answer
stringlengths
17
10.2M
package org.neo4j.kernel.impl.util; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.Date; import java.util.HashMap; import java.util.Map; public class StringLogger { private final PrintWriter out; private StringLogger( String filename ) { try { File file = new File( filename ); file.getParentFile().mkdirs(); out = new PrintWriter( new FileWriter( file, true ) ); } catch ( IOException e ) { throw new RuntimeException( e ); } } private static final Map<String,StringLogger> loggers = new HashMap<String, StringLogger>(); public static StringLogger getLogger( String filename ) { StringLogger logger = loggers.get( filename ); if ( logger == null ) { logger = new StringLogger( filename ); loggers.put( filename, logger ); } return logger; } public synchronized void logMessage( String msg ) { out.println( new Date() + ": " + msg ); out.flush(); } public synchronized void logMessage( String msg, Throwable cause ) { out.println( new Date() + ": " + msg + " " + cause.getMessage() ); cause.printStackTrace( out ); out.flush(); } public synchronized static void close( String filename ) { StringLogger logger = loggers.remove( filename ); if ( logger != null ) { logger.out.close(); } } }
package org.junit.jupiter.api; import static org.apiguardian.api.API.Status.STABLE; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.apiguardian.api.API; /** * {@code @BeforeAll} is used to signal that the annotated method should be * executed <em>before</em> <strong>all</strong> tests in the current test class. * * <p>In contrast to {@link BeforeEach @BeforeEach} methods, {@code @BeforeAll} * methods are only executed once for a given test class. * * <h3>Method Signatures</h3> * * <p>{@code @BeforeAll} methods must have a {@code void} return type, * must not be {@code private}, and must be {@code static} by default. * Consequently, {@code @BeforeAll} methods are not * supported in {@link Nested @Nested} test classes or as <em>interface default * methods</em> unless the test class is annotated with * {@link TestInstance @TestInstance(Lifecycle.PER_CLASS)}. {@code @BeforeAll} * methods may optionally declare parameters to be resolved by * {@link org.junit.jupiter.api.extension.ParameterResolver ParameterResolvers}. * * <h3>Inheritance and Execution Order</h3> * * <p>{@code @BeforeAll} methods are inherited from superclasses as long as * they are not <em>hidden</em> or <em>overridden</em>. Furthermore, * {@code @BeforeAll} methods from superclasses will be executed before * {@code @BeforeAll} methods in subclasses. * * <p>Similarly, {@code @BeforeAll} methods declared in an interface are * inherited as long as they are not <em>hidden</em> or <em>overridden</em>, * and {@code @BeforeAll} methods from an interface will be executed before * {@code @BeforeAll} methods in the class that implements the interface. * * <p>JUnit Jupiter does not guarantee the execution order of multiple * {@code @BeforeAll} methods that are declared within a single test class or * test interface. While it may at times appear that these methods are invoked * in alphabetical order, they are in fact sorted using an algorithm that is * deterministic but intentionally non-obvious. * * <p>In addition, {@code @BeforeAll} methods are in no way linked to * {@code @AfterAll} methods. Consequently, there are no guarantees with regard * to their <em>wrapping</em> behavior. For example, given two {@code @BeforeAll} * methods {@code createA()} and {@code createB()} as well as two {@code @AfterAll} * methods {@code destroyA()} and {@code destroyB()}, the order in which the * {@code @BeforeAll} methods are executed (e.g. {@code createA()} before * {@code createB()}) does not imply any order for the seemingly corresponding * {@code @AfterAll} methods. In other words, {@code destroyA()} might be called * before <em>or</em> after {@code destroyB()}. The JUnit Team therefore recommends * that developers declare at most one {@code @BeforeAll} method and at most one * {@code @AfterAll} method per test class or test interface unless there are no * dependencies between the {@code @BeforeAll} methods or between the * {@code @AfterAll} methods. * * <h3>Composition</h3> * * <p>{@code @BeforeAll} may be used as a meta-annotation in order to create * a custom <em>composed annotation</em> that inherits the semantics of * {@code @BeforeAll}. * * @since 5.0 * @see AfterAll * @see BeforeEach * @see AfterEach * @see Test * @see TestFactory * @see TestInstance */ @Target({ ElementType.ANNOTATION_TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) @Documented @API(status = STABLE, since = "5.0") public @interface BeforeAll { }
package ch.tkuhn.memetools; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import org.supercsv.io.CsvListReader; import org.supercsv.io.CsvListWriter; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; public class CalculatePaperSuccess { @Parameter(description = "chronologically-sorted-input-file", required = true) private List<String> parameters = new ArrayList<String>(); private File inputFile; @Parameter(names = "-o", description = "Output file") private File outputFile; @Parameter(names = "-t", description = "File with terms", required = true) private File termsFile; @Parameter(names = "-tcol", description = "Index or name of column to read terms (if term file is in CSV format)") private String termCol = "TERM"; @Parameter(names = "-d", description = "Set delta parameter (controlled noise level)") private int delta = 3; private File logFile; public static final void main(String[] args) { CalculatePaperSuccess obj = new CalculatePaperSuccess(); JCommander jc = new JCommander(obj); try { jc.parse(args); } catch (ParameterException ex) { jc.usage(); System.exit(1); } if (obj.parameters.size() != 1) { System.err.println("ERROR: Exactly one main argument is needed"); jc.usage(); System.exit(1); } obj.inputFile = new File(obj.parameters.get(0)); obj.run(); } private MemeScorer ms; private List<String> terms; private CsvListWriter csvWriter; private BufferedReader reader; private Map<String,String> cpyMapKeys; private Map<String,Long> cpyLastDay; private Map<String,Long> cpyPaperDays; private Map<String,Integer> cpyPaperCount; private Map<String,Integer> cpyCitationCount; public CalculatePaperSuccess() { } public void run() { init(); try { readTerms(); processEntries(); } catch (Throwable th) { log(th); System.exit(1); } log("Finished"); } private void init() { logFile = new File(MemeUtils.getLogDir(), getOutputFileName() + ".log"); log("=========="); if (outputFile == null) { outputFile = new File(MemeUtils.getOutputDataDir(), getOutputFileName() + ".csv"); } ms = new MemeScorer(MemeScorer.GIVEN_TERMLIST_MODE); terms = new ArrayList<String>(); cpyLastDay = new HashMap<String,Long>(); cpyPaperDays = new HashMap<String,Long>(); cpyPaperCount = new HashMap<String,Integer>(); cpyCitationCount = new HashMap<String,Integer>(); } private void readTerms() throws IOException { log("Reading terms from " + termsFile + " ..."); if (termsFile.toString().endsWith(".csv")) { readTermsCsv(); } else { readTermsTxt(); } log("Number of terms: " + terms.size()); } private void readTermsTxt() throws IOException { BufferedReader reader = new BufferedReader(new FileReader(termsFile)); String line; while ((line = reader.readLine()) != null) { String term = MemeUtils.normalize(line); ms.addTerm(term); terms.add(term); } reader.close(); } private void readTermsCsv() throws IOException { BufferedReader r = new BufferedReader(new FileReader(termsFile)); CsvListReader csvReader = new CsvListReader(r, MemeUtils.getCsvPreference()); List<String> header = csvReader.read(); int col; if (termCol.matches("[0-9]+")) { col = Integer.parseInt(termCol); } else { col = header.indexOf(termCol); } List<String> line; while ((line = csvReader.read()) != null) { String term = MemeUtils.normalize(line.get(col)); ms.addTerm(term); terms.add(term); } csvReader.close(); } private void processEntries() throws IOException { try { log("Processing entries and writing CSV file..."); Writer w = new BufferedWriter(new FileWriter(outputFile)); csvWriter = new CsvListWriter(w, MemeUtils.getCsvPreference()); csvWriter.write("ID", "JOURNAL-C/PY", "FIRSTAUTHOR-C/PY", "AUTHOR-MAX-C/PY"); //, "TOP-MS-" + delta); reader = new BufferedReader(new FileReader(inputFile)); int progress = 0; String line; while ((line = reader.readLine()) != null) { progress++; logProgress(progress); DataEntry d = new DataEntry(line); ms.recordTerms(d); long thisDay = getDayCount(d.getDate()); String doi = d.getId(); String[] authList = d.getAuthors().split(" "); String journal = PrepareApsData.getJournalFromDoi(doi); String journalKey = "J:" + journal; double journalCpy = updateCpyData(journalKey, thisDay); double firstAuthorCpy = -2.0; double authorMaxCpy = -2.0; for (String author : authList) { String authorKey = "A:" + author; double authorCpy = updateCpyData(authorKey, thisDay); if (firstAuthorCpy == -2.0) { firstAuthorCpy = authorCpy; } if (authorCpy > authorMaxCpy) { authorMaxCpy = authorCpy; } } // TODO write real data: csvWriter.write(doi, journalCpy, firstAuthorCpy, authorMaxCpy); addCpyPaper(journalKey); String cpyKeys = journalKey; for (String author : authList) { String authorKey = "A:" + author; addCpyPaper(authorKey); cpyKeys += " " + authorKey; } String[] citList = d.getCitations().split(" "); for (String cit : citList) { for (String k : cpyMapKeys.get(cit).split(" ")) { addCpyCitation(k); } } cpyMapKeys.put(doi, cpyKeys); } } finally { if (csvWriter != null) csvWriter.close(); if (reader != null) reader.close(); } } private double updateCpyData(String key, long thisDay) { Long lastDay = cpyLastDay.get(key); if (lastDay == null) lastDay = 0l; long dayDiff = thisDay - lastDay; Integer paperCount = cpyPaperCount.get(key); if (paperCount == null) paperCount = 0; Integer citationCount = cpyCitationCount.get(key); if (citationCount == null) citationCount = 0; Long paperDays = cpyPaperDays.get(key); if (paperDays == null) paperDays = 0l; paperDays = paperDays + paperCount*dayDiff; cpyPaperDays.put(key, paperDays); cpyLastDay.put(key, thisDay); if (paperDays == 0) return -1.0; return (double) citationCount/paperDays; } private void addCpyPaper(String key) { Integer paperCount = cpyPaperCount.get(key); if (paperCount == null) paperCount = 0; cpyPaperCount.put(key, paperCount + 1); } private void addCpyCitation(String key) { Integer citationCount = cpyCitationCount.get(key); if (citationCount == null) citationCount = 0; cpyCitationCount.put(key, citationCount + 1); } private static long getDayCount(String date) { return TimeUnit.DAYS.convert(parseDate(date).getTime(), TimeUnit.MILLISECONDS); } private static final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); private static Date parseDate(String s) { try { return formatter.parse(s); } catch (ParseException ex) { throw new RuntimeException(ex); } } private String getOutputFileName() { return "su-" + inputFile.getName().replaceAll("-chronologic", "").replaceAll("\\..*$", ""); } private void logProgress(int p) { if (p % 100000 == 0) log(p + "..."); } private void log(Object obj) { MemeUtils.log(logFile, obj); } }
package edu.ucla.cens.awserver.jee.servlet; import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import org.springframework.context.ApplicationContext; import org.springframework.web.context.support.WebApplicationContextUtils; import edu.ucla.cens.awserver.controller.Controller; import edu.ucla.cens.awserver.jee.servlet.glue.AwRequestCreator; import edu.ucla.cens.awserver.request.AwRequest; import edu.ucla.cens.awserver.util.StringUtils; /** * Servlet for processing sensor data uploads. * * @author selsky */ @SuppressWarnings("serial") public class SensorUploadServlet extends AbstractAwHttpServlet { private static Logger _logger = Logger.getLogger(SensorUploadServlet.class); private Controller _controller; private AwRequestCreator _awRequestCreator; private List<String> _parameterList; /** * Default no-arg constructor. */ public SensorUploadServlet() { _parameterList = new ArrayList<String>(Arrays.asList(new String[]{"t","u","phv","prv","d"})); } /** * JavaEE-to-Spring glue code. When the web application starts up, the init method on all servlets is invoked by the Servlet * container (if load-on-startup for the Servlet > 0). In this method, names of Spring "beans" are pulled out of the * ServletConfig and the names are used to retrieve the beans out of the ApplicationContext. The basic design rule followed * is that only Servlet.init methods contain Spring Framework glue code. */ public void init(ServletConfig config) throws ServletException { super.init(config); String servletName = config.getServletName(); String awRequestCreatorName = config.getInitParameter("awRequestCreatorName"); String controllerName = config.getInitParameter("controllerName"); if(StringUtils.isEmptyOrWhitespaceOnly(awRequestCreatorName)) { throw new ServletException("Invalid web.xml. Missing awRequestCreatorName init param. Servlet " + servletName + " cannot be initialized and put into service."); } if(StringUtils.isEmptyOrWhitespaceOnly(controllerName)) { throw new ServletException("Invalid web.xml. Missing controllerName init param. Servlet " + servletName + " cannot be initialized and put into service."); } // OK, now get the beans out of the Spring ApplicationContext // If the beans do not exist within the Spring configuration, Spring will throw a RuntimeException and initialization // of this Servlet will fail. (check catalina.out in addition to aw.log) ServletContext servletContext = config.getServletContext(); ApplicationContext applicationContext = WebApplicationContextUtils.getWebApplicationContext(servletContext); _awRequestCreator = (AwRequestCreator) applicationContext.getBean(awRequestCreatorName); _controller = (Controller) applicationContext.getBean(controllerName); } /** * Dispatches to a Controller to perform sensor data upload. If the upload fails, an error message is persisted to the response. * If the request is successful, allow Tomcat to simply return HTTP 200. */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Top-level security validation if(! prevalidate(request)) { response.sendError(HttpServletResponse.SC_NOT_FOUND); // if some entity is doing strange stuff, just respond with a 404 // in order not to give away too much about app processing return; } // Map data from the inbound request to our internal format AwRequest awRequest = _awRequestCreator.createFrom(request); Writer writer = new BufferedWriter(new OutputStreamWriter(getOutputStream(request, response))); try { // Execute feature-specific logic _controller.execute(awRequest); if(awRequest.isFailedRequest()) { response.setContentType("application/json"); writer.write(awRequest.getFailedRequestErrorMessage()); } // if the request is successful, just let Tomcat return a 200 } catch(Throwable t) { _logger.error("an error occurred on sensor data upload", t); } finally { if(null != writer) { writer.flush(); writer.close(); } request.getSession().invalidate(); // sensor data uploads only have state for the duration of a request } } /** * Dispatches to processRequest(). */ @Override protected final void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { processRequest(req, resp); } /** * Dispatches to processRequest(). */ @Override protected final void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { processRequest(req, resp); } /** * Pre-validate to avoid situations where someone is sending purposefully malicious data */ private boolean prevalidate(HttpServletRequest request) { Map<?,?> parameterMap = request.getParameterMap(); // String, String[] // Check for missing or extra parameters if(parameterMap.size() != 5) { _logger.warn("an incorrect number of parameters was found on sensor upload: " + parameterMap.size()); return false; } // Check for duplicate parameters Iterator<?> iterator = parameterMap.keySet().iterator(); while(iterator.hasNext()) { String key = (String) iterator.next(); String[] valuesForKey = (String[]) parameterMap.get(key); if(valuesForKey.length != 1) { _logger.warn("an incorrect number of values (" + valuesForKey.length + ") was found for parameter " + key); return false; } } // Check for parameters with unknown names iterator = parameterMap.keySet().iterator(); // there is no way to reset the iterator so just obtain a new one while(iterator.hasNext()) { String name = (String) iterator.next(); if(! _parameterList.contains(name)) { _logger.warn("an incorrect parameter name was found: " + name); return false; } } String u = (String) request.getParameter("u"); String t = (String) request.getParameter("t"); String phv = (String) request.getParameter("phv"); String prv = (String) request.getParameter("prv"); // Check for abnormal lengths (buffer overflow attack) // 50 is an arbitrary number, but for these parameters it would be very strange if(greaterThanLength("user", "u", u, 50) || greaterThanLength("request type", "t", t, 50) || greaterThanLength("phone version", "phv", phv, 50) || greaterThanLength("protocol version", "prv", prv, 50) ) { return false; } // the JSON data is not checked because its length is so variable and potentially huge (700000+ characters) // it will be heavily validated once inside the main application validation layer return true; } private boolean greaterThanLength(String longName, String name, String value, int length) { if(null != value && value.length() > length) { _logger.warn("a " + longName + "(request parameter " + name + ") of " + value.length() + " characters was found"); return true; } return false; } }
// FitsReader.java package loci.formats.in; import java.io.IOException; import loci.common.RandomAccessInputStream; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.MetadataTools; import loci.formats.meta.FilterMetadata; import loci.formats.meta.MetadataStore; public class FitsReader extends FormatReader { // -- Fields -- /** Number of lines in the header. */ private int count; // -- Constructor -- /** Constructs a new FitsReader. */ public FitsReader() { super("Flexible Image Transport System", "fits"); domains = new String[] {FormatTools.ASTRONOMY_DOMAIN, FormatTools.GRAPHICS_DOMAIN}; } // -- IFormatReader API methods -- /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); in.seek(2880 * ((((count * 80) - 1) / 2880) + 1)); readPlane(in, x, y, w, h, buf); return buf; } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) count = 0; } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { debug("FitsReader.initFile(" + id + ")"); super.initFile(id); in = new RandomAccessInputStream(id); count = 1; String line = in.readString(80); if (!line.startsWith("SIMPLE")) { throw new FormatException("Unsupported FITS file."); } String key = "", value = ""; while (true) { count++; line = in.readString(80); // parse key/value pair int ndx = line.indexOf("="); int comment = line.indexOf("/", ndx); if (comment < 0) comment = line.length(); if (ndx >= 0) { key = line.substring(0, ndx).trim(); value = line.substring(ndx + 1, comment).trim(); } else key = line.trim(); // if the file has an extended header, "END" will appear twice // the first time marks the end of the extended header // the second time marks the end of the standard header // image dimensions are only populated by the standard header if (key.equals("END") && getSizeX() > 0) break; if (key.equals("BITPIX")) { int bits = Integer.parseInt(value); switch (bits) { case 8: core[0].pixelType = FormatTools.UINT8; break; case 16: core[0].pixelType = FormatTools.INT16; break; case 32: core[0].pixelType = FormatTools.INT32; break; case -32: core[0].pixelType = FormatTools.FLOAT; break; case -64: core[0].pixelType = FormatTools.DOUBLE; break; default: throw new FormatException("Unsupported pixel type: " + bits); } } else if (key.equals("NAXIS1")) core[0].sizeX = Integer.parseInt(value); else if (key.equals("NAXIS2")) core[0].sizeY = Integer.parseInt(value); else if (key.equals("NAXIS3")) core[0].sizeZ = Integer.parseInt(value); addGlobalMeta(key, value); } core[0].sizeC = 1; core[0].sizeT = 1; if (getSizeZ() == 0) core[0].sizeZ = 1; core[0].imageCount = core[0].sizeZ; core[0].rgb = false; core[0].littleEndian = false; core[0].interleaved = false; core[0].dimensionOrder = "XYZCT"; core[0].indexed = false; core[0].falseColor = false; core[0].metadataComplete = true; MetadataStore store = new FilterMetadata(getMetadataStore(), isMetadataFiltered()); MetadataTools.populatePixels(store, this); MetadataTools.setDefaultCreationDate(store, id, 0); } }
package com.fsck.k9.controller; import java.io.CharArrayWriter; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import android.app.KeyguardManager; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.ContentResolver; import android.content.Context; import android.content.Intent; import android.content.pm.PackageInfo; import android.database.Cursor; import android.net.Uri; import android.os.Build; import android.os.PowerManager; import android.os.Process; import android.support.v4.app.NotificationCompat; import android.support.v4.app.TaskStackBuilder; import android.text.SpannableStringBuilder; import android.text.TextUtils; import android.text.style.TextAppearanceSpan; import android.util.Log; import com.fsck.k9.Account; import com.fsck.k9.Account.DeletePolicy; import com.fsck.k9.Account.Expunge; import com.fsck.k9.AccountStats; import com.fsck.k9.K9; import com.fsck.k9.K9.NotificationHideSubject; import com.fsck.k9.K9.Intents; import com.fsck.k9.K9.NotificationQuickDelete; import com.fsck.k9.NotificationSetting; import com.fsck.k9.Preferences; import com.fsck.k9.R; import com.fsck.k9.activity.Accounts; import com.fsck.k9.activity.FolderList; import com.fsck.k9.activity.MessageList; import com.fsck.k9.activity.MessageReference; import com.fsck.k9.activity.NotificationDeleteConfirmation; import com.fsck.k9.activity.setup.AccountSetupCheckSettings.CheckDirection; import com.fsck.k9.activity.setup.AccountSetupIncoming; import com.fsck.k9.activity.setup.AccountSetupOutgoing; import com.fsck.k9.cache.EmailProviderCache; import com.fsck.k9.helper.Contacts; import com.fsck.k9.helper.MessageHelper; import com.fsck.k9.mail.power.TracingPowerManager; import com.fsck.k9.mail.power.TracingPowerManager.TracingWakeLock; import com.fsck.k9.mail.Address; import com.fsck.k9.mail.FetchProfile; import com.fsck.k9.mail.Flag; import com.fsck.k9.mail.Folder; import com.fsck.k9.mail.Folder.FolderType; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.Message.RecipientType; import com.fsck.k9.mail.CertificateValidationException; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mail.Part; import com.fsck.k9.mail.PushReceiver; import com.fsck.k9.mail.Pusher; import com.fsck.k9.mail.Store; import com.fsck.k9.mail.Transport; import com.fsck.k9.mail.internet.MessageExtractor; import com.fsck.k9.mail.internet.MimeMessage; import com.fsck.k9.mail.internet.MimeMessageHelper; import com.fsck.k9.mail.internet.MimeUtility; import com.fsck.k9.mail.internet.TextBody; import com.fsck.k9.mailstore.MessageRemovalListener; import com.fsck.k9.mail.MessageRetrievalListener; import com.fsck.k9.mailstore.LocalFolder; import com.fsck.k9.mailstore.LocalMessage; import com.fsck.k9.mailstore.LocalStore; import com.fsck.k9.mailstore.LocalStore.PendingCommand; import com.fsck.k9.mail.store.pop3.Pop3Store; import com.fsck.k9.mailstore.UnavailableStorageException; import com.fsck.k9.provider.EmailProvider; import com.fsck.k9.provider.EmailProvider.StatsColumns; import com.fsck.k9.search.ConditionsTreeNode; import com.fsck.k9.search.LocalSearch; import com.fsck.k9.search.SearchAccount; import com.fsck.k9.search.SearchSpecification; import com.fsck.k9.search.SqlQueryBuilder; import com.fsck.k9.service.NotificationActionService; /** * Starts a long running (application) Thread that will run through commands * that require remote mailbox access. This class is used to serialize and * prioritize these commands. Each method that will submit a command requires a * MessagingListener instance to be provided. It is expected that that listener * has also been added as a registered listener using addListener(). When a * command is to be executed, if the listener that was provided with the command * is no longer registered the command is skipped. The design idea for the above * is that when an Activity starts it registers as a listener. When it is paused * it removes itself. Thus, any commands that that activity submitted are * removed from the queue once the activity is no longer active. */ public class MessagingController implements Runnable { public static final long INVALID_MESSAGE_ID = -1; /** * Immutable empty {@link String} array */ private static final String[] EMPTY_STRING_ARRAY = new String[0]; private static final String PENDING_COMMAND_MOVE_OR_COPY = "com.fsck.k9.MessagingController.moveOrCopy"; private static final String PENDING_COMMAND_MOVE_OR_COPY_BULK = "com.fsck.k9.MessagingController.moveOrCopyBulk"; private static final String PENDING_COMMAND_MOVE_OR_COPY_BULK_NEW = "com.fsck.k9.MessagingController.moveOrCopyBulkNew"; private static final String PENDING_COMMAND_EMPTY_TRASH = "com.fsck.k9.MessagingController.emptyTrash"; private static final String PENDING_COMMAND_SET_FLAG_BULK = "com.fsck.k9.MessagingController.setFlagBulk"; private static final String PENDING_COMMAND_SET_FLAG = "com.fsck.k9.MessagingController.setFlag"; private static final String PENDING_COMMAND_APPEND = "com.fsck.k9.MessagingController.append"; private static final String PENDING_COMMAND_MARK_ALL_AS_READ = "com.fsck.k9.MessagingController.markAllAsRead"; private static final String PENDING_COMMAND_EXPUNGE = "com.fsck.k9.MessagingController.expunge"; /** * Key to group stacked notifications on Android Wear. */ private static final String NOTIFICATION_GROUP_KEY = "com.fsck.k9.MessagingController.notificationGroup"; public static class UidReverseComparator implements Comparator<Message> { @Override public int compare(Message o1, Message o2) { if (o1 == null || o2 == null || o1.getUid() == null || o2.getUid() == null) { return 0; } int id1, id2; try { id1 = Integer.parseInt(o1.getUid()); id2 = Integer.parseInt(o2.getUid()); } catch (NumberFormatException e) { return 0; } //reversed intentionally. if (id1 < id2) return 1; if (id1 > id2) return -1; return 0; } } /** * Maximum number of unsynced messages to store at once */ private static final int UNSYNC_CHUNK_SIZE = 5; private static MessagingController inst = null; private BlockingQueue<Command> mCommands = new PriorityBlockingQueue<Command>(); private Thread mThread; private Set<MessagingListener> mListeners = new CopyOnWriteArraySet<MessagingListener>(); private final ConcurrentHashMap<String, AtomicInteger> sendCount = new ConcurrentHashMap<String, AtomicInteger>(); ConcurrentHashMap<Account, Pusher> pushers = new ConcurrentHashMap<Account, Pusher>(); private final ExecutorService threadPool = Executors.newCachedThreadPool(); private MessagingListener checkMailListener = null; private MemorizingListener memorizingListener = new MemorizingListener(); private boolean mBusy; private Context context; /** * A holder class for pending notification data * * This class holds all pieces of information for constructing * a notification with message preview. */ private static class NotificationData { /** Number of unread messages before constructing the notification */ int unreadBeforeNotification; /** * List of messages that should be used for the inbox-style overview. * It's sorted from newest to oldest message. * Don't modify this list directly, but use {@link #addMessage(com.fsck.k9.mailstore.LocalMessage)} and * {@link #removeMatchingMessage(android.content.Context, com.fsck.k9.activity.MessageReference)} instead. */ LinkedList<LocalMessage> messages; /** * Stacked notifications that share this notification as ther summary-notification. */ List<Integer> stackdNotifications; /** * List of references for messages that the user is still to be notified of, * but which don't fit into the inbox style anymore. It's sorted from newest * to oldest message. */ LinkedList<MessageReference> droppedMessages; /** * Maximum number of messages to keep for the inbox-style overview. * As of Jellybean, phone notifications show a maximum of 5 lines, while tablet * notifications show 7 lines. To make sure no lines are silently dropped, * we default to 5 lines. */ private final static int MAX_MESSAGES = 5; /** * Constructs a new data instance. * * @param unread Number of unread messages prior to instance construction */ public NotificationData(int unread) { unreadBeforeNotification = unread; droppedMessages = new LinkedList<MessageReference>(); messages = new LinkedList<LocalMessage>(); } /** * Adds a new message to the list of pending messages for this notification. * * The implementation will take care of keeping a meaningful amount of * messages in {@link #messages}. * * @param m The new message to add. */ public void addMessage(LocalMessage m) { while (messages.size() >= MAX_MESSAGES) { LocalMessage dropped = messages.removeLast(); droppedMessages.addFirst(dropped.makeMessageReference()); } messages.addFirst(m); } public void addStackedChildNotification(final int notificationId) { if (stackdNotifications == null) { stackdNotifications = new LinkedList<Integer>(); } stackdNotifications.add(new Integer(notificationId)); } public List<Integer> getStackedChildNotifications() { return stackdNotifications; }; /** * Remove a certain message from the message list. * * @param context A context. * @param ref Reference of the message to remove * @return true if message was found and removed, false otherwise */ public boolean removeMatchingMessage(Context context, MessageReference ref) { for (MessageReference dropped : droppedMessages) { if (dropped.equals(ref)) { droppedMessages.remove(dropped); return true; } } for (LocalMessage message : messages) { if (message.makeMessageReference().equals(ref)) { if (messages.remove(message) && !droppedMessages.isEmpty()) { LocalMessage restoredMessage = droppedMessages.getFirst().restoreToLocalMessage(context); if (restoredMessage != null) { messages.addLast(restoredMessage); droppedMessages.removeFirst(); } } return true; } } return false; } /** * Adds a list of references for all pending messages for the notification to the supplied * List. */ public void supplyAllMessageRefs(List<MessageReference> refs) { for (LocalMessage m : messages) { refs.add(m.makeMessageReference()); } refs.addAll(droppedMessages); } /** * Gets the total number of messages the user is to be notified of. * * @return Amount of new messages the notification notifies for */ public int getNewMessageCount() { return messages.size() + droppedMessages.size(); } }; // Key is accountNumber private final ConcurrentMap<Integer, NotificationData> notificationData = new ConcurrentHashMap<Integer, NotificationData>(); private static final Set<Flag> SYNC_FLAGS = EnumSet.of(Flag.SEEN, Flag.FLAGGED, Flag.ANSWERED, Flag.FORWARDED); private void suppressMessages(Account account, List<LocalMessage> messages) { EmailProviderCache cache = EmailProviderCache.getCache(account.getUuid(), context); cache.hideMessages(messages); } private void unsuppressMessages(Account account, List<? extends Message> messages) { EmailProviderCache cache = EmailProviderCache.getCache(account.getUuid(), context); cache.unhideMessages(messages); } private boolean isMessageSuppressed(LocalMessage message) { long messageId = message.getId(); long folderId = message.getFolder().getId(); EmailProviderCache cache = EmailProviderCache.getCache(message.getFolder().getAccountUuid(), context); return cache.isMessageHidden(messageId, folderId); } private void setFlagInCache(final Account account, final List<Long> messageIds, final Flag flag, final boolean newState) { EmailProviderCache cache = EmailProviderCache.getCache(account.getUuid(), context); String columnName = LocalStore.getColumnNameForFlag(flag); String value = Integer.toString((newState) ? 1 : 0); cache.setValueForMessages(messageIds, columnName, value); } private void removeFlagFromCache(final Account account, final List<Long> messageIds, final Flag flag) { EmailProviderCache cache = EmailProviderCache.getCache(account.getUuid(), context); String columnName = LocalStore.getColumnNameForFlag(flag); cache.removeValueForMessages(messageIds, columnName); } private void setFlagForThreadsInCache(final Account account, final List<Long> threadRootIds, final Flag flag, final boolean newState) { EmailProviderCache cache = EmailProviderCache.getCache(account.getUuid(), context); String columnName = LocalStore.getColumnNameForFlag(flag); String value = Integer.toString((newState) ? 1 : 0); cache.setValueForThreads(threadRootIds, columnName, value); } private void removeFlagForThreadsFromCache(final Account account, final List<Long> messageIds, final Flag flag) { EmailProviderCache cache = EmailProviderCache.getCache(account.getUuid(), context); String columnName = LocalStore.getColumnNameForFlag(flag); cache.removeValueForThreads(messageIds, columnName); } private MessagingController(Context context) { this.context = context; mThread = new Thread(this); mThread.setName("MessagingController"); mThread.start(); if (memorizingListener != null) { addListener(memorizingListener); } } public synchronized static MessagingController getInstance(Context context) { if (inst == null) { inst = new MessagingController(context.getApplicationContext()); } return inst; } public boolean isBusy() { return mBusy; } @Override public void run() { Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND); while (true) { String commandDescription = null; try { final Command command = mCommands.take(); if (command != null) { commandDescription = command.description; if (K9.DEBUG) Log.i(K9.LOG_TAG, "Running " + (command.isForeground ? "Foreground" : "Background") + " command '" + command.description + "', seq = " + command.sequence); mBusy = true; try { command.runnable.run(); } catch (UnavailableAccountException e) { // retry later new Thread() { @Override public void run() { try { sleep(30 * 1000); mCommands.put(command); } catch (InterruptedException e) { Log.e(K9.LOG_TAG, "interrupted while putting a pending command for" + " an unavailable account back into the queue." + " THIS SHOULD NEVER HAPPEN."); } } } .start(); } if (K9.DEBUG) Log.i(K9.LOG_TAG, (command.isForeground ? "Foreground" : "Background") + " Command '" + command.description + "' completed"); for (MessagingListener l : getListeners(command.listener)) { l.controllerCommandCompleted(!mCommands.isEmpty()); } } } catch (Exception e) { Log.e(K9.LOG_TAG, "Error running command '" + commandDescription + "'", e); } mBusy = false; } } private void put(String description, MessagingListener listener, Runnable runnable) { putCommand(mCommands, description, listener, runnable, true); } private void putBackground(String description, MessagingListener listener, Runnable runnable) { putCommand(mCommands, description, listener, runnable, false); } private void putCommand(BlockingQueue<Command> queue, String description, MessagingListener listener, Runnable runnable, boolean isForeground) { int retries = 10; Exception e = null; while (retries try { Command command = new Command(); command.listener = listener; command.runnable = runnable; command.description = description; command.isForeground = isForeground; queue.put(command); return; } catch (InterruptedException ie) { try { Thread.sleep(200); } catch (InterruptedException ne) { } e = ie; } } throw new Error(e); } public void addListener(MessagingListener listener) { mListeners.add(listener); refreshListener(listener); } public void refreshListener(MessagingListener listener) { if (memorizingListener != null && listener != null) { memorizingListener.refreshOther(listener); } } public void removeListener(MessagingListener listener) { mListeners.remove(listener); } public Set<MessagingListener> getListeners() { return mListeners; } public Set<MessagingListener> getListeners(MessagingListener listener) { if (listener == null) { return mListeners; } Set<MessagingListener> listeners = new HashSet<MessagingListener>(mListeners); listeners.add(listener); return listeners; } /** * Lists folders that are available locally and remotely. This method calls * listFoldersCallback for local folders before it returns, and then for * remote folders at some later point. If there are no local folders * includeRemote is forced by this method. This method should be called from * a Thread as it may take several seconds to list the local folders. * TODO this needs to cache the remote folder list * * @param account * @param listener * @throws MessagingException */ public void listFolders(final Account account, final boolean refreshRemote, final MessagingListener listener) { threadPool.execute(new Runnable() { @Override public void run() { listFoldersSynchronous(account, refreshRemote, listener); } }); } /** * Lists folders that are available locally and remotely. This method calls * listFoldersCallback for local folders before it returns, and then for * remote folders at some later point. If there are no local folders * includeRemote is forced by this method. This method is called in the * foreground. * TODO this needs to cache the remote folder list * * @param account * @param listener * @throws MessagingException */ public void listFoldersSynchronous(final Account account, final boolean refreshRemote, final MessagingListener listener) { for (MessagingListener l : getListeners(listener)) { l.listFoldersStarted(account); } List <? extends Folder > localFolders = null; if (!account.isAvailable(context)) { Log.i(K9.LOG_TAG, "not listing folders of unavailable account"); } else { try { Store localStore = account.getLocalStore(); localFolders = localStore.getPersonalNamespaces(false); if (refreshRemote || localFolders.isEmpty()) { doRefreshRemote(account, listener); return; } for (MessagingListener l : getListeners(listener)) { l.listFolders(account, localFolders); } } catch (Exception e) { for (MessagingListener l : getListeners(listener)) { l.listFoldersFailed(account, e.getMessage()); } addErrorMessage(account, null, e); return; } finally { if (localFolders != null) { for (Folder localFolder : localFolders) { closeFolder(localFolder); } } } } for (MessagingListener l : getListeners(listener)) { l.listFoldersFinished(account); } } private void doRefreshRemote(final Account account, final MessagingListener listener) { put("doRefreshRemote", listener, new Runnable() { @Override public void run() { List <? extends Folder > localFolders = null; try { Store store = account.getRemoteStore(); List <? extends Folder > remoteFolders = store.getPersonalNamespaces(false); LocalStore localStore = account.getLocalStore(); Set<String> remoteFolderNames = new HashSet<String>(); List<LocalFolder> foldersToCreate = new LinkedList<LocalFolder>(); localFolders = localStore.getPersonalNamespaces(false); Set<String> localFolderNames = new HashSet<String>(); for (Folder localFolder : localFolders) { localFolderNames.add(localFolder.getName()); } for (Folder remoteFolder : remoteFolders) { if (localFolderNames.contains(remoteFolder.getName()) == false) { LocalFolder localFolder = localStore.getFolder(remoteFolder.getName()); foldersToCreate.add(localFolder); } remoteFolderNames.add(remoteFolder.getName()); } localStore.createFolders(foldersToCreate, account.getDisplayCount()); localFolders = localStore.getPersonalNamespaces(false); /* * Clear out any folders that are no longer on the remote store. */ for (Folder localFolder : localFolders) { String localFolderName = localFolder.getName(); // FIXME: This is a hack used to clean up when we accidentally created the // special placeholder folder "-NONE-". if (K9.FOLDER_NONE.equals(localFolderName)) { localFolder.delete(false); } if (!account.isSpecialFolder(localFolderName) && !remoteFolderNames.contains(localFolderName)) { localFolder.delete(false); } } localFolders = localStore.getPersonalNamespaces(false); for (MessagingListener l : getListeners(listener)) { l.listFolders(account, localFolders); } for (MessagingListener l : getListeners(listener)) { l.listFoldersFinished(account); } } catch (Exception e) { for (MessagingListener l : getListeners(listener)) { l.listFoldersFailed(account, ""); } addErrorMessage(account, null, e); } finally { if (localFolders != null) { for (Folder localFolder : localFolders) { closeFolder(localFolder); } } } } }); } /** * Find all messages in any local account which match the query 'query' * @throws MessagingException */ public void searchLocalMessages(final LocalSearch search, final MessagingListener listener) { threadPool.execute(new Runnable() { @Override public void run() { searchLocalMessagesSynchronous(search, listener); } }); } public void searchLocalMessagesSynchronous(final LocalSearch search, final MessagingListener listener) { final AccountStats stats = new AccountStats(); final Set<String> uuidSet = new HashSet<String>(Arrays.asList(search.getAccountUuids())); List<Account> accounts = Preferences.getPreferences(context).getAccounts(); boolean allAccounts = uuidSet.contains(SearchSpecification.ALL_ACCOUNTS); // for every account we want to search do the query in the localstore for (final Account account : accounts) { if (!allAccounts && !uuidSet.contains(account.getUuid())) { continue; } // Collecting statistics of the search result MessageRetrievalListener retrievalListener = new MessageRetrievalListener<LocalMessage>() { @Override public void messageStarted(String message, int number, int ofTotal) {} @Override public void messagesFinished(int number) {} @Override public void messageFinished(LocalMessage message, int number, int ofTotal) { if (!isMessageSuppressed(message)) { List<LocalMessage> messages = new ArrayList<LocalMessage>(); messages.add(message); stats.unreadMessageCount += (!message.isSet(Flag.SEEN)) ? 1 : 0; stats.flaggedMessageCount += (message.isSet(Flag.FLAGGED)) ? 1 : 0; if (listener != null) { listener.listLocalMessagesAddMessages(account, null, messages); } } } }; // alert everyone the search has started if (listener != null) { listener.listLocalMessagesStarted(account, null); } // build and do the query in the localstore try { LocalStore localStore = account.getLocalStore(); localStore.searchForMessages(retrievalListener, search); } catch (Exception e) { if (listener != null) { listener.listLocalMessagesFailed(account, null, e.getMessage()); } addErrorMessage(account, null, e); } finally { if (listener != null) { listener.listLocalMessagesFinished(account, null); } } } // publish the total search statistics if (listener != null) { listener.searchStats(stats); } } public Future<?> searchRemoteMessages(final String acctUuid, final String folderName, final String query, final Set<Flag> requiredFlags, final Set<Flag> forbiddenFlags, final MessagingListener listener) { if (K9.DEBUG) { String msg = "searchRemoteMessages (" + "acct=" + acctUuid + ", folderName = " + folderName + ", query = " + query + ")"; Log.i(K9.LOG_TAG, msg); } return threadPool.submit(new Runnable() { @Override public void run() { searchRemoteMessagesSynchronous(acctUuid, folderName, query, requiredFlags, forbiddenFlags, listener); } }); } public void searchRemoteMessagesSynchronous(final String acctUuid, final String folderName, final String query, final Set<Flag> requiredFlags, final Set<Flag> forbiddenFlags, final MessagingListener listener) { final Account acct = Preferences.getPreferences(context).getAccount(acctUuid); if (listener != null) { listener.remoteSearchStarted(folderName); } List<Message> extraResults = new ArrayList<Message>(); try { Store remoteStore = acct.getRemoteStore(); LocalStore localStore = acct.getLocalStore(); if (remoteStore == null || localStore == null) { throw new MessagingException("Could not get store"); } Folder remoteFolder = remoteStore.getFolder(folderName); LocalFolder localFolder = localStore.getFolder(folderName); if (remoteFolder == null || localFolder == null) { throw new MessagingException("Folder not found"); } List<Message> messages = remoteFolder.search(query, requiredFlags, forbiddenFlags); if (K9.DEBUG) { Log.i("Remote Search", "Remote search got " + messages.size() + " results"); } // There's no need to fetch messages already completely downloaded List<Message> remoteMessages = localFolder.extractNewMessages(messages); messages.clear(); if (listener != null) { listener.remoteSearchServerQueryComplete(folderName, remoteMessages.size(), acct.getRemoteSearchNumResults()); } Collections.sort(remoteMessages, new UidReverseComparator()); int resultLimit = acct.getRemoteSearchNumResults(); if (resultLimit > 0 && remoteMessages.size() > resultLimit) { extraResults = remoteMessages.subList(resultLimit, remoteMessages.size()); remoteMessages = remoteMessages.subList(0, resultLimit); } loadSearchResultsSynchronous(remoteMessages, localFolder, remoteFolder, listener); } catch (Exception e) { if (Thread.currentThread().isInterrupted()) { Log.i(K9.LOG_TAG, "Caught exception on aborted remote search; safe to ignore.", e); } else { Log.e(K9.LOG_TAG, "Could not complete remote search", e); if (listener != null) { listener.remoteSearchFailed(null, e.getMessage()); } addErrorMessage(acct, null, e); } } finally { if (listener != null) { listener.remoteSearchFinished(folderName, 0, acct.getRemoteSearchNumResults(), extraResults); } } } public void loadSearchResults(final Account account, final String folderName, final List<Message> messages, final MessagingListener listener) { threadPool.execute(new Runnable() { @Override public void run() { if (listener != null) { listener.enableProgressIndicator(true); } try { Store remoteStore = account.getRemoteStore(); LocalStore localStore = account.getLocalStore(); if (remoteStore == null || localStore == null) { throw new MessagingException("Could not get store"); } Folder remoteFolder = remoteStore.getFolder(folderName); LocalFolder localFolder = localStore.getFolder(folderName); if (remoteFolder == null || localFolder == null) { throw new MessagingException("Folder not found"); } loadSearchResultsSynchronous(messages, localFolder, remoteFolder, listener); } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Exception in loadSearchResults: " + e); addErrorMessage(account, null, e); } finally { if (listener != null) { listener.enableProgressIndicator(false); } } } }); } public void loadSearchResultsSynchronous(List<Message> messages, LocalFolder localFolder, Folder remoteFolder, MessagingListener listener) throws MessagingException { final FetchProfile header = new FetchProfile(); header.add(FetchProfile.Item.FLAGS); header.add(FetchProfile.Item.ENVELOPE); final FetchProfile structure = new FetchProfile(); structure.add(FetchProfile.Item.STRUCTURE); int i = 0; for (Message message : messages) { i++; LocalMessage localMsg = localFolder.getMessage(message.getUid()); if (localMsg == null) { remoteFolder.fetch(Collections.singletonList(message), header, null); //fun fact: ImapFolder.fetch can't handle getting STRUCTURE at same time as headers remoteFolder.fetch(Collections.singletonList(message), structure, null); localFolder.appendMessages(Collections.singletonList(message)); localMsg = localFolder.getMessage(message.getUid()); } if (listener != null) { listener.remoteSearchAddMessage(remoteFolder.getName(), localMsg, i, messages.size()); } } } public void loadMoreMessages(Account account, String folder, MessagingListener listener) { try { LocalStore localStore = account.getLocalStore(); LocalFolder localFolder = localStore.getFolder(folder); if (localFolder.getVisibleLimit() > 0) { localFolder.setVisibleLimit(localFolder.getVisibleLimit() + account.getDisplayCount()); } synchronizeMailbox(account, folder, listener, null); } catch (MessagingException me) { addErrorMessage(account, null, me); throw new RuntimeException("Unable to set visible limit on folder", me); } } public void resetVisibleLimits(Collection<Account> accounts) { for (Account account : accounts) { account.resetVisibleLimits(); } } /** * Start background synchronization of the specified folder. * @param account * @param folder * @param listener * @param providedRemoteFolder TODO */ public void synchronizeMailbox(final Account account, final String folder, final MessagingListener listener, final Folder providedRemoteFolder) { putBackground("synchronizeMailbox", listener, new Runnable() { @Override public void run() { synchronizeMailboxSynchronous(account, folder, listener, providedRemoteFolder); } }); } /** * Start foreground synchronization of the specified folder. This is generally only called * by synchronizeMailbox. * @param account * @param folder * * TODO Break this method up into smaller chunks. * @param providedRemoteFolder TODO */ private void synchronizeMailboxSynchronous(final Account account, final String folder, final MessagingListener listener, Folder providedRemoteFolder) { Folder remoteFolder = null; LocalFolder tLocalFolder = null; if (K9.DEBUG) Log.i(K9.LOG_TAG, "Synchronizing folder " + account.getDescription() + ":" + folder); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxStarted(account, folder); } /* * We don't ever sync the Outbox or errors folder */ if (folder.equals(account.getOutboxFolderName()) || folder.equals(account.getErrorFolderName())) { for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxFinished(account, folder, 0, 0); } return; } Exception commandException = null; try { if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: About to process pending commands for account " + account.getDescription()); try { processPendingCommandsSynchronous(account); } catch (Exception e) { addErrorMessage(account, null, e); Log.e(K9.LOG_TAG, "Failure processing command, but allow message sync attempt", e); commandException = e; } /* * Get the message list from the local store and create an index of * the uids within the list. */ if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: About to get local folder " + folder); final LocalStore localStore = account.getLocalStore(); tLocalFolder = localStore.getFolder(folder); final LocalFolder localFolder = tLocalFolder; localFolder.open(Folder.OPEN_MODE_RW); localFolder.updateLastUid(); List<? extends Message> localMessages = localFolder.getMessages(null); Map<String, Message> localUidMap = new HashMap<String, Message>(); for (Message message : localMessages) { localUidMap.put(message.getUid(), message); } if (providedRemoteFolder != null) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: using providedRemoteFolder " + folder); remoteFolder = providedRemoteFolder; } else { Store remoteStore = account.getRemoteStore(); if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: About to get remote folder " + folder); remoteFolder = remoteStore.getFolder(folder); if (! verifyOrCreateRemoteSpecialFolder(account, folder, remoteFolder, listener)) { return; } /* * Synchronization process: * Open the folder Upload any local messages that are marked as PENDING_UPLOAD (Drafts, Sent, Trash) Get the message count Get the list of the newest K9.DEFAULT_VISIBLE_LIMIT messages getMessages(messageCount - K9.DEFAULT_VISIBLE_LIMIT, messageCount) See if we have each message locally, if not fetch it's flags and envelope Get and update the unread count for the folder Update the remote flags of any messages we have locally with an internal date newer than the remote message. Get the current flags for any messages we have locally but did not just download Update local flags For any message we have locally but not remotely, delete the local message to keep cache clean. Download larger parts of any new messages. (Optional) Download small attachments in the background. */ /* * Open the remote folder. This pre-loads certain metadata like message count. */ if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: About to open remote folder " + folder); remoteFolder.open(Folder.OPEN_MODE_RW); if (Expunge.EXPUNGE_ON_POLL == account.getExpungePolicy()) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Expunging folder " + account.getDescription() + ":" + folder); remoteFolder.expunge(); } } /* * Get the remote message count. */ int remoteMessageCount = remoteFolder.getMessageCount(); int visibleLimit = localFolder.getVisibleLimit(); if (visibleLimit < 0) { visibleLimit = K9.DEFAULT_VISIBLE_LIMIT; } final List<Message> remoteMessages = new ArrayList<Message>(); Map<String, Message> remoteUidMap = new HashMap<String, Message>(); if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: Remote message count for folder " + folder + " is " + remoteMessageCount); final Date earliestDate = account.getEarliestPollDate(); if (remoteMessageCount > 0) { /* Message numbers start at 1. */ int remoteStart; if (visibleLimit > 0) { remoteStart = Math.max(0, remoteMessageCount - visibleLimit) + 1; } else { remoteStart = 1; } int remoteEnd = remoteMessageCount; if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: About to get messages " + remoteStart + " through " + remoteEnd + " for folder " + folder); final AtomicInteger headerProgress = new AtomicInteger(0); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxHeadersStarted(account, folder); } List<? extends Message> remoteMessageArray = remoteFolder.getMessages(remoteStart, remoteEnd, earliestDate, null); int messageCount = remoteMessageArray.size(); for (Message thisMess : remoteMessageArray) { headerProgress.incrementAndGet(); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxHeadersProgress(account, folder, headerProgress.get(), messageCount); } Message localMessage = localUidMap.get(thisMess.getUid()); if (localMessage == null || !localMessage.olderThan(earliestDate)) { remoteMessages.add(thisMess); remoteUidMap.put(thisMess.getUid(), thisMess); } } if (K9.DEBUG) Log.v(K9.LOG_TAG, "SYNC: Got " + remoteUidMap.size() + " messages for folder " + folder); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxHeadersFinished(account, folder, headerProgress.get(), remoteUidMap.size()); } } else if (remoteMessageCount < 0) { throw new Exception("Message count " + remoteMessageCount + " for folder " + folder); } /* * Remove any messages that are in the local store but no longer on the remote store or are too old */ if (account.syncRemoteDeletions()) { List<Message> destroyMessages = new ArrayList<Message>(); for (Message localMessage : localMessages) { if (remoteUidMap.get(localMessage.getUid()) == null) { destroyMessages.add(localMessage); } } localFolder.destroyMessages(destroyMessages); for (Message destroyMessage : destroyMessages) { for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxRemovedMessage(account, folder, destroyMessage); } } } localMessages = null; /* * Now we download the actual content of messages. */ int newMessages = downloadMessages(account, remoteFolder, localFolder, remoteMessages, false); int unreadMessageCount = localFolder.getUnreadMessageCount(); for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, folder, unreadMessageCount); } /* Notify listeners that we're finally done. */ localFolder.setLastChecked(System.currentTimeMillis()); localFolder.setStatus(null); if (K9.DEBUG) Log.d(K9.LOG_TAG, "Done synchronizing folder " + account.getDescription() + ":" + folder + " @ " + new Date() + " with " + newMessages + " new messages"); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxFinished(account, folder, remoteMessageCount, newMessages); } if (commandException != null) { String rootMessage = getRootCauseMessage(commandException); Log.e(K9.LOG_TAG, "Root cause failure in " + account.getDescription() + ":" + tLocalFolder.getName() + " was '" + rootMessage + "'"); localFolder.setStatus(rootMessage); for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxFailed(account, folder, rootMessage); } } if (K9.DEBUG) Log.i(K9.LOG_TAG, "Done synchronizing folder " + account.getDescription() + ":" + folder); } catch (Exception e) { Log.e(K9.LOG_TAG, "synchronizeMailbox", e); // If we don't set the last checked, it can try too often during // failure conditions String rootMessage = getRootCauseMessage(e); if (tLocalFolder != null) { try { tLocalFolder.setStatus(rootMessage); tLocalFolder.setLastChecked(System.currentTimeMillis()); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Could not set last checked on folder " + account.getDescription() + ":" + tLocalFolder.getName(), e); } } for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxFailed(account, folder, rootMessage); } notifyUserIfCertificateProblem(context, e, account, true); addErrorMessage(account, null, e); Log.e(K9.LOG_TAG, "Failed synchronizing folder " + account.getDescription() + ":" + folder + " @ " + new Date()); } finally { if (providedRemoteFolder == null) { closeFolder(remoteFolder); } closeFolder(tLocalFolder); } } private void closeFolder(Folder f) { if (f != null) { f.close(); } } /* * If the folder is a "special" folder we need to see if it exists * on the remote server. It if does not exist we'll try to create it. If we * can't create we'll abort. This will happen on every single Pop3 folder as * designed and on Imap folders during error conditions. This allows us * to treat Pop3 and Imap the same in this code. */ private boolean verifyOrCreateRemoteSpecialFolder(final Account account, final String folder, final Folder remoteFolder, final MessagingListener listener) throws MessagingException { if (folder.equals(account.getTrashFolderName()) || folder.equals(account.getSentFolderName()) || folder.equals(account.getDraftsFolderName())) { if (!remoteFolder.exists()) { if (!remoteFolder.create(FolderType.HOLDS_MESSAGES)) { for (MessagingListener l : getListeners(listener)) { l.synchronizeMailboxFinished(account, folder, 0, 0); } if (K9.DEBUG) Log.i(K9.LOG_TAG, "Done synchronizing folder " + folder); return false; } } } return true; } /** * Fetches the messages described by inputMessages from the remote store and writes them to * local storage. * * @param account * The account the remote store belongs to. * @param remoteFolder * The remote folder to download messages from. * @param localFolder * The {@link LocalFolder} instance corresponding to the remote folder. * @param inputMessages * A list of messages objects that store the UIDs of which messages to download. * @param flagSyncOnly * Only flags will be fetched from the remote store if this is {@code true}. * * @return The number of downloaded messages that are not flagged as {@link Flag#SEEN}. * * @throws MessagingException */ private int downloadMessages(final Account account, final Folder remoteFolder, final LocalFolder localFolder, List<Message> inputMessages, boolean flagSyncOnly) throws MessagingException { final Date earliestDate = account.getEarliestPollDate(); Date downloadStarted = new Date(); // now if (earliestDate != null) { if (K9.DEBUG) { Log.d(K9.LOG_TAG, "Only syncing messages after " + earliestDate); } } final String folder = remoteFolder.getName(); int unreadBeforeStart = 0; try { AccountStats stats = account.getStats(context); unreadBeforeStart = stats.unreadMessageCount; } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Unable to getUnreadMessageCount for account: " + account, e); } List<Message> syncFlagMessages = new ArrayList<Message>(); List<Message> unsyncedMessages = new ArrayList<Message>(); final AtomicInteger newMessages = new AtomicInteger(0); List<Message> messages = new ArrayList<Message>(inputMessages); for (Message message : messages) { evaluateMessageForDownload(message, folder, localFolder, remoteFolder, account, unsyncedMessages, syncFlagMessages , flagSyncOnly); } final AtomicInteger progress = new AtomicInteger(0); final int todo = unsyncedMessages.size() + syncFlagMessages.size(); for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, folder, progress.get(), todo); } if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Have " + unsyncedMessages.size() + " unsynced messages"); messages.clear(); final List<Message> largeMessages = new ArrayList<Message>(); final List<Message> smallMessages = new ArrayList<Message>(); if (!unsyncedMessages.isEmpty()) { /* * Reverse the order of the messages. Depending on the server this may get us * fetch results for newest to oldest. If not, no harm done. */ Collections.sort(unsyncedMessages, new UidReverseComparator()); int visibleLimit = localFolder.getVisibleLimit(); int listSize = unsyncedMessages.size(); if ((visibleLimit > 0) && (listSize > visibleLimit)) { unsyncedMessages = unsyncedMessages.subList(0, visibleLimit); } FetchProfile fp = new FetchProfile(); if (remoteFolder.supportsFetchingFlags()) { fp.add(FetchProfile.Item.FLAGS); } fp.add(FetchProfile.Item.ENVELOPE); if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: About to fetch " + unsyncedMessages.size() + " unsynced messages for folder " + folder); fetchUnsyncedMessages(account, remoteFolder, localFolder, unsyncedMessages, smallMessages, largeMessages, progress, todo, fp); // If a message didn't exist, messageFinished won't be called, but we shouldn't try again // If we got here, nothing failed for (Message message : unsyncedMessages) { String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message); if (newPushState != null) { localFolder.setPushState(newPushState); } } if (K9.DEBUG) { Log.d(K9.LOG_TAG, "SYNC: Synced unsynced messages for folder " + folder); } } if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Have " + largeMessages.size() + " large messages and " + smallMessages.size() + " small messages out of " + unsyncedMessages.size() + " unsynced messages"); unsyncedMessages.clear(); /* * Grab the content of the small messages first. This is going to * be very fast and at very worst will be a single up of a few bytes and a single * download of 625k. */ FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.BODY); // fp.add(FetchProfile.Item.FLAGS); // fp.add(FetchProfile.Item.ENVELOPE); downloadSmallMessages(account, remoteFolder, localFolder, smallMessages, progress, unreadBeforeStart, newMessages, todo, fp); smallMessages.clear(); /* * Now do the large messages that require more round trips. */ fp.clear(); fp.add(FetchProfile.Item.STRUCTURE); downloadLargeMessages(account, remoteFolder, localFolder, largeMessages, progress, unreadBeforeStart, newMessages, todo, fp); largeMessages.clear(); /* * Refresh the flags for any messages in the local store that we didn't just * download. */ refreshLocalMessageFlags(account, remoteFolder, localFolder, syncFlagMessages, progress, todo); if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Synced remote messages for folder " + folder + ", " + newMessages.get() + " new messages"); localFolder.purgeToVisibleLimit(new MessageRemovalListener() { @Override public void messageRemoved(Message message) { for (MessagingListener l : getListeners()) { l.synchronizeMailboxRemovedMessage(account, folder, message); } } }); // If the oldest message seen on this sync is newer than // the oldest message seen on the previous sync, then // we want to move our high-water mark forward // this is all here just for pop which only syncs inbox // this would be a little wrong for IMAP (we'd want a folder-level pref, not an account level pref.) // fortunately, we just don't care. Long oldestMessageTime = localFolder.getOldestMessageDate(); if (oldestMessageTime != null) { Date oldestExtantMessage = new Date(oldestMessageTime); if (oldestExtantMessage.before(downloadStarted) && oldestExtantMessage.after(new Date(account.getLatestOldMessageSeenTime()))) { account.setLatestOldMessageSeenTime(oldestExtantMessage.getTime()); account.save(Preferences.getPreferences(context)); } } return newMessages.get(); } private void evaluateMessageForDownload(final Message message, final String folder, final LocalFolder localFolder, final Folder remoteFolder, final Account account, final List<Message> unsyncedMessages, final List<Message> syncFlagMessages, boolean flagSyncOnly) throws MessagingException { if (message.isSet(Flag.DELETED)) { syncFlagMessages.add(message); return; } Message localMessage = localFolder.getMessage(message.getUid()); if (localMessage == null) { if (!flagSyncOnly) { if (!message.isSet(Flag.X_DOWNLOADED_FULL) && !message.isSet(Flag.X_DOWNLOADED_PARTIAL)) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " has not yet been downloaded"); unsyncedMessages.add(message); } else { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " is partially or fully downloaded"); // Store the updated message locally localFolder.appendMessages(Collections.singletonList(message)); localMessage = localFolder.getMessage(message.getUid()); localMessage.setFlag(Flag.X_DOWNLOADED_FULL, message.isSet(Flag.X_DOWNLOADED_FULL)); localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, message.isSet(Flag.X_DOWNLOADED_PARTIAL)); for (MessagingListener l : getListeners()) { l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage); if (!localMessage.isSet(Flag.SEEN)) { l.synchronizeMailboxNewMessage(account, folder, localMessage); } } } } } else if (!localMessage.isSet(Flag.DELETED)) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " is present in the local store"); if (!localMessage.isSet(Flag.X_DOWNLOADED_FULL) && !localMessage.isSet(Flag.X_DOWNLOADED_PARTIAL)) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " is not downloaded, even partially; trying again"); unsyncedMessages.add(message); } else { String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message); if (newPushState != null) { localFolder.setPushState(newPushState); } syncFlagMessages.add(message); } } } private <T extends Message> void fetchUnsyncedMessages(final Account account, final Folder<T> remoteFolder, final LocalFolder localFolder, List<T> unsyncedMessages, final List<Message> smallMessages, final List<Message> largeMessages, final AtomicInteger progress, final int todo, FetchProfile fp) throws MessagingException { final String folder = remoteFolder.getName(); final Date earliestDate = account.getEarliestPollDate(); /* * Messages to be batch written */ final List<Message> chunk = new ArrayList<Message>(UNSYNC_CHUNK_SIZE); remoteFolder.fetch(unsyncedMessages, fp, new MessageRetrievalListener<T>() { @Override public void messageFinished(T message, int number, int ofTotal) { try { String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message); if (newPushState != null) { localFolder.setPushState(newPushState); } if (message.isSet(Flag.DELETED) || message.olderThan(earliestDate)) { if (K9.DEBUG) { if (message.isSet(Flag.DELETED)) { Log.v(K9.LOG_TAG, "Newly downloaded message " + account + ":" + folder + ":" + message.getUid() + " was marked deleted on server, skipping"); } else { Log.d(K9.LOG_TAG, "Newly downloaded message " + message.getUid() + " is older than " + earliestDate + ", skipping"); } } progress.incrementAndGet(); for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, folder, progress.get(), todo); } return; } if (account.getMaximumAutoDownloadMessageSize() > 0 && message.getSize() > account.getMaximumAutoDownloadMessageSize()) { largeMessages.add(message); } else { smallMessages.add(message); } // And include it in the view if (message.getSubject() != null && message.getFrom() != null) { /* * We check to make sure that we got something worth * showing (subject and from) because some protocols * (POP) may not be able to give us headers for * ENVELOPE, only size. */ // keep message for delayed storing chunk.add(message); if (chunk.size() >= UNSYNC_CHUNK_SIZE) { writeUnsyncedMessages(chunk, localFolder, account, folder); chunk.clear(); } } } catch (Exception e) { Log.e(K9.LOG_TAG, "Error while storing downloaded message.", e); addErrorMessage(account, null, e); } } @Override public void messageStarted(String uid, int number, int ofTotal) {} @Override public void messagesFinished(int total) { // FIXME this method is almost never invoked by various Stores! Don't rely on it unless fixed!! } }); if (!chunk.isEmpty()) { writeUnsyncedMessages(chunk, localFolder, account, folder); chunk.clear(); } } /** * Actual storing of messages * * <br> * FIXME: <strong>This method should really be moved in the above MessageRetrievalListener once {@link MessageRetrievalListener#messagesFinished(int)} is properly invoked by various stores</strong> * * @param messages Never <code>null</code>. * @param localFolder * @param account * @param folder */ private void writeUnsyncedMessages(final List<Message> messages, final LocalFolder localFolder, final Account account, final String folder) { if (K9.DEBUG) { Log.v(K9.LOG_TAG, "Batch writing " + Integer.toString(messages.size()) + " messages"); } try { // Store the new message locally localFolder.appendMessages(messages); for (final Message message : messages) { final LocalMessage localMessage = localFolder.getMessage(message.getUid()); syncFlags(localMessage, message); if (K9.DEBUG) Log.v(K9.LOG_TAG, "About to notify listeners that we got a new unsynced message " + account + ":" + folder + ":" + message.getUid()); for (final MessagingListener l : getListeners()) { l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage); } } } catch (final Exception e) { Log.e(K9.LOG_TAG, "Error while storing downloaded message.", e); addErrorMessage(account, null, e); } } private boolean shouldImportMessage(final Account account, final String folder, final Message message, final AtomicInteger progress, final Date earliestDate) { if (account.isSearchByDateCapable() && message.olderThan(earliestDate)) { if (K9.DEBUG) { Log.d(K9.LOG_TAG, "Message " + message.getUid() + " is older than " + earliestDate + ", hence not saving"); } return false; } return true; } private <T extends Message> void downloadSmallMessages(final Account account, final Folder<T> remoteFolder, final LocalFolder localFolder, List<T> smallMessages, final AtomicInteger progress, final int unreadBeforeStart, final AtomicInteger newMessages, final int todo, FetchProfile fp) throws MessagingException { final String folder = remoteFolder.getName(); final Date earliestDate = account.getEarliestPollDate(); if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Fetching small messages for folder " + folder); remoteFolder.fetch(smallMessages, fp, new MessageRetrievalListener<T>() { @Override public void messageFinished(final T message, int number, int ofTotal) { try { if (!shouldImportMessage(account, folder, message, progress, earliestDate)) { progress.incrementAndGet(); return; } // Store the updated message locally final LocalMessage localMessage = localFolder.storeSmallMessage(message, new Runnable() { @Override public void run() { progress.incrementAndGet(); } }); // Increment the number of "new messages" if the newly downloaded message is // not marked as read. if (!localMessage.isSet(Flag.SEEN)) { newMessages.incrementAndGet(); } if (K9.DEBUG) Log.v(K9.LOG_TAG, "About to notify listeners that we got a new small message " + account + ":" + folder + ":" + message.getUid()); // Update the listener with what we've found for (MessagingListener l : getListeners()) { l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage); l.synchronizeMailboxProgress(account, folder, progress.get(), todo); if (!localMessage.isSet(Flag.SEEN)) { l.synchronizeMailboxNewMessage(account, folder, localMessage); } } // Send a notification of this message if (shouldNotifyForMessage(account, localFolder, message)) { // Notify with the localMessage so that we don't have to recalculate the content preview. notifyAccount(context, account, localMessage, unreadBeforeStart); } } catch (MessagingException me) { addErrorMessage(account, null, me); Log.e(K9.LOG_TAG, "SYNC: fetch small messages", me); } } @Override public void messageStarted(String uid, int number, int ofTotal) {} @Override public void messagesFinished(int total) {} }); if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Done fetching small messages for folder " + folder); } private <T extends Message> void downloadLargeMessages(final Account account, final Folder<T> remoteFolder, final LocalFolder localFolder, List<T> largeMessages, final AtomicInteger progress, final int unreadBeforeStart, final AtomicInteger newMessages, final int todo, FetchProfile fp) throws MessagingException { final String folder = remoteFolder.getName(); final Date earliestDate = account.getEarliestPollDate(); if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Fetching large messages for folder " + folder); remoteFolder.fetch(largeMessages, fp, null); for (T message : largeMessages) { if (!shouldImportMessage(account, folder, message, progress, earliestDate)) { progress.incrementAndGet(); continue; } if (message.getBody() == null) { /* * The provider was unable to get the structure of the message, so * we'll download a reasonable portion of the messge and mark it as * incomplete so the entire thing can be downloaded later if the user * wishes to download it. */ fp.clear(); fp.add(FetchProfile.Item.BODY_SANE); /* * TODO a good optimization here would be to make sure that all Stores set * the proper size after this fetch and compare the before and after size. If * they equal we can mark this SYNCHRONIZED instead of PARTIALLY_SYNCHRONIZED */ remoteFolder.fetch(Collections.singletonList(message), fp, null); // Store the updated message locally localFolder.appendMessages(Collections.singletonList(message)); Message localMessage = localFolder.getMessage(message.getUid()); // Certain (POP3) servers give you the whole message even when you ask for only the first x Kb if (!message.isSet(Flag.X_DOWNLOADED_FULL)) { /* * Mark the message as fully downloaded if the message size is smaller than * the account's autodownload size limit, otherwise mark as only a partial * download. This will prevent the system from downloading the same message * twice. * * If there is no limit on autodownload size, that's the same as the message * being smaller than the max size */ if (account.getMaximumAutoDownloadMessageSize() == 0 || message.getSize() < account.getMaximumAutoDownloadMessageSize()) { localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true); } else { // Set a flag indicating that the message has been partially downloaded and // is ready for view. localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, true); } } } else { /* * We have a structure to deal with, from which * we can pull down the parts we want to actually store. * Build a list of parts we are interested in. Text parts will be downloaded * right now, attachments will be left for later. */ Set<Part> viewables = MessageExtractor.collectTextParts(message); /* * Now download the parts we're interested in storing. */ for (Part part : viewables) { remoteFolder.fetchPart(message, part, null); } // Store the updated message locally localFolder.appendMessages(Collections.singletonList(message)); Message localMessage = localFolder.getMessage(message.getUid()); // Set a flag indicating this message has been fully downloaded and can be // viewed. localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, true); } if (K9.DEBUG) Log.v(K9.LOG_TAG, "About to notify listeners that we got a new large message " + account + ":" + folder + ":" + message.getUid()); // Update the listener with what we've found progress.incrementAndGet(); // TODO do we need to re-fetch this here? LocalMessage localMessage = localFolder.getMessage(message.getUid()); // Increment the number of "new messages" if the newly downloaded message is // not marked as read. if (!localMessage.isSet(Flag.SEEN)) { newMessages.incrementAndGet(); } for (MessagingListener l : getListeners()) { l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage); l.synchronizeMailboxProgress(account, folder, progress.get(), todo); if (!localMessage.isSet(Flag.SEEN)) { l.synchronizeMailboxNewMessage(account, folder, localMessage); } } // Send a notification of this message if (shouldNotifyForMessage(account, localFolder, message)) { // Notify with the localMessage so that we don't have to recalculate the content preview. notifyAccount(context, account, localMessage, unreadBeforeStart); } }//for large messages if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: Done fetching large messages for folder " + folder); } private void refreshLocalMessageFlags(final Account account, final Folder remoteFolder, final LocalFolder localFolder, List<Message> syncFlagMessages, final AtomicInteger progress, final int todo ) throws MessagingException { final String folder = remoteFolder.getName(); if (remoteFolder.supportsFetchingFlags()) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "SYNC: About to sync flags for " + syncFlagMessages.size() + " remote messages for folder " + folder); FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.FLAGS); List<Message> undeletedMessages = new LinkedList<Message>(); for (Message message : syncFlagMessages) { if (!message.isSet(Flag.DELETED)) { undeletedMessages.add(message); } } remoteFolder.fetch(undeletedMessages, fp, null); for (Message remoteMessage : syncFlagMessages) { LocalMessage localMessage = localFolder.getMessage(remoteMessage.getUid()); boolean messageChanged = syncFlags(localMessage, remoteMessage); if (messageChanged) { boolean shouldBeNotifiedOf = false; if (localMessage.isSet(Flag.DELETED) || isMessageSuppressed(localMessage)) { for (MessagingListener l : getListeners()) { l.synchronizeMailboxRemovedMessage(account, folder, localMessage); } } else { for (MessagingListener l : getListeners()) { l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage); } if (shouldNotifyForMessage(account, localFolder, localMessage)) { shouldBeNotifiedOf = true; } } // we're only interested in messages that need removing if (!shouldBeNotifiedOf) { NotificationData data = getNotificationData(account, null); if (data != null) { synchronized (data) { MessageReference ref = localMessage.makeMessageReference(); if (data.removeMatchingMessage(context, ref)) { notifyAccountWithDataLocked(context, account, null, data); } } } } } progress.incrementAndGet(); for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, folder, progress.get(), todo); } } } } private boolean syncFlags(LocalMessage localMessage, Message remoteMessage) throws MessagingException { boolean messageChanged = false; if (localMessage == null || localMessage.isSet(Flag.DELETED)) { return false; } if (remoteMessage.isSet(Flag.DELETED)) { if (localMessage.getFolder().syncRemoteDeletions()) { localMessage.setFlag(Flag.DELETED, true); messageChanged = true; } } else { for (Flag flag : MessagingController.SYNC_FLAGS) { if (remoteMessage.isSet(flag) != localMessage.isSet(flag)) { localMessage.setFlag(flag, remoteMessage.isSet(flag)); messageChanged = true; } } } return messageChanged; } private String getRootCauseMessage(Throwable t) { Throwable rootCause = t; Throwable nextCause = rootCause; do { nextCause = rootCause.getCause(); if (nextCause != null) { rootCause = nextCause; } } while (nextCause != null); if (rootCause instanceof MessagingException) { return rootCause.getMessage(); } else { // Remove the namespace on the exception so we have a fighting chance of seeing more of the error in the // notification. return (rootCause.getLocalizedMessage() != null) ? (rootCause.getClass().getSimpleName() + ": " + rootCause.getLocalizedMessage()) : rootCause.getClass().getSimpleName(); } } private void queuePendingCommand(Account account, PendingCommand command) { try { LocalStore localStore = account.getLocalStore(); localStore.addPendingCommand(command); } catch (Exception e) { addErrorMessage(account, null, e); throw new RuntimeException("Unable to enqueue pending command", e); } } private void processPendingCommands(final Account account) { putBackground("processPendingCommands", null, new Runnable() { @Override public void run() { try { processPendingCommandsSynchronous(account); } catch (UnavailableStorageException e) { Log.i(K9.LOG_TAG, "Failed to process pending command because storage is not available - trying again later."); throw new UnavailableAccountException(e); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "processPendingCommands", me); addErrorMessage(account, null, me); /* * Ignore any exceptions from the commands. Commands will be processed * on the next round. */ } } }); } private void processPendingCommandsSynchronous(Account account) throws MessagingException { LocalStore localStore = account.getLocalStore(); List<PendingCommand> commands = localStore.getPendingCommands(); int progress = 0; int todo = commands.size(); if (todo == 0) { return; } for (MessagingListener l : getListeners()) { l.pendingCommandsProcessing(account); l.synchronizeMailboxProgress(account, null, progress, todo); } PendingCommand processingCommand = null; try { for (PendingCommand command : commands) { processingCommand = command; if (K9.DEBUG) Log.d(K9.LOG_TAG, "Processing pending command '" + command + "'"); String[] components = command.command.split("\\."); String commandTitle = components[components.length - 1]; for (MessagingListener l : getListeners()) { l.pendingCommandStarted(account, commandTitle); } /* * We specifically do not catch any exceptions here. If a command fails it is * most likely due to a server or IO error and it must be retried before any * other command processes. This maintains the order of the commands. */ try { if (PENDING_COMMAND_APPEND.equals(command.command)) { processPendingAppend(command, account); } else if (PENDING_COMMAND_SET_FLAG_BULK.equals(command.command)) { processPendingSetFlag(command, account); } else if (PENDING_COMMAND_SET_FLAG.equals(command.command)) { processPendingSetFlagOld(command, account); } else if (PENDING_COMMAND_MARK_ALL_AS_READ.equals(command.command)) { processPendingMarkAllAsRead(command, account); } else if (PENDING_COMMAND_MOVE_OR_COPY_BULK.equals(command.command)) { processPendingMoveOrCopyOld2(command, account); } else if (PENDING_COMMAND_MOVE_OR_COPY_BULK_NEW.equals(command.command)) { processPendingMoveOrCopy(command, account); } else if (PENDING_COMMAND_MOVE_OR_COPY.equals(command.command)) { processPendingMoveOrCopyOld(command, account); } else if (PENDING_COMMAND_EMPTY_TRASH.equals(command.command)) { processPendingEmptyTrash(command, account); } else if (PENDING_COMMAND_EXPUNGE.equals(command.command)) { processPendingExpunge(command, account); } localStore.removePendingCommand(command); if (K9.DEBUG) Log.d(K9.LOG_TAG, "Done processing pending command '" + command + "'"); } catch (MessagingException me) { if (me.isPermanentFailure()) { addErrorMessage(account, null, me); Log.e(K9.LOG_TAG, "Failure of command '" + command + "' was permanent, removing command from queue"); localStore.removePendingCommand(processingCommand); } else { throw me; } } finally { progress++; for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, null, progress, todo); l.pendingCommandCompleted(account, commandTitle); } } } } catch (MessagingException me) { notifyUserIfCertificateProblem(context, me, account, true); addErrorMessage(account, null, me); Log.e(K9.LOG_TAG, "Could not process command '" + processingCommand + "'", me); throw me; } finally { for (MessagingListener l : getListeners()) { l.pendingCommandsFinished(account); } } } /** * Process a pending append message command. This command uploads a local message to the * server, first checking to be sure that the server message is not newer than * the local message. Once the local message is successfully processed it is deleted so * that the server message will be synchronized down without an additional copy being * created. * TODO update the local message UID instead of deleteing it * * @param command arguments = (String folder, String uid) * @param account * @throws MessagingException */ private void processPendingAppend(PendingCommand command, Account account) throws MessagingException { Folder remoteFolder = null; LocalFolder localFolder = null; try { String folder = command.arguments[0]; String uid = command.arguments[1]; if (account.getErrorFolderName().equals(folder)) { return; } LocalStore localStore = account.getLocalStore(); localFolder = localStore.getFolder(folder); LocalMessage localMessage = localFolder.getMessage(uid); if (localMessage == null) { return; } Store remoteStore = account.getRemoteStore(); remoteFolder = remoteStore.getFolder(folder); if (!remoteFolder.exists()) { if (!remoteFolder.create(FolderType.HOLDS_MESSAGES)) { return; } } remoteFolder.open(Folder.OPEN_MODE_RW); if (remoteFolder.getMode() != Folder.OPEN_MODE_RW) { return; } Message remoteMessage = null; if (!localMessage.getUid().startsWith(K9.LOCAL_UID_PREFIX)) { remoteMessage = remoteFolder.getMessage(localMessage.getUid()); } if (remoteMessage == null) { if (localMessage.isSet(Flag.X_REMOTE_COPY_STARTED)) { Log.w(K9.LOG_TAG, "Local message with uid " + localMessage.getUid() + " has flag " + Flag.X_REMOTE_COPY_STARTED + " already set, checking for remote message with " + " same message id"); String rUid = remoteFolder.getUidFromMessageId(localMessage); if (rUid != null) { Log.w(K9.LOG_TAG, "Local message has flag " + Flag.X_REMOTE_COPY_STARTED + " already set, and there is a remote message with " + " uid " + rUid + ", assuming message was already copied and aborting this copy"); String oldUid = localMessage.getUid(); localMessage.setUid(rUid); localFolder.changeUid(localMessage); for (MessagingListener l : getListeners()) { l.messageUidChanged(account, folder, oldUid, localMessage.getUid()); } return; } else { Log.w(K9.LOG_TAG, "No remote message with message-id found, proceeding with append"); } } /* * If the message does not exist remotely we just upload it and then * update our local copy with the new uid. */ FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.BODY); localFolder.fetch(Collections.singletonList(localMessage) , fp, null); String oldUid = localMessage.getUid(); localMessage.setFlag(Flag.X_REMOTE_COPY_STARTED, true); remoteFolder.appendMessages(Collections.singletonList(localMessage)); localFolder.changeUid(localMessage); for (MessagingListener l : getListeners()) { l.messageUidChanged(account, folder, oldUid, localMessage.getUid()); } } else { /* * If the remote message exists we need to determine which copy to keep. */ /* * See if the remote message is newer than ours. */ FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.ENVELOPE); remoteFolder.fetch(Collections.singletonList(remoteMessage), fp, null); Date localDate = localMessage.getInternalDate(); Date remoteDate = remoteMessage.getInternalDate(); if (remoteDate != null && remoteDate.compareTo(localDate) > 0) { /* * If the remote message is newer than ours we'll just * delete ours and move on. A sync will get the server message * if we need to be able to see it. */ localMessage.destroy(); } else { /* * Otherwise we'll upload our message and then delete the remote message. */ fp.clear(); fp = new FetchProfile(); fp.add(FetchProfile.Item.BODY); localFolder.fetch(Collections.singletonList(localMessage), fp, null); String oldUid = localMessage.getUid(); localMessage.setFlag(Flag.X_REMOTE_COPY_STARTED, true); remoteFolder.appendMessages(Collections.singletonList(localMessage)); localFolder.changeUid(localMessage); for (MessagingListener l : getListeners()) { l.messageUidChanged(account, folder, oldUid, localMessage.getUid()); } if (remoteDate != null) { remoteMessage.setFlag(Flag.DELETED, true); if (Expunge.EXPUNGE_IMMEDIATELY == account.getExpungePolicy()) { remoteFolder.expunge(); } } } } } finally { closeFolder(remoteFolder); closeFolder(localFolder); } } private void queueMoveOrCopy(Account account, String srcFolder, String destFolder, boolean isCopy, String uids[]) { if (account.getErrorFolderName().equals(srcFolder)) { return; } PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_MOVE_OR_COPY_BULK_NEW; int length = 4 + uids.length; command.arguments = new String[length]; command.arguments[0] = srcFolder; command.arguments[1] = destFolder; command.arguments[2] = Boolean.toString(isCopy); command.arguments[3] = Boolean.toString(false); System.arraycopy(uids, 0, command.arguments, 4, uids.length); queuePendingCommand(account, command); } private void queueMoveOrCopy(Account account, String srcFolder, String destFolder, boolean isCopy, String uids[], Map<String, String> uidMap) { if (uidMap == null || uidMap.isEmpty()) { queueMoveOrCopy(account, srcFolder, destFolder, isCopy, uids); } else { if (account.getErrorFolderName().equals(srcFolder)) { return; } PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_MOVE_OR_COPY_BULK_NEW; int length = 4 + uidMap.keySet().size() + uidMap.values().size(); command.arguments = new String[length]; command.arguments[0] = srcFolder; command.arguments[1] = destFolder; command.arguments[2] = Boolean.toString(isCopy); command.arguments[3] = Boolean.toString(true); System.arraycopy(uidMap.keySet().toArray(), 0, command.arguments, 4, uidMap.keySet().size()); System.arraycopy(uidMap.values().toArray(), 0, command.arguments, 4 + uidMap.keySet().size(), uidMap.values().size()); queuePendingCommand(account, command); } } /** * Convert pending command to new format and call * {@link #processPendingMoveOrCopy(PendingCommand, Account)}. * * <p> * TODO: This method is obsolete and is only for transition from K-9 4.0 to K-9 4.2 * Eventually, it should be removed. * </p> * * @param command * Pending move/copy command in old format. * @param account * The account the pending command belongs to. * * @throws MessagingException * In case of an error. */ private void processPendingMoveOrCopyOld2(PendingCommand command, Account account) throws MessagingException { PendingCommand newCommand = new PendingCommand(); int len = command.arguments.length; newCommand.command = PENDING_COMMAND_MOVE_OR_COPY_BULK_NEW; newCommand.arguments = new String[len + 1]; newCommand.arguments[0] = command.arguments[0]; newCommand.arguments[1] = command.arguments[1]; newCommand.arguments[2] = command.arguments[2]; newCommand.arguments[3] = Boolean.toString(false); System.arraycopy(command.arguments, 3, newCommand.arguments, 4, len - 3); processPendingMoveOrCopy(newCommand, account); } /** * Process a pending trash message command. * * @param command arguments = (String folder, String uid) * @param account * @throws MessagingException */ private void processPendingMoveOrCopy(PendingCommand command, Account account) throws MessagingException { Folder remoteSrcFolder = null; Folder remoteDestFolder = null; LocalFolder localDestFolder = null; try { String srcFolder = command.arguments[0]; if (account.getErrorFolderName().equals(srcFolder)) { return; } String destFolder = command.arguments[1]; String isCopyS = command.arguments[2]; String hasNewUidsS = command.arguments[3]; boolean hasNewUids = false; if (hasNewUidsS != null) { hasNewUids = Boolean.parseBoolean(hasNewUidsS); } Store remoteStore = account.getRemoteStore(); remoteSrcFolder = remoteStore.getFolder(srcFolder); Store localStore = account.getLocalStore(); localDestFolder = (LocalFolder) localStore.getFolder(destFolder); List<Message> messages = new ArrayList<Message>(); /* * We split up the localUidMap into two parts while sending the command, here we assemble it back. */ Map<String, String> localUidMap = new HashMap<String, String>(); if (hasNewUids) { int offset = (command.arguments.length - 4) / 2; for (int i = 4; i < 4 + offset; i++) { localUidMap.put(command.arguments[i], command.arguments[i + offset]); String uid = command.arguments[i]; if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { messages.add(remoteSrcFolder.getMessage(uid)); } } } else { for (int i = 4; i < command.arguments.length; i++) { String uid = command.arguments[i]; if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { messages.add(remoteSrcFolder.getMessage(uid)); } } } boolean isCopy = false; if (isCopyS != null) { isCopy = Boolean.parseBoolean(isCopyS); } if (!remoteSrcFolder.exists()) { throw new MessagingException("processingPendingMoveOrCopy: remoteFolder " + srcFolder + " does not exist", true); } remoteSrcFolder.open(Folder.OPEN_MODE_RW); if (remoteSrcFolder.getMode() != Folder.OPEN_MODE_RW) { throw new MessagingException("processingPendingMoveOrCopy: could not open remoteSrcFolder " + srcFolder + " read/write", true); } if (K9.DEBUG) Log.d(K9.LOG_TAG, "processingPendingMoveOrCopy: source folder = " + srcFolder + ", " + messages.size() + " messages, destination folder = " + destFolder + ", isCopy = " + isCopy); Map <String, String> remoteUidMap = null; if (!isCopy && destFolder.equals(account.getTrashFolderName())) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "processingPendingMoveOrCopy doing special case for deleting message"); String destFolderName = destFolder; if (K9.FOLDER_NONE.equals(destFolderName)) { destFolderName = null; } remoteSrcFolder.delete(messages, destFolderName); } else { remoteDestFolder = remoteStore.getFolder(destFolder); if (isCopy) { remoteUidMap = remoteSrcFolder.copyMessages(messages, remoteDestFolder); } else { remoteUidMap = remoteSrcFolder.moveMessages(messages, remoteDestFolder); } } if (!isCopy && Expunge.EXPUNGE_IMMEDIATELY == account.getExpungePolicy()) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "processingPendingMoveOrCopy expunging folder " + account.getDescription() + ":" + srcFolder); remoteSrcFolder.expunge(); } /* * This next part is used to bring the local UIDs of the local destination folder * upto speed with the remote UIDs of remote destination folder. */ if (!localUidMap.isEmpty() && remoteUidMap != null && !remoteUidMap.isEmpty()) { for (Map.Entry<String, String> entry : remoteUidMap.entrySet()) { String remoteSrcUid = entry.getKey(); String localDestUid = localUidMap.get(remoteSrcUid); String newUid = entry.getValue(); Message localDestMessage = localDestFolder.getMessage(localDestUid); if (localDestMessage != null) { localDestMessage.setUid(newUid); localDestFolder.changeUid((LocalMessage)localDestMessage); for (MessagingListener l : getListeners()) { l.messageUidChanged(account, destFolder, localDestUid, newUid); } } } } } finally { closeFolder(remoteSrcFolder); closeFolder(remoteDestFolder); } } private void queueSetFlag(final Account account, final String folderName, final String newState, final String flag, final String[] uids) { putBackground("queueSetFlag " + account.getDescription() + ":" + folderName, null, new Runnable() { @Override public void run() { PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_SET_FLAG_BULK; int length = 3 + uids.length; command.arguments = new String[length]; command.arguments[0] = folderName; command.arguments[1] = newState; command.arguments[2] = flag; System.arraycopy(uids, 0, command.arguments, 3, uids.length); queuePendingCommand(account, command); processPendingCommands(account); } }); } /** * Processes a pending mark read or unread command. * * @param command arguments = (String folder, String uid, boolean read) * @param account */ private void processPendingSetFlag(PendingCommand command, Account account) throws MessagingException { String folder = command.arguments[0]; if (account.getErrorFolderName().equals(folder)) { return; } boolean newState = Boolean.parseBoolean(command.arguments[1]); Flag flag = Flag.valueOf(command.arguments[2]); Store remoteStore = account.getRemoteStore(); Folder remoteFolder = remoteStore.getFolder(folder); if (!remoteFolder.exists() || !remoteFolder.isFlagSupported(flag)) { return; } try { remoteFolder.open(Folder.OPEN_MODE_RW); if (remoteFolder.getMode() != Folder.OPEN_MODE_RW) { return; } List<Message> messages = new ArrayList<Message>(); for (int i = 3; i < command.arguments.length; i++) { String uid = command.arguments[i]; if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { messages.add(remoteFolder.getMessage(uid)); } } if (messages.isEmpty()) { return; } remoteFolder.setFlags(messages, Collections.singleton(flag), newState); } finally { closeFolder(remoteFolder); } } // TODO: This method is obsolete and is only for transition from K-9 2.0 to K-9 2.1 // Eventually, it should be removed private void processPendingSetFlagOld(PendingCommand command, Account account) throws MessagingException { String folder = command.arguments[0]; String uid = command.arguments[1]; if (account.getErrorFolderName().equals(folder)) { return; } if (K9.DEBUG) Log.d(K9.LOG_TAG, "processPendingSetFlagOld: folder = " + folder + ", uid = " + uid); boolean newState = Boolean.parseBoolean(command.arguments[2]); Flag flag = Flag.valueOf(command.arguments[3]); Folder remoteFolder = null; try { Store remoteStore = account.getRemoteStore(); remoteFolder = remoteStore.getFolder(folder); if (!remoteFolder.exists()) { return; } remoteFolder.open(Folder.OPEN_MODE_RW); if (remoteFolder.getMode() != Folder.OPEN_MODE_RW) { return; } Message remoteMessage = null; if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { remoteMessage = remoteFolder.getMessage(uid); } if (remoteMessage == null) { return; } remoteMessage.setFlag(flag, newState); } finally { closeFolder(remoteFolder); } } private void queueExpunge(final Account account, final String folderName) { putBackground("queueExpunge " + account.getDescription() + ":" + folderName, null, new Runnable() { @Override public void run() { PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_EXPUNGE; command.arguments = new String[1]; command.arguments[0] = folderName; queuePendingCommand(account, command); processPendingCommands(account); } }); } private void processPendingExpunge(PendingCommand command, Account account) throws MessagingException { String folder = command.arguments[0]; if (account.getErrorFolderName().equals(folder)) { return; } if (K9.DEBUG) Log.d(K9.LOG_TAG, "processPendingExpunge: folder = " + folder); Store remoteStore = account.getRemoteStore(); Folder remoteFolder = remoteStore.getFolder(folder); try { if (!remoteFolder.exists()) { return; } remoteFolder.open(Folder.OPEN_MODE_RW); if (remoteFolder.getMode() != Folder.OPEN_MODE_RW) { return; } remoteFolder.expunge(); if (K9.DEBUG) Log.d(K9.LOG_TAG, "processPendingExpunge: complete for folder = " + folder); } finally { closeFolder(remoteFolder); } } // TODO: This method is obsolete and is only for transition from K-9 2.0 to K-9 2.1 // Eventually, it should be removed private void processPendingMoveOrCopyOld(PendingCommand command, Account account) throws MessagingException { String srcFolder = command.arguments[0]; String uid = command.arguments[1]; String destFolder = command.arguments[2]; String isCopyS = command.arguments[3]; boolean isCopy = false; if (isCopyS != null) { isCopy = Boolean.parseBoolean(isCopyS); } if (account.getErrorFolderName().equals(srcFolder)) { return; } Store remoteStore = account.getRemoteStore(); Folder remoteSrcFolder = remoteStore.getFolder(srcFolder); Folder remoteDestFolder = remoteStore.getFolder(destFolder); if (!remoteSrcFolder.exists()) { throw new MessagingException("processPendingMoveOrCopyOld: remoteFolder " + srcFolder + " does not exist", true); } remoteSrcFolder.open(Folder.OPEN_MODE_RW); if (remoteSrcFolder.getMode() != Folder.OPEN_MODE_RW) { throw new MessagingException("processPendingMoveOrCopyOld: could not open remoteSrcFolder " + srcFolder + " read/write", true); } Message remoteMessage = null; if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { remoteMessage = remoteSrcFolder.getMessage(uid); } if (remoteMessage == null) { throw new MessagingException("processPendingMoveOrCopyOld: remoteMessage " + uid + " does not exist", true); } if (K9.DEBUG) Log.d(K9.LOG_TAG, "processPendingMoveOrCopyOld: source folder = " + srcFolder + ", uid = " + uid + ", destination folder = " + destFolder + ", isCopy = " + isCopy); if (!isCopy && destFolder.equals(account.getTrashFolderName())) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "processPendingMoveOrCopyOld doing special case for deleting message"); remoteMessage.delete(account.getTrashFolderName()); remoteSrcFolder.close(); return; } remoteDestFolder.open(Folder.OPEN_MODE_RW); if (remoteDestFolder.getMode() != Folder.OPEN_MODE_RW) { throw new MessagingException("processPendingMoveOrCopyOld: could not open remoteDestFolder " + srcFolder + " read/write", true); } if (isCopy) { remoteSrcFolder.copyMessages(Collections.singletonList(remoteMessage), remoteDestFolder); } else { remoteSrcFolder.moveMessages(Collections.singletonList(remoteMessage), remoteDestFolder); } remoteSrcFolder.close(); remoteDestFolder.close(); } private void processPendingMarkAllAsRead(PendingCommand command, Account account) throws MessagingException { String folder = command.arguments[0]; Folder remoteFolder = null; LocalFolder localFolder = null; try { Store localStore = account.getLocalStore(); localFolder = (LocalFolder) localStore.getFolder(folder); localFolder.open(Folder.OPEN_MODE_RW); List<? extends Message> messages = localFolder.getMessages(null, false); for (Message message : messages) { if (!message.isSet(Flag.SEEN)) { message.setFlag(Flag.SEEN, true); for (MessagingListener l : getListeners()) { l.listLocalMessagesUpdateMessage(account, folder, message); } } } for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, folder, 0); } if (account.getErrorFolderName().equals(folder)) { return; } Store remoteStore = account.getRemoteStore(); remoteFolder = remoteStore.getFolder(folder); if (!remoteFolder.exists() || !remoteFolder.isFlagSupported(Flag.SEEN)) { return; } remoteFolder.open(Folder.OPEN_MODE_RW); if (remoteFolder.getMode() != Folder.OPEN_MODE_RW) { return; } remoteFolder.setFlags(Collections.singleton(Flag.SEEN), true); remoteFolder.close(); } catch (UnsupportedOperationException uoe) { Log.w(K9.LOG_TAG, "Could not mark all server-side as read because store doesn't support operation", uoe); } finally { closeFolder(localFolder); closeFolder(remoteFolder); } } void notifyUserIfCertificateProblem(Context context, Exception e, Account account, boolean incoming) { if (!(e instanceof CertificateValidationException)) { return; } CertificateValidationException cve = (CertificateValidationException) e; if (!cve.needsUserAttention()) { return; } final int id = incoming ? K9.CERTIFICATE_EXCEPTION_NOTIFICATION_INCOMING + account.getAccountNumber() : K9.CERTIFICATE_EXCEPTION_NOTIFICATION_OUTGOING + account.getAccountNumber(); final Intent i = incoming ? AccountSetupIncoming.intentActionEditIncomingSettings(context, account) : AccountSetupOutgoing.intentActionEditOutgoingSettings(context, account); final PendingIntent pi = PendingIntent.getActivity(context, account.getAccountNumber(), i, PendingIntent.FLAG_UPDATE_CURRENT); final String title = context.getString( R.string.notification_certificate_error_title, account.getDescription()); final NotificationCompat.Builder builder = new NotificationCompat.Builder(context); builder.setSmallIcon(platformSupportsLockScreenNotifications() ? R.drawable.ic_notify_new_mail_vector : R.drawable.ic_notify_new_mail); builder.setWhen(System.currentTimeMillis()); builder.setAutoCancel(true); builder.setTicker(title); builder.setContentTitle(title); builder.setContentText(context.getString(R.string.notification_certificate_error_text)); builder.setContentIntent(pi); builder.setVisibility(NotificationCompat.VISIBILITY_PUBLIC); configureNotification(builder, null, null, K9.NOTIFICATION_LED_FAILURE_COLOR, K9.NOTIFICATION_LED_BLINK_FAST, true); final NotificationManager nm = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); nm.notify(null, id, builder.build()); } public void clearCertificateErrorNotifications(Context context, final Account account, CheckDirection direction) { final NotificationManager nm = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); if (direction == CheckDirection.INCOMING) { nm.cancel(null, K9.CERTIFICATE_EXCEPTION_NOTIFICATION_INCOMING + account.getAccountNumber()); } else { nm.cancel(null, K9.CERTIFICATE_EXCEPTION_NOTIFICATION_OUTGOING + account.getAccountNumber()); } } static long uidfill = 0; static AtomicBoolean loopCatch = new AtomicBoolean(); public void addErrorMessage(Account account, String subject, Throwable t) { try { if (t == null) { return; } CharArrayWriter baos = new CharArrayWriter(t.getStackTrace().length * 10); PrintWriter ps = new PrintWriter(baos); try { PackageInfo packageInfo = context.getPackageManager().getPackageInfo( context.getPackageName(), 0); ps.format("K9-Mail version: %s\r\n", packageInfo.versionName); } catch (Exception e) { // ignore } ps.format("Device make: %s\r\n", Build.MANUFACTURER); ps.format("Device model: %s\r\n", Build.MODEL); ps.format("Android version: %s\r\n\r\n", Build.VERSION.RELEASE); t.printStackTrace(ps); ps.close(); if (subject == null) { subject = getRootCauseMessage(t); } addErrorMessage(account, subject, baos.toString()); } catch (Throwable it) { Log.e(K9.LOG_TAG, "Could not save error message to " + account.getErrorFolderName(), it); } } public void addErrorMessage(Account account, String subject, String body) { if (!K9.DEBUG) { return; } if (!loopCatch.compareAndSet(false, true)) { return; } try { if (body == null || body.length() < 1) { return; } Store localStore = account.getLocalStore(); LocalFolder localFolder = (LocalFolder)localStore.getFolder(account.getErrorFolderName()); MimeMessage message = new MimeMessage(); MimeMessageHelper.setBody(message, new TextBody(body)); message.setFlag(Flag.X_DOWNLOADED_FULL, true); message.setSubject(subject); long nowTime = System.currentTimeMillis(); Date nowDate = new Date(nowTime); message.setInternalDate(nowDate); message.addSentDate(nowDate, K9.hideTimeZone()); message.setFrom(new Address(account.getEmail(), "K9mail internal")); localFolder.appendMessages(Collections.singletonList(message)); localFolder.clearMessagesOlderThan(nowTime - (15 * 60 * 1000)); } catch (Throwable it) { Log.e(K9.LOG_TAG, "Could not save error message to " + account.getErrorFolderName(), it); } finally { loopCatch.set(false); } } public void markAllMessagesRead(final Account account, final String folder) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Marking all messages in " + account.getDescription() + ":" + folder + " as read"); List<String> args = new ArrayList<String>(); args.add(folder); PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_MARK_ALL_AS_READ; command.arguments = args.toArray(EMPTY_STRING_ARRAY); queuePendingCommand(account, command); processPendingCommands(account); } public void setFlag(final Account account, final List<Long> messageIds, final Flag flag, final boolean newState) { setFlagInCache(account, messageIds, flag, newState); threadPool.execute(new Runnable() { @Override public void run() { setFlagSynchronous(account, messageIds, flag, newState, false); } }); } public void setFlagForThreads(final Account account, final List<Long> threadRootIds, final Flag flag, final boolean newState) { setFlagForThreadsInCache(account, threadRootIds, flag, newState); threadPool.execute(new Runnable() { @Override public void run() { setFlagSynchronous(account, threadRootIds, flag, newState, true); } }); } private void setFlagSynchronous(final Account account, final List<Long> ids, final Flag flag, final boolean newState, final boolean threadedList) { LocalStore localStore; try { localStore = account.getLocalStore(); } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Couldn't get LocalStore instance", e); return; } // Update affected messages in the database. This should be as fast as possible so the UI // can be updated with the new state. try { if (threadedList) { localStore.setFlagForThreads(ids, flag, newState); removeFlagForThreadsFromCache(account, ids, flag); } else { localStore.setFlag(ids, flag, newState); removeFlagFromCache(account, ids, flag); } } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Couldn't set flags in local database", e); } // Read folder name and UID of messages from the database Map<String, List<String>> folderMap; try { folderMap = localStore.getFoldersAndUids(ids, threadedList); } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Couldn't get folder name and UID of messages", e); return; } // Loop over all folders for (Entry<String, List<String>> entry : folderMap.entrySet()) { String folderName = entry.getKey(); // Notify listeners of changed folder status LocalFolder localFolder = localStore.getFolder(folderName); try { int unreadMessageCount = localFolder.getUnreadMessageCount(); for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, folderName, unreadMessageCount); } } catch (MessagingException e) { Log.w(K9.LOG_TAG, "Couldn't get unread count for folder: " + folderName, e); } // The error folder is always a local folder // TODO: Skip the remote part for all local-only folders if (account.getErrorFolderName().equals(folderName)) { continue; } // Send flag change to server String[] uids = entry.getValue().toArray(EMPTY_STRING_ARRAY); queueSetFlag(account, folderName, Boolean.toString(newState), flag.toString(), uids); processPendingCommands(account); } } /** * Set or remove a flag for a set of messages in a specific folder. * * <p> * The {@link Message} objects passed in are updated to reflect the new flag state. * </p> * * @param account * The account the folder containing the messages belongs to. * @param folderName * The name of the folder. * @param messages * The messages to change the flag for. * @param flag * The flag to change. * @param newState * {@code true}, if the flag should be set. {@code false} if it should be removed. */ public void setFlag(Account account, String folderName, List<? extends Message> messages, Flag flag, boolean newState) { // TODO: Put this into the background, but right now some callers depend on the message // objects being modified right after this method returns. Folder localFolder = null; try { Store localStore = account.getLocalStore(); localFolder = localStore.getFolder(folderName); localFolder.open(Folder.OPEN_MODE_RW); // Allows for re-allowing sending of messages that could not be sent if (flag == Flag.FLAGGED && !newState && account.getOutboxFolderName().equals(folderName)) { for (Message message : messages) { String uid = message.getUid(); if (uid != null) { sendCount.remove(uid); } } } // Update the messages in the local store localFolder.setFlags(messages, Collections.singleton(flag), newState); int unreadMessageCount = localFolder.getUnreadMessageCount(); for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, folderName, unreadMessageCount); } /* * Handle the remote side */ // The error folder is always a local folder // TODO: Skip the remote part for all local-only folders if (account.getErrorFolderName().equals(folderName)) { return; } String[] uids = new String[messages.size()]; for (int i = 0, end = uids.length; i < end; i++) { uids[i] = messages.get(i).getUid(); } queueSetFlag(account, folderName, Boolean.toString(newState), flag.toString(), uids); processPendingCommands(account); } catch (MessagingException me) { addErrorMessage(account, null, me); throw new RuntimeException(me); } finally { closeFolder(localFolder); } } /** * Set or remove a flag for a message referenced by message UID. * * @param account * The account the folder containing the message belongs to. * @param folderName * The name of the folder. * @param uid * The UID of the message to change the flag for. * @param flag * The flag to change. * @param newState * {@code true}, if the flag should be set. {@code false} if it should be removed. */ public void setFlag(Account account, String folderName, String uid, Flag flag, boolean newState) { Folder localFolder = null; try { LocalStore localStore = account.getLocalStore(); localFolder = localStore.getFolder(folderName); localFolder.open(Folder.OPEN_MODE_RW); Message message = localFolder.getMessage(uid); if (message != null) { setFlag(account, folderName, Collections.singletonList(message), flag, newState); } } catch (MessagingException me) { addErrorMessage(account, null, me); throw new RuntimeException(me); } finally { closeFolder(localFolder); } } public void clearAllPending(final Account account) { try { Log.w(K9.LOG_TAG, "Clearing pending commands!"); LocalStore localStore = account.getLocalStore(); localStore.removePendingCommands(); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Unable to clear pending command", me); addErrorMessage(account, null, me); } } public void loadMessageForViewRemote(final Account account, final String folder, final String uid, final MessagingListener listener) { put("loadMessageForViewRemote", listener, new Runnable() { @Override public void run() { loadMessageForViewRemoteSynchronous(account, folder, uid, listener, false, false); } }); } public boolean loadMessageForViewRemoteSynchronous(final Account account, final String folder, final String uid, final MessagingListener listener, final boolean force, final boolean loadPartialFromSearch) { Folder remoteFolder = null; LocalFolder localFolder = null; try { LocalStore localStore = account.getLocalStore(); localFolder = localStore.getFolder(folder); localFolder.open(Folder.OPEN_MODE_RW); LocalMessage message = localFolder.getMessage(uid); if (uid.startsWith(K9.LOCAL_UID_PREFIX)) { Log.w(K9.LOG_TAG, "Message has local UID so cannot download fully."); // ASH move toast android.widget.Toast.makeText(context, "Message has local UID so cannot download fully", android.widget.Toast.LENGTH_LONG).show(); // TODO: Using X_DOWNLOADED_FULL is wrong because it's only a partial message. But // one we can't download completely. Maybe add a new flag; X_PARTIAL_MESSAGE ? message.setFlag(Flag.X_DOWNLOADED_FULL, true); message.setFlag(Flag.X_DOWNLOADED_PARTIAL, false); } /* commented out because this was pulled from another unmerged branch: } else if (localFolder.isLocalOnly() && !force) { Log.w(K9.LOG_TAG, "Message in local-only folder so cannot download fully."); // ASH move toast android.widget.Toast.makeText(mApplication, "Message in local-only folder so cannot download fully", android.widget.Toast.LENGTH_LONG).show(); message.setFlag(Flag.X_DOWNLOADED_FULL, true); message.setFlag(Flag.X_DOWNLOADED_PARTIAL, false); }*/ if (message.isSet(Flag.X_DOWNLOADED_FULL)) { /* * If the message has been synchronized since we were called we'll * just hand it back cause it's ready to go. */ FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.ENVELOPE); fp.add(FetchProfile.Item.BODY); localFolder.fetch(Collections.singletonList(message), fp, null); } else { /* * At this point the message is not available, so we need to download it * fully if possible. */ Store remoteStore = account.getRemoteStore(); remoteFolder = remoteStore.getFolder(folder); remoteFolder.open(Folder.OPEN_MODE_RW); // Get the remote message and fully download it Message remoteMessage = remoteFolder.getMessage(uid); FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.BODY); remoteFolder.fetch(Collections.singletonList(remoteMessage), fp, null); // Store the message locally and load the stored message into memory localFolder.appendMessages(Collections.singletonList(remoteMessage)); if (loadPartialFromSearch) { fp.add(FetchProfile.Item.BODY); } fp.add(FetchProfile.Item.ENVELOPE); message = localFolder.getMessage(uid); localFolder.fetch(Collections.singletonList(message), fp, null); // Mark that this message is now fully synched if (account.isMarkMessageAsReadOnView()) { message.setFlag(Flag.SEEN, true); } message.setFlag(Flag.X_DOWNLOADED_FULL, true); } // now that we have the full message, refresh the headers for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewHeadersAvailable(account, folder, uid, message); } for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewBodyAvailable(account, folder, uid, message); } for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewFinished(account, folder, uid, message); } return true; } catch (Exception e) { for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewFailed(account, folder, uid, e); } notifyUserIfCertificateProblem(context, e, account, true); addErrorMessage(account, null, e); return false; } finally { closeFolder(remoteFolder); closeFolder(localFolder); } } public void loadMessageForView(final Account account, final String folder, final String uid, final MessagingListener listener) { for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewStarted(account, folder, uid); } threadPool.execute(new Runnable() { @Override public void run() { try { LocalStore localStore = account.getLocalStore(); LocalFolder localFolder = localStore.getFolder(folder); localFolder.open(Folder.OPEN_MODE_RW); LocalMessage message = localFolder.getMessage(uid); if (message == null || message.getId() == 0) { throw new IllegalArgumentException("Message not found: folder=" + folder + ", uid=" + uid); } // IMAP search results will usually need to be downloaded before viewing. // TODO: limit by account.getMaximumAutoDownloadMessageSize(). if (!message.isSet(Flag.X_DOWNLOADED_FULL) && !message.isSet(Flag.X_DOWNLOADED_PARTIAL)) { if (loadMessageForViewRemoteSynchronous(account, folder, uid, listener, false, true)) { markMessageAsReadOnView(account, message); } return; } for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewHeadersAvailable(account, folder, uid, message); } FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.ENVELOPE); fp.add(FetchProfile.Item.BODY); localFolder.fetch(Collections.singletonList(message), fp, null); localFolder.close(); for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewBodyAvailable(account, folder, uid, message); } for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewFinished(account, folder, uid, message); } markMessageAsReadOnView(account, message); } catch (Exception e) { for (MessagingListener l : getListeners(listener)) { l.loadMessageForViewFailed(account, folder, uid, e); } addErrorMessage(account, null, e); } } }); } public LocalMessage loadMessage(Account account, String folderName, String uid) throws MessagingException { LocalStore localStore = account.getLocalStore(); LocalFolder localFolder = localStore.getFolder(folderName); localFolder.open(Folder.OPEN_MODE_RW); LocalMessage message = localFolder.getMessage(uid); if (message == null || message.getId() == 0) { throw new IllegalArgumentException("Message not found: folder=" + folderName + ", uid=" + uid); } FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.BODY); localFolder.fetch(Collections.singletonList(message), fp, null); localFolder.close(); markMessageAsReadOnView(account, message); return message; } private void markMessageAsReadOnView(Account account, LocalMessage message) throws MessagingException { if (account.isMarkMessageAsReadOnView() && !message.isSet(Flag.SEEN)) { List<Long> messageIds = Collections.singletonList(message.getId()); setFlag(account, messageIds, Flag.SEEN, true); message.setFlagInternal(Flag.SEEN, true); } } public void loadAttachment(final Account account, final LocalMessage message, final Part part, final MessagingListener listener) { put("loadAttachment", listener, new Runnable() { @Override public void run() { Folder remoteFolder = null; LocalFolder localFolder = null; try { String folderName = message.getFolder().getName(); LocalStore localStore = account.getLocalStore(); localFolder = localStore.getFolder(folderName); Store remoteStore = account.getRemoteStore(); remoteFolder = remoteStore.getFolder(folderName); remoteFolder.open(Folder.OPEN_MODE_RW); Message remoteMessage = remoteFolder.getMessage(message.getUid()); remoteFolder.fetchPart(remoteMessage, part, null); localFolder.addPartToMessage(message, part); for (MessagingListener l : getListeners(listener)) { l.loadAttachmentFinished(account, message, part); } } catch (MessagingException me) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Exception loading attachment", me); for (MessagingListener l : getListeners(listener)) { l.loadAttachmentFailed(account, message, part, me.getMessage()); } notifyUserIfCertificateProblem(context, me, account, true); addErrorMessage(account, null, me); } finally { closeFolder(localFolder); closeFolder(remoteFolder); } } }); } /** * Stores the given message in the Outbox and starts a sendPendingMessages command to * attempt to send the message. * @param account * @param message * @param listener */ public void sendMessage(final Account account, final Message message, MessagingListener listener) { try { LocalStore localStore = account.getLocalStore(); LocalFolder localFolder = localStore.getFolder(account.getOutboxFolderName()); localFolder.open(Folder.OPEN_MODE_RW); localFolder.appendMessages(Collections.singletonList(message)); Message localMessage = localFolder.getMessage(message.getUid()); localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true); localFolder.close(); sendPendingMessages(account, listener); } catch (Exception e) { /* for (MessagingListener l : getListeners()) { // TODO general failed } */ addErrorMessage(account, null, e); } } public void sendPendingMessages(MessagingListener listener) { final Preferences prefs = Preferences.getPreferences(context); for (Account account : prefs.getAvailableAccounts()) { sendPendingMessages(account, listener); } } /** * Attempt to send any messages that are sitting in the Outbox. * @param account * @param listener */ public void sendPendingMessages(final Account account, MessagingListener listener) { putBackground("sendPendingMessages", listener, new Runnable() { @Override public void run() { if (!account.isAvailable(context)) { throw new UnavailableAccountException(); } if (messagesPendingSend(account)) { notifyWhileSending(account); try { sendPendingMessagesSynchronous(account); } finally { notifyWhileSendingDone(account); } } } }); } private void cancelNotification(int id) { NotificationManager notifMgr = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); notifMgr.cancel(id); } private void notifyWhileSendingDone(Account account) { if (account.isShowOngoing()) { cancelNotification(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber()); } } /** * Display an ongoing notification while a message is being sent. * * @param account * The account the message is sent from. Never {@code null}. */ private void notifyWhileSending(Account account) { if (!account.isShowOngoing()) { return; } NotificationManager notifMgr = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); NotificationCompat.Builder builder = new NotificationCompat.Builder(context); builder.setSmallIcon(R.drawable.ic_notify_check_mail); builder.setWhen(System.currentTimeMillis()); builder.setOngoing(true); String accountDescription = account.getDescription(); String accountName = (TextUtils.isEmpty(accountDescription)) ? account.getEmail() : accountDescription; builder.setTicker(context.getString(R.string.notification_bg_send_ticker, accountName)); builder.setContentTitle(context.getString(R.string.notification_bg_send_title)); builder.setContentText(account.getDescription()); TaskStackBuilder stack = buildMessageListBackStack(context, account, account.getInboxFolderName()); builder.setContentIntent(stack.getPendingIntent(0, 0)); builder.setVisibility(NotificationCompat.VISIBILITY_PUBLIC); if (K9.NOTIFICATION_LED_WHILE_SYNCING) { configureNotification(builder, null, null, account.getNotificationSetting().getLedColor(), K9.NOTIFICATION_LED_BLINK_FAST, true); } notifMgr.notify(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber(), builder.build()); } private void notifySendTempFailed(Account account, Exception lastFailure) { notifySendFailed(account, lastFailure, account.getOutboxFolderName()); } private void notifySendPermFailed(Account account, Exception lastFailure) { notifySendFailed(account, lastFailure, account.getDraftsFolderName()); } /** * Display a notification when sending a message has failed. * * @param account * The account that was used to sent the message. * @param lastFailure * The {@link Exception} instance that indicated sending the message has failed. * @param openFolder * The name of the folder to open when the notification is clicked. */ private void notifySendFailed(Account account, Exception lastFailure, String openFolder) { NotificationManager notifMgr = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); NotificationCompat.Builder builder = new NotificationCompat.Builder(context); builder.setSmallIcon(platformSupportsLockScreenNotifications() ? R.drawable.ic_notify_new_mail_vector : R.drawable.ic_notify_new_mail); builder.setWhen(System.currentTimeMillis()); builder.setAutoCancel(true); builder.setTicker(context.getString(R.string.send_failure_subject)); builder.setContentTitle(context.getString(R.string.send_failure_subject)); builder.setContentText(getRootCauseMessage(lastFailure)); TaskStackBuilder stack = buildFolderListBackStack(context, account); builder.setContentIntent(stack.getPendingIntent(0, 0)); builder.setVisibility(NotificationCompat.VISIBILITY_PUBLIC); configureNotification(builder, null, null, K9.NOTIFICATION_LED_FAILURE_COLOR, K9.NOTIFICATION_LED_BLINK_FAST, true); notifMgr.notify(K9.SEND_FAILED_NOTIFICATION - account.getAccountNumber(), builder.build()); } /** * Display an ongoing notification while checking for new messages on the server. * * @param account * The account that is checked for new messages. Never {@code null}. * @param folder * The folder that is being checked for new messages. Never {@code null}. */ private void notifyFetchingMail(final Account account, final Folder folder) { if (!account.isShowOngoing()) { return; } final NotificationManager notifMgr = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); NotificationCompat.Builder builder = new NotificationCompat.Builder(context); builder.setSmallIcon(R.drawable.ic_notify_check_mail); builder.setWhen(System.currentTimeMillis()); builder.setOngoing(true); builder.setTicker(context.getString( R.string.notification_bg_sync_ticker, account.getDescription(), folder.getName())); builder.setContentTitle(context.getString(R.string.notification_bg_sync_title)); builder.setContentText(account.getDescription() + context.getString(R.string.notification_bg_title_separator) + folder.getName()); TaskStackBuilder stack = buildMessageListBackStack(context, account, account.getInboxFolderName()); builder.setContentIntent(stack.getPendingIntent(0, 0)); builder.setVisibility(NotificationCompat.VISIBILITY_PUBLIC); if (K9.NOTIFICATION_LED_WHILE_SYNCING) { configureNotification(builder, null, null, account.getNotificationSetting().getLedColor(), K9.NOTIFICATION_LED_BLINK_FAST, true); } notifMgr.notify(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber(), builder.build()); } private void notifyFetchingMailCancel(final Account account) { if (account.isShowOngoing()) { cancelNotification(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber()); } } public boolean messagesPendingSend(final Account account) { Folder localFolder = null; try { localFolder = account.getLocalStore().getFolder( account.getOutboxFolderName()); if (!localFolder.exists()) { return false; } localFolder.open(Folder.OPEN_MODE_RW); if (localFolder.getMessageCount() > 0) { return true; } } catch (Exception e) { Log.e(K9.LOG_TAG, "Exception while checking for unsent messages", e); } finally { closeFolder(localFolder); } return false; } /** * Attempt to send any messages that are sitting in the Outbox. * @param account */ public void sendPendingMessagesSynchronous(final Account account) { Folder localFolder = null; Exception lastFailure = null; try { Store localStore = account.getLocalStore(); localFolder = localStore.getFolder( account.getOutboxFolderName()); if (!localFolder.exists()) { return; } for (MessagingListener l : getListeners()) { l.sendPendingMessagesStarted(account); } localFolder.open(Folder.OPEN_MODE_RW); List<? extends Message> localMessages = localFolder.getMessages(null); int progress = 0; int todo = localMessages.size(); for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, account.getSentFolderName(), progress, todo); } /* * The profile we will use to pull all of the content * for a given local message into memory for sending. */ FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.ENVELOPE); fp.add(FetchProfile.Item.BODY); if (K9.DEBUG) Log.i(K9.LOG_TAG, "Scanning folder '" + account.getOutboxFolderName() + "' (" + ((LocalFolder)localFolder).getId() + ") for messages to send"); Transport transport = Transport.getInstance(K9.app, account); for (Message message : localMessages) { if (message.isSet(Flag.DELETED)) { message.destroy(); continue; } try { AtomicInteger count = new AtomicInteger(0); AtomicInteger oldCount = sendCount.putIfAbsent(message.getUid(), count); if (oldCount != null) { count = oldCount; } if (K9.DEBUG) Log.i(K9.LOG_TAG, "Send count for message " + message.getUid() + " is " + count.get()); if (count.incrementAndGet() > K9.MAX_SEND_ATTEMPTS) { Log.e(K9.LOG_TAG, "Send count for message " + message.getUid() + " can't be delivered after " + K9.MAX_SEND_ATTEMPTS + " attempts. Giving up until the user restarts the device"); notifySendTempFailed(account, new MessagingException(message.getSubject())); continue; } localFolder.fetch(Collections.singletonList(message), fp, null); try { if (message.getHeader(K9.IDENTITY_HEADER) != null) { Log.v(K9.LOG_TAG, "The user has set the Outbox and Drafts folder to the same thing. " + "This message appears to be a draft, so K-9 will not send it"); continue; } message.setFlag(Flag.X_SEND_IN_PROGRESS, true); if (K9.DEBUG) Log.i(K9.LOG_TAG, "Sending message with UID " + message.getUid()); transport.sendMessage(message); message.setFlag(Flag.X_SEND_IN_PROGRESS, false); message.setFlag(Flag.SEEN, true); progress++; for (MessagingListener l : getListeners()) { l.synchronizeMailboxProgress(account, account.getSentFolderName(), progress, todo); } if (!account.hasSentFolder()) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Account does not have a sent mail folder; deleting sent message"); message.setFlag(Flag.DELETED, true); } else { LocalFolder localSentFolder = (LocalFolder) localStore.getFolder(account.getSentFolderName()); if (K9.DEBUG) Log.i(K9.LOG_TAG, "Moving sent message to folder '" + account.getSentFolderName() + "' (" + localSentFolder.getId() + ") "); localFolder.moveMessages(Collections.singletonList(message), localSentFolder); if (K9.DEBUG) Log.i(K9.LOG_TAG, "Moved sent message to folder '" + account.getSentFolderName() + "' (" + localSentFolder.getId() + ") "); PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_APPEND; command.arguments = new String[] { localSentFolder.getName(), message.getUid() }; queuePendingCommand(account, command); processPendingCommands(account); } } catch (Exception e) { // 5.x.x errors from the SMTP server are "PERMFAIL" // move the message over to drafts rather than leaving it in the outbox // This is a complete hack, but is worlds better than the previous // "don't even bother" functionality if (getRootCauseMessage(e).startsWith("5")) { localFolder.moveMessages(Collections.singletonList(message), (LocalFolder) localStore.getFolder(account.getDraftsFolderName())); } notifyUserIfCertificateProblem(context, e, account, false); addErrorMessage(account, "Failed to send message", e); message.setFlag(Flag.X_SEND_FAILED, true); Log.e(K9.LOG_TAG, "Failed to send message", e); for (MessagingListener l : getListeners()) { l.synchronizeMailboxFailed(account, localFolder.getName(), getRootCauseMessage(e)); } lastFailure = e; } } catch (Exception e) { Log.e(K9.LOG_TAG, "Failed to fetch message for sending", e); for (MessagingListener l : getListeners()) { l.synchronizeMailboxFailed(account, localFolder.getName(), getRootCauseMessage(e)); } addErrorMessage(account, "Failed to fetch message for sending", e); lastFailure = e; } } for (MessagingListener l : getListeners()) { l.sendPendingMessagesCompleted(account); } if (lastFailure != null) { if (getRootCauseMessage(lastFailure).startsWith("5")) { notifySendPermFailed(account, lastFailure); } else { notifySendTempFailed(account, lastFailure); } } } catch (UnavailableStorageException e) { Log.i(K9.LOG_TAG, "Failed to send pending messages because storage is not available - trying again later."); throw new UnavailableAccountException(e); } catch (Exception e) { for (MessagingListener l : getListeners()) { l.sendPendingMessagesFailed(account); } addErrorMessage(account, null, e); } finally { if (lastFailure == null) { cancelNotification(K9.SEND_FAILED_NOTIFICATION - account.getAccountNumber()); } closeFolder(localFolder); } } public void getAccountStats(final Context context, final Account account, final MessagingListener listener) { threadPool.execute(new Runnable() { @Override public void run() { try { AccountStats stats = account.getStats(context); listener.accountStatusChanged(account, stats); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Count not get unread count for account " + account.getDescription(), me); } } }); } public void getSearchAccountStats(final SearchAccount searchAccount, final MessagingListener listener) { threadPool.execute(new Runnable() { @Override public void run() { getSearchAccountStatsSynchronous(searchAccount, listener); } }); } public AccountStats getSearchAccountStatsSynchronous(final SearchAccount searchAccount, final MessagingListener listener) { Preferences preferences = Preferences.getPreferences(context); LocalSearch search = searchAccount.getRelatedSearch(); // Collect accounts that belong to the search String[] accountUuids = search.getAccountUuids(); List<Account> accounts; if (search.searchAllAccounts()) { accounts = preferences.getAccounts(); } else { accounts = new ArrayList<Account>(accountUuids.length); for (int i = 0, len = accountUuids.length; i < len; i++) { String accountUuid = accountUuids[i]; accounts.set(i, preferences.getAccount(accountUuid)); } } ContentResolver cr = context.getContentResolver(); int unreadMessageCount = 0; int flaggedMessageCount = 0; String[] projection = { StatsColumns.UNREAD_COUNT, StatsColumns.FLAGGED_COUNT }; for (Account account : accounts) { StringBuilder query = new StringBuilder(); List<String> queryArgs = new ArrayList<String>(); ConditionsTreeNode conditions = search.getConditions(); SqlQueryBuilder.buildWhereClause(account, conditions, query, queryArgs); String selection = query.toString(); String[] selectionArgs = queryArgs.toArray(EMPTY_STRING_ARRAY); Uri uri = Uri.withAppendedPath(EmailProvider.CONTENT_URI, "account/" + account.getUuid() + "/stats"); // Query content provider to get the account stats Cursor cursor = cr.query(uri, projection, selection, selectionArgs, null); try { if (cursor.moveToFirst()) { unreadMessageCount += cursor.getInt(0); flaggedMessageCount += cursor.getInt(1); } } finally { cursor.close(); } } // Create AccountStats instance... AccountStats stats = new AccountStats(); stats.unreadMessageCount = unreadMessageCount; stats.flaggedMessageCount = flaggedMessageCount; // ...and notify the listener if (listener != null) { listener.accountStatusChanged(searchAccount, stats); } return stats; } public void getFolderUnreadMessageCount(final Account account, final String folderName, final MessagingListener l) { Runnable unreadRunnable = new Runnable() { @Override public void run() { int unreadMessageCount = 0; try { Folder localFolder = account.getLocalStore().getFolder(folderName); unreadMessageCount = localFolder.getUnreadMessageCount(); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Count not get unread count for account " + account.getDescription(), me); } l.folderStatusChanged(account, folderName, unreadMessageCount); } }; put("getFolderUnread:" + account.getDescription() + ":" + folderName, l, unreadRunnable); } public boolean isMoveCapable(Message message) { return !message.getUid().startsWith(K9.LOCAL_UID_PREFIX); } public boolean isCopyCapable(Message message) { return isMoveCapable(message); } public boolean isMoveCapable(final Account account) { try { Store localStore = account.getLocalStore(); Store remoteStore = account.getRemoteStore(); return localStore.isMoveCapable() && remoteStore.isMoveCapable(); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Exception while ascertaining move capability", me); return false; } } public boolean isCopyCapable(final Account account) { try { Store localStore = account.getLocalStore(); Store remoteStore = account.getRemoteStore(); return localStore.isCopyCapable() && remoteStore.isCopyCapable(); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Exception while ascertaining copy capability", me); return false; } } public void moveMessages(final Account account, final String srcFolder, final List<LocalMessage> messages, final String destFolder, final MessagingListener listener) { suppressMessages(account, messages); putBackground("moveMessages", null, new Runnable() { @Override public void run() { moveOrCopyMessageSynchronous(account, srcFolder, messages, destFolder, false, listener); } }); } public void moveMessagesInThread(final Account account, final String srcFolder, final List<LocalMessage> messages, final String destFolder) { suppressMessages(account, messages); putBackground("moveMessagesInThread", null, new Runnable() { @Override public void run() { try { List<Message> messagesInThreads = collectMessagesInThreads(account, messages); moveOrCopyMessageSynchronous(account, srcFolder, messagesInThreads, destFolder, false, null); } catch (MessagingException e) { addErrorMessage(account, "Exception while moving messages", e); } } }); } public void moveMessage(final Account account, final String srcFolder, final LocalMessage message, final String destFolder, final MessagingListener listener) { moveMessages(account, srcFolder, Collections.singletonList(message), destFolder, listener); } public void copyMessages(final Account account, final String srcFolder, final List<? extends Message> messages, final String destFolder, final MessagingListener listener) { putBackground("copyMessages", null, new Runnable() { @Override public void run() { moveOrCopyMessageSynchronous(account, srcFolder, messages, destFolder, true, listener); } }); } public void copyMessagesInThread(final Account account, final String srcFolder, final List<? extends Message> messages, final String destFolder) { putBackground("copyMessagesInThread", null, new Runnable() { @Override public void run() { try { List<Message> messagesInThreads = collectMessagesInThreads(account, messages); moveOrCopyMessageSynchronous(account, srcFolder, messagesInThreads, destFolder, true, null); } catch (MessagingException e) { addErrorMessage(account, "Exception while copying messages", e); } } }); } public void copyMessage(final Account account, final String srcFolder, final Message message, final String destFolder, final MessagingListener listener) { copyMessages(account, srcFolder, Collections.singletonList(message), destFolder, listener); } private void moveOrCopyMessageSynchronous(final Account account, final String srcFolder, final List<? extends Message> inMessages, final String destFolder, final boolean isCopy, MessagingListener listener) { try { Map<String, String> uidMap = new HashMap<String, String>(); Store localStore = account.getLocalStore(); Store remoteStore = account.getRemoteStore(); if (!isCopy && (!remoteStore.isMoveCapable() || !localStore.isMoveCapable())) { return; } if (isCopy && (!remoteStore.isCopyCapable() || !localStore.isCopyCapable())) { return; } Folder localSrcFolder = localStore.getFolder(srcFolder); Folder localDestFolder = localStore.getFolder(destFolder); boolean unreadCountAffected = false; List<String> uids = new LinkedList<String>(); for (Message message : inMessages) { String uid = message.getUid(); if (!uid.startsWith(K9.LOCAL_UID_PREFIX)) { uids.add(uid); } if (!unreadCountAffected && !message.isSet(Flag.SEEN)) { unreadCountAffected = true; } } List<? extends Message> messages = localSrcFolder.getMessages(uids.toArray(EMPTY_STRING_ARRAY), null); if (messages.size() > 0) { Map<String, Message> origUidMap = new HashMap<String, Message>(); for (Message message : messages) { origUidMap.put(message.getUid(), message); } if (K9.DEBUG) Log.i(K9.LOG_TAG, "moveOrCopyMessageSynchronous: source folder = " + srcFolder + ", " + messages.size() + " messages, " + ", destination folder = " + destFolder + ", isCopy = " + isCopy); if (isCopy) { FetchProfile fp = new FetchProfile(); fp.add(FetchProfile.Item.ENVELOPE); fp.add(FetchProfile.Item.BODY); localSrcFolder.fetch(messages, fp, null); uidMap = localSrcFolder.copyMessages(messages, localDestFolder); if (unreadCountAffected) { // If this copy operation changes the unread count in the destination // folder, notify the listeners. int unreadMessageCount = localDestFolder.getUnreadMessageCount(); for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, destFolder, unreadMessageCount); } } } else { uidMap = localSrcFolder.moveMessages(messages, localDestFolder); for (Map.Entry<String, Message> entry : origUidMap.entrySet()) { String origUid = entry.getKey(); Message message = entry.getValue(); for (MessagingListener l : getListeners()) { l.messageUidChanged(account, srcFolder, origUid, message.getUid()); } } unsuppressMessages(account, messages); if (unreadCountAffected) { // If this move operation changes the unread count, notify the listeners // that the unread count changed in both the source and destination folder. int unreadMessageCountSrc = localSrcFolder.getUnreadMessageCount(); int unreadMessageCountDest = localDestFolder.getUnreadMessageCount(); for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, srcFolder, unreadMessageCountSrc); l.folderStatusChanged(account, destFolder, unreadMessageCountDest); } } } queueMoveOrCopy(account, srcFolder, destFolder, isCopy, origUidMap.keySet().toArray(EMPTY_STRING_ARRAY), uidMap); } processPendingCommands(account); } catch (UnavailableStorageException e) { Log.i(K9.LOG_TAG, "Failed to move/copy message because storage is not available - trying again later."); throw new UnavailableAccountException(e); } catch (MessagingException me) { addErrorMessage(account, null, me); throw new RuntimeException("Error moving message", me); } } public void expunge(final Account account, final String folder, final MessagingListener listener) { putBackground("expunge", null, new Runnable() { @Override public void run() { queueExpunge(account, folder); } }); } public void deleteDraft(final Account account, long id) { LocalFolder localFolder = null; try { LocalStore localStore = account.getLocalStore(); localFolder = localStore.getFolder(account.getDraftsFolderName()); localFolder.open(Folder.OPEN_MODE_RW); String uid = localFolder.getMessageUidById(id); if (uid != null) { LocalMessage message = localFolder.getMessage(uid); if (message != null) { deleteMessages(Collections.singletonList(message), null); } } } catch (MessagingException me) { addErrorMessage(account, null, me); } finally { closeFolder(localFolder); } } public void deleteThreads(final List<LocalMessage> messages) { actOnMessages(messages, new MessageActor() { @Override public void act(final Account account, final Folder folder, final List<Message> accountMessages) { suppressMessages(account, messages); putBackground("deleteThreads", null, new Runnable() { @Override public void run() { deleteThreadsSynchronous(account, folder.getName(), accountMessages); } }); } }); } public void deleteThreadsSynchronous(Account account, String folderName, List<Message> messages) { try { List<Message> messagesToDelete = collectMessagesInThreads(account, messages); deleteMessagesSynchronous(account, folderName, messagesToDelete, null); } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Something went wrong while deleting threads", e); } } public List<Message> collectMessagesInThreads(Account account, List<? extends Message> messages) throws MessagingException { LocalStore localStore = account.getLocalStore(); List<Message> messagesInThreads = new ArrayList<Message>(); for (Message message : messages) { LocalMessage localMessage = (LocalMessage) message; long rootId = localMessage.getRootId(); long threadId = (rootId == -1) ? localMessage.getThreadId() : rootId; List<? extends Message> messagesInThread = localStore.getMessagesInThread(threadId); messagesInThreads.addAll(messagesInThread); } return messagesInThreads; } public void deleteMessages(final List<LocalMessage> messages, final MessagingListener listener) { actOnMessages(messages, new MessageActor() { @Override public void act(final Account account, final Folder folder, final List<Message> accountMessages) { suppressMessages(account, messages); putBackground("deleteMessages", null, new Runnable() { @Override public void run() { deleteMessagesSynchronous(account, folder.getName(), accountMessages, listener); } }); } }); } private void deleteMessagesSynchronous(final Account account, final String folder, final List<? extends Message> messages, MessagingListener listener) { Folder localFolder = null; Folder localTrashFolder = null; String[] uids = getUidsFromMessages(messages); try { //We need to make these callbacks before moving the messages to the trash //as messages get a new UID after being moved for (Message message : messages) { for (MessagingListener l : getListeners(listener)) { l.messageDeleted(account, folder, message); } } Store localStore = account.getLocalStore(); localFolder = localStore.getFolder(folder); Map<String, String> uidMap = null; if (folder.equals(account.getTrashFolderName()) || !account.hasTrashFolder()) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Deleting messages in trash folder or trash set to -None-, not copying"); localFolder.setFlags(messages, Collections.singleton(Flag.DELETED), true); } else { localTrashFolder = localStore.getFolder(account.getTrashFolderName()); if (!localTrashFolder.exists()) { localTrashFolder.create(Folder.FolderType.HOLDS_MESSAGES); } if (localTrashFolder.exists()) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Deleting messages in normal folder, moving"); uidMap = localFolder.moveMessages(messages, localTrashFolder); } } for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, folder, localFolder.getUnreadMessageCount()); if (localTrashFolder != null) { l.folderStatusChanged(account, account.getTrashFolderName(), localTrashFolder.getUnreadMessageCount()); } } if (K9.DEBUG) Log.d(K9.LOG_TAG, "Delete policy for account " + account.getDescription() + " is " + account.getDeletePolicy()); if (folder.equals(account.getOutboxFolderName())) { for (Message message : messages) { // If the message was in the Outbox, then it has been copied to local Trash, and has // to be copied to remote trash PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_APPEND; command.arguments = new String[] { account.getTrashFolderName(), message.getUid() }; queuePendingCommand(account, command); } processPendingCommands(account); } else if (account.getDeletePolicy() == DeletePolicy.ON_DELETE) { if (folder.equals(account.getTrashFolderName())) { queueSetFlag(account, folder, Boolean.toString(true), Flag.DELETED.toString(), uids); } else { queueMoveOrCopy(account, folder, account.getTrashFolderName(), false, uids, uidMap); } processPendingCommands(account); } else if (account.getDeletePolicy() == DeletePolicy.MARK_AS_READ) { queueSetFlag(account, folder, Boolean.toString(true), Flag.SEEN.toString(), uids); processPendingCommands(account); } else { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Delete policy " + account.getDeletePolicy() + " prevents delete from server"); } unsuppressMessages(account, messages); } catch (UnavailableStorageException e) { Log.i(K9.LOG_TAG, "Failed to delete message because storage is not available - trying again later."); throw new UnavailableAccountException(e); } catch (MessagingException me) { addErrorMessage(account, null, me); throw new RuntimeException("Error deleting message from local store.", me); } finally { closeFolder(localFolder); closeFolder(localTrashFolder); } } private String[] getUidsFromMessages(List <? extends Message> messages) { String[] uids = new String[messages.size()]; for (int i = 0; i < messages.size(); i++) { uids[i] = messages.get(i).getUid(); } return uids; } private void processPendingEmptyTrash(PendingCommand command, Account account) throws MessagingException { Store remoteStore = account.getRemoteStore(); Folder remoteFolder = remoteStore.getFolder(account.getTrashFolderName()); try { if (remoteFolder.exists()) { remoteFolder.open(Folder.OPEN_MODE_RW); remoteFolder.setFlags(Collections.singleton(Flag.DELETED), true); if (Expunge.EXPUNGE_IMMEDIATELY == account.getExpungePolicy()) { remoteFolder.expunge(); } // When we empty trash, we need to actually synchronize the folder // or local deletes will never get cleaned up synchronizeFolder(account, remoteFolder, true, 0, null); compact(account, null); } } finally { closeFolder(remoteFolder); } } public void emptyTrash(final Account account, MessagingListener listener) { putBackground("emptyTrash", listener, new Runnable() { @Override public void run() { LocalFolder localFolder = null; try { Store localStore = account.getLocalStore(); localFolder = (LocalFolder) localStore.getFolder(account.getTrashFolderName()); localFolder.open(Folder.OPEN_MODE_RW); boolean isTrashLocalOnly = isTrashLocalOnly(account); if (isTrashLocalOnly) { localFolder.clearAllMessages(); } else { localFolder.setFlags(Collections.singleton(Flag.DELETED), true); } for (MessagingListener l : getListeners()) { l.emptyTrashCompleted(account); } if (!isTrashLocalOnly) { List<String> args = new ArrayList<String>(); PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_EMPTY_TRASH; command.arguments = args.toArray(EMPTY_STRING_ARRAY); queuePendingCommand(account, command); processPendingCommands(account); } } catch (UnavailableStorageException e) { Log.i(K9.LOG_TAG, "Failed to empty trash because storage is not available - trying again later."); throw new UnavailableAccountException(e); } catch (Exception e) { Log.e(K9.LOG_TAG, "emptyTrash failed", e); addErrorMessage(account, null, e); } finally { closeFolder(localFolder); } } }); } /** * Find out whether the account type only supports a local Trash folder. * * <p>Note: Currently this is only the case for POP3 accounts.</p> * * @param account * The account to check. * * @return {@code true} if the account only has a local Trash folder that is not synchronized * with a folder on the server. {@code false} otherwise. * * @throws MessagingException * In case of an error. */ private boolean isTrashLocalOnly(Account account) throws MessagingException { // TODO: Get rid of the tight coupling once we properly support local folders return (account.getRemoteStore() instanceof Pop3Store); } public void sendAlternate(final Context context, Account account, Message message) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "About to load message " + account.getDescription() + ":" + message.getFolder().getName() + ":" + message.getUid() + " for sendAlternate"); loadMessageForView(account, message.getFolder().getName(), message.getUid(), new MessagingListener() { @Override public void loadMessageForViewBodyAvailable(Account account, String folder, String uid, Message message) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Got message " + account.getDescription() + ":" + folder + ":" + message.getUid() + " for sendAlternate"); try { Intent msg = new Intent(Intent.ACTION_SEND); String quotedText = null; Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain"); if (part == null) { part = MimeUtility.findFirstPartByMimeType(message, "text/html"); } if (part != null) { quotedText = MessageExtractor.getTextFromPart(part); } if (quotedText != null) { msg.putExtra(Intent.EXTRA_TEXT, quotedText); } msg.putExtra(Intent.EXTRA_SUBJECT, message.getSubject()); Address[] from = message.getFrom(); String[] senders = new String[from.length]; for (int i = 0; i < from.length; i++) { senders[i] = from[i].toString(); } msg.putExtra(Intents.Share.EXTRA_FROM, senders); Address[] to = message.getRecipients(RecipientType.TO); String[] recipientsTo = new String[to.length]; for (int i = 0; i < to.length; i++) { recipientsTo[i] = to[i].toString(); } msg.putExtra(Intent.EXTRA_EMAIL, recipientsTo); Address[] cc = message.getRecipients(RecipientType.CC); String[] recipientsCc = new String[cc.length]; for (int i = 0; i < cc.length; i++) { recipientsCc[i] = cc[i].toString(); } msg.putExtra(Intent.EXTRA_CC, recipientsCc); msg.setType("text/plain"); context.startActivity(Intent.createChooser(msg, context.getString(R.string.send_alternate_chooser_title))); } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Unable to send email through alternate program", me); } } }); } /** * Checks mail for one or multiple accounts. If account is null all accounts * are checked. * * @param context * @param account * @param listener */ public void checkMail(final Context context, final Account account, final boolean ignoreLastCheckedTime, final boolean useManualWakeLock, final MessagingListener listener) { TracingWakeLock twakeLock = null; if (useManualWakeLock) { TracingPowerManager pm = TracingPowerManager.getPowerManager(context); twakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "K9 MessagingController.checkMail"); twakeLock.setReferenceCounted(false); twakeLock.acquire(K9.MANUAL_WAKE_LOCK_TIMEOUT); } final TracingWakeLock wakeLock = twakeLock; for (MessagingListener l : getListeners()) { l.checkMailStarted(context, account); } putBackground("checkMail", listener, new Runnable() { @Override public void run() { try { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Starting mail check"); Preferences prefs = Preferences.getPreferences(context); Collection<Account> accounts; if (account != null) { accounts = new ArrayList<Account>(1); accounts.add(account); } else { accounts = prefs.getAvailableAccounts(); } for (final Account account : accounts) { checkMailForAccount(context, account, ignoreLastCheckedTime, prefs, listener); } } catch (Exception e) { Log.e(K9.LOG_TAG, "Unable to synchronize mail", e); addErrorMessage(account, null, e); } putBackground("finalize sync", null, new Runnable() { @Override public void run() { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Finished mail sync"); if (wakeLock != null) { wakeLock.release(); } for (MessagingListener l : getListeners()) { l.checkMailFinished(context, account); } } } ); } }); } private void checkMailForAccount(final Context context, final Account account, final boolean ignoreLastCheckedTime, final Preferences prefs, final MessagingListener listener) { if (!account.isAvailable(context)) { if (K9.DEBUG) { Log.i(K9.LOG_TAG, "Skipping synchronizing unavailable account " + account.getDescription()); } return; } final long accountInterval = account.getAutomaticCheckIntervalMinutes() * 60 * 1000; if (!ignoreLastCheckedTime && accountInterval <= 0) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Skipping synchronizing account " + account.getDescription()); return; } if (K9.DEBUG) Log.i(K9.LOG_TAG, "Synchronizing account " + account.getDescription()); account.setRingNotified(false); sendPendingMessages(account, listener); try { Account.FolderMode aDisplayMode = account.getFolderDisplayMode(); Account.FolderMode aSyncMode = account.getFolderSyncMode(); Store localStore = account.getLocalStore(); for (final Folder folder : localStore.getPersonalNamespaces(false)) { folder.open(Folder.OPEN_MODE_RW); Folder.FolderClass fDisplayClass = folder.getDisplayClass(); Folder.FolderClass fSyncClass = folder.getSyncClass(); if (modeMismatch(aDisplayMode, fDisplayClass)) { // Never sync a folder that isn't displayed /* if (K9.DEBUG) Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName() + " which is in display mode " + fDisplayClass + " while account is in display mode " + aDisplayMode); */ continue; } if (modeMismatch(aSyncMode, fSyncClass)) { // Do not sync folders in the wrong class /* if (K9.DEBUG) Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName() + " which is in sync mode " + fSyncClass + " while account is in sync mode " + aSyncMode); */ continue; } synchronizeFolder(account, folder, ignoreLastCheckedTime, accountInterval, listener); } } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Unable to synchronize account " + account.getName(), e); addErrorMessage(account, null, e); } finally { putBackground("clear notification flag for " + account.getDescription(), null, new Runnable() { @Override public void run() { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Clearing notification flag for " + account.getDescription()); account.setRingNotified(false); try { AccountStats stats = account.getStats(context); if (stats == null || stats.unreadMessageCount == 0) { notifyAccountCancel(context, account); } } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Unable to getUnreadMessageCount for account: " + account, e); } } } ); } } private void synchronizeFolder( final Account account, final Folder folder, final boolean ignoreLastCheckedTime, final long accountInterval, final MessagingListener listener) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Folder " + folder.getName() + " was last synced @ " + new Date(folder.getLastChecked())); if (!ignoreLastCheckedTime && folder.getLastChecked() > (System.currentTimeMillis() - accountInterval)) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName() + ", previously synced @ " + new Date(folder.getLastChecked()) + " which would be too recent for the account period"); return; } putBackground("sync" + folder.getName(), null, new Runnable() { @Override public void run() { LocalFolder tLocalFolder = null; try { // In case multiple Commands get enqueued, don't run more than // once final LocalStore localStore = account.getLocalStore(); tLocalFolder = localStore.getFolder(folder.getName()); tLocalFolder.open(Folder.OPEN_MODE_RW); if (!ignoreLastCheckedTime && tLocalFolder.getLastChecked() > (System.currentTimeMillis() - accountInterval)) { if (K9.DEBUG) Log.v(K9.LOG_TAG, "Not running Command for folder " + folder.getName() + ", previously synced @ " + new Date(folder.getLastChecked()) + " which would be too recent for the account period"); return; } notifyFetchingMail(account, folder); try { synchronizeMailboxSynchronous(account, folder.getName(), listener, null); } finally { notifyFetchingMailCancel(account); } } catch (Exception e) { Log.e(K9.LOG_TAG, "Exception while processing folder " + account.getDescription() + ":" + folder.getName(), e); addErrorMessage(account, null, e); } finally { closeFolder(tLocalFolder); } } } ); } public void compact(final Account account, final MessagingListener ml) { putBackground("compact:" + account.getDescription(), ml, new Runnable() { @Override public void run() { try { LocalStore localStore = account.getLocalStore(); long oldSize = localStore.getSize(); localStore.compact(); long newSize = localStore.getSize(); for (MessagingListener l : getListeners(ml)) { l.accountSizeChanged(account, oldSize, newSize); } } catch (UnavailableStorageException e) { Log.i(K9.LOG_TAG, "Failed to compact account because storage is not available - trying again later."); throw new UnavailableAccountException(e); } catch (Exception e) { Log.e(K9.LOG_TAG, "Failed to compact account " + account.getDescription(), e); } } }); } public void clear(final Account account, final MessagingListener ml) { putBackground("clear:" + account.getDescription(), ml, new Runnable() { @Override public void run() { try { LocalStore localStore = account.getLocalStore(); long oldSize = localStore.getSize(); localStore.clear(); localStore.resetVisibleLimits(account.getDisplayCount()); long newSize = localStore.getSize(); AccountStats stats = new AccountStats(); stats.size = newSize; stats.unreadMessageCount = 0; stats.flaggedMessageCount = 0; for (MessagingListener l : getListeners(ml)) { l.accountSizeChanged(account, oldSize, newSize); l.accountStatusChanged(account, stats); } } catch (UnavailableStorageException e) { Log.i(K9.LOG_TAG, "Failed to clear account because storage is not available - trying again later."); throw new UnavailableAccountException(e); } catch (Exception e) { Log.e(K9.LOG_TAG, "Failed to clear account " + account.getDescription(), e); } } }); } public void recreate(final Account account, final MessagingListener ml) { putBackground("recreate:" + account.getDescription(), ml, new Runnable() { @Override public void run() { try { LocalStore localStore = account.getLocalStore(); long oldSize = localStore.getSize(); localStore.recreate(); localStore.resetVisibleLimits(account.getDisplayCount()); long newSize = localStore.getSize(); AccountStats stats = new AccountStats(); stats.size = newSize; stats.unreadMessageCount = 0; stats.flaggedMessageCount = 0; for (MessagingListener l : getListeners(ml)) { l.accountSizeChanged(account, oldSize, newSize); l.accountStatusChanged(account, stats); } } catch (UnavailableStorageException e) { Log.i(K9.LOG_TAG, "Failed to recreate an account because storage is not available - trying again later."); throw new UnavailableAccountException(e); } catch (Exception e) { Log.e(K9.LOG_TAG, "Failed to recreate account " + account.getDescription(), e); } } }); } private boolean shouldNotifyForMessage(Account account, LocalFolder localFolder, Message message) { // If we don't even have an account name, don't show the notification. // (This happens during initial account setup) if (account.getName() == null) { return false; } // Do not notify if the user does not have notifications enabled or if the message has // been read. if (!account.isNotifyNewMail() || message.isSet(Flag.SEEN)) { return false; } Account.FolderMode aDisplayMode = account.getFolderDisplayMode(); Account.FolderMode aNotifyMode = account.getFolderNotifyNewMailMode(); Folder.FolderClass fDisplayClass = localFolder.getDisplayClass(); Folder.FolderClass fNotifyClass = localFolder.getNotifyClass(); if (modeMismatch(aDisplayMode, fDisplayClass)) { // Never notify a folder that isn't displayed return false; } if (modeMismatch(aNotifyMode, fNotifyClass)) { // Do not notify folders in the wrong class return false; } // If the account is a POP3 account and the message is older than the oldest message we've // previously seen, then don't notify about it. if (account.getStoreUri().startsWith("pop3") && message.olderThan(new Date(account.getLatestOldMessageSeenTime()))) { return false; } // No notification for new messages in Trash, Drafts, Spam or Sent folder. // But do notify if it's the INBOX (see issue 1817). Folder folder = message.getFolder(); if (folder != null) { String folderName = folder.getName(); if (!account.getInboxFolderName().equals(folderName) && (account.getTrashFolderName().equals(folderName) || account.getDraftsFolderName().equals(folderName) || account.getSpamFolderName().equals(folderName) || account.getSentFolderName().equals(folderName))) { return false; } } if (message.getUid() != null && localFolder.getLastUid() != null) { try { Integer messageUid = Integer.parseInt(message.getUid()); if (messageUid <= localFolder.getLastUid()) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Message uid is " + messageUid + ", max message uid is " + localFolder.getLastUid() + ". Skipping notification."); return false; } } catch (NumberFormatException e) { // Nothing to be done here. } } // Don't notify if the sender address matches one of our identities and the user chose not // to be notified for such messages. if (account.isAnIdentity(message.getFrom()) && !account.isNotifySelfNewMail()) { return false; } return true; } /** * Get the pending notification data for an account. * See {@link NotificationData}. * * @param account The account to retrieve the pending data for * @param previousUnreadMessageCount The number of currently pending messages, which will be used * if there's no pending data yet. If passed as null, a new instance * won't be created if currently not existent. * @return A pending data instance, or null if one doesn't exist and * previousUnreadMessageCount was passed as null. */ private NotificationData getNotificationData(Account account, Integer previousUnreadMessageCount) { NotificationData data; synchronized (notificationData) { data = notificationData.get(account.getAccountNumber()); if (data == null && previousUnreadMessageCount != null) { data = new NotificationData(previousUnreadMessageCount); notificationData.put(account.getAccountNumber(), data); } } return data; } private CharSequence getMessageSender(Context context, Account account, Message message) { try { boolean isSelf = false; final Contacts contacts = K9.showContactName() ? Contacts.getInstance(context) : null; final Address[] fromAddrs = message.getFrom(); if (fromAddrs != null) { isSelf = account.isAnIdentity(fromAddrs); if (!isSelf && fromAddrs.length > 0) { return MessageHelper.toFriendly(fromAddrs[0], contacts).toString(); } } if (isSelf) { // show To: if the message was sent from me Address[] rcpts = message.getRecipients(Message.RecipientType.TO); if (rcpts != null && rcpts.length > 0) { return context.getString(R.string.message_to_fmt, MessageHelper.toFriendly(rcpts[0], contacts).toString()); } return context.getString(R.string.general_no_sender); } } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Unable to get sender information for notification.", e); } return null; } private CharSequence getMessageSubject(Context context, Message message) { String subject = message.getSubject(); if (!TextUtils.isEmpty(subject)) { return subject; } return context.getString(R.string.general_no_subject); } private static TextAppearanceSpan sEmphasizedSpan; private TextAppearanceSpan getEmphasizedSpan(Context context) { if (sEmphasizedSpan == null) { sEmphasizedSpan = new TextAppearanceSpan(context, R.style.TextAppearance_StatusBar_EventContent_Emphasized); } return sEmphasizedSpan; } private CharSequence getMessagePreview(Context context, Message message) { CharSequence subject = getMessageSubject(context, message); String snippet = message.getPreview(); if (TextUtils.isEmpty(subject)) { return snippet; } else if (TextUtils.isEmpty(snippet)) { return subject; } SpannableStringBuilder preview = new SpannableStringBuilder(); preview.append(subject); preview.append('\n'); preview.append(snippet); preview.setSpan(getEmphasizedSpan(context), 0, subject.length(), 0); return preview; } private CharSequence buildMessageSummary(Context context, CharSequence sender, CharSequence subject) { if (sender == null) { return subject; } SpannableStringBuilder summary = new SpannableStringBuilder(); summary.append(sender); summary.append(" "); summary.append(subject); summary.setSpan(getEmphasizedSpan(context), 0, sender.length(), 0); return summary; } public static final boolean platformSupportsExtendedNotifications() { // supported in Jellybean // TODO: use constant once target SDK is set to >= 16 return Build.VERSION.SDK_INT >= 16; } public static boolean platformSupportsLockScreenNotifications() { return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP; } private LocalMessage findNewestMessageForNotificationLocked(Context context, NotificationData data) { if (!data.messages.isEmpty()) { return data.messages.getFirst(); } if (!data.droppedMessages.isEmpty()) { return data.droppedMessages.getFirst().restoreToLocalMessage(context); } return null; } /** * Creates a notification of a newly received message. */ private void notifyAccount(Context context, Account account, LocalMessage message, int previousUnreadMessageCount) { final NotificationData data = getNotificationData(account, previousUnreadMessageCount); synchronized (data) { notifyAccountWithDataLocked(context, account, message, data); } } // Maximum number of senders to display in a lock screen notification. private static final int NUM_SENDERS_IN_LOCK_SCREEN_NOTIFICATION = 5; /** * Build the specific notification actions for a single message on Android Wear. */ private void addWearActions(final NotificationCompat.Builder builder, final Account account, final Message messages) { ArrayList<MessageReference> subAllRefs = new ArrayList<MessageReference>(); subAllRefs.add(new MessageReference(account.getUuid(), messages.getFolder().getName(), messages.getUid(), messages.getFlags().size()==0?null:messages.getFlags().iterator().next())); LinkedList<Message> msgList = new LinkedList<Message>(); msgList.add(messages); addWearActions(builder, 1, account, subAllRefs, msgList); } /** * Build the specific notification actions for a single or multiple message on Android Wear. */ private void addWearActions(final NotificationCompat.Builder builder, final int msgCount, final Account account, final ArrayList<MessageReference> allRefs, final List<? extends Message> messages) { // we need a new wearableExtender for each notification final NotificationCompat.WearableExtender wearableExtender = new NotificationCompat.WearableExtender(); NotificationQuickDelete deleteOption = K9.getNotificationQuickDeleteBehaviour(); boolean showDeleteAction = deleteOption == NotificationQuickDelete.ALWAYS || (deleteOption == NotificationQuickDelete.FOR_SINGLE_MSG && msgCount == 1); // note: while we are limited to 3 actions on the phone, // this does not seem to be a limit on Android Wear devices. // Tested on Moto 360, 8 actions seem to be no problem. if (showDeleteAction) { // Delete on wear only if no confirmation is required // because they would have to be confirmed on the phone, not the wear device if (!K9.confirmDeleteFromNotification()) { NotificationCompat.Action wearActionDelete = new NotificationCompat.Action.Builder( R.drawable.ic_action_delete_dark, context.getString(R.string.notification_action_delete), NotificationDeleteConfirmation.getIntent(context, account, allRefs)) .build(); builder.extend(wearableExtender.addAction(wearActionDelete)); } } if (NotificationActionService.isArchiveAllMessagesWearAvaliable(context, account, messages)) { // Archive on wear NotificationCompat.Action wearActionArchive = new NotificationCompat.Action.Builder( R.drawable.ic_action_delete_dark, context.getString(R.string.notification_action_archive), NotificationActionService.getArchiveAllMessagesIntent(context, account, allRefs)) .build(); builder.extend(wearableExtender.addAction(wearActionArchive)); } if (NotificationActionService.isSpamAllMessagesWearAvaliable(context, account, messages)) { // Spam on wear NotificationCompat.Action wearActionSpam = new NotificationCompat.Action.Builder( R.drawable.ic_action_delete_dark, context.getString(R.string.notification_action_spam), NotificationActionService.getSpamAllMessagesIntent(context, account, allRefs)) .build(); builder.extend(wearableExtender.addAction(wearActionSpam)); } } private void notifyAccountWithDataLocked(Context context, final Account account, LocalMessage message, NotificationData data) { boolean updateSilently = false; if (message == null) { /* this can happen if a message we previously notified for is read or deleted remotely */ message = findNewestMessageForNotificationLocked(context, data); updateSilently = true; if (message == null) { // seemingly both the message list as well as the overflow list is empty; // it probably is a good idea to cancel the notification in that case notifyAccountCancel(context, account); return; } } else { data.addMessage(message); } final KeyguardManager keyguardService = (KeyguardManager) context.getSystemService(Context.KEYGUARD_SERVICE); final CharSequence sender = getMessageSender(context, account, message); final CharSequence subject = getMessageSubject(context, message); CharSequence summary = buildMessageSummary(context, sender, subject); boolean privacyModeEnabled = (K9.getNotificationHideSubject() == NotificationHideSubject.ALWAYS) || (K9.getNotificationHideSubject() == NotificationHideSubject.WHEN_LOCKED && keyguardService.inKeyguardRestrictedInputMode()); if (privacyModeEnabled || summary.length() == 0) { summary = context.getString(R.string.notification_new_title); } NotificationManager notifMgr = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); NotificationCompat.Builder builder = new NotificationCompat.Builder(context); builder.setSmallIcon(R.drawable.ic_notify_new_mail); builder.setWhen(System.currentTimeMillis()); if (!updateSilently) { builder.setTicker(summary); } final int newMessages = data.getNewMessageCount(); final int unreadCount = data.unreadBeforeNotification + newMessages; builder.setNumber(unreadCount); String accountDescr = (account.getDescription() != null) ? account.getDescription() : account.getEmail(); final ArrayList<MessageReference> allRefs = new ArrayList<MessageReference>(); data.supplyAllMessageRefs(allRefs); if (platformSupportsExtendedNotifications() && !privacyModeEnabled) { if (newMessages > 1) { //TODO: Stacked notifications for Android Wear // multiple messages pending, show inbox style NotificationCompat.InboxStyle style = new NotificationCompat.InboxStyle(builder); int nID = account.getAccountNumber(); for (Message m : data.messages) { style.addLine(buildMessageSummary(context, getMessageSender(context, account, m), getMessageSubject(context, m))); // build child-notifications for Android Wear, // so the grouped notification can be opened to // reveal the individual messages and their actions. NotificationCompat.Builder subBuilder = new NotificationCompat.Builder(context); subBuilder.setSmallIcon(R.drawable.ic_notify_new_mail); subBuilder.setWhen(System.currentTimeMillis()); subBuilder.setGroup(NOTIFICATION_GROUP_KEY); // same group are the GroupSummary notification subBuilder.setGroupSummary(false); // this is not the summary // set content setNotificationContent(context, m, getMessageSender(context, account, message), getMessageSubject(context, message), subBuilder, accountDescr); // set actions addWearActions(subBuilder, account, m); if (m.isSet(Flag.FLAGGED)) { subBuilder.setPriority(NotificationCompat.PRIORITY_HIGH); } // no sound, no vibrate, no LED because these are for the summary notification only // and depend on quiet time and user settings // this must be done before the summary notification nID = 1000 + nID; notifMgr.notify(nID, subBuilder.build()); data.addStackedChildNotification(nID); } if (!data.droppedMessages.isEmpty()) { style.setSummaryText(context.getString(R.string.notification_additional_messages, data.droppedMessages.size(), accountDescr)); } final String title = context.getResources().getQuantityString( R.plurals.notification_new_messages_title, newMessages, newMessages); style.setBigContentTitle(title); builder.setContentTitle(title); builder.setSubText(accountDescr); builder.setStyle(style); } else { // single message pending, show big text setNotificationContent(context, message, sender, subject, builder, accountDescr); builder.addAction( platformSupportsLockScreenNotifications() ? R.drawable.ic_action_single_message_options_dark_vector : R.drawable.ic_action_single_message_options_dark, context.getString(R.string.notification_action_reply), NotificationActionService.getReplyIntent(context, account, message.makeMessageReference())); } // Mark Read on phone builder.addAction( platformSupportsLockScreenNotifications() ? R.drawable.ic_action_mark_as_read_dark_vector : R.drawable.ic_action_mark_as_read_dark, context.getString(R.string.notification_action_mark_as_read), NotificationActionService.getReadAllMessagesIntent(context, account, allRefs)); NotificationQuickDelete deleteOption = K9.getNotificationQuickDeleteBehaviour(); boolean showDeleteAction = deleteOption == NotificationQuickDelete.ALWAYS || (deleteOption == NotificationQuickDelete.FOR_SINGLE_MSG && newMessages == 1); // add /different) actions to show on connected Android Wear devices addWearActions(builder, newMessages, account, allRefs, data.messages); if (showDeleteAction) { // we need to pass the action directly to the activity, otherwise the // status bar won't be pulled up and we won't see the confirmation (if used) // Delete on phone builder.addAction( platformSupportsLockScreenNotifications() ? R.drawable.ic_action_delete_dark_vector : R.drawable.ic_action_delete_dark, context.getString(R.string.notification_action_delete), NotificationDeleteConfirmation.getIntent(context, account, allRefs)); } // this may be a summary notification for multiple stacked notifications // for each individual mail, shown on Android Wear // The phone will only show the summary as it's the last notification given // to notifMgr with this account's key builder.setGroup(NOTIFICATION_GROUP_KEY); builder.setGroupSummary(true); } else { // no extended notifications supported String accountNotice = context.getString(R.string.notification_new_one_account_fmt, unreadCount, accountDescr); builder.setContentTitle(accountNotice); builder.setContentText(summary); } for (Message m : data.messages) { if (m.isSet(Flag.FLAGGED)) { builder.setPriority(NotificationCompat.PRIORITY_HIGH); break; } } TaskStackBuilder stack = buildNotificationNavigationStack(context, account, message, newMessages, unreadCount, allRefs); builder.setContentIntent(stack.getPendingIntent( account.getAccountNumber(), PendingIntent.FLAG_CANCEL_CURRENT | PendingIntent.FLAG_ONE_SHOT)); builder.setDeleteIntent(NotificationActionService.getAcknowledgeIntent(context, account)); // Only ring or vibrate if we have not done so already on this account and fetch boolean ringAndVibrate = false; if (!updateSilently && !account.isRingNotified()) { account.setRingNotified(true); ringAndVibrate = true; } NotificationSetting n = account.getNotificationSetting(); configureLockScreenNotification(builder, context, account, newMessages, unreadCount, accountDescr, sender, data.messages); configureNotification( builder, (n.shouldRing()) ? n.getRingtone() : null, (n.shouldVibrate()) ? n.getVibration() : null, (n.isLed()) ? Integer.valueOf(n.getLedColor()) : null, K9.NOTIFICATION_LED_BLINK_SLOW, ringAndVibrate); notifMgr.notify(account.getAccountNumber(), builder.build()); } /** * Builds the TaskStack of a notification using either buildMessageViewBackStack * or buildUnreadBackStack or buildMessageListBackStack depending on the * behavior we have on this device generation. * @param context * @param account * @param message (only used if there is only 1 new message) * @param newMessages (used on newer platforms) * @param unreadCount (used on platforms that support no extended notifications) * @param allRefs * @return */ private TaskStackBuilder buildNotificationNavigationStack(Context context, Account account, LocalMessage message, int newMessages, int unreadCount, ArrayList<MessageReference> allRefs) { TaskStackBuilder stack; boolean treatAsSingleMessageNotification; if (platformSupportsExtendedNotifications()) { // in the new-style notifications, we focus on the new messages, not the unread ones treatAsSingleMessageNotification = newMessages == 1; } else { // in the old-style notifications, we focus on unread messages, as we don't have a // good way to express the new message count treatAsSingleMessageNotification = unreadCount == 1; } if (treatAsSingleMessageNotification) { stack = buildMessageViewBackStack(context, message.makeMessageReference()); } else if (account.goToUnreadMessageSearch()) { stack = buildUnreadBackStack(context, account); } else { String initialFolder = message.getFolder().getName(); /* only go to folder if all messages are in the same folder, else go to folder list */ for (MessageReference ref : allRefs) { if (!TextUtils.equals(initialFolder, ref.getFolderName())) { initialFolder = null; break; } } stack = buildMessageListBackStack(context, account, initialFolder); } return stack; } /** * Set the content of a notification for a single message. * @see #getMessagePreview(android.content.Context, com.fsck.k9.mail.Message) * @param context * @param message * @param sender * @param subject * @param builder * @param accountDescr */ private NotificationCompat.Builder setNotificationContent(Context context, /*Local*/Message message, CharSequence sender, CharSequence subject, NotificationCompat.Builder builder, String accountDescr) { NotificationCompat.BigTextStyle style = new NotificationCompat.BigTextStyle(builder); CharSequence preview = getMessagePreview(context, message); if (preview != null) { style.bigText(preview); } builder.setContentText(subject); builder.setSubText(accountDescr); builder.setContentTitle(sender); builder.setStyle(style); return builder; } private TaskStackBuilder buildAccountsBackStack(Context context) { TaskStackBuilder stack = TaskStackBuilder.create(context); if (!skipAccountsInBackStack(context)) { stack.addNextIntent(new Intent(context, Accounts.class).putExtra(Accounts.EXTRA_STARTUP, false)); } return stack; } private TaskStackBuilder buildFolderListBackStack(Context context, Account account) { TaskStackBuilder stack = buildAccountsBackStack(context); stack.addNextIntent(FolderList.actionHandleAccountIntent(context, account, false)); return stack; } private TaskStackBuilder buildUnreadBackStack(Context context, final Account account) { TaskStackBuilder stack = buildAccountsBackStack(context); LocalSearch search = Accounts.createUnreadSearch(context, account); stack.addNextIntent(MessageList.intentDisplaySearch(context, search, true, false, false)); return stack; } private TaskStackBuilder buildMessageListBackStack(Context context, Account account, String folder) { TaskStackBuilder stack = skipFolderListInBackStack(context, account, folder) ? buildAccountsBackStack(context) : buildFolderListBackStack(context, account); if (folder != null) { LocalSearch search = new LocalSearch(folder); search.addAllowedFolder(folder); search.addAccountUuid(account.getUuid()); stack.addNextIntent(MessageList.intentDisplaySearch(context, search, false, true, true)); } return stack; } private TaskStackBuilder buildMessageViewBackStack(Context context, MessageReference message) { Account account = Preferences.getPreferences(context).getAccount(message.getAccountUuid()); TaskStackBuilder stack = buildMessageListBackStack(context, account, message.getFolderName()); stack.addNextIntent(MessageList.actionDisplayMessageIntent(context, message)); return stack; } private boolean skipFolderListInBackStack(Context context, Account account, String folder) { return folder != null && folder.equals(account.getAutoExpandFolderName()); } private boolean skipAccountsInBackStack(Context context) { return Preferences.getPreferences(context).getAccounts().size() == 1; } /** * Configure the notification sound and LED * * @param builder * {@link NotificationCompat.Builder} instance used to configure the notification. * Never {@code null}. * @param ringtone * String name of ringtone. {@code null}, if no ringtone should be played. * @param vibrationPattern * {@code long[]} vibration pattern. {@code null}, if no vibration should be played. * @param ledColor * Color to flash LED. {@code null}, if no LED flash should happen. * @param ledSpeed * Either {@link K9#NOTIFICATION_LED_BLINK_SLOW} or * {@link K9#NOTIFICATION_LED_BLINK_FAST}. * @param ringAndVibrate * {@code true}, if ringtone/vibration are allowed. {@code false}, otherwise. */ private void configureNotification(NotificationCompat.Builder builder, String ringtone, long[] vibrationPattern, Integer ledColor, int ledSpeed, boolean ringAndVibrate) { // if it's quiet time, then we shouldn't be ringing, buzzing or flashing if (K9.isQuietTime()) { return; } if (ringAndVibrate) { if (ringtone != null && !TextUtils.isEmpty(ringtone)) { builder.setSound(Uri.parse(ringtone)); } if (vibrationPattern != null) { builder.setVibrate(vibrationPattern); } } if (ledColor != null) { int ledOnMS; int ledOffMS; if (ledSpeed == K9.NOTIFICATION_LED_BLINK_SLOW) { ledOnMS = K9.NOTIFICATION_LED_ON_TIME; ledOffMS = K9.NOTIFICATION_LED_OFF_TIME; } else { ledOnMS = K9.NOTIFICATION_LED_FAST_ON_TIME; ledOffMS = K9.NOTIFICATION_LED_FAST_OFF_TIME; } builder.setLights(ledColor, ledOnMS, ledOffMS); } } /** * Configure lock screen notifications on platforms that support it * * @param builder Unlocked notification * @param context Context * @param account Account being notified * @param newMessages Number of new messages being notified for * @param unreadCount Total number of unread messages in this account * @param accountDescription Formatted account name for display * @param formattedSender Formatted sender name for display * @param messages List of messages if notifying for multiple messages. Null otherwise. */ private void configureLockScreenNotification(NotificationCompat.Builder builder, Context context, Account account, int newMessages, int unreadCount, CharSequence accountDescription, CharSequence formattedSender, List<? extends Message> messages) { if (!platformSupportsLockScreenNotifications()) { return; } builder.setSmallIcon(R.drawable.ic_notify_new_mail_vector); builder.setColor(account.getChipColor()); NotificationCompat.Builder publicNotification = new NotificationCompat.Builder(context); publicNotification.setSmallIcon(R.drawable.ic_notify_new_mail_vector); publicNotification.setColor(account.getChipColor()); publicNotification.setNumber(unreadCount); final String title = context.getResources().getQuantityString( R.plurals.notification_new_messages_title, newMessages, newMessages); publicNotification.setContentTitle(title); switch (K9.getLockScreenNotificationVisibility()) { case NOTHING: builder.setVisibility(NotificationCompat.VISIBILITY_SECRET); break; case APP_NAME: // This is the Android default, but we should be explicit in case that changes in the future. builder.setVisibility(NotificationCompat.VISIBILITY_PRIVATE); break; case SENDERS: if (newMessages == 1) { publicNotification.setContentText(formattedSender); } else { // Use a LinkedHashSet so that we preserve ordering (newest to oldest), but still remove duplicates Set<CharSequence> senders = new LinkedHashSet<CharSequence>(NUM_SENDERS_IN_LOCK_SCREEN_NOTIFICATION); for (Message message : messages) { senders.add(getMessageSender(context, account, message)); if (senders.size() == NUM_SENDERS_IN_LOCK_SCREEN_NOTIFICATION) { break; } } publicNotification.setContentText(TextUtils.join(", ", senders)); } builder.setPublicVersion(publicNotification.build()); break; case EVERYTHING: builder.setVisibility(NotificationCompat.VISIBILITY_PUBLIC); break; case MESSAGE_COUNT: default: publicNotification.setContentText(accountDescription); builder.setPublicVersion(publicNotification.build()); break; } } /** * Cancel a notification of new email messages * @param account all notifications for this account will be canceled and removed */ public void notifyAccountCancel(final Context context, final Account account) { NotificationManager notificationManager = (NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE); notificationManager.cancel(account.getAccountNumber()); notificationManager.cancel(-1000 - account.getAccountNumber()); // cancel stacked notifications on Android Wear that share this as a summary notification NotificationData data = notificationData.get(account.getAccountNumber()); if (data != null) { List<Integer> stackedChildNotifications = data.getStackedChildNotifications(); if (stackedChildNotifications != null) { for (Integer stackedNotificationId : stackedChildNotifications) { notificationManager.cancel(stackedNotificationId); } } } notificationData.remove(account.getAccountNumber()); } public void deleteAccount(Context context, Account account) { notifyAccountCancel(context, account); memorizingListener.removeAccount(account); } /** * Save a draft message. * @param account Account we are saving for. * @param message Message to save. * @return Message representing the entry in the local store. */ public Message saveDraft(final Account account, final Message message, long existingDraftId) { Message localMessage = null; try { LocalStore localStore = account.getLocalStore(); LocalFolder localFolder = localStore.getFolder(account.getDraftsFolderName()); localFolder.open(Folder.OPEN_MODE_RW); if (existingDraftId != INVALID_MESSAGE_ID) { String uid = localFolder.getMessageUidById(existingDraftId); message.setUid(uid); } // Save the message to the store. localFolder.appendMessages(Collections.singletonList(message)); // Fetch the message back from the store. This is the Message that's returned to the caller. localMessage = localFolder.getMessage(message.getUid()); localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true); PendingCommand command = new PendingCommand(); command.command = PENDING_COMMAND_APPEND; command.arguments = new String[] { localFolder.getName(), localMessage.getUid() }; queuePendingCommand(account, command); processPendingCommands(account); } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Unable to save message as draft.", e); addErrorMessage(account, null, e); } return localMessage; } public long getId(Message message) { long id; if (message instanceof LocalMessage) { id = ((LocalMessage) message).getId(); } else { Log.w(K9.LOG_TAG, "MessagingController.getId() called without a LocalMessage"); id = INVALID_MESSAGE_ID; } return id; } public boolean modeMismatch(Account.FolderMode aMode, Folder.FolderClass fMode) { if (aMode == Account.FolderMode.NONE || (aMode == Account.FolderMode.FIRST_CLASS && fMode != Folder.FolderClass.FIRST_CLASS) || (aMode == Account.FolderMode.FIRST_AND_SECOND_CLASS && fMode != Folder.FolderClass.FIRST_CLASS && fMode != Folder.FolderClass.SECOND_CLASS) || (aMode == Account.FolderMode.NOT_SECOND_CLASS && fMode == Folder.FolderClass.SECOND_CLASS)) { return true; } else { return false; } } static AtomicInteger sequencing = new AtomicInteger(0); static class Command implements Comparable<Command> { public Runnable runnable; public MessagingListener listener; public String description; boolean isForeground; int sequence = sequencing.getAndIncrement(); @Override public int compareTo(Command other) { if (other.isForeground && !isForeground) { return 1; } else if (!other.isForeground && isForeground) { return -1; } else { return (sequence - other.sequence); } } } public MessagingListener getCheckMailListener() { return checkMailListener; } public void setCheckMailListener(MessagingListener checkMailListener) { if (this.checkMailListener != null) { removeListener(this.checkMailListener); } this.checkMailListener = checkMailListener; if (this.checkMailListener != null) { addListener(this.checkMailListener); } } public Collection<Pusher> getPushers() { return pushers.values(); } public boolean setupPushing(final Account account) { try { Pusher previousPusher = pushers.remove(account); if (previousPusher != null) { previousPusher.stop(); } Account.FolderMode aDisplayMode = account.getFolderDisplayMode(); Account.FolderMode aPushMode = account.getFolderPushMode(); List<String> names = new ArrayList<String>(); Store localStore = account.getLocalStore(); for (final Folder folder : localStore.getPersonalNamespaces(false)) { if (folder.getName().equals(account.getErrorFolderName()) || folder.getName().equals(account.getOutboxFolderName())) { /* if (K9.DEBUG) Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() + " which should never be pushed"); */ continue; } folder.open(Folder.OPEN_MODE_RW); Folder.FolderClass fDisplayClass = folder.getDisplayClass(); Folder.FolderClass fPushClass = folder.getPushClass(); if (modeMismatch(aDisplayMode, fDisplayClass)) { // Never push a folder that isn't displayed /* if (K9.DEBUG) Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() + " which is in display class " + fDisplayClass + " while account is in display mode " + aDisplayMode); */ continue; } if (modeMismatch(aPushMode, fPushClass)) { // Do not push folders in the wrong class /* if (K9.DEBUG) Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() + " which is in push mode " + fPushClass + " while account is in push mode " + aPushMode); */ continue; } if (K9.DEBUG) Log.i(K9.LOG_TAG, "Starting pusher for " + account.getDescription() + ":" + folder.getName()); names.add(folder.getName()); } if (!names.isEmpty()) { PushReceiver receiver = new MessagingControllerPushReceiver(context, account, this); int maxPushFolders = account.getMaxPushFolders(); if (names.size() > maxPushFolders) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Count of folders to push for account " + account.getDescription() + " is " + names.size() + ", greater than limit of " + maxPushFolders + ", truncating"); names = names.subList(0, maxPushFolders); } try { Store store = account.getRemoteStore(); if (!store.isPushCapable()) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Account " + account.getDescription() + " is not push capable, skipping"); return false; } Pusher pusher = store.getPusher(receiver); if (pusher != null) { Pusher oldPusher = pushers.putIfAbsent(account, pusher); if (oldPusher == null) { pusher.start(names); } } } catch (Exception e) { Log.e(K9.LOG_TAG, "Could not get remote store", e); return false; } return true; } else { if (K9.DEBUG) Log.i(K9.LOG_TAG, "No folders are configured for pushing in account " + account.getDescription()); return false; } } catch (Exception e) { Log.e(K9.LOG_TAG, "Got exception while setting up pushing", e); } return false; } public void stopAllPushing() { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Stopping all pushers"); Iterator<Pusher> iter = pushers.values().iterator(); while (iter.hasNext()) { Pusher pusher = iter.next(); iter.remove(); pusher.stop(); } } public void messagesArrived(final Account account, final Folder remoteFolder, final List<Message> messages, final boolean flagSyncOnly) { if (K9.DEBUG) Log.i(K9.LOG_TAG, "Got new pushed email messages for account " + account.getDescription() + ", folder " + remoteFolder.getName()); final CountDownLatch latch = new CountDownLatch(1); putBackground("Push messageArrived of account " + account.getDescription() + ", folder " + remoteFolder.getName(), null, new Runnable() { @Override public void run() { LocalFolder localFolder = null; try { LocalStore localStore = account.getLocalStore(); localFolder = localStore.getFolder(remoteFolder.getName()); localFolder.open(Folder.OPEN_MODE_RW); account.setRingNotified(false); int newCount = downloadMessages(account, remoteFolder, localFolder, messages, flagSyncOnly); int unreadMessageCount = localFolder.getUnreadMessageCount(); localFolder.setLastPush(System.currentTimeMillis()); localFolder.setStatus(null); if (K9.DEBUG) Log.i(K9.LOG_TAG, "messagesArrived newCount = " + newCount + ", unread count = " + unreadMessageCount); if (unreadMessageCount == 0) { notifyAccountCancel(context, account); } for (MessagingListener l : getListeners()) { l.folderStatusChanged(account, remoteFolder.getName(), unreadMessageCount); } } catch (Exception e) { String rootMessage = getRootCauseMessage(e); String errorMessage = "Push failed: " + rootMessage; try { // Oddly enough, using a local variable gets rid of a // potential null pointer access warning with Eclipse. LocalFolder folder = localFolder; folder.setStatus(errorMessage); } catch (Exception se) { Log.e(K9.LOG_TAG, "Unable to set failed status on localFolder", se); } for (MessagingListener l : getListeners()) { l.synchronizeMailboxFailed(account, remoteFolder.getName(), errorMessage); } addErrorMessage(account, null, e); } finally { closeFolder(localFolder); latch.countDown(); } } }); try { latch.await(); } catch (Exception e) { Log.e(K9.LOG_TAG, "Interrupted while awaiting latch release", e); } if (K9.DEBUG) Log.i(K9.LOG_TAG, "MessagingController.messagesArrivedLatch released"); } public void systemStatusChanged() { for (MessagingListener l : getListeners()) { l.systemStatusChanged(); } } enum MemorizingState { STARTED, FINISHED, FAILED } static class Memory { Account account; String folderName; MemorizingState syncingState = null; MemorizingState sendingState = null; MemorizingState pushingState = null; MemorizingState processingState = null; String failureMessage = null; int syncingTotalMessagesInMailbox; int syncingNumNewMessages; int folderCompleted = 0; int folderTotal = 0; String processingCommandTitle = null; Memory(Account nAccount, String nFolderName) { account = nAccount; folderName = nFolderName; } String getKey() { return getMemoryKey(account, folderName); } } static String getMemoryKey(Account taccount, String tfolderName) { return taccount.getDescription() + ":" + tfolderName; } static class MemorizingListener extends MessagingListener { Map<String, Memory> memories = new HashMap<String, Memory>(31); Memory getMemory(Account account, String folderName) { Memory memory = memories.get(getMemoryKey(account, folderName)); if (memory == null) { memory = new Memory(account, folderName); memories.put(memory.getKey(), memory); } return memory; } synchronized void removeAccount(Account account) { Iterator<Entry<String, Memory>> memIt = memories.entrySet().iterator(); while (memIt.hasNext()) { Entry<String, Memory> memoryEntry = memIt.next(); String uuidForMemory = memoryEntry.getValue().account.getUuid(); if (uuidForMemory.equals(account.getUuid())) { memIt.remove(); } } } @Override public synchronized void synchronizeMailboxStarted(Account account, String folder) { Memory memory = getMemory(account, folder); memory.syncingState = MemorizingState.STARTED; memory.folderCompleted = 0; memory.folderTotal = 0; } @Override public synchronized void synchronizeMailboxFinished(Account account, String folder, int totalMessagesInMailbox, int numNewMessages) { Memory memory = getMemory(account, folder); memory.syncingState = MemorizingState.FINISHED; memory.syncingTotalMessagesInMailbox = totalMessagesInMailbox; memory.syncingNumNewMessages = numNewMessages; } @Override public synchronized void synchronizeMailboxFailed(Account account, String folder, String message) { Memory memory = getMemory(account, folder); memory.syncingState = MemorizingState.FAILED; memory.failureMessage = message; } synchronized void refreshOther(MessagingListener other) { if (other != null) { Memory syncStarted = null; Memory sendStarted = null; Memory processingStarted = null; for (Memory memory : memories.values()) { if (memory.syncingState != null) { switch (memory.syncingState) { case STARTED: syncStarted = memory; break; case FINISHED: other.synchronizeMailboxFinished(memory.account, memory.folderName, memory.syncingTotalMessagesInMailbox, memory.syncingNumNewMessages); break; case FAILED: other.synchronizeMailboxFailed(memory.account, memory.folderName, memory.failureMessage); break; } } if (memory.sendingState != null) { switch (memory.sendingState) { case STARTED: sendStarted = memory; break; case FINISHED: other.sendPendingMessagesCompleted(memory.account); break; case FAILED: other.sendPendingMessagesFailed(memory.account); break; } } if (memory.pushingState != null) { switch (memory.pushingState) { case STARTED: other.setPushActive(memory.account, memory.folderName, true); break; case FINISHED: other.setPushActive(memory.account, memory.folderName, false); break; case FAILED: break; } } if (memory.processingState != null) { switch (memory.processingState) { case STARTED: processingStarted = memory; break; case FINISHED: case FAILED: other.pendingCommandsFinished(memory.account); break; } } } Memory somethingStarted = null; if (syncStarted != null) { other.synchronizeMailboxStarted(syncStarted.account, syncStarted.folderName); somethingStarted = syncStarted; } if (sendStarted != null) { other.sendPendingMessagesStarted(sendStarted.account); somethingStarted = sendStarted; } if (processingStarted != null) { other.pendingCommandsProcessing(processingStarted.account); if (processingStarted.processingCommandTitle != null) { other.pendingCommandStarted(processingStarted.account, processingStarted.processingCommandTitle); } else { other.pendingCommandCompleted(processingStarted.account, processingStarted.processingCommandTitle); } somethingStarted = processingStarted; } if (somethingStarted != null && somethingStarted.folderTotal > 0) { other.synchronizeMailboxProgress(somethingStarted.account, somethingStarted.folderName, somethingStarted.folderCompleted, somethingStarted.folderTotal); } } } @Override public synchronized void setPushActive(Account account, String folderName, boolean active) { Memory memory = getMemory(account, folderName); memory.pushingState = (active ? MemorizingState.STARTED : MemorizingState.FINISHED); } @Override public synchronized void sendPendingMessagesStarted(Account account) { Memory memory = getMemory(account, null); memory.sendingState = MemorizingState.STARTED; memory.folderCompleted = 0; memory.folderTotal = 0; } @Override public synchronized void sendPendingMessagesCompleted(Account account) { Memory memory = getMemory(account, null); memory.sendingState = MemorizingState.FINISHED; } @Override public synchronized void sendPendingMessagesFailed(Account account) { Memory memory = getMemory(account, null); memory.sendingState = MemorizingState.FAILED; } @Override public synchronized void synchronizeMailboxProgress(Account account, String folderName, int completed, int total) { Memory memory = getMemory(account, folderName); memory.folderCompleted = completed; memory.folderTotal = total; } @Override public synchronized void pendingCommandsProcessing(Account account) { Memory memory = getMemory(account, null); memory.processingState = MemorizingState.STARTED; memory.folderCompleted = 0; memory.folderTotal = 0; } @Override public synchronized void pendingCommandsFinished(Account account) { Memory memory = getMemory(account, null); memory.processingState = MemorizingState.FINISHED; } @Override public synchronized void pendingCommandStarted(Account account, String commandTitle) { Memory memory = getMemory(account, null); memory.processingCommandTitle = commandTitle; } @Override public synchronized void pendingCommandCompleted(Account account, String commandTitle) { Memory memory = getMemory(account, null); memory.processingCommandTitle = null; } } private void actOnMessages(List<LocalMessage> messages, MessageActor actor) { Map<Account, Map<Folder, List<Message>>> accountMap = new HashMap<Account, Map<Folder, List<Message>>>(); for (LocalMessage message : messages) { if ( message == null) { continue; } Folder folder = message.getFolder(); Account account = message.getAccount(); Map<Folder, List<Message>> folderMap = accountMap.get(account); if (folderMap == null) { folderMap = new HashMap<Folder, List<Message>>(); accountMap.put(account, folderMap); } List<Message> messageList = folderMap.get(folder); if (messageList == null) { messageList = new LinkedList<Message>(); folderMap.put(folder, messageList); } messageList.add(message); } for (Map.Entry<Account, Map<Folder, List<Message>>> entry : accountMap.entrySet()) { Account account = entry.getKey(); //account.refresh(Preferences.getPreferences(K9.app)); Map<Folder, List<Message>> folderMap = entry.getValue(); for (Map.Entry<Folder, List<Message>> folderEntry : folderMap.entrySet()) { Folder folder = folderEntry.getKey(); List<Message> messageList = folderEntry.getValue(); actor.act(account, folder, messageList); } } } interface MessageActor { public void act(final Account account, final Folder folder, final List<Message> messages); } }
package ch.uzh.ddis.katts.bolts; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import ch.uzh.ddis.katts.query.stream.Stream; import ch.uzh.ddis.katts.query.stream.Variable; /** * The variable bindings are used to exchange data between the different nodes (bolts). A object of variable bindings * contains a set of variable and always a start and end date. Additionally also a sequence number. * * This sequence number is unique between two instance of a certain bolt. If a bolt is parallelized then the sequence * number can help to synchronize certain tasks. Important is that this sequence number is not globally nor unique * between two bolt types (classes). It is only unique between a direct stream from one bolt instance to the other. That * means it is unique on direct connections of bolts. * * @author Thomas Hunziker * */ public class VariableBindings { /** The stream on which the variable bindings of this object should be emitted on. */ private Stream stream; private Emitter emitter = null; private Map<Variable, Object> variableData = new HashMap<Variable, Object>(); private Event anchorEvent; private Date startDate; private Date endDate; public VariableBindings(Stream stream, Emitter emitter, Event anchorEvent) { this.stream = stream; this.emitter = emitter; this.anchorEvent = anchorEvent; if (anchorEvent == null) { throw new NullPointerException("anchorEvent was null"); } } /** * Adds a variable to the variable binding. * * @param variable * The variable to add. * @param value * The value for this variable in the variable binding. * */ public void add(Variable variable, Object value) { variableData.put(variable, value); } /** * Adds a new variable to the variable binding by the given reference on identifier. The reference name identifies * the internal name of a variable. * * @param referenceName * The reference name for which the variable value should be set for. * @param value * The value to set. */ public void add(String referenceName, Object value) { this.add(stream.getVariableByReferenceName(referenceName), value); } public Stream getStream() { return stream; } public void setStream(Stream stream) { this.stream = stream; } public void emit() { emitter.emit(this); } /** * Build the outgoing tuple for storm with the correct ordering of values. * * @param sequenceNumber * The sequence number of the variable binding. * @return Tuple for emitting to Storm */ public List<Object> getDataListSorted(long sequenceNumber) { List<Object> list = new ArrayList<Object>(); list.add(sequenceNumber); list.add(this.getStartDate()); list.add(this.getEndDate()); for (Variable var : this.getStream().getAllVariables()) { list.add(this.variableData.get(var)); } return list; } /** * The start date indicates, when the variable binding starts. * * @return */ public Date getStartDate() { return startDate; } public void setStartDate(Date startDate) { this.startDate = startDate; } /** * The end date indicates, when the variable binding ends. * * @return */ public Date getEndDate() { return endDate; } public void setEndDate(Date endDate) { this.endDate = endDate; } public Event getAnchorEvent() { return anchorEvent; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("Emit On: ").append(stream.getId()); builder.append("\n\tVariables:\n"); for (Entry<Variable, Object> entry : this.variableData.entrySet()) { builder.append("\t"); builder.append(entry.getKey()); // builder.append("(").append(entry.getKey().getReferencesTo()).append(")"); builder.append(": "); builder.append(entry.getValue()); builder.append("\n"); } return builder.toString(); } }
package io.permazen.kv.raft; import com.google.common.collect.Iterables; import com.google.common.primitives.Bytes; import io.permazen.kv.KVTransactionException; import io.permazen.kv.KeyRange; import io.permazen.kv.RetryTransactionException; import io.permazen.kv.mvcc.Mutations; import io.permazen.kv.mvcc.Reads; import io.permazen.kv.mvcc.Writes; import io.permazen.kv.raft.msg.AppendRequest; import io.permazen.kv.raft.msg.AppendResponse; import io.permazen.kv.raft.msg.CommitRequest; import io.permazen.kv.raft.msg.CommitResponse; import io.permazen.kv.raft.msg.GrantVote; import io.permazen.kv.raft.msg.InstallSnapshot; import io.permazen.kv.raft.msg.Message; import io.permazen.kv.raft.msg.PingRequest; import io.permazen.kv.raft.msg.PingResponse; import io.permazen.kv.raft.msg.RequestVote; import io.permazen.util.LongEncoder; import java.io.IOException; import java.util.AbstractMap; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import org.slf4j.Logger; /** * Common superclass for the three roles played by a Raft node: * {@linkplain LeaderRole leader}, {@linkplain FollowerRole follower}, and {@linkplain CandidateRole candidate}. */ public abstract class Role { final Logger log; final RaftKVDatabase raft; final Service checkReadyTransactionsService = new Service(this, "check ready transactions") { @Override public void run() { Role.this.checkReadyTransactions(); } }; final Service checkWaitingTransactionsService = new Service(this, "check waiting transactions") { @Override public void run() { Role.this.checkWaitingTransactions(); } }; // NOTE: use of this service requires that 'checkWaitingTransactionsService' be scheduled first! final Service applyCommittedLogEntriesService = new Service(this, "apply committed logs") { @Override public void run() { Role.this.applyCommittedLogEntries(); } }; final Service triggerKeyWatchesService = new Service(this, "trigger key watches") { @Override public void run() { Role.this.triggerKeyWatches(); } }; // Constructors Role(RaftKVDatabase raft) { this.raft = raft; this.log = this.raft.log; assert Thread.holdsLock(this.raft); } // Status /** * Get the {@link RaftKVDatabase} with which this instance is associated. * * @return associated database */ public RaftKVDatabase getKVDatabase() { return this.raft; } // Lifecycle void setup() { assert Thread.holdsLock(this.raft); this.raft.requestService(this.checkReadyTransactionsService); this.raft.requestService(this.checkWaitingTransactionsService); this.raft.requestService(this.applyCommittedLogEntriesService); } void shutdown() { // Sanity check assert Thread.holdsLock(this.raft); // Fail any (read-only) transactions with a minimum lease timeout, because they won't be valid for a new leader for (RaftKVTransaction tx : new ArrayList<>(this.raft.openTransactions.values())) { if (!tx.getState().equals(TxState.COMPLETED) && tx.getCommitLeaderLeaseTimeout() != null) { assert tx.hasCommitInfo(); this.raft.fail(tx, new RetryTransactionException(tx, "leader was deposed during leader lease timeout wait")); } } // Cleanup role-specific state for (RaftKVTransaction tx : this.raft.openTransactions.values()) this.cleanupForTransaction(tx); } // Service abstract void outputQueueEmpty(String address); /** * Check transactions in the {@link TxState#COMMIT_READY} state to see if we can advance them. */ void checkReadyTransactions() { assert Thread.holdsLock(this.raft); for (RaftKVTransaction tx : new ArrayList<>(this.raft.openTransactions.values())) { if (TxState.COMMIT_READY.equals(tx.getState())) new CheckReadyTransactionService(this, tx).run(); } } /** * Check transactions in the {@link TxState#COMMIT_WAITING} state to see if they are committed yet. * We invoke this service method whenever our {@code commitIndex} advances. */ void checkWaitingTransactions() { assert Thread.holdsLock(this.raft); for (RaftKVTransaction tx : new ArrayList<>(this.raft.openTransactions.values())) { if (TxState.COMMIT_WAITING.equals(tx.getState())) new CheckWaitingTransactionService(this, tx).run(); } } /** * Apply committed but unapplied log entries to the state machine. * We invoke this service method whenever log entries are added or our {@code commitIndex} advances. * * <p> * Note: checkWaitingTransactions() must have been invoked already when this method is invoked. */ void applyCommittedLogEntries() { // Sanity check assert Thread.holdsLock(this.raft); assert this.checkRebasableAndCommittableUpToDate(); // Determine how many committed log entries we can apply to the state machine at this time int numEntriesToApply = 0; while (this.raft.lastAppliedIndex + numEntriesToApply < this.raft.commitIndex && this.mayApplyLogEntry(this.raft.raftLog.get(numEntriesToApply))) numEntriesToApply++; final long maxAppliedIndex = this.raft.lastAppliedIndex + numEntriesToApply; assert maxAppliedIndex <= this.raft.commitIndex; // Apply committed log entries to the state machine while (this.raft.lastAppliedIndex < maxAppliedIndex) { // Grab the first unwritten log entry final LogEntry logEntry = this.raft.raftLog.get(0); assert logEntry.getIndex() == this.raft.lastAppliedIndex + 1; // Get the current config as of the log entry we're about to apply final HashMap<String, String> logEntryConfig = new HashMap<>(this.raft.lastAppliedConfig); logEntry.applyConfigChange(logEntryConfig); // Prepare combined Mutations containing prefixed log entry changes plus my own final Writes logWrites = logEntry.getWrites(); final Writes myWrites = new Writes(); myWrites.getPuts().put(RaftKVDatabase.LAST_APPLIED_TERM_KEY, LongEncoder.encode(logEntry.getTerm())); myWrites.getPuts().put(RaftKVDatabase.LAST_APPLIED_INDEX_KEY, LongEncoder.encode(logEntry.getIndex())); myWrites.getPuts().put(RaftKVDatabase.LAST_APPLIED_CONFIG_KEY, this.raft.encodeConfig(logEntryConfig)); final byte[] stateMachinePrefix = this.raft.getStateMachinePrefix(); final Mutations mutations = new Mutations() { @Override public Iterable<KeyRange> getRemoveRanges() { return Iterables.transform(logWrites.getRemoveRanges(), range -> range.prefixedBy(stateMachinePrefix)); } @Override public Iterable<Map.Entry<byte[], byte[]>> getPutPairs() { return Iterables.concat( Iterables.transform(logWrites.getPutPairs(), entry -> new AbstractMap.SimpleEntry<>(Bytes.concat(stateMachinePrefix, entry.getKey()), entry.getValue())), myWrites.getPutPairs()); } @Override public Iterable<Map.Entry<byte[], Long>> getAdjustPairs() { return Iterables.transform(logWrites.getAdjustPairs(), entry -> new AbstractMap.SimpleEntry<>(Bytes.concat(stateMachinePrefix, entry.getKey()), entry.getValue())); } }; // Apply updates to the key/value store; when applying the last one, durably persist if (this.log.isDebugEnabled()) this.debug("applying committed log entry " + logEntry + " to key/value store"); try { this.raft.kv.mutate(mutations, !this.raft.disableSync && this.raft.lastAppliedIndex == maxAppliedIndex); } catch (Exception e) { if (e instanceof RuntimeException && e.getCause() instanceof IOException) e = (IOException)e.getCause(); this.error("error applying log entry " + logEntry + " to key/value store", e); break; } // Update in-memory state assert logEntry.getIndex() == this.raft.lastAppliedIndex + 1; this.raft.incrementLastAppliedIndex(logEntry.getTerm()); logEntry.applyConfigChange(this.raft.lastAppliedConfig); assert this.raft.currentConfig.equals(this.raft.buildCurrentConfig()); // Delete the log entry this.raft.raftLog.remove(0); this.raft.deleteFile(logEntry.getFile(), "applied log file"); } } // Assertion check boolean checkRebasableAndCommittableUpToDate() { for (RaftKVTransaction tx : this.raft.openTransactions.values()) this.checkRebasableAndCommittableUpToDate(tx); return true; } // Assertion check boolean checkRebasableAndCommittableUpToDate(RaftKVTransaction tx) { // A rebasable transactions should be fully rebased assert !tx.isRebasable() || tx.getBaseIndex() == this.raft.getLastLogIndex() : "rebasable check failed for " + tx; // A committable transaction should be marked as such if (!tx.isCommittable()) { try { assert !this.checkCommittable(tx); } catch (KVTransactionException e) { // ok - it's not committable because it's broken } } return true; } /** * Determine whether the given log entry may be applied to the state machine. * This method can assume that the log entry is already committed. * * @param logEntry log entry to apply */ final boolean mayApplyLogEntry(LogEntry logEntry) { assert Thread.holdsLock(this.raft); // Are we running out of memory, or keeping around too many log entries? If so, go ahead no matter what the subclass says. final long logEntryMemoryUsage = this.raft.getUnappliedLogMemoryUsage(); if (logEntryMemoryUsage > this.raft.maxUnappliedLogMemory || this.raft.raftLog.size() > this.raft.maxUnappliedLogEntries) { if (this.log.isTraceEnabled()) { this.trace("allowing log entry " + logEntry + " to be applied because memory usage " + logEntryMemoryUsage + " > " + this.raft.maxUnappliedLogMemory + " and/or log length " + this.raft.raftLog.size() + " > " + this.raft.maxUnappliedLogEntries); } return true; } // Check with subclass return this.roleMayApplyLogEntry(logEntry); } /** * Role-specific hook to determine whether the given log entry should be applied to the state machine. * This method can assume that the log entry is already committed. * * @param logEntry log entry to apply */ boolean roleMayApplyLogEntry(LogEntry logEntry) { return true; } /** * Trigger any key watches for changes in log entries committed since the last time we checked. * * <p> * This should be invoked: * <ul> * <li>After advancing the commitIndex</li> * <li>After resetting the state machine</li> * <li>After installing a snapshot</li> * </ul> */ void triggerKeyWatches() { // Sanity check assert Thread.holdsLock(this.raft); assert this.raft.commitIndex >= this.raft.lastAppliedIndex; assert this.raft.commitIndex <= this.raft.lastAppliedIndex + this.raft.raftLog.size(); assert this.raft.keyWatchIndex <= this.raft.commitIndex; // If nobody is watching, don't bother if (this.raft.keyWatchTracker == null) return; // If we have recevied a snapshot install, we may not be able to tell which keys have changed since last notification; // in that case, trigger all key watches; otherwise, trigger the keys affected by newly committed log entries if (this.raft.keyWatchIndex < this.raft.lastAppliedIndex) { this.raft.keyWatchTracker.triggerAll(); this.raft.keyWatchIndex = this.raft.commitIndex; } else { while (this.raft.keyWatchIndex < this.raft.commitIndex) this.raft.keyWatchTracker.trigger(this.raft.getLogEntryAtIndex(++this.raft.keyWatchIndex).getWrites()); } } // Transactions /** * Handle the situation where a {@link Consistency#LINEARIZABLE} transaction in state {@link TxState#EXECUTING} * transitions from read-write to read-only. */ void handleLinearizableReadOnlyChange(RaftKVTransaction tx) { // Sanity check assert Thread.holdsLock(this.raft); assert tx.getState().equals(TxState.EXECUTING); assert tx.getConsistency().equals(Consistency.LINEARIZABLE); assert tx.isReadOnly(); assert !tx.hasCommitInfo(); assert tx.isRebasable(); assert !tx.isCommittable(); assert this.checkRebasableAndCommittableUpToDate(tx); } /** * Check a transaction that is ready to be committed (in the {@link TxState#COMMIT_READY} state). * * <p> * This should be invoked: * <ul> * <li>After changing roles</li> * <li>After a transaction has entered the {@link TxState#COMMIT_READY} state</li> * <li>After the leader is newly known (in {@link FollowerRole})</li> * <li>After the leader's output queue goes from non-empty to empty (in {@link FollowerRole})</li> * <li>After the leader's {@code commitIndex} has advanced, in case a config change transaction * is waiting on a previous config change transaction (in {@link LeaderRole})</li> * </ul> * * @param tx the transaction * @throws KVTransactionException if an error occurs */ final void checkReadyTransaction(RaftKVTransaction tx) { // Sanity check assert Thread.holdsLock(this.raft); assert tx.getState().equals(TxState.COMMIT_READY); // If transaction already has a commit term & index, proceed to COMMIT_WAITING if (tx.hasCommitInfo()) { this.advanceReadyTransaction(tx); return; } // Requires leader communication to acquire commit term+index - let subclass handle it assert !tx.isCommittable(); assert tx.getConsistency().equals(Consistency.LINEARIZABLE); this.checkReadyTransactionNeedingCommitInfo(tx); } /** * Handle a linearizable transaction that is ready to be committed (in the {@link TxState#COMMIT_READY} state) but * does not yet have a commit term &amp; index and therefore requires communication with the leader. * * @param tx the transaction * @throws KVTransactionException if an error occurs */ void checkReadyTransactionNeedingCommitInfo(RaftKVTransaction tx) { // Sanity check assert Thread.holdsLock(this.raft); assert tx.getState().equals(TxState.COMMIT_READY); assert tx.getConsistency().equals(Consistency.LINEARIZABLE); assert !tx.hasCommitInfo(); assert !tx.isCommittable(); assert this.checkRebasableAndCommittableUpToDate(tx); } /** * Advance a transaction from the {@link TxState#COMMIT_READY} state to the {@link TxState#COMMIT_WAITING} state. * * @param tx the transaction * @param commitTerm term of log entry that must be committed before the transaction may succeed * @param commitIndex index of log entry that must be committed before the transaction may succeed * @param commitLeaderLeaseTimeout if not null, minimum leader lease timeout we must see before commit may succeed */ final void advanceReadyTransactionWithCommitInfo(RaftKVTransaction tx, long commitTerm, long commitIndex, Timestamp commitLeaderLeaseTimeout) { // Sanity check assert Thread.holdsLock(this.raft); assert tx.getState().equals(TxState.COMMIT_READY); assert !tx.hasCommitInfo(); // Set commit term & index tx.setCommitInfo(commitTerm, commitIndex, commitLeaderLeaseTimeout); // Advance to COMMIT_WAITING this.advanceReadyTransaction(tx); } /** * Advance a transaction from the {@link TxState#COMMIT_READY} state to the {@link TxState#COMMIT_WAITING} state. * * <p> * This assumes the commit info is already set. * * @param tx the transaction */ final void advanceReadyTransaction(RaftKVTransaction tx) { // Sanity check assert Thread.holdsLock(this.raft); assert tx.getState().equals(TxState.COMMIT_READY); assert tx.hasCommitInfo(); // Update state if (this.log.isTraceEnabled()) this.trace("advancing " + tx + " to " + TxState.COMMIT_WAITING); tx.setState(TxState.COMMIT_WAITING); tx.setNoLongerRebasable(); this.checkCommittable(tx); // Check this transaction to see if it can be committed new CheckWaitingTransactionService(this, tx).run(); } /** * Check a transaction waiting for its log entry to be committed (in the {@link TxState#COMMIT_WAITING} state). * * <p> * This should be invoked: * <ul> * <li>After changing roles</li> * <li>After a transaction has entered the {@link TxState#COMMIT_WAITING} state</li> * <li>After advancing my {@code commitIndex} (as leader or follower)</li> * <li>After receiving an updated {@linkplain AppendResponse#getLeaderLeaseTimeout leader lease timeout} * (in {@link FollowerRole})</li> * </ul> * * @param tx the transaction * @throws KVTransactionException if an error occurs */ final void checkWaitingTransaction(RaftKVTransaction tx) { // Sanity check assert Thread.holdsLock(this.raft); assert tx.getConsistency().isGuaranteesUpToDateReads(); // Is transaction committable? if (!this.checkCommittable(tx)) return; // Is there a required minimum leader lease timeout associated with the transaction? If so, we must wait for it. final Timestamp commitLeaderLeaseTimeout = tx.getCommitLeaderLeaseTimeout(); if (commitLeaderLeaseTimeout != null && !this.isLeaderLeaseActiveAt(commitLeaderLeaseTimeout)) { if (this.log.isTraceEnabled()) this.trace("committable " + tx + " must wait for leader lease timeout " + commitLeaderLeaseTimeout); return; } // Allow transaction commit to complete if (this.log.isTraceEnabled()) this.trace("commit successful for " + tx); this.raft.succeed(tx); } /** * Detect newly-committable transactions. * * <p> * This should be invoked after advancing my {@code commitIndex} (as leader or follower). * * @param tx the transaction * @throws KVTransactionException if an error occurs */ void checkCommittables() { // Sanity check assert Thread.holdsLock(this.raft); // Check which transactions are now committable for (RaftKVTransaction tx : new ArrayList<>(this.raft.openTransactions.values())) { try { this.checkCommittable(tx); } catch (KVTransactionException e) { this.raft.fail(tx, e); } catch (Exception | Error e) { this.raft.error("error checking committable for transaction " + tx, e); this.raft.fail(tx, new KVTransactionException(tx, e)); } } } /** * Determine if a transction has become committable, and mark it so if so. * * <p> * This should be invoked after advancing my {@code commitIndex} (as leader or follower), after setting * the commit info for a transaction, or after rebasing a transaction that has commit info already. * * <p> * Note: "committable" means ready to commit except any required wait for {@code tx.commitLeaderLeaseTimeout}. * In particular, the commit term+index is known, the corresponding log entry has been committed, and if rebasable * the transaction is rebased up through the commit term+index. * * @param tx the transaction * @throws KVTransactionException if an error occurs */ boolean checkCommittable(RaftKVTransaction tx) { // Sanity check assert Thread.holdsLock(this.raft); // Already checked? if (tx.isCommittable()) return true; // Has the transaction's commit info been determined yet? final long commitIndex = tx.getCommitIndex(); final long commitTerm = tx.getCommitTerm(); if (commitIndex == 0) return false; // Has the transaction's commit log entry been added yet? final long lastIndex = this.raft.getLastLogIndex(); if (commitIndex > lastIndex) return false; // Compare commit term to the actual term of the commit log entry final long commitIndexActualTerm = this.raft.getLogTermAtIndexIfKnown(commitIndex); if (commitIndexActualTerm == 0) { // The commit log entry has already been applied to the state machine and its term forgotten. // This can happen if we lose contact and by the time we're back the log entry has // already been applied to the state machine on some leader and that leader sent // us an InstallSnapshot message. We don't know whether it actually got committed // or not, so the transaction must be retried. throw new RetryTransactionException(tx, "commit index " + commitIndex + " < last applied log index " + this.raft.lastAppliedIndex); } // Verify the term of the committed log entry; if not what we expect, the log entry was overwritten by a new leader if (commitTerm != commitIndexActualTerm) { throw new RetryTransactionException(tx, "leader was deposed during commit and transaction's commit log entry " + commitIndex + "t" + commitTerm + " overwritten by " + commitIndex + "t" + commitIndexActualTerm); } // Has the transaction's commit log entry been committed yet? if (commitIndex > this.raft.commitIndex) return false; // If transaction is rebasable, it must be rebased at least up through its commit index if (tx.isRebasable() && tx.getBaseIndex() < commitIndex) return false; // The transaction's commit log entry is committed, so mark the transaction as committable if (this.log.isTraceEnabled()) this.trace(tx + " is now committable: " + this.raft.commitIndex + " >= " + commitIndex + "t" + commitTerm); tx.setCommittable(); if (tx.isRebasable()) tx.setNoLongerRebasable(); return true; } /** * Rebase all rebasable transactions up to through the last log entry. * * <p> * We only rebase {@link Consistency#LINEARIZABLE} transactions that are either non-mutating or have not * yet had a {@link CommitRequest} sent to the leader. * * <p> * This should be invoked after appending a new Raft log entry. * * @param tx the transaction * @param highPrioAlreadyChecked if the high priority transaction is already checked for conflicts * @throws KVTransactionException if an error occurs */ void rebaseTransactions(boolean highPrioAlreadyChecked) { // Sanity check assert Thread.holdsLock(this.raft); assert !highPrioAlreadyChecked || this.raft.highPrioTx != null; assert !highPrioAlreadyChecked || Thread.holdsLock(this.raft.highPrioTx.view); // Rebase all rebasable transactions for (RaftKVTransaction tx : new ArrayList<>(this.raft.openTransactions.values())) { if (!tx.isRebasable()) continue; try { this.rebaseTransaction(tx, highPrioAlreadyChecked && tx == this.raft.highPrioTx); } catch (KVTransactionException e) { this.raft.fail(tx, e); } catch (Exception | Error e) { this.raft.error("error rebasing transaction " + tx, e); this.raft.fail(tx, new KVTransactionException(tx, e)); } } } /** * Rebase the given transaction so that its base log entry is the last log entry or its commit log entry, * whichever is lower. * * <p> * This should be invoked for each {@linkplain RaftKVTransaction#isRebasable rebasable} transaction * after appending a new log entry. * * <p> * This method assumes that the given transaction is {@linkplain RaftKVTransaction#isRebasable rebasable}. * * @param tx the transaction * @param skipConflictCheck true to skip the conflict check because we've already done it * @throws KVTransactionException if an error occurs */ private void rebaseTransaction(RaftKVTransaction tx, boolean skipConflictCheck) { // Sanity check assert Thread.holdsLock(this.raft); assert tx.isRebasable(); assert tx.getFailure() == null; assert tx.getBaseIndex() >= this.raft.lastAppliedIndex; assert !tx.hasCommitInfo() || tx.getCommitIndex() > tx.getBaseIndex(); assert !tx.hasCommitInfo() || !tx.addsLogEntry(); // Anything to do? long baseIndex = tx.getBaseIndex(); final long lastIndex = this.raft.getLastLogIndex(); if (baseIndex == lastIndex) return; // Lock the mutable view so the rebase appears to happen instantaneously to any threads viewing the transaction synchronized (tx.view) { // Check for conflicts between transaction reads and newly committed log entries while (baseIndex < lastIndex) { // Check for conflicts final LogEntry logEntry = this.raft.getLogEntryAtIndex(++baseIndex); assert !skipConflictCheck || !tx.view.getReads().isConflict(logEntry.getWrites()); if (!skipConflictCheck && tx.view.getReads().isConflict(logEntry.getWrites())) { if (this.log.isDebugEnabled()) this.debug("cannot rebase " + tx + " past " + logEntry + " due to conflicts, failing"); if (this.raft.dumpConflicts) { this.dumpConflicts(tx.view.getReads(), logEntry.getWrites(), "local txId=" + tx.txId + " fails due to conflicts with " + logEntry); } throw new RetryTransactionException(tx, "writes of committed transaction at index " + baseIndex + " conflict with transaction reads from transaction base index " + tx.getBaseIndex()); } // If we reach the transaction's commit log entry (if any), we can stop if (baseIndex == tx.getCommitIndex()) { tx.setNoLongerRebasable(); break; } } // Update transaction final long baseTerm = this.raft.getLogTermAtIndex(baseIndex); if (this.log.isDebugEnabled()) { this.debug("rebased " + tx + " from " + tx.getBaseIndex() + "t" + tx.getBaseTerm() + " -> " + baseIndex + "t" + baseTerm); } switch (tx.getState()) { case EXECUTING: assert !tx.hasCommitInfo() || tx.isReadOnly(); final MostRecentView view = new MostRecentView(this.raft, baseIndex); assert view.getTerm() == baseTerm; assert view.getIndex() == baseIndex; tx.rebase(baseTerm, baseIndex, view.getView().getKVStore(), view.getSnapshot()); break; case COMMIT_READY: tx.rebase(baseTerm, baseIndex); break; case COMMIT_WAITING: tx.rebase(baseTerm, baseIndex); this.checkWaitingTransaction(tx); // transaction might have become committable break; default: throw new RuntimeException("internal error"); } } // Check whether transaction has become committable if (baseIndex == tx.getCommitIndex()) this.checkCommittable(tx); } void dumpConflicts(Reads reads, Writes writes, String description) { final StringBuilder buf = new StringBuilder(); buf.append(description).append(':'); for (String conflict : reads.getConflicts(writes)) buf.append("\n ").append(conflict); this.info(buf.toString()); } /** * Get the leader's lease timeout, if known. * * @return leader lease timeout, or null if unknown */ Timestamp getLeaderLeaseTimeout() { return null; } /** * Determine whether the leader's lease timeout extends past the current time, that is, it is known that if * the current leader is deposed by a new leader, then that deposition must occur after now. * * @return true if it is known that no other leader can possibly have been elected at the current time, otherwise false */ protected boolean isLeaderLeaseActiveNow() { return this.isLeaderLeaseActiveAt(new Timestamp()); } /** * Determine whether the leader's lease timeout extends past the given time, that is, it is known that if * the current leader is deposed by a new leader, then that deposition must occur after the given time. * * @param time leader timestamp * @return true if it is known that no other leader can possibly have been elected at the given time, otherwise false */ protected boolean isLeaderLeaseActiveAt(Timestamp time) { final Timestamp leaderLeaseTimeout = this.getLeaderLeaseTimeout(); return leaderLeaseTimeout != null && leaderLeaseTimeout.compareTo(time) > 0; } /** * Perform any role-specific transaction cleanup for the given transaction. * * <p> * Invoked either when transaction is completed OR this role is being shutdown. * This method MAY be invoked more than once for the same transaction; it should be idempotent. * * <p> * Subclasses should invoke this method if overriden. * * @param tx the transaction */ void cleanupForTransaction(RaftKVTransaction tx) { assert Thread.holdsLock(this.raft); } // Messages // This is a package access version of "implements MessageSwitch" abstract void caseAppendRequest(AppendRequest msg, NewLogEntry newLogEntry); abstract void caseAppendResponse(AppendResponse msg); abstract void caseCommitRequest(CommitRequest msg, NewLogEntry newLogEntry); abstract void caseCommitResponse(CommitResponse msg); abstract void caseGrantVote(GrantVote msg); abstract void caseInstallSnapshot(InstallSnapshot msg); abstract void caseRequestVote(RequestVote msg); void casePingRequest(PingRequest msg) { assert Thread.holdsLock(this.raft); final int responseClusterId = this.raft.clusterId != 0 ? this.raft.clusterId : msg.getClusterId(); this.raft.sendMessage(new PingResponse(responseClusterId, this.raft.identity, msg.getSenderId(), this.raft.currentTerm, msg.getTimestamp())); } void casePingResponse(PingResponse msg) { assert Thread.holdsLock(this.raft); // ignore by default } boolean mayAdvanceCurrentTerm(Message msg) { return true; } void failUnexpectedMessage(Message msg) { this.warn("rec'd unexpected message " + msg + " while in role " + this + "; ignoring"); } // Debug abstract boolean checkState(); void checkTransaction(RaftKVTransaction tx) { this.checkRebasableAndCommittableUpToDate(tx); } // Logging void trace(String msg, Throwable t) { this.raft.trace(msg, t); } void trace(String msg) { this.raft.trace(msg); } void debug(String msg, Throwable t) { this.raft.debug(msg, t); } void debug(String msg) { this.raft.debug(msg); } void info(String msg, Throwable t) { this.raft.info(msg, t); } void info(String msg) { this.raft.info(msg); } void warn(String msg, Throwable t) { this.raft.warn(msg, t); } void warn(String msg) { this.raft.warn(msg); } void error(String msg, Throwable t) { this.raft.error(msg, t); } void error(String msg) { this.raft.error(msg); } // Object @Override public abstract String toString(); String toStringPrefix() { assert Thread.holdsLock(this.raft); return this.getClass().getSimpleName() + "[term=" + this.raft.currentTerm + ",applied=" + this.raft.lastAppliedIndex + "t" + this.raft.lastAppliedTerm + ",commit=" + this.raft.commitIndex + ",log=" + this.raft.raftLog + "]"; } }
/* Smooth, backgrounded saving of changed issues. * Every time this task runs, one issue will have changes saved to disk. * The frequency with which this task runs can be changed to improve performance. */ package me.makskay.bukkit.tidy.tasks; import me.makskay.bukkit.tidy.ConfigAccessor; import me.makskay.bukkit.tidy.IssueManager; import me.makskay.bukkit.tidy.IssueReport; import me.makskay.bukkit.tidy.TidyPlugin; public class SaveChangedIssuesTask implements Runnable { private ConfigAccessor issuesYml; private IssueManager issueManager; public SaveChangedIssuesTask(TidyPlugin plugin) { this.issuesYml = plugin.getIssuesYml(); this.issueManager = plugin.getIssueManager(); } public void run() { IssueReport issue = null; boolean delete = false; for (IssueReport iss : issueManager.getCachedIssues()) { if (iss.hasChanged()) { issue = iss; break; } delete = iss.shouldBeDeleted(); if (delete) { issue = iss; break; } } if (issue == null) { return; // there were no issues with changes waiting to be saved } String path = "issues." + issue.getUid(); if (delete) { issuesYml.getConfig().set(path, null); } else { issuesYml.getConfig().set(path + ".open", issue.isOpen()); issuesYml.getConfig().set(path + ".sticky", issue.isSticky()); issuesYml.getConfig().set(path + ".comments", issue.getComments()); issuesYml.getConfig().set(path + ".timestamp", System.currentTimeMillis()); } issuesYml.saveConfig(); issuesYml.reloadConfig(); } }
package wraith.library.WindowUtil; import java.awt.Color; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.Graphics2D; import java.util.Random; import javax.swing.JPanel; public class LineGraph extends JPanel{ private String[] rowNames = new String[0]; private String[] colNames = new String[0]; private double[][] values = new double[0][]; private String[][] valueNames = new String[0][]; private String[] key = new String[0]; private static Color[] PRIME_COLORS = { Color.RED, Color.BLUE, Color.GREEN, Color.ORANGE, Color.YELLOW, Color.CYAN, Color.MAGENTA, Color.PINK, new Color(151, 35, 201), new Color(152, 235, 29), new Color(75, 81, 255) }; private static final int COLOR_GENERATOR = 2; private static final int PIXEL_TEXT_BUFFER = 10; public void setValues(double[][] values, String[][] valueNames){ this.values=values; this.valueNames=valueNames; repaint(); } @Override public void paint(Graphics g1){ Graphics2D g = (Graphics2D)g1; g.setColor(Color.gray); g.fillRect(0, 0, getWidth(), getHeight()); FontMetrics fm = g.getFontMetrics(); int rowNameBuffer = calculateRowNameBuffer(fm)+PIXEL_TEXT_BUFFER; int colNameBuffer = fm.getHeight()+PIXEL_TEXT_BUFFER; int keyBuffer = calculateKeyBuffer(fm)+PIXEL_TEXT_BUFFER; int width = getWidth()-rowNameBuffer-keyBuffer; int height = getHeight()-colNameBuffer; g.setColor(Color.black); for(int i = 0; i<rowNames.length; i++)g.drawString(rowNames[i], (rowNameBuffer-fm.stringWidth(rowNames[i]))/2, (int)(height/(double)rowNames.length*((rowNames.length-i)))); for(int i = 0; i<colNames.length; i++)g.drawString(colNames[i], (int)(width/(double)colNames.length*(i+0.5)-fm.stringWidth(colNames[i])/2), getHeight()-colNameBuffer/2+fm.getAscent()/2); g.setColor(Color.darkGray); for(int i = 0; i<=rowNames.length; i++)g.drawLine(rowNameBuffer, (int)(height/(double)rowNames.length*i), getWidth()-keyBuffer, (int)(height/(double)rowNames.length*i)); for(int i = 0; i<colNames.length; i++)g.drawLine((int)(width/(double)colNames.length*(i+0.5)), 0, (int)(width/(double)colNames.length*(i+0.5)), height); Color c; Random colorGen = new Random(COLOR_GENERATOR); for(int a = 0; a<values.length; a++){ c=(values.length<PRIME_COLORS.length?PRIME_COLORS[a]:new Color(colorGen.nextFloat(), colorGen.nextFloat(), colorGen.nextFloat())); g.setColor(c); if(values[a].length==1){ g.drawLine(rowNameBuffer, (int)(height*(1-values[a][0])), getWidth()-keyBuffer, (int)(height*(1-values[a][0]))); g.setColor(Color.white); g.drawString(valueNames[a][0], (width-fm.stringWidth(valueNames[a][0]))/2, (int)(height*(1-values[a][0]))); g.setColor(c); }else{ for(int b = 0; b<values[a].length; b++){ if(b==0){ g.setColor(Color.white); g.drawString(valueNames[a][b], (int)(width/(double)colNames.length*(b+0.5))-fm.stringWidth(valueNames[a][b])/2, (int)(height*(1-values[a][b]))-2); g.setColor(c); continue; } g.drawLine((int)(width/(double)colNames.length*(b-0.5)), (int)(height*(1-values[a][b-1])), (int)(width/(double)colNames.length*(b+0.5)), (int)(height*(1-values[a][b]))); g.setColor(Color.white); g.drawString(valueNames[a][b], (int)(width/(double)colNames.length*(b+0.5))-fm.stringWidth(valueNames[a][b])/2, (int)(height*(1-values[a][b]))-2); g.setColor(c); } } } Random keyGen = new Random(COLOR_GENERATOR); int sampleX = getWidth()-keyBuffer+PIXEL_TEXT_BUFFER/2; for(int i = 0; i<key.length; i++){ c=(values.length<PRIME_COLORS.length?PRIME_COLORS[i]:new Color(keyGen.nextFloat(), keyGen.nextFloat(), keyGen.nextFloat())); g.setColor(c); g.fillRect(sampleX, i*(fm.getHeight()+5)+PIXEL_TEXT_BUFFER/2, 10, 10); g.setColor(Color.black); g.drawRect(sampleX, i*(fm.getHeight()+5)+PIXEL_TEXT_BUFFER/2, 10, 10); g.setColor(Color.white); g.drawString(key[i], sampleX+10+PIXEL_TEXT_BUFFER/2, i*(fm.getHeight()+5)+PIXEL_TEXT_BUFFER/2+10); } g.dispose(); } private int calculateRowNameBuffer(FontMetrics fm){ int l = 0; int c; for(String s : rowNames)if((c=fm.stringWidth(s))>l)l=c; return l; } public void setRowNames(String[] rowNames){ this.rowNames=rowNames; repaint(); } public void setColNames(String[] colNames){ this.colNames=colNames; repaint(); } public void setKey(String[] key){ this.key=key; repaint(); } private int calculateKeyBuffer(FontMetrics fm){ int l = 0; int c; for(String s : key)if((c=fm.stringWidth(s))>l)l=c; return l+10+PIXEL_TEXT_BUFFER/2; } }
package org.aksw.kbox.kibe; import java.net.MalformedURLException; import java.net.URL; import org.aksw.kbox.kns.CustomKNSServerList; import org.aksw.kbox.kns.KNSServerListVisitor; public class DefaultKNSServerList extends URLKNSServerList { // Default KNS table URL private final static String DEFAULT_KNS_TABLE_URL = "https://raw.githubusercontent.com/sahandilshan/KBox/dev/kns/2.0/"; private CustomKNSServerList customKNSServerList = new CustomKNSServerList(); public DefaultKNSServerList() throws MalformedURLException { super(new URL(DEFAULT_KNS_TABLE_URL)); } @Override public boolean visit(KNSServerListVisitor visitor) throws Exception { boolean next = super.visit(visitor); if(next) { next = customKNSServerList.visit(visitor); } return next; } }
package aimax.osm.viewer.swing; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Graphics2D; import java.awt.Image; import java.awt.RenderingHints; import aimax.osm.viewer.UColor; import aimax.osm.viewer.UnifiedImageBuilder; /** * Specialized image builder for AWT images. * * @author Ruediger Lunde */ public class AWTImageBuilder implements UnifiedImageBuilder { private Image result; private Graphics2D g2; boolean areaFillMode; public void initImage(Image image) { result = image; g2 = (Graphics2D) image.getGraphics(); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); } /** Returns the width of the image under construction. */ @Override public int getWidth() { return result.getWidth(null); } /** Returns the height of the image under construction. */ @Override public int getHeight() { return result.getHeight(null); } @Override public void drawLine(int x1, int y1, int x2, int y2) { g2.drawLine(x1, y1, x2, y2); } @Override public void drawRect(int x, int y, int width, int height) { if (areaFillMode) g2.fillRect(x, y, width, height); else g2.drawRect(x, y, width, height); } @Override public void drawOval(int x, int y, int width, int height) { if (areaFillMode) g2.fillOval(x, y, width, height); else g2.drawOval(x, y, width, height); } @Override public void drawPolyline(int[] xPoints, int[] yPoints, int nPoints) { g2.drawPolyline(xPoints, yPoints, nPoints); } @Override public void drawPolygon(int[] xPoints, int[] yPoints, int nPoints) { if (areaFillMode) g2.fillPolygon(xPoints, yPoints, nPoints); else g2.drawPolygon(xPoints, yPoints, nPoints); } @Override public void drawString(String text, int x, int y) { g2.drawString(text, x, y); } @Override public Image getResult() { return result; } @Override public void setColor(UColor color) { g2.setColor(new Color(color.getRed(), color.getGreen(), color.getBlue(), color.getAlpha())); } @Override public void setLineStyle(boolean dashed, float width) { float dash[] = null; if (dashed) { dash = new float[] { width * 2f }; } g2.setStroke(new BasicStroke(width, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND, 10.0f, dash, 0.0f)); } @Override public void setAreaFilled(boolean value) { areaFillMode = value; } @Override public void setFontSize(float size) { g2.setFont(g2.getFont().deriveFont(size)); } @Override public float getFontSize() { return g2.getFont().getSize(); } }
package com.abstractthis.consoul; //of this software and associated documentation files (the "Software"), to deal //in the Software without restriction, including without limitation the rights //copies of the Software, and to permit persons to whom the Software is //furnished to do so, subject to the following conditions: //all copies or substantial portions of the Software. //IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, //FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE //LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, //OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN //THE SOFTWARE. import java.util.concurrent.atomic.AtomicReference; public class ContextVariable<T> { private AtomicReference<T> varRef = new AtomicReference<T>(); public ContextVariable() { // Default creates an empty variable } public ContextVariable(T t) { this.setContent(t); } public void clear() { varRef.set(null); } public T getContent() { return varRef.get(); } public void setContent(T t) { varRef.set(t); } }
package org.akvo.rsr.android.service; import java.net.URL; import org.akvo.rsr.android.domain.User; import org.akvo.rsr.android.util.ConstantUtil; import org.akvo.rsr.android.util.SettingsUtil; import org.akvo.rsr.android.xml.Downloader; import android.app.IntentService; import android.content.Intent; import android.support.v4.content.LocalBroadcastManager; import android.util.Log; public class SignInService extends IntentService { private static final String TAG = "SignInService"; public SignInService() { super(TAG); } @Override protected void onHandleIntent(Intent intent) { String username = intent.getStringExtra(ConstantUtil.USERNAME_KEY); String password = intent.getStringExtra(ConstantUtil.PASSWORD_KEY); Intent i2 = new Intent(ConstantUtil.AUTHORIZATION_RESULT_ACTION); Downloader dl = new Downloader(); User user = new User(); try { if (dl.authorize(new URL(SettingsUtil.host(this) + ConstantUtil.AUTH_URL), username, password, user)) { //Yes! SettingsUtil.signIn(this,user); //TODO get project list from API and set other projects invisible //dl.enableAuthorizedProjects(this, new URL(SettingsUtil.host(this) + String.format(ConstantUtil.FETCH_PROJ_URL_PATTERN, SettingsUtil.Read(this, "authorized_orgid")))); } else { i2.putExtra(ConstantUtil.SERVICE_ERRMSG_KEY, "Wrong password and/or username"); SettingsUtil.signOut(this); } } catch (Exception e) { i2.putExtra(ConstantUtil.SERVICE_ERRMSG_KEY, "Unable to authorize: " + e.getMessage()); Log.e(TAG,"SignIn() error:",e); } //broadcast completion LocalBroadcastManager.getInstance(this).sendBroadcast(i2); } }
package invtweaks.forge.asm; import cpw.mods.fml.common.asm.transformers.deobf.FMLDeobfuscatingRemapper; import cpw.mods.fml.relauncher.FMLRelaunchLog; import cpw.mods.fml.relauncher.IClassTransformer; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import org.objectweb.asm.tree.*; import java.util.HashMap; import java.util.Map; public class ContainerTransformer implements IClassTransformer { public static final String VALID_INVENTORY_METHOD = "invtweaks$validInventory"; public static final String VALID_CHEST_METHOD = "invtweaks$validChest"; public static final String STANDARD_INVENTORY_METHOD = "invtweaks$standardInventory"; public static final String ROW_SIZE_METHOD = "invtweaks$rowSize"; public static final String SLOT_MAP_METHOD = "invtweaks$slotMap"; public static final String CONTAINER_CLASS_INTERNAL = "net/minecraft/inventory/Container"; public static final String SLOT_MAPS_VANILLA_CLASS = "invtweaks/containers/VanillaSlotMaps"; private static Map<String, ContainerInfo> standardClasses = new HashMap<String, ContainerInfo>(); private String containerClassName; public ContainerTransformer() { // TODO: ContainerCreative handling // Standard non-chest type standardClasses.put("net.minecraft.inventory.ContainerPlayer", new ContainerInfo(true, true, false, getVanillaSlotMapInfo("containerPlayerSlots"))); standardClasses.put("net.minecraft.inventory.ContainerMerchant", new ContainerInfo(true, true, false)); standardClasses.put("net.minecraft.inventory.ContainerRepair", new ContainerInfo(true, true, false, getVanillaSlotMapInfo("containerPlayerSlots"))); standardClasses.put("net.minecraft.inventory.ContainerHopper", new ContainerInfo(true, true, false)); standardClasses.put("net.minecraft.inventory.ContainerBeacon", new ContainerInfo(true, true, false)); standardClasses.put("net.minecraft.inventory.ContainerBrewingStand", new ContainerInfo(true, true, false, getVanillaSlotMapInfo("containerBrewingSlots"))); standardClasses.put("net.minecraft.inventory.ContainerWorkbench", new ContainerInfo(true, true, false, getVanillaSlotMapInfo("containerWorkbenchSlots"))); standardClasses.put("net.minecraft.inventory.ContainerEnchantment", new ContainerInfo(true, true, false, getVanillaSlotMapInfo("containerEnchantmentSlots"))); standardClasses.put("net.minecraft.inventory.ContainerFurnace", new ContainerInfo(true, true, false, getVanillaSlotMapInfo("containerFurnaceSlots"))); // Chest-type standardClasses.put("net.minecraft.inventory.ContainerDispenser", new ContainerInfo(false, false, true, (short) 3, getVanillaSlotMapInfo("containerChestDispenserSlots"))); standardClasses.put("net.minecraft.inventory.ContainerChest", new ContainerInfo(false, false, true, getVanillaSlotMapInfo( "containerChestDispenserSlots"))); } @Override public byte[] transform(String name, String transformedName, byte[] bytes) { if(containerClassName == null) { if(FMLPlugin.runtimeDeobfEnabled) { containerClassName = FMLDeobfuscatingRemapper.INSTANCE.unmap(CONTAINER_CLASS_INTERNAL); } else { containerClassName = CONTAINER_CLASS_INTERNAL; } } FMLRelaunchLog.info(String.format("%s = %s", name, transformedName)); if("net.minecraft.inventory.Container".equals(transformedName)) { ClassReader cr = new ClassReader(bytes); ClassNode cn = new ClassNode(Opcodes.ASM4); cr.accept(cn, 0); transformBaseContainer(cn); ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS); cn.accept(cw); return cw.toByteArray(); } // Transform classes with explicitly specified information ContainerInfo info = standardClasses.get(transformedName); if(info != null) { ClassReader cr = new ClassReader(bytes); ClassNode cn = new ClassNode(Opcodes.ASM4); cr.accept(cn, 0); transformContainer(cn, info); ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS); cn.accept(cw); return cw.toByteArray(); } if("invtweaks.InvTweaksObfuscation".equals(transformedName)) { ClassReader cr = new ClassReader(bytes); ClassNode cn = new ClassNode(Opcodes.ASM4); cr.accept(cn, 0); Type containertype = Type.getObjectType(containerClassName); for(MethodNode method : cn.methods) { if("isValidChest".equals(method.name)) { replaceSelfForwardingMethod(method, VALID_CHEST_METHOD, containertype); } else if("isValidInventory".equals(method.name)) { replaceSelfForwardingMethod(method, VALID_INVENTORY_METHOD, containertype); } else if("isStandardInventory".equals(method.name)) { replaceSelfForwardingMethod(method, STANDARD_INVENTORY_METHOD, containertype); } else if("getSpecialChestRowSize".equals(method.name)) { replaceSelfForwardingMethod(method, ROW_SIZE_METHOD, containertype); } else if("getContainerSlotMap".equals(method.name)) { replaceSelfForwardingMethod(method, SLOT_MAP_METHOD, containertype); } } ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS); cn.accept(cw); return cw.toByteArray(); } return bytes; } /** * Alter class to contain information contained by ContainerInfo * * @param clazz Class to alter * @param info Information used to alter class */ public static void transformContainer(ClassNode clazz, ContainerInfo info) { generateBooleanMethodConst(clazz, STANDARD_INVENTORY_METHOD, info.standardInventory); generateBooleanMethodConst(clazz, VALID_INVENTORY_METHOD, info.validInventory); generateBooleanMethodConst(clazz, VALID_CHEST_METHOD, info.validChest); generateIntegerMethodConst(clazz, ROW_SIZE_METHOD, info.rowSize); if(info.slotMapMethod.isStatic) { generateForwardingToStaticMethod(clazz, SLOT_MAP_METHOD, info.slotMapMethod.methodName, info.slotMapMethod.methodType.getReturnType(), info.slotMapMethod.methodClass, info.slotMapMethod.methodType.getArgumentTypes()[0]); } else { generateSelfForwardingMethod(clazz, SLOT_MAP_METHOD, info.slotMapMethod.methodName, info.slotMapMethod.methodType); } } /** * Alter class to contain default implementations of added methods. * * @param clazz Class to alter */ public static void transformBaseContainer(ClassNode clazz) { generateBooleanMethodConst(clazz, STANDARD_INVENTORY_METHOD, false); generateDefaultInventoryCheck(clazz); generateBooleanMethodConst(clazz, VALID_CHEST_METHOD, false); generateIntegerMethodConst(clazz, ROW_SIZE_METHOD, (short) 9); generateForwardingToStaticMethod(clazz, SLOT_MAP_METHOD, "unknownContainerSlots", Type.getObjectType("java/util/Map"), Type.getObjectType(SLOT_MAPS_VANILLA_CLASS)); } /** * Generate a new method "boolean invtweaks$validInventory()", returning true if the size of the container is large * enough to hold the player inventory. * * @param clazz Class to add method to */ public static void generateDefaultInventoryCheck(ClassNode clazz) { MethodNode method = new MethodNode(Opcodes.ASM4, Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, VALID_INVENTORY_METHOD, "()Z", null, null); InsnList code = method.instructions; code.add(new VarInsnNode(Opcodes.ALOAD, 0)); code.add(new FieldInsnNode(Opcodes.GETFIELD, clazz.name, "field_75151_b", "Ljava/util/List;")); code.add(new MethodInsnNode(Opcodes.INVOKEINTERFACE, "java/util/List", "size", "()I")); code.add(new IntInsnNode(Opcodes.BIPUSH, 36)); // TODO: Load Static InvTweaksConst.INVENTORY_SIZE LabelNode l1 = new LabelNode(); code.add(new JumpInsnNode(Opcodes.IF_ICMPLE, l1)); code.add(new InsnNode(Opcodes.ICONST_1)); LabelNode l2 = new LabelNode(); code.add(new JumpInsnNode(Opcodes.GOTO, l2)); code.add(l1); code.add(new InsnNode(Opcodes.ICONST_0)); code.add(l2); code.add(new InsnNode(Opcodes.IRETURN)); clazz.methods.add(method); } /** * Generate a new method "boolean name()", returning a constant value * * @param clazz Class to add method to * @param name Name of method * @param retval Return value of method */ public static void generateBooleanMethodConst(ClassNode clazz, String name, boolean retval) { MethodNode method = new MethodNode(Opcodes.ASM4, Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, name, "()Z", null, null); InsnList code = method.instructions; code.add(new InsnNode(retval ? Opcodes.ICONST_1 : Opcodes.ICONST_0)); code.add(new InsnNode(Opcodes.IRETURN)); clazz.methods.add(method); } /** * Generate a new method "int name()", returning a constant value * * @param clazz Class to add method to * @param name Name of method * @param retval Return value of method */ public static void generateIntegerMethodConst(ClassNode clazz, String name, short retval) { MethodNode method = new MethodNode(Opcodes.ASM4, Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, name, "()I", null, null); InsnList code = method.instructions; // Probably doesn't make a huge difference, but use BIPUSH if the value is small enough. if(retval >= Byte.MIN_VALUE && retval <= Byte.MAX_VALUE) { code.add(new IntInsnNode(Opcodes.BIPUSH, retval)); } else { code.add(new IntInsnNode(Opcodes.SIPUSH, retval)); } code.add(new InsnNode(Opcodes.IRETURN)); clazz.methods.add(method); } /** * Generate a forwarding method of the form "T name() { return this.forward(); } * * @param clazz Class to generate new method on * @param name Name of method to generate * @param forwardname Name of method to call * @param rettype Return type of method */ public static void generateSelfForwardingMethod(ClassNode clazz, String name, String forwardname, Type rettype) { MethodNode method = new MethodNode(Opcodes.ASM4, Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, name, "()" + rettype.getDescriptor(), null, null); populateSelfForwardingMethod(method, forwardname, rettype, Type.getObjectType(clazz.name)); clazz.methods.add(method); } /** * Generate a forwarding method of the form "static T name(S object) { return object.forward(); } * * @param clazz Class to generate new method on * @param name Name of method to generate * @param forwardname Name of method to call * @param rettype Return type of method */ public static void generateStaticForwardingMethod(ClassNode clazz, String name, String forwardname, Type rettype, Type argtype) { MethodNode method = new MethodNode(Opcodes.ASM4, Opcodes.ACC_STATIC | Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, name, "()" + rettype.getDescriptor(), null, null); populateSelfForwardingMethod(method, forwardname, rettype, argtype); clazz.methods.add(method); } /** * Generate a forwarding method of the form "T name() { return Class.forward(this); } * * @param clazz Class to generate new method on * @param name Name of method to generate * @param forwardname Name of method to call * @param rettype Return type of method */ public static void generateForwardingToStaticMethod(ClassNode clazz, String name, String forwardname, Type rettype, Type fowardtype) { MethodNode method = new MethodNode(Opcodes.ASM4, Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, name, "()" + rettype.getDescriptor(), null, null); populateForwardingToStaticMethod(method, forwardname, rettype, Type.getObjectType(clazz.name), fowardtype); clazz.methods.add(method); } /** * Generate a forwarding method of the form "T name() { return Class.forward(this); } * * @param clazz Class to generate new method on * @param name Name of method to generate * @param forwardname Name of method to call * @param rettype Return type of method * @param thistype Type to treat 'this' as for overload searching purposes */ public static void generateForwardingToStaticMethod(ClassNode clazz, String name, String forwardname, Type rettype, Type fowardtype, Type thistype) { MethodNode method = new MethodNode(Opcodes.ASM4, Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, name, "()" + rettype.getDescriptor(), null, null); populateForwardingToStaticMethod(method, forwardname, rettype, thistype, fowardtype); clazz.methods.add(method); } /** * Replace a method's code with a forward to another method on itself (or the first argument of a static method as * the argument takes the place of this) * * @param method Method to replace code of * @param forwardname Name of method to forward to * @param thistype Type of object method is being replaced on */ public static void replaceSelfForwardingMethod(MethodNode method, String forwardname, Type thistype) { Type methodType = Type.getMethodType(method.desc); method.instructions.clear(); populateSelfForwardingMethod(method, forwardname, methodType.getReturnType(), thistype); } /** * Generate a forwarding method of the form "T name(S object) { return object.forward(); } * * @param clazz Class to generate new method on * @param name Name of method to generate * @param forwardname Name of method to call * @param rettype Return type of method * @param argtype Type of object to call method on */ public static void generateForwardingMethod(ClassNode clazz, String name, String forwardname, Type rettype, Type argtype) { MethodNode method = new MethodNode(Opcodes.ASM4, Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, name, "()" + rettype.getDescriptor(), null, null); populateForwardingMethod(method, forwardname, rettype, argtype, Type.getObjectType(clazz.name)); clazz.methods.add(method); } /** * Replace a method's code with a forward to an method on its first argument * * @param method Method to replace code of * @param forwardname Name of method to forward to * @param thistype Type of object method is being replaced on */ public static void replaceForwardingMethod(MethodNode method, String forwardname, Type thistype) { Type methodType = Type.getMethodType(method.desc); method.instructions.clear(); populateForwardingMethod(method, forwardname, methodType.getReturnType(), methodType.getArgumentTypes()[0], thistype); } /** * Populate a forwarding method of the form "T name() { return Class.forward(this); }" * * @param method Method to generate code for * @param forwardname Name of method to call * @param rettype Return type of method * @param thistype Type of object method is being generated on * @param forwardtype Type to forward method to */ public static void populateForwardingToStaticMethod(MethodNode method, String forwardname, Type rettype, Type thistype, Type forwardtype) { InsnList code = method.instructions; code.add(new VarInsnNode(thistype.getOpcode(Opcodes.ILOAD), 0)); code.add(new MethodInsnNode(Opcodes.INVOKESTATIC, forwardtype.getInternalName(), forwardname, Type.getMethodDescriptor(rettype, thistype))); code.add(new InsnNode(rettype.getOpcode(Opcodes.IRETURN))); } /** * Populate a forwarding method of the form "T name() { return this.forward(); }" This is also valid for methods of * the form "static T name(S object) { return object.forward() }" * * @param method Method to generate code for * @param forwardname Name of method to call * @param rettype Return type of method * @param thistype Type of object method is being generated on */ public static void populateSelfForwardingMethod(MethodNode method, String forwardname, Type rettype, Type thistype) { InsnList code = method.instructions; code.add(new VarInsnNode(thistype.getOpcode(Opcodes.ILOAD), 0)); code.add(new MethodInsnNode(Opcodes.INVOKEVIRTUAL, thistype.getInternalName(), forwardname, "()" + rettype.getDescriptor())); code.add(new InsnNode(rettype.getOpcode(Opcodes.IRETURN))); } /** * Populate a forwarding method of the form "T name(S object) { return object.forward(); }" * * @param method Method to generate code for * @param forwardname Name of method to call * @param rettype Return type of method * @param argtype Type of object to call method on * @param thistype Type of object method is being generated on */ public static void populateForwardingMethod(MethodNode method, String forwardname, Type rettype, Type argtype, Type thistype) { InsnList code = method.instructions; code.add(new VarInsnNode(argtype.getOpcode(Opcodes.ILOAD), 1)); code.add(new MethodInsnNode(Opcodes.INVOKEVIRTUAL, argtype.getInternalName(), forwardname, "()" + rettype.getDescriptor())); code.add(new InsnNode(rettype.getOpcode(Opcodes.IRETURN))); } private MethodInfo getVanillaSlotMapInfo(String name) { return getSlotMapInfo(Type.getObjectType(SLOT_MAPS_VANILLA_CLASS), name, true); } private MethodInfo getSlotMapInfo(Type mClass, String name, boolean isStatic) { return new MethodInfo(Type.getMethodType( Type.getObjectType("java/util/Map"), Type.getObjectType(containerClassName)), mClass, name, true); } class MethodInfo { Type methodType; Type methodClass; String methodName; boolean isStatic = false; MethodInfo(Type mType, Type mClass, String name) { methodType = mType; methodClass = mClass; methodName = name; } MethodInfo(Type mType, Type mClass, String name, boolean stat) { methodType = mType; methodClass = mClass; methodName = name; isStatic = stat; } } class ContainerInfo { boolean standardInventory = false; boolean validInventory = false; boolean validChest = false; short rowSize = 9; MethodInfo slotMapMethod = getVanillaSlotMapInfo("unknownContainerSlots"); ContainerInfo() { } ContainerInfo(boolean standard, boolean validInv, boolean validCh) { standardInventory = standard; validInventory = validInv; validChest = validCh; } ContainerInfo(boolean standard, boolean validInv, boolean validCh, MethodInfo slotMap) { standardInventory = standard; validInventory = validInv; validChest = validCh; slotMapMethod = slotMap; } ContainerInfo(boolean standard, boolean validInv, boolean validCh, short rowS) { standardInventory = standard; validInventory = validInv; validChest = validCh; rowSize = rowS; } ContainerInfo(boolean standard, boolean validInv, boolean validCh, short rowS, MethodInfo slotMap) { standardInventory = standard; validInventory = validInv; validChest = validCh; rowSize = rowS; slotMapMethod = slotMap; } } }
package annis.gui.exporter; import annis.CommonHelper; import annis.libgui.Helper; import annis.model.AnnisConstants; import annis.model.Annotation; import annis.model.RelannisNodeFeature; import java.io.IOException; import java.io.Writer; import java.util.List; import java.util.Map; import annis.service.objects.SubgraphFilter; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.TreeSet; import net.xeoh.plugins.base.annotations.PluginImplementation; import org.apache.commons.lang3.StringUtils; import org.corpus_tools.salt.common.SDocumentGraph; import org.corpus_tools.salt.core.SAnnotation; import org.corpus_tools.salt.core.SNode; /** * @author barteld */ @PluginImplementation public class CSVMultiTokExporter extends SaltBasedExporter { @Override public String getHelpMessage() { return "The CSV MultiTok Exporter exports only the " + "values of the elements searched for by the user, ignoring the context " + "around search results. The values for all annotations of each of the " + "found nodes is given in a comma-separated table (CSV). <br/><br/>" + "This exporter will take more time than the normal CSV Exporter " + "but it is able to export the underlying text for spans " + "if the corpus contains multiple tokenizations. <br/><br/>" + "Parameters: <br/>" + "<em>metakeys</em> - comma seperated list of all meta data to include in the result (e.g. " + "<code>metakeys=title,documentname</code>)"; } @Override public SubgraphFilter getSubgraphFilter() { return SubgraphFilter.all; } @Override public String getFileEnding() { return "csv"; } private Set<String> metakeys; private SortedMap<Integer,TreeSet<String>> annotationsForMatchedNodes; @ Override public void createAdjacencyMatrix(SDocumentGraph graph, Map<String, String> args, int matchNumber, int nodeCount) throws IOException, IllegalArgumentException { // first match if (matchNumber == 0) { // get list of metakeys to export metakeys = new HashSet<>(); if (args.containsKey("metakeys")) { metakeys.addAll(Arrays.asList(args.get("metakeys").split(","))); } // initialize list of annotations for the matched nodes annotationsForMatchedNodes = new TreeMap<>(); } // get list of annotations for the nodes in the current match Set<String> matchIDs = new HashSet<>(Arrays.asList( graph .getFeature(AnnisConstants.ANNIS_NS, AnnisConstants.FEAT_MATCHEDIDS) .getValue_STEXT().split(","))); for (String matchID: matchIDs) { matchID = URLDecoder.decode(matchID, "UTF-8"); SNode node = graph.getNode(matchID); int node_id = node .getFeature(AnnisConstants.ANNIS_NS, AnnisConstants.FEAT_MATCHEDNODE) .getValue_SNUMERIC().intValue(); if(!annotationsForMatchedNodes.containsKey(node_id)) annotationsForMatchedNodes.put(node_id, new TreeSet<String>()); List<SAnnotation> annots = new ArrayList<>(node.getAnnotations()); Set<String> annoNames = annotationsForMatchedNodes.get(node_id); for (SAnnotation annot: annots) { annoNames.add(annot.getNamespace() + "::" + annot.getName()); } } } @Override public void outputText(SDocumentGraph graph, boolean alignmc, int matchNumber, Writer out) throws IOException, IllegalArgumentException { // first match if (matchNumber == 0) { // output header List<String> headerLine = new ArrayList<>(); for(Map.Entry<Integer, TreeSet<String>> match: annotationsForMatchedNodes.entrySet()) { int node_id = match.getKey(); headerLine.add(String.valueOf(node_id) + "_id"); headerLine.add(String.valueOf(node_id) + "_span"); for (String annoName: match.getValue()) { headerLine.add(String.valueOf(node_id) + "_anno_" + annoName); } } for (String key: metakeys) { headerLine.add("meta_" + key); } out.append(StringUtils.join(headerLine, "\t")); out.append("\n"); } // output nodes in the order of the matches SortedMap<Integer, String> contentLine = new TreeMap<>(); Set<String> matchIDs = new HashSet<>(Arrays.asList( graph .getFeature(AnnisConstants.ANNIS_NS, AnnisConstants.FEAT_MATCHEDIDS) .getValue_STEXT().split(","))); for (String matchID: matchIDs) { matchID = URLDecoder.decode(matchID, "UTF-8"); List<String> nodeLine = new ArrayList<>(); SNode node = graph.getNode(matchID); // export id RelannisNodeFeature feats = RelannisNodeFeature.extract(node); nodeLine.add(String.valueOf(feats.getInternalID())); // export spanned text String span = graph.getText(node); if (span != null) nodeLine.add(graph.getText(node)); else nodeLine.add(""); // export annotations int node_id = node .getFeature(AnnisConstants.ANNIS_NS, AnnisConstants.FEAT_MATCHEDNODE) .getValue_SNUMERIC().intValue(); for (String annoName: annotationsForMatchedNodes.get(node_id)) { SAnnotation anno = node.getAnnotation(annoName); if (anno != null) { nodeLine.add(anno.getValue_STEXT()); } else nodeLine.add("'NULL'"); // add everything to line contentLine.put(node_id, StringUtils.join(nodeLine, "\t")); } } out.append(StringUtils.join(contentLine.values(), "\t")); // export Metadata // TODO cache the metadata if(!metakeys.isEmpty()) { // TODO is this the best way to get the corpus name? String corpus_name = CommonHelper.getCorpusPath(java.net.URI.create(graph.getDocument().getId())).get(0); List<Annotation> asList = Helper.getMetaData(corpus_name, graph.getDocument().getName()); for(Annotation anno : asList) { if (metakeys.contains(anno.getName())) out.append("\t" + anno.getValue()); } } out.append("\n"); } @Override public void getOrderedMatchNumbers() { // TODO } @Override public boolean isAlignable() { return false; } }
package com.austinv11.etf.util.parsing; import com.austinv11.etf.erlang.*; import com.austinv11.etf.util.BertCompatible; import com.austinv11.etf.util.ETFConstants; import com.austinv11.etf.util.ETFException; import java.io.UnsupportedEncodingException; import java.math.BigInteger; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.*; import java.util.zip.DataFormatException; import java.util.zip.Inflater; import static com.austinv11.etf.common.TermTypes.*; /** * This represents a utility parser for parsing data from an etf object. */ public class ETFParser { private final byte[] data; private int offset = 0; private final int expectedVersion; private final boolean bert; public ETFParser(byte[] data) { this(data, ETFConstants.VERSION); } public ETFParser(byte[] data, ETFParser parent) { this(data, parent.expectedVersion, parent.bert, true); } public ETFParser(byte[] data, boolean bert) { this(data, ETFConstants.VERSION, bert, false); } public ETFParser(byte[] data, int expectedVersion) { this(data, expectedVersion, false, false); } public ETFParser(byte[] data, int expectedVersion, boolean bert, boolean partial) { this.expectedVersion = expectedVersion; this.bert = bert; int initialOffset = 0; if (Byte.toUnsignedInt(data[initialOffset]) == expectedVersion) //Skip the version number initialOffset++; if (!partial) { if (data[initialOffset] != HEADER) throw new ETFException("Missing header! Is this data malformed?"); initialOffset++; int uncompressedSize = wrap(data, initialOffset, (initialOffset += 4)).getInt(); byte[] inflatedData = new byte[uncompressedSize]; Inflater inflater = new Inflater(); inflater.setInput(Arrays.copyOfRange(data, offset, data.length)); try { inflater.inflate(inflatedData); } catch (DataFormatException e) { throw new ETFException(e); } if (!inflater.finished()) throw new ETFException("Inflater not finished, is the distribution header wrong?"); this.data = inflatedData; } else { this.data = data; } } private static ByteBuffer wrap(byte[] array) { return ByteBuffer.wrap(array).order(ByteOrder.BIG_ENDIAN); } private static ByteBuffer wrap(byte[] array, int offset, int length) { return ByteBuffer.wrap(array, offset, length).order(ByteOrder.BIG_ENDIAN); } private void skipVersion() { if (Byte.toUnsignedInt(data[offset]) == expectedVersion) offset++; } /** * This gets the number of bytes in the uncompressed term data. * * @return The number of bytes. */ public int getSize() { return data.length; } /** * Gets the version of etf this is using. * * @return The version. */ public int getVersion() { return expectedVersion; } /** * Checks if this parser is BERT compatible. * * @return True when BERT compatible, false if otherwise. */ public boolean isBert() { return bert; } /** * This gets the raw term data (excluding the initial distribution header. * * @return The raw data. */ public byte[] getRawData() { return data; } /** * This gets the current position the parser is at in the raw data array. * * @return The current array offset. */ public int getPosition() { return offset; } /** * This checks if there is no more data to read. * * @return True when there is no more data to read, false when otherwise. */ public boolean isFinished() { return offset >= data.length; } private void checkPreconditions() throws ETFException { checkPreconditions(null); } private void checkPreconditions(byte type) throws ETFException { checkPreconditions(type, null); } private void checkPreconditions(Boolean bertStatus) throws ETFException { checkPreconditions(-1, bertStatus); } private void checkPreconditions(int type, Boolean bertStatus) throws ETFException { if (bertStatus != null) { //bert status is relevant if (bertStatus != isBert()) throw new ETFException("BERT vs ETF spec mismatch"); } if (isFinished()) { throw new ETFException("No more data to read!"); } skipVersion(); if (type != -1) { if (type != peek()) { throw new ETFException("ETF Term type mismatch!"); } else { offset++; } } } /** * This peeks at the type of the next term. * * @return The type of the next term. * * @see com.austinv11.etf.common.TermTypes */ public byte peek() { checkPreconditions(); return data[offset]; } /** * This gets the next distribution header. * * @return The next header. */ public DistributionHeader nextDistributionHeader() { //TODO? throw new UnsupportedOperationException("Not implemented"); } /** * This gets an index referring to an atom cache reference in the distribution header. * * @return The index. * * @see #nextDistributionHeader() */ public int nextAtomCacheIndex() { checkPreconditions(ATOM_CACHE_REF, false); return data[offset++]; } /** * This gets the next small integer (unsigned 8 bit int). * * @return The int. */ @BertCompatible public int nextSmallInt() { checkPreconditions(SMALL_INTEGER_EXT); return Byte.toUnsignedInt(data[offset++]); } /** * This gets the next large integer (signed 32 bit). * * @return The int. */ @BertCompatible public int nextLargeInt() { checkPreconditions(INTEGER_EXT); int integer = wrap(data, offset, 4).getInt(); offset += 4; return integer; } /** * This gets the next large or small integer. * * @return The int. */ @BertCompatible public int nextInt() { byte type = peek(); if (type == SMALL_INTEGER_EXT) { return nextSmallInt(); } else { return nextLargeInt(); } } /** * This gets the next old formatted float. * * @return The float. */ @BertCompatible public float nextOldFloat() { checkPreconditions(FLOAT_EXT); return Float.parseFloat(new String(Arrays.copyOfRange(data, offset, (offset += 31)))); } /** * This gets the next new formatted float. * * @return The float. */ public float nextNewFloat() { checkPreconditions(NEW_FLOAT_EXT, false); float num = wrap(data, offset, 8).getFloat(); offset += 8; return num; } /** * This gets the next new or old float. * * @return The float. */ public float nextFloat() { byte version = peek(); if (version == FLOAT_EXT) { return nextOldFloat(); } else { return nextNewFloat(); } } /** * Gets the next large Latin-1 encoded atom. * * @return The atom name. */ @BertCompatible public String nextLargeAtom() { checkPreconditions(ATOM_EXT); char len = wrap(data, offset, 2).getChar(); //Because we don't have unsigned shorts offset += 2; try { return new String(Arrays.copyOfRange(data, offset, (offset += len)), "ISO-8859-1" /*Latin-1 charset*/); } catch (UnsupportedEncodingException e) { throw new ETFException(e); } } /** * Gets the next small Latin-1 encoded atom. * * @return The atom name. */ public String nextSmallAtom() { checkPreconditions(SMALL_ATOM_EXT, false); int len = Byte.toUnsignedInt(data[offset++]); try { return new String(Arrays.copyOfRange(data, offset, (offset += len)), "ISO-8859-1" /*Latin-1 charset*/); } catch (UnsupportedEncodingException e) { throw new ETFException(e); } } /** * Gets the next large UTF-8 encoded atom. * * @return The atom name. */ public String nextLargeUTF8Atom() { checkPreconditions(ATOM_UTF8_EXT, false); char len = wrap(data, offset, 2).getChar(); //Because we don't have unsigned shorts offset += 2; try { return new String(Arrays.copyOfRange(data, offset, (offset += len)), "UTF8"); } catch (UnsupportedEncodingException e) { throw new ETFException(e); } } /** * Gets the next small UTF-8 encoded atom. * * @return The atom name. */ public String nextSmallUTF8Atom() { checkPreconditions(SMALL_ATOM_UTF8_EXT, false); int len = Byte.toUnsignedInt(data[offset++]); try { return new String(Arrays.copyOfRange(data, offset, (offset += len)), "UTF8"); } catch (UnsupportedEncodingException e) { throw new ETFException(e); } } /** * Gets the next atom (small or large and latin-1 or utf-8). * * @return The atom name. */ public String nextAtom() { byte type = peek(); if (type == SMALL_ATOM_EXT) { return nextSmallAtom(); } else if (type == ATOM_EXT) { return nextLargeAtom(); } else if (type == ATOM_UTF8_EXT) { return nextLargeUTF8Atom(); } else { return nextSmallUTF8Atom(); } } /** * Gets the next "string". NOTE: Erlang doesn't natively support strings, strings are actually just unsigned byte * lists (or char list in java). So the string might be nonsensical. * * @return The string. * * @see String#toCharArray() */ @BertCompatible public String nextString() { checkPreconditions(STRING_EXT); char len = wrap(data, offset, 2).getChar(); //Because we don't have unsigned shorts offset += 2; return new String(wrap(data, offset, len).asCharBuffer().array()); } /** * This gets the next atom or string. * * @return The atom or string. */ public String nextAtomOrString() { byte type = peek(); if (type == STRING_EXT) { return nextString(); } else { return nextAtom(); } } private Node nextNode() { int type = peek(); String atom = null; int index = -1; if (type == ATOM_EXT) { //Only supports standard atoms + atom index atom = nextLargeAtom(); } else if (type == SMALL_ATOM_EXT) { atom = nextSmallAtom(); } else { index = nextAtomCacheIndex(); } if (index != -1) { return new Node(index); } else { return new Node(atom); } } /** * Gets the next port object. * * @return The port object. */ public Port nextPort() { //Pretty much identical to #nextReference checkPreconditions(PORT_EXT, false); Node node = nextNode(); int id = wrap(data, offset, 4).getInt(); offset += 4; byte creation = data[offset++]; if (node.isRef()) { return new Port(node.ref, id, creation); } else { return new Port(node.atom, id, creation); } } /** * Gets the next pid object. * * @return The pid object. */ public PID nextPID() { checkPreconditions(PID_EXT, false); Node node = nextNode(); int id = wrap(data, offset, 4).getInt(); offset += 4; int serial = wrap(data, offset, 4).getInt(); offset += 4; byte creation = data[offset++]; if (node.isRef()) { return new PID(node.ref, id, serial, creation); } else { return new PID(node.atom, id, serial, creation); } } private Tuple findTuple(long arity) { Object[] data = new Object[(int)arity]; for (int i = 0; i < arity; i++) { data[i] = next(); } return new Tuple(data); } /** * Gets the next small tuple. * * @return The tuple. */ public Tuple nextSmallTuple() { checkPreconditions(SMALL_TUPLE_EXT); return findTuple(Byte.toUnsignedInt(data[offset++])); } /** * Gets the next large tuple. * * @return The tuple. */ @BertCompatible public Tuple nextLargeTuple() { checkPreconditions(LARGE_TUPLE_EXT); Tuple tuple = findTuple(Integer.toUnsignedLong(wrap(data, offset, 4).getInt())); offset += 4; return tuple; } /** * Gets the next small or large tuple. * * @return The tuple. */ @BertCompatible public Tuple nextTuple() { byte type = peek(); if (type == SMALL_TUPLE_EXT) { return nextSmallTuple(); } else { return nextLargeTuple(); } } /** * Gets the next map. * * @return The map. */ @BertCompatible public ErlangMap nextMap() { checkPreconditions(MAP_EXT); long arity = Integer.toUnsignedLong(wrap(data, offset, 4).getInt()); offset += 4; Map<Object, Object> map = new HashMap<>(); for (long i = 0; i < arity; i++) { map.put(next(), next()); } return new ErlangMap(map); } /** * Checks if the next term is nil. * * @return True if the next term is nil, false if otherwise. */ @BertCompatible public boolean isNil() { return peek() == NIL_EXT; } /** * Gets the next nil. */ @BertCompatible public void nextNil() { checkPreconditions(NIL_EXT); //Offset should be incremented here } /** * Gets the next proper or improper list. * * @return The list. */ @BertCompatible public ErlangList nextList() { checkPreconditions(LIST_EXT); long len = Integer.toUnsignedLong(wrap(data, offset, 4).getInt()); offset += 4; Object[] list = new Object[(int) len]; for (int i = 0; i < len; i++) { list[i] = next(); } Object tail; if (isNil()) { //Proper list nextNil(); tail = null; } else { tail = next(); } return new ErlangList(list, tail); } /** * This gets the next binary representation of a list or term. * * @return The binary data. */ @BertCompatible public byte[] nextBinary() { checkPreconditions(BINARY_EXT); long len = Integer.toUnsignedLong(wrap(data, offset, 4).getInt()); offset += 4; byte[] bytes = wrap(data, offset, (int) len).array(); offset += len; return bytes; } /** * This gets the next bitstring. * * @return The binary data. */ public long[] nextBitBinary() { checkPreconditions(BIT_BINARY_EXT); long len = Integer.toUnsignedLong(wrap(data, offset, 4).getInt()); offset += 4; byte bits = data[offset++]; long[] bytes = new long[(int) len]; for (int i = 0; i < len; i++) { int val = wrap(data, offset, 4).getInt(); offset += 4; if (i == len-1) //Tail val >>>= 8-bits; //bits = # of significant bits from 1-8, so we remove the insignificant ones bytes[i] = Integer.toUnsignedLong(val); } bytes[bytes.length-1] = bytes[bytes.length-1] >> 8-len; offset += len; return bytes; } private BigInteger nextBig(long len) { int sign = Byte.toUnsignedInt(data[offset++]); BigInteger total = BigInteger.valueOf(0); //Sorry for this algorithm but its what the docs say to do for (long i = 0; i < len; i++) { total = total.add(BigInteger.valueOf(Byte.toUnsignedInt(data[offset++]) * (long)Math.pow(256, i))); } if (sign == 0) { //Positive if (total.signum() == -1) total = total.negate(); } else if (sign == 1) { //Negative if (total.signum() == 1) total = total.negate(); } return total; } /** * Gets the next small big number. * * @return The small big number. */ @BertCompatible public BigInteger nextSmallBig() { checkPreconditions(SMALL_BIG_EXT); return nextBig(Byte.toUnsignedInt(data[offset++])); } /** * Gets the next large big number. * * @return The large big number. */ @BertCompatible public BigInteger nextLargeBig() { checkPreconditions(LARGE_BIG_EXT); long len = Integer.toUnsignedLong(wrap(data, offset, 4).getInt()); offset += 4; return nextBig(len); } /** * Gets the next big number. * * @return The big number. */ @BertCompatible public BigInteger nextBigNumber() { if (peek() == SMALL_BIG_EXT) { return nextSmallBig(); } else { return nextLargeBig(); } } /** * Gets the next old reference object. * * @return The old reference object. */ public Reference nextOldReference() { checkPreconditions(REFERENCE_EXT, false); Node node = nextNode(); long id = Integer.toUnsignedLong(wrap(data, offset, 4).getInt()); offset += 4; byte creation = data[offset++]; if (node.isRef()) { return new Reference(node.ref, id, creation); } else { return new Reference(node.atom, id, creation); } } /** * Gets the next new reference object. * * @return The new reference object. */ public Reference nextNewReference() { checkPreconditions(NEW_REFERENCE_EXT, false); char len = wrap(data, offset, 2).getChar(); //Because we don't have unsigned shorts offset += 2; Node node = nextNode(); byte creation = data[offset++]; long[] id = new long[len]; for (char i = 0; i < len; i++) { id[i] = Integer.toUnsignedLong(wrap(data, offset, 4).getInt()); offset += 4; } if (node.isRef()) { return new Reference(node.ref, creation, id); } else { return new Reference(node.atom, creation, id); } } /** * Gets the next reference object. * * @return The reference object. */ public Reference nextReference() { if (peek() == REFERENCE_EXT) { return nextOldReference(); } else { return nextNewReference(); } } /** * Gets the next old function reference. * * @return The function object. */ public Fun nextOldFun() { throw new UnsupportedOperationException("Not implemented"); //TODO? } /** * Gets the next new function reference. * * @return The function object. */ public Fun nextNewFun() { throw new UnsupportedOperationException("Not implemented"); //TODO? } /** * Gets the next export function reference. * * @return The function object. */ public Fun nextExport() { throw new UnsupportedOperationException("Not implemented"); //TODO? } /** * Gets the next old/new/export function reference. * * @return The function object. */ public Fun nextFun() { byte type = peek(); if (type == FUN_EXT) { return nextOldFun(); } else if (type == NEW_FUN_EXT) { return nextNewFun(); } else { return nextExport(); } } //TODO: Implement advanced BERT objs /** * This gets the next generic term. * * @return The next term. */ @BertCompatible public Object next() { switch (peek()) { case HEADER: throw new ETFException("Nested header found! Is the data malformed?"); case DISTRIBUTION_HEADER: return nextDistributionHeader(); case ATOM_CACHE_REF: return nextAtomCacheIndex(); case SMALL_INTEGER_EXT: return nextSmallInt(); case INTEGER_EXT: return nextLargeInt(); case FLOAT_EXT: return nextOldFloat(); case ATOM_EXT: return nextLargeAtom(); case REFERENCE_EXT: return nextOldReference(); case PORT_EXT: return nextPort(); case PID_EXT: return nextPID(); case SMALL_TUPLE_EXT: return nextSmallTuple(); case LARGE_TUPLE_EXT: return nextLargeTuple(); case MAP_EXT: return nextMap(); case NIL_EXT: nextNil(); return null; case STRING_EXT: return nextString(); case LIST_EXT: return nextList(); case BINARY_EXT: return nextBinary(); case SMALL_BIG_EXT: return nextSmallBig(); case LARGE_BIG_EXT: return nextLargeBig(); case NEW_REFERENCE_EXT: return nextNewReference(); case SMALL_ATOM_EXT: return nextSmallAtom(); case FUN_EXT: return nextOldFun(); case NEW_FUN_EXT: return nextNewFun(); case EXPORT_EXT: return nextExport(); case BIT_BINARY_EXT: return nextBitBinary(); case NEW_FLOAT_EXT: return nextNewFloat(); case ATOM_UTF8_EXT: return nextLargeUTF8Atom(); case SMALL_ATOM_UTF8_EXT: return nextSmallUTF8Atom(); default: throw new ETFException("Unidentified type " + peek() + " is the data malformed?"); } } /** * This reads all of the terms in the provided etf data from the current offset. * * @return The list of all remaining terms. */ public List<Object> readFully() { List<Object> terms = new ArrayList<>(); while (!isFinished()) { terms.add(next()); } return terms; } @Override public String toString() { StringBuilder builder = new StringBuilder("<"); for (int i = 0; i < data.length; i++) { builder.append(data[i]); if (i+1 != data.length) builder.append(", "); } builder.append(">"); return builder.toString(); } //Internal use only, we don't actually provide a Node object private class Node { final String atom; final int ref; public Node(String atom) { this.atom = atom; this.ref = -1; } public Node(int ref) { this.ref = ref; this.atom = null; } boolean isRef() { return ref != -1; } } }
package org.grobid.core.data; import org.junit.Test; import java.util.Arrays; import java.util.List; import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertEquals; import static org.hamcrest.CoreMatchers.*; public class DateTest { Date target; Date other; @Test public void testDateMerging_yearVsYearMonth_shouldReturnYearMonth() { // "2010" "2010-10" -> "2010-10" target = new Date(); target.setYear(2010); other = new Date(); other.setYear(2010); other.setMonth(10); Date merged = Date.merge(target, other); assertThat(merged.getYear(), is(2010)); assertThat(merged.getMonth(), is(10)); } @Test public void testDateMerging_yearVsYearMonthDay_shouldReturnYearMonthDay() { // "2010" "2010-10-27" -> "2010-10-27" target = new Date(); target.setYear(2010); other = new Date(); other.setYear(2010); other.setMonth(10); other.setDay(27); Date merged = Date.merge(target, other); assertThat(merged.getYear(), is(2010)); assertThat(merged.getMonth(), is(10)); assertThat(merged.getDay(), is(27)); } @Test public void testDateMerging_yearMonthVsYearMonthDay_shouldReturnYearMonthDay() { // "2010-10" "2010-10-27" -> "2010-10-27" target = new Date(); target.setYear(2010); target.setMonth(10); other = new Date(); other.setYear(2010); other.setMonth(10); other.setDay(27); Date merged = Date.merge(target, other); assertThat(merged.getYear(), is(2010)); assertThat(merged.getMonth(), is(10)); assertThat(merged.getDay(), is(27)); } @Test public void testDateMerging_YearMonthDayVsYearMonth_shouldReturnYearMonthDay() { // "2010-10-27" "2010-10" -> "2010-10-27" target = new Date(); target.setYear(2010); target.setMonth(10); target.setDay(27); other = new Date(); other.setYear(2010); other.setMonth(10); Date merged = Date.merge(target, other); assertThat(merged.getYear(), is(2010)); assertThat(merged.getMonth(), is(10)); assertThat(merged.getDay(), is(27)); } @Test public void testDateMerging_differentDates_yearMonth_shouldReturnOriginal() { // "2011-10" "2010-10-27" -> "2011-10" target = new Date(); target.setYear(2011); target.setMonth(10); other = new Date(); other.setYear(2010); other.setMonth(10); other.setDay(27); Date merged = Date.merge(target, other); assertThat(merged.getYear(), is(2011)); assertThat(merged.getMonth(), is(10)); } @Test public void testDateMerging_differentDates_year_shouldReturnOriginal() { // "2010" "2016-10-27" -> "2010" target = new Date(); target.setYear(2010); other = new Date(); other.setYear(2016); other.setMonth(10); other.setDay(27); Date merged = Date.merge(target, other); assertThat(merged.getYear(), is(2010)); } @Test public void testDateMerging_differentDates_onlyYear_shouldReturnOriginal() { // "2011" "2010" -> 2011 target = new Date(); target.setYear(2011); other = new Date(); other.setYear(2010); Date merged = Date.merge(target, other); assertThat(merged.getYear(), is(2011)); } @Test public void testToISOString_onlyYear() { Date date = new Date(); date.setYear(2016); date.setMonth(10); date.setDay(27); assertThat(Date.toISOString(date), is("2016-10-27")); } @Test public void testToISOString_onlyYear_WithoutPrefix() { Date date = new Date(); date.setYear(16); date.setMonth(10); date.setDay(27); assertThat(Date.toISOString(date), is("0016-10-27")); } @Test public void testToISOString_completeDate_missingMonth() { Date date = new Date(); date.setYear(2016); date.setDay(27); assertThat(Date.toISOString(date), is("2016")); } @Test public void testToISOString_onlyDay() { Date date = new Date(); date.setDay(27); assertThat(Date.toISOString(date), is("")); } }
package com.commafeed.frontend.rest; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import javax.ws.rs.Consumes; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.ext.MessageBodyReader; import javax.ws.rs.ext.MessageBodyWriter; import javax.ws.rs.ext.Provider; import org.apache.commons.io.Charsets; import org.apache.http.HttpHeaders; import com.fasterxml.jackson.databind.ObjectMapper; @Provider @Consumes(MediaType.WILDCARD) @Produces(MediaType.WILDCARD) public class JsonProvider implements MessageBodyReader<Object>, MessageBodyWriter<Object> { private static final String CONTENT_TYPE_VALUE_SUFFIX = ";charset=UTF-8"; private static final String CACHE_CONTROL_VALUE = "no-cache"; private static final ObjectMapper MAPPER = new ObjectMapper(); @Override public void writeTo(Object value, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException { httpHeaders.putSingle(HttpHeaders.CONTENT_TYPE, mediaType.toString() + CONTENT_TYPE_VALUE_SUFFIX); httpHeaders.putSingle(HttpHeaders.CACHE_CONTROL, CACHE_CONTROL_VALUE); httpHeaders.putSingle(HttpHeaders.PRAGMA, CACHE_CONTROL_VALUE); if (type.equals(String.class)) { entityStream.write(value.toString().getBytes(Charsets.UTF_8)); } else { getMapper().writeValue(entityStream, value); } } @Override public Object readFrom(Class<Object> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException { return getMapper().readValue(entityStream, type); } @Override public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return true; } @Override public long getSize(Object t, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return -1; } @Override public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return true; } public static ObjectMapper getMapper() { return MAPPER; } }
package com.intellij.util.ui; import com.intellij.codeInspection.InspectionProfileEntry; import com.intellij.util.ReflectionUtil; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.event.ItemEvent; public class CheckBox extends JCheckBox { public CheckBox(@NotNull @Nls String label, @NotNull InspectionProfileEntry owner, @NonNls String property) { this(label, (Object)owner, property); } /** * @param property field must be non-private (or ensure that it won't be scrambled by other means) */ public CheckBox(@NotNull @Nls String label, @NotNull Object owner, @NonNls String property) { super(label, getPropertyValue(owner, property)); addItemListener(e -> ReflectionUtil.setField(owner.getClass(), owner, boolean.class, property, e.getStateChange() == ItemEvent.SELECTED)); } private static boolean getPropertyValue(Object owner, String property) { final Boolean value = ReflectionUtil.getField(owner.getClass(), owner, boolean.class, property); assert value != null; return value; } }
package com.conveyal.gtfs.loader; import com.conveyal.gtfs.model.Entity; import com.conveyal.gtfs.model.PatternStop; import com.conveyal.gtfs.model.StopTime; import com.conveyal.gtfs.storage.StorageException; import com.conveyal.gtfs.util.InvalidNamespaceException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; import gnu.trove.iterator.TIntIterator; import gnu.trove.list.TIntList; import gnu.trove.list.array.TIntArrayList; import gnu.trove.set.TIntSet; import gnu.trove.set.hash.TIntHashSet; import org.apache.commons.dbutils.DbUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.sql.DataSource; import java.io.IOException; import java.sql.Connection; import java.sql.JDBCType; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import static com.conveyal.gtfs.loader.JdbcGtfsLoader.INSERT_BATCH_SIZE; import static com.conveyal.gtfs.util.Util.ensureValidNamespace; /** * This wraps a single database table and provides methods to modify GTFS entities. */ public class JdbcTableWriter implements TableWriter { private static final Logger LOG = LoggerFactory.getLogger(JdbcTableWriter.class); private final DataSource dataSource; private final Table specTable; private final String tablePrefix; private static final ObjectMapper mapper = new ObjectMapper(); private final Connection connection; private static final String RECONCILE_STOPS_ERROR_MSG = "Changes to trip pattern stops must be made one at a time if pattern contains at least one trip."; public JdbcTableWriter(Table table, DataSource datasource, String namespace) throws InvalidNamespaceException { this(table, datasource, namespace, null); } /** * Enum containing available methods for updating in SQL. */ private enum SqlMethod { DELETE, UPDATE, CREATE } public JdbcTableWriter ( Table specTable, DataSource dataSource, String tablePrefix, Connection optionalConnection ) throws InvalidNamespaceException { // verify tablePrefix (namespace) is ok to use for constructing dynamic sql statements ensureValidNamespace(tablePrefix); this.tablePrefix = tablePrefix; this.dataSource = dataSource; // TODO: verify specTable.name is ok to use for constructing dynamic sql statements this.specTable = specTable; Connection connection1; try { connection1 = dataSource.getConnection(); } catch (SQLException e) { e.printStackTrace(); connection1 = null; } if (optionalConnection != null) { DbUtils.closeQuietly(connection1); } this.connection = optionalConnection == null ? connection1 : optionalConnection; } /** * Wrapper method to call Jackson to deserialize a JSON string into JsonNode. */ private static JsonNode getJsonNode (String json) throws IOException { try { return mapper.readTree(json); } catch (IOException e) { LOG.error("Bad JSON syntax", e); throw e; } } /** * Create a new entity in the database from the provided JSON string. Note, any call to create or update must provide * a JSON string with the full set of fields matching the definition of the GTFS table in the Table class. */ @Override public String create(String json, boolean autoCommit) throws SQLException, IOException { return update(null, json, autoCommit); } /** * Update entity for a given ID with the provided JSON string. This update and any potential cascading updates to * referencing tables all happens in a single transaction. Note, any call to create or update must provide * a JSON string with the full set of fields matching the definition of the GTFS table in the Table class. */ @Override public String update(Integer id, String json, boolean autoCommit) throws SQLException, IOException { final boolean isCreating = id == null; JsonNode jsonNode = getJsonNode(json); try { if (jsonNode.isArray()) { // If an array of objects is passed in as the JSON input, update them all in a single transaction, only // committing once all entities have been updated. List<String> updatedObjects = new ArrayList<>(); for (JsonNode node : jsonNode) { JsonNode idNode = node.get("id"); Integer nodeId = idNode == null || isCreating ? null : idNode.asInt(); String updatedObject = update(nodeId, node.toString(), false); updatedObjects.add(updatedObject); } if (autoCommit) connection.commit(); return mapper.writeValueAsString(updatedObjects); } // Cast JsonNode to ObjectNode to allow mutations (e.g., updating the ID field). ObjectNode jsonObject = (ObjectNode) jsonNode; // Ensure that the key field is unique and that referencing tables are updated if the value is updated. ensureReferentialIntegrity(connection, jsonObject, tablePrefix, specTable, id); // Parse the fields/values into a Field -> String map (drops ALL fields not explicitly listed in spec table's // fields) // Note, this must follow referential integrity check because some tables will modify the jsonObject (e.g., // adding trip ID if it is null). // LOG.info("JSON to {} entity: {}", isCreating ? "create" : "update", jsonObject.toString()); PreparedStatement preparedStatement = createPreparedUpdate(id, isCreating, jsonObject, specTable, connection, false); // ID from create/update result long newId = handleStatementExecution(preparedStatement, isCreating); // At this point, the transaction was successful (but not yet committed). Now we should handle any update // logic that applies to child tables. For example, after saving a trip, we need to store its stop times. Set<Table> referencingTables = getReferencingTables(specTable); // FIXME: hacky hack hack to add shapes table if we're updating a pattern. if (specTable.name.equals("patterns")) { referencingTables.add(Table.SHAPES); } // Iterate over referencing (child) tables and update those rows that reference the parent entity with the // JSON array for the key that matches the child table's name (e.g., trip.stop_times array will trigger // update of stop_times with matching trip_id). for (Table referencingTable : referencingTables) { Table parentTable = referencingTable.getParentTable(); if (parentTable != null && parentTable.name.equals(specTable.name) || referencingTable.name.equals("shapes")) { // If a referencing table has the current table as its parent, update child elements. JsonNode childEntities = jsonObject.get(referencingTable.name); if (childEntities == null || childEntities.isNull() || !childEntities.isArray()) { throw new SQLException(String.format("Child entities %s must be an array and not null", referencingTable.name)); } int entityId = isCreating ? (int) newId : id; // Cast child entities to array node to iterate over. ArrayNode childEntitiesArray = (ArrayNode)childEntities; updateChildTable(childEntitiesArray, entityId, isCreating, referencingTable, connection); } } // Iterate over table's fields and apply linked values to any tables if ("routes".equals(specTable.name)) { updateLinkedFields(specTable, jsonObject, "trips", "route_id", "wheelchair_accessible"); } else if ("patterns".equals(specTable.name)) { updateLinkedFields(specTable, jsonObject, "trips", "pattern_id", "direction_id"); } if (autoCommit) { // If nothing failed up to this point, it is safe to assume there were no problems updating/creating the // main entity and any of its children, so we commit the transaction. LOG.info("Committing transaction."); connection.commit(); } // Add new ID to JSON object. jsonObject.put("id", newId); // FIXME: Should this return the entity freshly queried from the database rather than just updating the ID? return jsonObject.toString(); } catch (Exception e) { LOG.error("Error {} {} entity", isCreating ? "creating" : "updating", specTable.name); e.printStackTrace(); throw e; } finally { if (autoCommit) { // Always rollback and close in finally in case of early returns or exceptions. connection.rollback(); connection.close(); } } } /** * Updates linked fields with values from entity being updated. This is used to update identical fields in related * tables (for now just fields in trips and stop_times) where the reference table's value should take precedence over * the related table (e.g., pattern_stop#timepoint should update all of its related stop_times). */ private void updateLinkedFields(Table referenceTable, ObjectNode jsonObject, String tableName, String keyField, String ...fieldNames) throws SQLException { // Collect fields, the JSON values for these fields, and the strings to add to the prepared statement into Lists. List<Field> fields = new ArrayList<>(); List<JsonNode> values = new ArrayList<>(); List<String> fieldStrings = new ArrayList<>(); for (String field : fieldNames) { fields.add(referenceTable.getFieldForName(field)); values.add(jsonObject.get(field)); fieldStrings.add(String.format("%s = ?", field)); } String setFields = String.join(", ", fieldStrings); // If updating stop_times, use a more complex query that joins trips to stop_times in order to match on pattern_id boolean updatingStopTimes = "stop_times".equals(tableName); Field orderField = updatingStopTimes ? referenceTable.getFieldForName(referenceTable.getOrderFieldName()) : null; String sql = updatingStopTimes ? String.format("update %s.stop_times st set %s from %s.trips t " + "where st.trip_id = t.trip_id AND t.%s = ? AND st.%s = ?", tablePrefix, setFields, tablePrefix, keyField, orderField.name) : String.format("update %s.%s set %s where %s = ?", tablePrefix, tableName, setFields, keyField); // Prepare the statement and set statement parameters PreparedStatement statement = connection.prepareStatement(sql); int oneBasedIndex = 1; // Iterate over list of fields that need to be updated and set params. for (int i = 0; i < fields.size(); i++) { Field field = fields.get(i); String newValue = values.get(i).isNull() ? null : values.get(i).asText(); if (newValue == null) field.setNull(statement, oneBasedIndex++); else field.setParameter(statement, oneBasedIndex++, newValue); } // Set "where clause" with value for key field (e.g., set values where pattern_id = '3') statement.setString(oneBasedIndex++, jsonObject.get(keyField).asText()); if (updatingStopTimes) { // If updating stop times set the order field parameter (stop_sequence) String orderValue = jsonObject.get(orderField.name).asText(); orderField.setParameter(statement, oneBasedIndex++, orderValue); } // Log query, execute statement, and log result. LOG.debug(statement.toString()); int entitiesUpdated = statement.executeUpdate(); LOG.debug("{} {} linked fields updated", entitiesUpdated, tableName); } /** * Creates a prepared statement for an entity create or update operation. If not performing a batch operation, the * method will set parameters for the prepared statement with values found in the provided JSON ObjectNode. The Table * object here is provided as a positional argument (rather than provided via the JdbcTableWriter instance field) * because this method is used to update both the specTable for the primary entity and any relevant child entities. */ private PreparedStatement createPreparedUpdate(Integer id, boolean isCreating, ObjectNode jsonObject, Table table, Connection connection, boolean batch) throws SQLException { String statementString; if (isCreating) { statementString = table.generateInsertSql(tablePrefix, true); } else { statementString = table.generateUpdateSql(tablePrefix, id); } // Set the RETURN_GENERATED_KEYS flag on the PreparedStatement because it may be creating new rows, in which // case we need to know the auto-generated IDs of those new rows. PreparedStatement preparedStatement = connection.prepareStatement( statementString, Statement.RETURN_GENERATED_KEYS); if (!batch) { setStatementParameters(jsonObject, table, preparedStatement, connection); } return preparedStatement; } /** * Given a prepared statement (for update or create), set the parameters of the statement based on string values * taken from JSON. Note, string values are used here in order to take advantage of setParameter method on * individual fields, which handles parsing string and non-string values into the appropriate SQL field types. */ private void setStatementParameters(ObjectNode jsonObject, Table table, PreparedStatement preparedStatement, Connection connection) throws SQLException { // JDBC SQL statements use a one-based index for setting fields/parameters List<String> missingFieldNames = new ArrayList<>(); int index = 1; for (Field field : table.editorFields()) { if (!jsonObject.has(field.name)) { // If there is a field missing from the JSON string and it is required to write to an editor table, // throw an exception (handled after the fields iteration. In an effort to keep the database integrity // intact, every update/create operation should have all fields defined by the spec table. // FIXME: What if someone wants to make updates to non-editor feeds? In this case, the table may not // have all of the required fields, yet this would prohibit such an update. Further, an update on such // a table that DID have all of the spec table fields would fail because they might be missing from // the actual database table. missingFieldNames.add(field.name); continue; } JsonNode value = jsonObject.get(field.name); LOG.debug("{}={}", field.name, value); try { if (value == null || value.isNull()) { if (field.isRequired() && !field.isEmptyValuePermitted()) { // Only register the field as missing if the value is null, the field is required, and empty // values are not permitted. For example, a null value for fare_attributes#transfers should not // trigger a missing field exception. missingFieldNames.add(field.name); continue; } // Handle setting null value on statement field.setNull(preparedStatement, index); } else { List<String> values = new ArrayList<>(); if (value.isArray()) { for (JsonNode node : value) { values.add(node.asText()); } field.setParameter(preparedStatement, index, String.join(",", values)); } else { field.setParameter(preparedStatement, index, value.asText()); } } } catch (StorageException e) { LOG.warn("Could not set field {} to value {}. Attempting to parse integer seconds.", field.name, value); if (field.name.contains("_time")) { // FIXME: This is a hack to get arrival and departure time into the right format. Because the UI // currently returns them as seconds since midnight rather than the Field-defined format HH:MM:SS. try { if (value == null || value.isNull()) { if (field.isRequired()) { missingFieldNames.add(field.name); continue; } field.setNull(preparedStatement, index); } else { // Try to parse integer seconds value preparedStatement.setInt(index, Integer.parseInt(value.asText())); LOG.info("Parsing value {} for field {} successful!", value, field.name); } } catch (NumberFormatException ex) { // Attempt to set arrival or departure time via integer seconds failed. Rollback. connection.rollback(); LOG.error("Bad column: {}={}", field.name, value); ex.printStackTrace(); throw ex; } } else { // Rollback transaction and throw exception connection.rollback(); throw e; } } index += 1; } if (missingFieldNames.size() > 0) { // String joinedFieldNames = missingFieldNames.stream().collect(Collectors.joining(", ")); throw new SQLException(String.format("The following field(s) are missing from JSON %s object: %s", table.name, missingFieldNames.toString())); } } /** * This updates those tables that depend on the table currently being updated. For example, if updating/creating a * pattern, this method handles deleting any pattern stops and shape points. For trips, this would handle updating * the trips' stop times. * * This method should only be used on tables that have a single foreign key reference to another table, i.e., they * have a hierarchical relationship. * FIXME develop a better way to update tables with foreign keys to the table being updated. */ private void updateChildTable(ArrayNode subEntities, Integer id, boolean isCreatingNewEntity, Table subTable, Connection connection) throws SQLException, IOException { // Get parent table's key field Field keyField; String keyValue; // Primary key fields are always referenced by foreign key fields with the same name. keyField = specTable.getFieldForName(subTable.getKeyFieldName()); // Get parent entity's key value keyValue = getValueForId(id, keyField.name, tablePrefix, specTable, connection); String childTableName = String.join(".", tablePrefix, subTable.name); // FIXME: add check for pattern stop consistency. // FIXME: re-order stop times if pattern stop order changes. // Reconciling pattern stops MUST happen before original pattern stops are deleted in below block (with // getUpdateReferencesSql) if ("pattern_stops".equals(subTable.name)) { List<PatternStop> newPatternStops = new ArrayList<>(); // Clean up pattern stop ID fields (passed in as string ID from datatools-ui to avoid id collision) for (JsonNode node : subEntities) { ObjectNode objectNode = (ObjectNode) node; if (!objectNode.get("id").isNumber()) { // Set ID to zero. ID is ignored entirely here. When the pattern stops are stored in the database, // the ID values are determined by auto-incrementation. objectNode.put("id", 0); } // Accumulate new pattern stop objects from JSON. newPatternStops.add(mapper.readValue(objectNode.toString(), PatternStop.class)); } reconcilePatternStops(keyValue, newPatternStops, connection); } // FIXME: allow shapes to be updated on pattern geometry change. if (!isCreatingNewEntity) { // Delete existing sub-entities for given entity ID if the parent entity is not being newly created. String deleteSql = getUpdateReferencesSql(SqlMethod.DELETE, childTableName, keyField, keyValue, null); LOG.info(deleteSql); Statement statement = connection.createStatement(); // FIXME: Use copy on update for a pattern's shape instead of deleting the previous shape and replacing it. // This would better account for GTFS data loaded from a file where multiple patterns reference a single // shape. int result = statement.executeUpdate(deleteSql); LOG.info("Deleted {} {}", result, subTable.name); // FIXME: are there cases when an update should not return zero? // if (result == 0) throw new SQLException("No stop times found for trip ID"); } int entityCount = 0; PreparedStatement insertStatement = null; // Iterate over the entities found in the array and add to batch for inserting into table. String orderFieldName = subTable.getOrderFieldName(); boolean hasOrderField = orderFieldName != null; int previousOrder = -1; TIntSet orderValues = new TIntHashSet(); Multimap<Table, String> referencesPerTable = HashMultimap.create(); for (JsonNode entityNode : subEntities) { // Cast entity node to ObjectNode to allow mutations (JsonNode is immutable). ObjectNode subEntity = (ObjectNode)entityNode; // Always override the key field (shape_id for shapes, pattern_id for patterns) regardless of the entity's // actual value. subEntity.put(keyField.name, keyValue); // Check any references the sub entity might have. For example, this checks that stop_id values on // pattern_stops refer to entities that actually exist in the stops table. NOTE: This skips the "specTable", // i.e., for pattern stops it will not check pattern_id references. This is enforced above with the put key // field statement above. for (Field field : subTable.specFields()) { if (field.referenceTable != null && !field.referenceTable.name.equals(specTable.name)) { JsonNode refValueNode = subEntity.get(field.name); // Skip over references that are null but not required (e.g., route_id in fare_rules). if (refValueNode.isNull() && !field.isRequired()) continue; String refValue = refValueNode.asText(); referencesPerTable.put(field.referenceTable, refValue); } } // Insert new sub-entity. if (entityCount == 0) { // If handling first iteration, create the prepared statement (later iterations will add to batch). insertStatement = createPreparedUpdate(id, true, subEntity, subTable, connection, true); } // Update linked stop times fields for updated pattern stop (e.g., timepoint, pickup/drop off type). if ("pattern_stops".equals(subTable.name)) { updateLinkedFields( subTable, subEntity, "stop_times", "pattern_id", "timepoint", "drop_off_type", "pickup_type", "shape_dist_traveled" ); } setStatementParameters(subEntity, subTable, insertStatement, connection); if (hasOrderField) { // If the table has an order field, check that it is zero-based and incrementing for all sub entities. // NOTE: Rather than coercing the order values to conform to the sequence in which they are found, we // check the values here as a sanity check. int orderValue = subEntity.get(orderFieldName).asInt(); boolean orderIsUnique = orderValues.add(orderValue); boolean valuesAreIncrementing = ++previousOrder == orderValue; if (!orderIsUnique || !valuesAreIncrementing) { throw new SQLException(String.format( "%s %s values must be zero-based, unique, and incrementing. Entity at index %d had %s value of %d", subTable.name, orderFieldName, entityCount, previousOrder == 0 ? "non-zero" : !valuesAreIncrementing ? "non-incrementing" : "duplicate", orderValue) ); } } // Log statement on first iteration so that it is not logged for each item in the batch. if (entityCount == 0) LOG.info(insertStatement.toString()); insertStatement.addBatch(); // Prefix increment count and check whether to execute batched update. if (++entityCount % INSERT_BATCH_SIZE == 0) { LOG.info("Executing batch insert ({}/{}) for {}", entityCount, subEntities.size(), childTableName); int[] newIds = insertStatement.executeBatch(); LOG.info("Updated {}", newIds.length); } } // Check that accumulated references all exist in reference tables. verifyReferencesExist(subTable.name, referencesPerTable); // execute any remaining prepared statement calls LOG.info("Executing batch insert ({}/{}) for {}", entityCount, subEntities.size(), childTableName); if (insertStatement != null) { // If insert statement is null, an empty array was passed for the child table, so the child elements have // been wiped. int[] newIds = insertStatement.executeBatch(); LOG.info("Updated {} {} child entities", newIds.length, subTable.name); } else { LOG.info("No inserts to execute. Empty array found in JSON for child table {}", childTableName); } } /** * Checks that a set of string references to a set of reference tables are all valid. For each set of references * mapped to a reference table, the method queries for all of the references. If there are any references that were * not returned in the query, one of the original references was invalid and an exception is thrown. * @param referringTableName name of the table which contains references for logging/exception message only * @param referencesPerTable string references mapped to the tables to which they refer * @throws SQLException */ private void verifyReferencesExist(String referringTableName, Multimap<Table, String> referencesPerTable) throws SQLException { for (Table referencedTable: referencesPerTable.keySet()) { LOG.info("Checking {} references to {}", referringTableName, referencedTable.name); Collection<String> referenceStrings = referencesPerTable.get(referencedTable); String referenceFieldName = referencedTable.getKeyFieldName(); String questionMarks = String.join(", ", Collections.nCopies(referenceStrings.size(), "?")); String checkCountSql = String.format( "select %s from %s.%s where %s in (%s)", referenceFieldName, tablePrefix, referencedTable.name, referenceFieldName, questionMarks); PreparedStatement preparedStatement = connection.prepareStatement(checkCountSql); int oneBasedIndex = 1; for (String ref : referenceStrings) { preparedStatement.setString(oneBasedIndex++, ref); } LOG.info(preparedStatement.toString()); ResultSet resultSet = preparedStatement.executeQuery(); Set<String> foundReferences = new HashSet<>(); while (resultSet.next()) { String referenceValue = resultSet.getString(1); foundReferences.add(referenceValue); } // Determine if any references were not found. referenceStrings.removeAll(foundReferences); if (referenceStrings.size() > 0) { throw new SQLException( String.format( "%s entities must contain valid %s references. (Invalid references: %s)", referringTableName, referenceFieldName, String.join(", ", referenceStrings))); } else { LOG.info("All {} {} {} references are valid.", foundReferences.size(), referencedTable.name, referenceFieldName); } } } private void reconcilePatternStops(String patternId, List<PatternStop> newStops, Connection connection) throws SQLException { LOG.info("Reconciling pattern stops for pattern ID={}", patternId); // Collect the original list of pattern stop IDs. String getStopIdsSql = String.format("select stop_id from %s.pattern_stops where pattern_id = ? order by stop_sequence", tablePrefix); PreparedStatement getStopsStatement = connection.prepareStatement(getStopIdsSql); getStopsStatement.setString(1, patternId); LOG.info(getStopsStatement.toString()); ResultSet stopsResults = getStopsStatement.executeQuery(); List<String> originalStopIds = new ArrayList<>(); while (stopsResults.next()) { originalStopIds.add(stopsResults.getString(1)); } // Collect all trip IDs so that we can insert new stop times (with the appropriate trip ID value) if a pattern // stop is added. String getTripIdsSql = String.format("select trip_id from %s.trips where pattern_id = ?", tablePrefix); PreparedStatement getTripsStatement = connection.prepareStatement(getTripIdsSql); getTripsStatement.setString(1, patternId); ResultSet tripsResults = getTripsStatement.executeQuery(); List<String> tripsForPattern = new ArrayList<>(); while (tripsResults.next()) { tripsForPattern.add(tripsResults.getString(1)); } if (tripsForPattern.size() == 0) { // If there are no trips for the pattern, there is no need to reconcile stop times to modified pattern stops. // This permits the creation of patterns without stops, reversing the stops on existing patterns, and // duplicating patterns. // For new patterns, this short circuit is required to prevent the transposition conditional check from // throwing an IndexOutOfBoundsException when it attempts to access index 0 of a list with no items. return; } // Prepare SQL fragment to filter for all stop times for all trips on a certain pattern. String joinToTrips = String.format("%s.trips.trip_id = %s.stop_times.trip_id AND %s.trips.pattern_id = '%s'", tablePrefix, tablePrefix, tablePrefix, patternId); // ADDITIONS (IF DIFF == 1) if (originalStopIds.size() == newStops.size() - 1) { // We have an addition; find it. int differenceLocation = -1; for (int i = 0; i < newStops.size(); i++) { if (differenceLocation != -1) { // we've already found the addition if (i < originalStopIds.size() && !originalStopIds.get(i).equals(newStops.get(i + 1).stop_id)) { // there's another difference, which we weren't expecting throw new IllegalStateException("Multiple differences found when trying to detect stop addition"); } } // if we've reached where one trip has an extra stop, or if the stops at this position differ else if (i == newStops.size() - 1 || !originalStopIds.get(i).equals(newStops.get(i).stop_id)) { // we have found the difference differenceLocation = i; } } // Increment sequences for stops that follow the inserted location (including the stop at the changed index). // NOTE: This should happen before the blank stop time insertion for logical consistency. String updateSql = String.format("update %s.stop_times set stop_sequence = stop_sequence + 1 from %s.trips where stop_sequence >= %d AND %s", tablePrefix, tablePrefix, differenceLocation, joinToTrips); LOG.info(updateSql); PreparedStatement updateStatement = connection.prepareStatement(updateSql); int updated = updateStatement.executeUpdate(); LOG.info("Updated {} stop times", updated); // Insert a skipped stop at the difference location insertBlankStopTimes(tripsForPattern, newStops, differenceLocation, 1, connection); } // DELETIONS else if (originalStopIds.size() == newStops.size() + 1) { // We have a deletion; find it int differenceLocation = -1; for (int i = 0; i < originalStopIds.size(); i++) { if (differenceLocation != -1) { if (!originalStopIds.get(i).equals(newStops.get(i - 1).stop_id)) { // There is another difference, which we were not expecting throw new IllegalStateException("Multiple differences found when trying to detect stop removal"); } } else if (i == originalStopIds.size() - 1 || !originalStopIds.get(i).equals(newStops.get(i).stop_id)) { // We've reached the end and the only difference is length (so the last stop is the different one) // or we've found the difference. differenceLocation = i; } } // Delete stop at difference location String deleteSql = String.format("delete from %s.stop_times using %s.trips where stop_sequence = %d AND %s", tablePrefix, tablePrefix, differenceLocation, joinToTrips); LOG.info(deleteSql); PreparedStatement deleteStatement = connection.prepareStatement(deleteSql); // Decrement all stops with sequence greater than difference location String updateSql = String.format("update %s.stop_times set stop_sequence = stop_sequence - 1 from %s.trips where stop_sequence > %d AND %s", tablePrefix, tablePrefix, differenceLocation, joinToTrips); LOG.info(updateSql); PreparedStatement updateStatement = connection.prepareStatement(updateSql); int deleted = deleteStatement.executeUpdate(); int updated = updateStatement.executeUpdate(); LOG.info("Deleted {} stop times, updated sequence for {} stop times", deleted, updated); // FIXME: Should we be handling bad stop time delete? I.e., we could query for stop times to be deleted and // if any of them have different stop IDs than the pattern stop, we could raise a warning for the user. String removedStopId = originalStopIds.get(differenceLocation); // StopTime removed = trip.stopTimes.remove(differenceLocation); // // the removed stop can be null if it was skipped. trip.stopTimes.remove will throw an exception // // rather than returning null if we try to do a remove out of bounds. // if (removed != null && !removed.stop_id.equals(removedStopId)) { } // TRANSPOSITIONS else if (originalStopIds.size() == newStops.size()) { // Imagine the trip patterns pictured below (where . is a stop, and lines indicate the same stop) // the original trip pattern is on top, the new below // also imagine that the two that are unmarked are the same // (the limitations of ascii art, this is prettier on my whiteboard) // There are three regions: the beginning and end, where stopSequences are the same, and the middle, where they are not // The same is true of trips where stops were moved backwards // find the left bound of the changed region int firstDifferentIndex = 0; while (originalStopIds.get(firstDifferentIndex).equals(newStops.get(firstDifferentIndex).stop_id)) { firstDifferentIndex++; if (firstDifferentIndex == originalStopIds.size()) // trip patterns do not differ at all, nothing to do return; } // find the right bound of the changed region int lastDifferentIndex = originalStopIds.size() - 1; while (originalStopIds.get(lastDifferentIndex).equals(newStops.get(lastDifferentIndex).stop_id)) { lastDifferentIndex } // TODO: write a unit test for this if (firstDifferentIndex == lastDifferentIndex) { throw new IllegalStateException( "Pattern stop substitutions are not supported, region of difference must have length > 1."); } String conditionalUpdate; // figure out whether a stop was moved left or right // note that if the stop was only moved one position, it's impossible to tell, and also doesn't matter, // because the requisite operations are equivalent int from, to; // Ensure that only a single stop has been moved (i.e. verify stop IDs inside changed region remain unchanged) if (originalStopIds.get(firstDifferentIndex).equals(newStops.get(lastDifferentIndex).stop_id)) { // Stop was moved from beginning of changed region to end of changed region (-->) from = firstDifferentIndex; to = lastDifferentIndex; verifyInteriorStopsAreUnchanged(originalStopIds, newStops, firstDifferentIndex, lastDifferentIndex, true); conditionalUpdate = String.format("update %s.stop_times set stop_sequence = case " + // if sequence = fromIndex, update to toIndex. "when stop_sequence = %d then %d " + // if sequence is greater than fromIndex and less than or equal to toIndex, decrement "when stop_sequence > %d AND stop_sequence <= %d then stop_sequence - 1 " + // Otherwise, sequence remains untouched "else stop_sequence " + "end " + "from %s.trips where %s", tablePrefix, from, to, from, to, tablePrefix, joinToTrips); } else if (newStops.get(firstDifferentIndex).stop_id.equals(originalStopIds.get(lastDifferentIndex))) { // Stop was moved from end of changed region to beginning of changed region (<--) from = lastDifferentIndex; to = firstDifferentIndex; verifyInteriorStopsAreUnchanged(originalStopIds, newStops, firstDifferentIndex, lastDifferentIndex, false); conditionalUpdate = String.format("update %s.stop_times set stop_sequence = case " + // if sequence = fromIndex, update to toIndex. "when stop_sequence = %d then %d " + // if sequence is less than fromIndex and greater than or equal to toIndex, increment "when stop_sequence < %d AND stop_sequence >= %d then stop_sequence + 1 " + // Otherwise, sequence remains untouched "else stop_sequence " + "end " + "from %s.trips where %s", tablePrefix, from, to, from, to, tablePrefix, joinToTrips); } else { throw new IllegalStateException("not a simple, single move!"); } // Update the stop sequences for the stop that was moved and the other stops within the changed region. PreparedStatement updateStatement = connection.prepareStatement(conditionalUpdate); LOG.info(updateStatement.toString()); int updated = updateStatement.executeUpdate(); LOG.info("Updated {} stop_times.", updated); } // CHECK IF SET OF STOPS ADDED TO END OF ORIGINAL LIST else if (originalStopIds.size() < newStops.size()) { // find the left bound of the changed region to check that no stops have changed in between int firstDifferentIndex = 0; while ( firstDifferentIndex < originalStopIds.size() && originalStopIds.get(firstDifferentIndex).equals(newStops.get(firstDifferentIndex).stop_id) ) { firstDifferentIndex++; } if (firstDifferentIndex != originalStopIds.size()) throw new IllegalStateException("When adding multiple stops to patterns, new stops must all be at the end"); // insert a skipped stop for each new element in newStops int stopsToInsert = newStops.size() - firstDifferentIndex; // FIXME: Should we be inserting blank stop times at all? Shouldn't these just inherit the arrival times // from the pattern stops? LOG.info("Adding {} stop times to existing {} stop times. Starting at {}", stopsToInsert, originalStopIds.size(), firstDifferentIndex); insertBlankStopTimes(tripsForPattern, newStops, firstDifferentIndex, stopsToInsert, connection); } // ANY OTHER TYPE OF MODIFICATION IS NOT SUPPORTED else { throw new IllegalStateException(RECONCILE_STOPS_ERROR_MSG); } } /** * Check the stops in the changed region to ensure they remain in the same order. If not, throw an exception to * cancel the transaction. */ private static void verifyInteriorStopsAreUnchanged(List<String> originalStopIds, List<PatternStop> newStops, int firstDifferentIndex, int lastDifferentIndex, boolean movedRight) { //Stops mapped to list of stop IDs simply for easier viewing/comparison with original IDs while debugging with // breakpoints. List<String> newStopIds = newStops.stream().map(s -> s.stop_id).collect(Collectors.toList()); // Determine the bounds of the region that should be identical between the two lists. int beginRegion = movedRight ? firstDifferentIndex : firstDifferentIndex + 1; int endRegion = movedRight ? lastDifferentIndex - 1 : lastDifferentIndex; for (int i = beginRegion; i <= endRegion; i++) { // Shift index when selecting stop from original list to account for displaced stop. int shiftedIndex = movedRight ? i + 1 : i - 1; String newStopId = newStopIds.get(i); String originalStopId = originalStopIds.get(shiftedIndex); if (!newStopId.equals(originalStopId)) { // If stop ID for new stop at the given index does not match the original stop ID, the order of at least // only a single addition, deletion, or transposition per update. throw new IllegalStateException(RECONCILE_STOPS_ERROR_MSG); } } } /** * You must call this method after updating sequences for any stop times following the starting stop sequence to * avoid overwriting these other stop times. */ private void insertBlankStopTimes(List<String> tripIds, List<PatternStop> newStops, int startingStopSequence, int stopTimesToAdd, Connection connection) throws SQLException { if (tripIds.isEmpty()) { // There is no need to insert blank stop times if there are no trips for the pattern. return; } String insertSql = Table.STOP_TIMES.generateInsertSql(tablePrefix, true); PreparedStatement insertStatement = connection.prepareStatement(insertSql); int count = 0; int totalRowsUpdated = 0; // Create a new stop time for each sequence value (times each trip ID) that needs to be inserted. for (int i = startingStopSequence; i < stopTimesToAdd + startingStopSequence; i++) { PatternStop patternStop = newStops.get(i); StopTime stopTime = new StopTime(); stopTime.stop_id = patternStop.stop_id; stopTime.drop_off_type = patternStop.drop_off_type; stopTime.pickup_type = patternStop.pickup_type; stopTime.timepoint = patternStop.timepoint; stopTime.shape_dist_traveled = patternStop.shape_dist_traveled; stopTime.stop_sequence = i; // Update stop time with each trip ID and add to batch. for (String tripId : tripIds) { stopTime.trip_id = tripId; stopTime.setStatementParameters(insertStatement, true); insertStatement.addBatch(); if (count % INSERT_BATCH_SIZE == 0) { int[] rowsUpdated = insertStatement.executeBatch(); totalRowsUpdated += rowsUpdated.length; } } } int[] rowsUpdated = insertStatement.executeBatch(); totalRowsUpdated += rowsUpdated.length; LOG.info("{} blank stop times inserted", totalRowsUpdated); } /** * For a given condition (fieldName = 'value'), delete all entities that match the condition. Because this uses the * primary delete method, it also will delete any "child" entities that reference any entities matching the original * query. */ @Override public int deleteWhere(String fieldName, String value, boolean autoCommit) throws SQLException { try { String tableName = String.join(".", tablePrefix, specTable.name); // Get the IDs for entities matching the where condition TIntSet idsToDelete = getIdsForCondition(tableName, fieldName, value, connection); TIntIterator iterator = idsToDelete.iterator(); TIntList results = new TIntArrayList(); while (iterator.hasNext()) { // For all entity IDs that match query, delete from referencing tables. int id = iterator.next(); // FIXME: Should this be a where in clause instead of iterating over IDs? // Delete each entity and its referencing (child) entities int result = delete(id, false); if (result != 1) { throw new SQLException("Could not delete entity with ID " + id); } results.add(result); } if (autoCommit) connection.commit(); LOG.info("Deleted {} {} entities", results.size(), specTable.name); return results.size(); } catch (Exception e) { connection.rollback(); LOG.error("Could not delete {} entity where {}={}", specTable.name, fieldName, value); e.printStackTrace(); throw e; } finally { if (autoCommit) { // Always rollback and close if auto-committing. connection.rollback(); connection.close(); } } } /** * Deletes an entity for the specified ID. */ @Override public int delete(Integer id, boolean autoCommit) throws SQLException { try { // Handle "cascading" delete or constraints on deleting entities that other entities depend on // (e.g., keep a calendar from being deleted if trips reference it). // FIXME: actually add "cascading"? Currently, it just deletes one level down. deleteFromReferencingTables(tablePrefix, specTable, connection, id); PreparedStatement statement = connection.prepareStatement(specTable.generateDeleteSql(tablePrefix)); statement.setInt(1, id); LOG.info(statement.toString()); // Execute query int result = statement.executeUpdate(); if (result == 0) { LOG.error("Could not delete {} entity with id: {}", specTable.name, id); throw new SQLException("Could not delete entity"); } if (autoCommit) connection.commit(); // FIXME: change return message based on result value return result; } catch (Exception e) { LOG.error("Could not delete {} entity with id: {}", specTable.name, id); e.printStackTrace(); throw e; } finally { if (autoCommit) { // Always rollback and close if auto-committing. connection.rollback(); connection.close(); } } } @Override public void commit() throws SQLException { // FIXME: should this take a connection and commit it? connection.commit(); connection.close(); } /** * Delete entities from any referencing tables (if required). This method is defined for convenience and clarity, but * essentially just runs updateReferencingTables with a null value for newKeyValue param. */ private static void deleteFromReferencingTables(String namespace, Table table, Connection connection, int id) throws SQLException { updateReferencingTables(namespace, table, connection, id, null); } /** * Handle executing a prepared statement and return the ID for the newly-generated or updated entity. */ private static long handleStatementExecution(PreparedStatement statement, boolean isCreating) throws SQLException { // Log the SQL for the prepared statement LOG.info(statement.toString()); int affectedRows = statement.executeUpdate(); // Determine operation-specific action for any error messages String messageAction = isCreating ? "Creating" : "Updating"; if (affectedRows == 0) { // No update occurred. // TODO: add some clarity on cause (e.g., where clause found no entity with provided ID)? throw new SQLException(messageAction + " entity failed, no rows affected."); } try (ResultSet generatedKeys = statement.getGeneratedKeys()) { if (generatedKeys.next()) { // Get the auto-generated ID from the update execution long newId = generatedKeys.getLong(1); return newId; } else { throw new SQLException(messageAction + " entity failed, no ID obtained."); } } catch (SQLException e) { e.printStackTrace(); throw e; } } /** * Checks for modification of GTFS key field (e.g., stop_id, route_id) in supplied JSON object and ensures * both uniqueness and that referencing tables are appropriately updated. * * FIXME: add more detail/precise language on what this method actually does */ private static void ensureReferentialIntegrity(Connection connection, ObjectNode jsonObject, String namespace, Table table, Integer id) throws SQLException { final boolean isCreating = id == null; String keyField = table.getKeyFieldName(); String tableName = String.join(".", namespace, table.name); if (jsonObject.get(keyField) == null || jsonObject.get(keyField).isNull()) { // FIXME: generate key field automatically for certain entities (e.g., trip ID). Maybe this should be // generated for all entities if null? if ("trip_id".equals(keyField)) { jsonObject.put(keyField, UUID.randomUUID().toString()); } else if ("agency_id".equals(keyField)) { LOG.warn("agency_id field for agency id={} is null.", id); int rowSize = getRowCount(tableName, connection); if (rowSize > 1 || (isCreating && rowSize > 0)) { throw new SQLException("agency_id must not be null if more than one agency exists."); } } else { throw new SQLException(String.format("Key field %s must not be null", keyField)); } } String keyValue = jsonObject.get(keyField).asText(); // If updating key field, check that there is no ID conflict on value (e.g., stop_id or route_id) TIntSet uniqueIds = getIdsForCondition(tableName, keyField, keyValue, connection); int size = uniqueIds.size(); if (size == 0 || (size == 1 && id != null && uniqueIds.contains(id))) { if (size == 0 && !isCreating) { // FIXME: Need to update referencing tables because entity has changed ID. // Entity key value is being changed to an entirely new one. If there are entities that // reference this value, we need to update them. updateReferencingTables(namespace, table, connection, id, keyValue); } } else { // Conflict. The different conflict conditions are outlined below. if (size == 1) { // There was one match found. if (isCreating) { // Under no circumstance should a new entity have a conflict with existing key field. throw new SQLException("New entity's key field must not match existing value."); } if (!uniqueIds.contains(id)) { // There are two circumstances we could encounter here. // 1. The key value for this entity has been updated to match some other entity's key value (conflict). // 2. The int ID provided in the request parameter does not match any rows in the table. throw new SQLException("Key field must be unique and request parameter ID must exist."); } } else if (size > 1) { // FIXME: Handle edge case where original data set contains duplicate values for key field and this is an // attempt to rectify bad data. String message = String.format( "%d %s entities shares the same key field (%s=%s)! Key field must be unique.", size, table.name, keyField, keyValue); LOG.error(message); throw new SQLException(message); } } } /** * Get number of rows for a table. This is currently just used to check the number of entities for the agency table. */ private static int getRowCount(String tableName, Connection connection) throws SQLException { String rowCountSql = String.format("SELECT COUNT(*) FROM %s", tableName); LOG.info(rowCountSql); // Create statement for counting rows selected Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(rowCountSql); if (resultSet.next()) return resultSet.getInt(1); else return 0; } /** * For some condition (where field = string value), return the set of unique int IDs for the records that match. */ private static TIntSet getIdsForCondition(String tableName, String keyField, String keyValue, Connection connection) throws SQLException { String idCheckSql = String.format("select id from %s where %s = ?", tableName, keyField); // Create statement for counting rows selected PreparedStatement statement = connection.prepareStatement(idCheckSql); statement.setString(1, keyValue); LOG.info(statement.toString()); ResultSet resultSet = statement.executeQuery(); // Keep track of number of records found with key field TIntSet uniqueIds = new TIntHashSet(); while (resultSet.next()) { int uniqueId = resultSet.getInt(1); uniqueIds.add(uniqueId); LOG.info("entity id: {}, where {}: {}", uniqueId, keyField, keyValue); } return uniqueIds; } /** * Finds the set of tables that reference the parent entity being updated. */ private static Set<Table> getReferencingTables(Table table) { String keyField = table.getKeyFieldName(); Set<Table> referencingTables = new HashSet<>(); for (Table gtfsTable : Table.tablesInOrder) { // IMPORTANT: Skip the table for the entity we're modifying or if loop table does not have field. if (table.name.equals(gtfsTable.name)) continue; // || !gtfsTable.hasField(keyField) for (Field field : gtfsTable.fields) { if (field.isForeignReference() && field.referenceTable.name.equals(table.name)) { // If any of the table's fields are foreign references to the specified table, add to the return set. referencingTables.add(gtfsTable); } } // Field tableField = gtfsTable.getFieldForName(keyField); // // If field is not a foreign reference, continue. (This should probably never be the case because a field // // that shares the key field's name ought to refer to the key field. // if (!tableField.isForeignReference()) continue; } return referencingTables; } /** * For a given integer ID, return the value for the specified field name for that entity. */ private static String getValueForId(int id, String fieldName, String namespace, Table table, Connection connection) throws SQLException { String tableName = String.join(".", namespace, table.name); String selectIdSql = String.format("select %s from %s where id = %d", fieldName, tableName, id); LOG.info(selectIdSql); Statement selectIdStatement = connection.createStatement(); ResultSet selectResults = selectIdStatement.executeQuery(selectIdSql); String keyValue = null; while (selectResults.next()) { keyValue = selectResults.getString(1); } return keyValue; } /** * Updates any foreign references that exist should a GTFS key field (e.g., stop_id or route_id) be updated via an * HTTP request for a given integer ID. First, all GTFS tables are filtered to find referencing tables. Then records * in these tables that match the old key value are modified to match the new key value. * * The function determines whether the method is update or delete depending on the presence of the newKeyValue * parameter (if null, the method is DELETE). Custom logic/hooks could be added here to check if there are entities * referencing the entity being updated. * * FIXME: add custom logic/hooks. Right now entity table checks are hard-coded in (e.g., if Agency, skip all. OR if * Calendar, rollback transaction if there are referencing trips). * * FIXME: Do we need to clarify the impact of the direction of the relationship (e.g., if we delete a trip, that should * not necessarily delete a shape that is shared by multiple trips)? I think not because we are skipping foreign refs * found in the table for the entity being updated/deleted. [Leaving this comment in place for now though.] */ private static void updateReferencingTables(String namespace, Table table, Connection connection, int id, String newKeyValue) throws SQLException { Field keyField = table.getFieldForName(table.getKeyFieldName()); Class<? extends Entity> entityClass = table.getEntityClass(); // Determine method (update vs. delete) depending on presence of newKeyValue field. SqlMethod sqlMethod = newKeyValue != null ? SqlMethod.UPDATE : SqlMethod.DELETE; Set<Table> referencingTables = getReferencingTables(table); // If there are no referencing tables, there is no need to update any values (e.g., . if (referencingTables.size() == 0) return; String keyValue = getValueForId(id, keyField.name, namespace, table, connection); if (keyValue == null) { // FIXME: should we still check referencing tables for null value? LOG.warn("Entity {} to {} has null value for {}. Skipping references check.", id, sqlMethod, keyField); return; } for (Table referencingTable : referencingTables) { // Update/delete foreign references that have match the key value. String refTableName = String.join(".", namespace, referencingTable.name); for (Field field : referencingTable.editorFields()) { if (field.isForeignReference() && field.referenceTable.name.equals(table.name)) { // FIXME: Are there other references that are not being captured??? // Cascade delete stop times and frequencies for trips. This must happen before trips are deleted // below. Otherwise, there are no trips with which to join. if ("trips".equals(referencingTable.name)) { String stopTimesTable = String.join(".", namespace, "stop_times"); String frequenciesTable = String.join(".", namespace, "frequencies"); String tripsTable = String.join(".", namespace, "trips"); // Delete stop times and frequencies for trips for pattern String deleteStopTimes = String.format( "delete from %s using %s where %s.trip_id = %s.trip_id and %s.pattern_id = '%s'", stopTimesTable, tripsTable, stopTimesTable, tripsTable, tripsTable, keyValue); LOG.info(deleteStopTimes); PreparedStatement deleteStopTimesStatement = connection.prepareStatement(deleteStopTimes); int deletedStopTimes = deleteStopTimesStatement.executeUpdate(); LOG.info("Deleted {} stop times for pattern {}", deletedStopTimes, keyValue); String deleteFrequencies = String.format( "delete from %s using %s where %s.trip_id = %s.trip_id and %s.pattern_id = '%s'", frequenciesTable, tripsTable, frequenciesTable, tripsTable, tripsTable, keyValue); LOG.info(deleteFrequencies); PreparedStatement deleteFrequenciesStatement = connection.prepareStatement(deleteFrequencies); int deletedFrequencies = deleteFrequenciesStatement.executeUpdate(); LOG.info("Deleted {} frequencies for pattern {}", deletedFrequencies, keyValue); } // Get unique IDs before delete (for logging/message purposes). // TIntSet uniqueIds = getIdsForCondition(refTableName, keyField, keyValue, connection); String updateRefSql = getUpdateReferencesSql(sqlMethod, refTableName, field, keyValue, newKeyValue); LOG.info(updateRefSql); Statement updateStatement = connection.createStatement(); int result = updateStatement.executeUpdate(updateRefSql); if (result > 0) { // FIXME: is this where a delete hook should go? (E.g., CalendarController subclass would override // deleteEntityHook). // deleteEntityHook(); if (sqlMethod.equals(SqlMethod.DELETE)) { // Check for restrictions on delete. if (table.isCascadeDeleteRestricted()) { // The entity must not have any referencing entities in order to delete it. connection.rollback(); // List<String> idStrings = new ArrayList<>(); // uniqueIds.forEach(uniqueId -> idStrings.add(String.valueOf(uniqueId))); // String message = String.format("Cannot delete %s %s=%s. %d %s reference this %s (%s).", entityClass.getSimpleName(), keyField, keyValue, result, referencingTable.name, entityClass.getSimpleName(), String.join(",", idStrings)); String message = String.format("Cannot delete %s %s=%s. %d %s reference this %s.", entityClass.getSimpleName(), keyField.name, keyValue, result, referencingTable.name, entityClass.getSimpleName()); LOG.warn(message); throw new SQLException(message); } } LOG.info("{} reference(s) in {} {}D!", result, refTableName, sqlMethod); } else { LOG.info("No references in {} found!", refTableName); } } } } } /** * Constructs SQL string based on method provided. */ private static String getUpdateReferencesSql(SqlMethod sqlMethod, String refTableName, Field keyField, String keyValue, String newKeyValue) throws SQLException { boolean isArrayField = keyField.getSqlType().equals(JDBCType.ARRAY); switch (sqlMethod) { case DELETE: if (isArrayField) { return String.format("delete from %s where %s @> ARRAY['%s']::text[]", refTableName, keyField.name, keyValue); } else { return String.format("delete from %s where %s = '%s'", refTableName, keyField.name, keyValue); } case UPDATE: if (isArrayField) { // If the field to be updated is an array field (of which there are only text[] types in the db), // replace the old value with the new value using array contains clause. // FIXME This is probably horribly postgres specific. return String.format("update %s set %s = array_replace(%s, '%s', '%s') where %s @> ARRAY['%s']::text[]", refTableName, keyField.name, keyField.name, keyValue, newKeyValue, keyField.name, keyValue); } else { return String.format("update %s set %s = '%s' where %s = '%s'", refTableName, keyField.name, newKeyValue, keyField.name, keyValue); } // case CREATE: // return String.format("insert into %s "); default: throw new SQLException("SQL Method must be DELETE or UPDATE."); } } }
package com.conveyal.taui.analysis.broker; import com.conveyal.r5.analyst.WorkerCategory; import com.conveyal.r5.analyst.cluster.RegionalTask; import com.conveyal.r5.analyst.cluster.RegionalWorkResult; import com.conveyal.r5.analyst.cluster.WorkerStatus; import com.conveyal.taui.AnalysisServerConfig; import com.conveyal.taui.AnalysisServerException; import com.conveyal.taui.GridResultAssembler; import com.conveyal.taui.analysis.RegionalAnalysisStatus; import com.google.common.collect.ListMultimap; import com.google.common.collect.MultimapBuilder; import gnu.trove.TCollections; import gnu.trove.map.TObjectLongMap; import gnu.trove.map.hash.TObjectLongHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; /** * This class distributes the tasks making up regional jobs to workers. * * It should aim to draw tasks fairly from all organizations, and fairly from all jobs within each organization, * while attempting to respect the transport network affinity of each worker, giving the worker tasks that require * the same network it has been using recently. * * Previously workers long-polled for work, holding lots of connections open. Now they short-poll and sleep for a while * if there's no work. This is simpler and allows us to work withing much more standard HTTP frameworks. * * The fact that workers continuously re-poll for work every 10-30 seconds serves as a signal to the broker that * they are still alive and waiting. This also allows the broker to maintain a catalog of active workers. * * Because (at least currently) two organizations never share the same graph, we can get by with pulling tasks * cyclically or randomly from all the jobs, and actively shape the number of workers with affinity for each graph by * forcing some of them to accept tasks on graphs other than the one they have declared affinity for. * * This could be thought of as "affinity homeostasis". We will constantly keep track of the ideal proportion of workers * by graph (based on active jobs), and the true proportion of consumers by graph (based on incoming polling), then * we can decide when a worker's graph affinity should be ignored and what it should be forced to. * * It may also be helpful to mark jobs every time they are skipped in the LRU queue. Each time a job is serviced, * it is taken out of the queue and put at its end. Jobs that have not been serviced float to the top. * * Most methods on this class are synchronized, because they can be called from many HTTP handler threads at once. * TODO evaluate whether synchronizing all the functions to make this threadsafe is a performance issue. */ public class Broker { private static final Logger LOG = LoggerFactory.getLogger(Broker.class); public final ListMultimap<WorkerCategory, Job> jobs = MultimapBuilder.hashKeys().arrayListValues().build(); /** The most tasks to deliver to a worker at a time. */ public final int MAX_TASKS_PER_WORKER = 16; /** Used when auto-starting spot instances. Set to a smaller value to increase the number of workers requested * automatically*/ public final int TARGET_TASKS_PER_WORKER = 400; /** We want to request spot instances to "boost" regional analyses after a few regional task results are received * for a given workerCategory. Do so after receiving results for an arbitrary task toward the beginning of the job*/ public final int AUTO_START_SPOT_INSTANCES_AT_TASK = MAX_TASKS_PER_WORKER * 2 + 10; /** The maximum number of spot instances allowable in an automatic request */ public final int MAX_WORKERS_PER_CATEGORY = 100; /** * How long to give workers to start up (in ms) before assuming that they have started (and starting more * on a given graph if they haven't. */ public static final long WORKER_STARTUP_TIME = 60 * 60 * 1000; /** Maximum number of workers allowed */ private int maxWorkers; /** The configuration that will be applied to workers launched by this broker. */ private Properties workerConfig; /** Keeps track of all the workers that have contacted this broker recently asking for work. */ protected WorkerCatalog workerCatalog = new WorkerCatalog(); /** If true, avoid using remote hosted services. */ private boolean workOffline; /** Amazon AWS SDK client. */ private EC2Launcher launcher; /** These objects piece together results received from workers into one regional analysis result file per job. */ private static Map<String, GridResultAssembler> resultAssemblers = new HashMap<>(); /** * keep track of which graphs we have launched workers on and how long ago we launched them, * so that we don't re-request workers which have been requested. */ public TObjectLongMap<WorkerCategory> recentlyRequestedWorkers = TCollections.synchronizedMap(new TObjectLongHashMap<>()); public Broker () { // print out date on startup so that CloudWatch logs has a unique fingerprint LOG.info("Analyst broker starting at {}", LocalDateTime.now().format(DateTimeFormatter.ISO_DATE_TIME)); this.workOffline = AnalysisServerConfig.offline; if (!workOffline){ this.launcher = new EC2Launcher(); } this.maxWorkers = AnalysisServerConfig.maxWorkers; } /** * Enqueue a set of tasks for a regional analysis. * Only a single task is passed in, which the broker will expand into all the individual tasks for a regional job. * We pass in the group and user only to tag any newly created workers. This should probably be done in the caller. * TODO push the creation of the TemplateTask down into this method, to avoid last two parameters? * TODO make the tags a simple Map from String -> String here and for worker startup. */ public synchronized void enqueueTasksForRegionalJob (RegionalTask templateTask, String accessGroup, String createdBy) { LOG.info("Enqueuing tasks for job {} using template task.", templateTask.jobId); if (findJob(templateTask.jobId) != null) { LOG.error("Someone tried to enqueue job {} but it already exists.", templateTask.jobId); throw new RuntimeException("Enqueued duplicate job " + templateTask.jobId); } Job job = new Job(templateTask, accessGroup, createdBy); jobs.put(job.workerCategory, job); // Register the regional job so results received from multiple workers can be assembled into one file. resultAssemblers.put(templateTask.jobId, new GridResultAssembler(templateTask, AnalysisServerConfig.resultsBucket)); if (AnalysisServerConfig.testTaskRedelivery) { // This is a fake job for testing, don't confuse the worker startup code below with null graph ID. return; } if (workerCatalog.noWorkersAvailable(job.workerCategory, workOffline)) { createOnDemandWorkerInCategory(job.workerCategory, accessGroup, createdBy); } else { // Workers exist in this category, clear out any record that we're waiting for one to start up. recentlyRequestedWorkers.remove(job.workerCategory); } } /** * Create on-demand worker for a given job. * @param user only used to tag the newly created instance * @param group only used to tag the newly created instance */ public void createOnDemandWorkerInCategory(WorkerCategory category, String group, String user){ createWorkersInCategory(category, group, user, 1, 0); } /** * Create on-demand/spot workers for a given job, after certain checks * @param user only used to tag the newly created instance * @param group only used to tag the newly created instance * @param nOnDemand EC2 on-demand instances to request * @param nSpot EC2 spot instances to request */ public void createWorkersInCategory (WorkerCategory category, String group, String user, int nOnDemand, int nSpot) { if (workOffline) { LOG.info("Work offline enabled, not creating workers for {}", category); return; } if (nOnDemand < 0 || nSpot < 0){ LOG.info("Negative number of workers requested, not starting any"); return; } if (workerCatalog.totalWorkerCount() + nOnDemand + nSpot >= maxWorkers) { throw AnalysisServerException.forbidden("\"Maximum of {} workers already started, not starting more; jobs" + " will not complete on {}\", maxWorkers, category"); } // If workers have already been started up, don't repeat the operation. if (recentlyRequestedWorkers.containsKey(category) && recentlyRequestedWorkers.get(category) >= System.currentTimeMillis() - WORKER_STARTUP_TIME){ LOG.info("Workers still starting on {}, not starting more", category); return; } EC2RequestConfiguration config = new EC2RequestConfiguration(category, group, user); launcher.launch(config, nOnDemand, nSpot); // Record the fact that we've requested an on-demand worker so we don't do it repeatedly. if (nOnDemand > 0) { recentlyRequestedWorkers.put(category, System.currentTimeMillis()); } LOG.info("Requested {} on-demand and {} spot workers on {}", nOnDemand, nSpot, config); } /** * Attempt to find some tasks that match what a worker is requesting. * Always returns a list, which may be empty if there is nothing to deliver. */ public synchronized List<RegionalTask> getSomeWork (WorkerCategory workerCategory) { Job job; if (AnalysisServerConfig.offline) { // Working in offline mode; get tasks from the first job that has any tasks to deliver. job = jobs.values().stream() .filter(j -> j.hasTasksToDeliver()).findFirst().orElse(null); } else { // This worker has a preferred network, get tasks from a job on that network. job = jobs.get(workerCategory).stream() .filter(j -> j.hasTasksToDeliver()).findFirst().orElse(null); } if (job == null) { // No matching job was found. return Collections.EMPTY_LIST; } // Return up to N tasks that are waiting to be processed. return job.generateSomeTasksToDeliver(MAX_TASKS_PER_WORKER); } /** * Take a normal (non-priority) task out of a job queue, marking it as completed so it will not be re-delivered. * The result of the computation is supplied. * TODO separate completion out from returning the work product, since they have different synchronization requirements * this would also allow returning errors as JSON and the grid result separately. * @return whether the task was found and removed. */ public synchronized boolean markTaskCompleted (RegionalWorkResult workResult) { String jobId = workResult.jobId; int taskId = workResult.taskId; Job job = findJob(jobId); if (job == null) { LOG.error("Could not find a job with ID {} and therefore could not mark the task as completed.", jobId); return false; } if (!job.markTaskCompleted(taskId)) { LOG.error("Failed to mark task {} completed on job {}.", taskId, jobId); } // Once the last task is marked as completed, the job is finished. Purge it from the list to free memory. if (job.isComplete()) { job.verifyComplete(); jobs.remove(job.workerCategory, job); // This method is called after the regional work results are handled, finishing and closing the local file. // So we can harmlessly remove the GridResultAssembler now that the job is removed. resultAssemblers.remove(jobId); } return true; } /** Find the job for the given jobId, returning null if that job does not exist. */ public Job findJob (String jobId) { return jobs.values().stream().filter(job -> job.jobId.equals(jobId)).findFirst().orElse(null); } /** * Delete the job with the given ID. */ public synchronized boolean deleteJob (String jobId) { // Remove the job from the broker so we stop distributing its tasks to workers. Job job = findJob(jobId); if (job == null) return false; boolean success = jobs.remove(job.workerCategory, job); // Shut down the object used for assembling results, removing its associated temporary disk file. // TODO just put the assembler in the Job object GridResultAssembler assembler = resultAssemblers.remove(jobId); try { assembler.terminate(); } catch (Exception e) { LOG.error("Could not terminate grid result assembler, this may waste disk space. Reason: {}", e.toString()); success = false; } // TODO where do we delete the regional analysis from Persistence so it doesn't show up in the UI after deletion? return success; } /** * Given a worker commit ID and transport network, return the IP or DNS name of a worker that has that software * and network already loaded. If none exist, return null and try to start one. */ public synchronized String getWorkerAddress(WorkerCategory workerCategory) { if (workOffline) { return "localhost"; } // First try to get a worker that's already loaded the right network. // This value will be null if no workers exist in this category - caller should attempt to create some. String workerAddress = workerCatalog.getSinglePointWorkerAddressForCategory(workerCategory); return workerAddress; } /** * Get a collection of all the workers that have recently reported to this broker. * The returned objects are designed to be serializable so they can be returned over an HTTP API. */ public Collection<WorkerObservation> getWorkerObservations () { return workerCatalog.getAllWorkerObservations(); } /** * Get a collection of all unfinished jobs being managed by this broker. * The returned objects are designed to be serializable so they can be returned over an HTTP API. */ public Collection<JobStatus> getJobSummary() { List<JobStatus> jobStatusList = new ArrayList<>(); for (Job job : this.jobs.values()) { jobStatusList.add(new JobStatus(job)); } // Add a summary of all jobs to the list. jobStatusList.add(new JobStatus(jobStatusList)); return jobStatusList; } /** * Record information that a worker sent about itself. */ public void recordWorkerObservation(WorkerStatus workerStatus) { workerCatalog.catalog(workerStatus); } /** * Slots a single regional work result received from a worker into the appropriate position in the appropriate file. * Also considers requesting extra spot instances after a few results have been received. The checks in place * should prevent an unduly large number of workers from proliferating, assuming jobs for a given worker category (transport * network + R5 version) are completed sequentially. * @param workResult an object representing accessibility results for a single-origin, sent by a worker. */ public void handleRegionalWorkResult (RegionalWorkResult workResult) { GridResultAssembler assembler = resultAssemblers.get(workResult.jobId); if (assembler == null) { LOG.error("Received result for unrecognized job ID {}, discarding.", workResult.jobId); } else { assembler.handleMessage(workResult); // When results for the task with the magic number are received, consider boosting the job by starting EC2 // spot instances if (workResult.taskId == AUTO_START_SPOT_INSTANCES_AT_TASK) { requestExtraWorkersIfAppropriate(workResult); } } } private void requestExtraWorkersIfAppropriate(RegionalWorkResult workResult) { Job job = findJob(workResult.jobId); WorkerCategory workerCategory = job.workerCategory; int categoryWorkersAlreadyRunning = workerCatalog.countWorkersInCategory(workerCategory); if (categoryWorkersAlreadyRunning < MAX_WORKERS_PER_CATEGORY) { // Start a number of workers that scales with the number of total tasks, up to a fixed number. // TODO more refined determination of number of workers to start (e.g. using tasks per minute) int nSpot = Math.min(MAX_WORKERS_PER_CATEGORY, job.nTasksTotal / TARGET_TASKS_PER_WORKER) - categoryWorkersAlreadyRunning; createWorkersInCategory(job.workerCategory, job.accessGroup, job.createdBy, 0, nSpot); } } /** * Returns a simple status object intended to inform the UI of job progress. */ public RegionalAnalysisStatus getJobStatus (String jobId) { GridResultAssembler gridResultAssembler = resultAssemblers.get(jobId); if (gridResultAssembler == null) { return null; } else { return new RegionalAnalysisStatus(gridResultAssembler); } } public File getPartialRegionalAnalysisResults (String jobId) { GridResultAssembler gridResultAssembler = resultAssemblers.get(jobId); if (gridResultAssembler == null) { return null; } else { return gridResultAssembler.getBufferFile(); } } public boolean anyJobsActive () { for (Job job : jobs.values()) { if (!job.isComplete()) return true; } return false; } public void logJobStatus() { for (Job job : jobs.values()) { LOG.info(job.toString()); } } }
/** * <b>{@link com.datatorrent.common}</b> is a library of utility classes that are open source and shared by all other malhar libraries<p> * <br> */ package com.datatorrent.common.util;
package com.elmakers.mine.bukkit.api.spell; import com.elmakers.mine.bukkit.api.magic.MageController; import org.bukkit.Location; import org.bukkit.command.CommandSender; import org.bukkit.entity.Entity; import org.bukkit.entity.Player; import org.bukkit.util.Vector; /** * Represents a Spell that may be cast by a Mage. * * Each Spell is based on a SpellTemplate, which are defined * by the spells configuration files. * * Every spell uses a specific Class that must extend from * com.elmakers.mine.bukkit.plugins.magic.spell.Spell. * * To create a new custom spell from scratch, you must also * implement the MageSpell interface. */ public interface Spell extends SpellTemplate { public MageController getController(); public boolean cast(); public boolean cast(String[] parameters); public boolean cast(String[] parameters, Location defaultLocation); public Location getLocation(); public void target(); public Location getTargetLocation(); public Entity getTargetEntity(); public Vector getDirection(); public boolean canTarget(Entity entity); public boolean isActive(); public boolean hasBrushOverride(); public boolean canCast(Location location); public long getRemainingCooldown(); public CastingCost getRequiredCost(); public void messageTargets(String messageKey); public void playEffects(String effectName); public void playEffects(String effectName, float scale, Location source, Entity sourceEntity, Location target, Entity targetEntity); }
package netspy.components.gui.components.frame; import java.awt.Color; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import javax.swing.DefaultListModel; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextField; import javax.swing.ScrollPaneConstants; import netspy.components.config.ConfigPropertiesManager; import netspy.components.gui.components.frame.components.LogBox; import netspy.components.gui.components.listeners.NetSpyActionListener; /** * The Class MyJFrame. */ public class NetSpyFrame extends JFrame { /** The Constant serialVersionUID. */ private static final long serialVersionUID = -2357381332647405895L; /** The Constant APPLICATION_TITLE. */ public static final String APPLICATION_TITLE = "NetSpy 2"; /** The Constant INPUT_ID_LOG_PATH. */ private static final String INPUT_ID_LOG_PATH = "input_log_path"; /** The Constant INPUT_ID_MAIL_PATH. */ private static final String INPUT_ID_MAIL_PATH = "input_mail_path"; /** The Constant INPUT_ID_BLACKWORD_PATH. */ private static final String INPUT_ID_BLACKWORD_PATH = "input_blackword_path"; /** The Constant INPUT_ID_QUARANTINE_PATH. */ private static final String INPUT_ID_QUARANTINE_PATH = "input_quarantine_path"; /** The Constant LABEL_QUARANTAENE_PATH. */ private static final String LABEL_QUARANTAENE_PATH = "Quarantäne-Verzeichnis:"; /** The Constant LABEL_LOG_PATH. */ private static final String LABEL_LOG_PATH = "Log-Verzeichnis:"; /** The Constant LABEL_BLACKWORD_PATH. */ private static final String LABEL_BLACKWORD_PATH = "Blackword-Datei:"; /** The Constant LABEL_MAIL_PATH. */ private static final String LABEL_MAIL_PATH = "Mail-Verzeichnis:"; /** The Constant BUTTON_LABEL_HIDE_LOG_BOX. */ public static final String BUTTON_LABEL_HIDE_LOGBOX = "Log ausblenden"; /** The Constant BUTTON_LABEL_CLEAR_LOGBOX. */ private static final String BUTTON_LABEL_CLEAR_LOGBOX = "Log leeren"; /** The Constant BUTTON_LABEL_SHOW_LOG_BOX. */ public static final String BUTTON_LABEL_SHOW_LOGBOX = "Log einblenden"; /** The Constant BUTTON_LABEL_START_SCAN. */ private static final String BUTTON_LABEL_START_SCAN = "Start scan"; /** The Constant BUTTON_LABEL_SEARCH_FILE. */ private static final String BUTTON_LABEL_SEARCH_FILE = "Durchsuchen"; /** The Constant BUTTON_ID_MAIL_PATH. */ public static final String BUTTON_ID_MAIL_PATH = "button_mail_path"; /** The Constant BUTTON_ID_BLACKWORD_PATH. */ public static final String BUTTON_ID_BLACKWORD_PATH = "button_blackword_path"; /** The Constant BUTTON_ID_QUARANTINE_PATH. */ public static final String BUTTON_ID_QUARANTINE_PATH = "button_quarantine_path"; /** The Constant BUTTON_ID_LOG_PATH. */ public static final String BUTTON_ID_LOG_PATH = "button_log_path"; /** The Constant BUTTON_ID_START_SCAN. */ public static final String BUTTON_ID_START_SCAN = "button_start_scan"; /** The Constant BUTTON_ID_TOGGLE_LOG_BOX. */ public static final String BUTTON_ID_TOGGLE_LOGBOX = "toggle_logbox"; /** The Constant BUTTON_ID_CLEAR_LOGBOX. */ public static final String BUTTON_ID_CLEAR_LOGBOX = "clear_logbox"; /** The Constant DIMENSION_TEXTFIELD_SIZE. */ private static final Dimension DIMENSION_TEXTFIELD_SIZE = new Dimension(250, 25); /** The Constant DIMENSION_LABEL_SIZE. */ private static final Dimension DIMENSION_LABEL_SIZE = new Dimension(50, 25); /** The Constant DIMENSION_BUTTON_SIZE. */ private static final Dimension DIMENSION_BUTTON_SIZE = new Dimension(120, 25); /** The action listener. */ private NetSpyActionListener actionListener = new NetSpyActionListener(this); /** The Input mail path. */ private JTextField inputMailPath; /** The Input blackword path. */ private JTextField inputBlackwordPath; /** The Input quarantine path. */ private JTextField inputQuarantinePath; /** The Input log path. */ private JTextField inputLogPath; /** The log box. */ private LogBox logBox = new LogBox(); /** The main panel. */ private JPanel mainPanel = new JPanel(); /** The empty row. */ private JPanel emptyRow = new JPanel(); /** The gbc. */ private GridBagConstraints gbc = new GridBagConstraints(); /** The prop conf. */ ConfigPropertiesManager propConf; /** * Instantiates a new my j frame. */ public NetSpyFrame() { super(); initConf(); initialize(); } /** * Inits the conf. */ private void initConf() { propConf = new ConfigPropertiesManager(this.logBox); propConf.init(); } /** * Initialize. */ private void initialize() { // general configuration of frame this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); this.setTitle(APPLICATION_TITLE); this.setBackground(Color.RED); this.setBounds(500, 200, 1024, 768); this.setResizable(false); // Application Icon ImageIcon appIcon = new ImageIcon(System.getProperty("user.dir") + "/resources/img/system_search.png"); this.setIconImage(appIcon.getImage()); // Layout this.mainPanel.setLayout(new GridBagLayout()); gbc.insets = new Insets(5, 5, 5, 5); // Background color this.mainPanel.setBackground(Color.WHITE); // create content this.setTitlePanel(); this.setFormLayout(); this.setInfoBox(); this.add(this.mainPanel); this.pack(); this.setVisible(true); } /** * Sets the title panel. */ private void setTitlePanel() { // y = 0, x = 0-7 gbc.gridx = 0; gbc.gridy = 0; gbc.gridwidth = 8; gbc.anchor = GridBagConstraints.CENTER; gbc.fill = GridBagConstraints.HORIZONTAL; final JPanel titlePanel = new JPanel(); titlePanel.setBackground(Color.WHITE); titlePanel.add(new JLabel(APPLICATION_TITLE)); this.mainPanel.add(titlePanel, gbc); } /** * Sets the form layout. */ private void setFormLayout() { // MAIL PATH // LABEL // y = 1, x = 0-1, fill none gbc.gridx = 0; gbc.gridy = 1; gbc.gridwidth = 2; gbc.anchor = GridBagConstraints.LINE_START; gbc.fill = GridBagConstraints.NONE; final JLabel labelMailPath = new JLabel(LABEL_MAIL_PATH); labelMailPath.setSize(DIMENSION_LABEL_SIZE); this.mainPanel.add(labelMailPath, gbc); // INPUT // y = 1, x = 2-5, fill horizontal gbc.gridx = 2; gbc.gridy = 1; gbc.gridwidth = 4; gbc.fill = GridBagConstraints.HORIZONTAL; this.inputMailPath = new JTextField(); this.inputMailPath.setText(propConf.getInboxPath()); this.inputMailPath.setPreferredSize(DIMENSION_TEXTFIELD_SIZE); this.inputMailPath.setEditable(false); this.inputMailPath.setName(INPUT_ID_MAIL_PATH); this.inputMailPath.setToolTipText("Wähle eine konkrete .eml-Datei oder ein\n" + " Verzeichnis, in dem alle .eml-Dateien durchsucht werden sollen."); this.mainPanel.add(this.inputMailPath, gbc); // BUTTON FOR CHOOSER // y = 1, x = 6-7, fill horizontal gbc.gridx = 6; gbc.gridy = 1; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.HORIZONTAL; final JButton btnOpenMailPathChooser = new JButton(BUTTON_LABEL_SEARCH_FILE); btnOpenMailPathChooser.setName(BUTTON_ID_MAIL_PATH); btnOpenMailPathChooser.addActionListener(actionListener); btnOpenMailPathChooser.setPreferredSize(DIMENSION_BUTTON_SIZE); this.mainPanel.add(btnOpenMailPathChooser, gbc); // TODO: @Kevin insert DefaultListModel // BLACKWORD PATH // LABEL // y = 2, x = 0-1, fill none gbc.gridx = 0; gbc.gridy = 2; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.NONE; final JLabel lblBlackword = new JLabel(LABEL_BLACKWORD_PATH); lblBlackword.setSize(DIMENSION_LABEL_SIZE); this.mainPanel.add(lblBlackword, gbc); // INPUT // y = 2, x = 2-5, fill horizontal gbc.gridx = 2; gbc.gridy = 2; gbc.gridwidth = 4; gbc.fill = GridBagConstraints.HORIZONTAL; this.inputBlackwordPath = new JTextField(); this.inputBlackwordPath.setText(propConf.getBlackwordPath()); this.inputBlackwordPath.setPreferredSize(DIMENSION_TEXTFIELD_SIZE); this.inputBlackwordPath.setEditable(false); this.inputBlackwordPath.setName(INPUT_ID_BLACKWORD_PATH); this.inputBlackwordPath.setToolTipText("Wähle die blacklist.txt-Datei aus, anhand " + "welcher die Emails überprüft werden sollen."); this.mainPanel.add(this.inputBlackwordPath, gbc); // BUTTON FOR CHOOSER // y = 2, x = 6-7, fill horizontal gbc.gridx = 6; gbc.gridy = 2; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.HORIZONTAL; final JButton btnOpenBlackwordPathChooser = new JButton((BUTTON_LABEL_SEARCH_FILE)); btnOpenBlackwordPathChooser.setName(BUTTON_ID_BLACKWORD_PATH); btnOpenBlackwordPathChooser.addActionListener(actionListener); btnOpenBlackwordPathChooser.setPreferredSize(DIMENSION_BUTTON_SIZE); this.mainPanel.add(btnOpenBlackwordPathChooser, gbc); // LOG PATH // LABEL // y = 3, x = 0-1, fill none gbc.gridx = 0; gbc.gridy = 3; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.NONE; final JLabel lblLogPath = new JLabel(LABEL_LOG_PATH); lblLogPath.setSize(DIMENSION_LABEL_SIZE); this.mainPanel.add(lblLogPath, gbc); // INPUT // y = 3, x = 2-5, fill horizontal gbc.gridx = 2; gbc.gridy = 3; gbc.gridwidth = 4; gbc.fill = GridBagConstraints.HORIZONTAL; this.inputLogPath = new JTextField(); this.inputLogPath.setText(propConf.getLogPath()); this.inputLogPath.setEditable(false); this.inputLogPath.setName(INPUT_ID_LOG_PATH); this.inputLogPath.setPreferredSize(DIMENSION_TEXTFIELD_SIZE); this.inputLogPath.setToolTipText("Wähle das Log-Verzeichnis aus. " + "Dort werden die Informationen über verdächtige Emails gespeichert."); this.mainPanel.add(this.inputLogPath, gbc); // BUTTON FOR CHOOSER // y = 3, x = 6-7, fill horizontal gbc.gridx = 6; gbc.gridy = 3; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.HORIZONTAL; final JButton btnOpenLogPathChooser = new JButton(BUTTON_LABEL_SEARCH_FILE); btnOpenLogPathChooser.setName(BUTTON_ID_LOG_PATH); btnOpenLogPathChooser.addActionListener(actionListener); btnOpenLogPathChooser.setPreferredSize(DIMENSION_BUTTON_SIZE); this.mainPanel.add(btnOpenLogPathChooser, gbc); // QUARANTINE PATH // LABEL // y = 4, x = 0-1, fill none gbc.gridx = 0; gbc.gridy = 4; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.NONE; final JLabel lblQuarantine = new JLabel(LABEL_QUARANTAENE_PATH); lblQuarantine.setSize(DIMENSION_LABEL_SIZE); this.mainPanel.add(lblQuarantine, gbc); // INPUT // y = 4, x = 2-5, fill horizontal gbc.gridx = 2; gbc.gridy = 4; gbc.gridwidth = 4; gbc.fill = GridBagConstraints.HORIZONTAL; this.inputQuarantinePath = new JTextField(); this.inputQuarantinePath.setText(propConf.getQuarantinePath()); this.inputQuarantinePath.setEditable(false); this.inputQuarantinePath.setName(INPUT_ID_QUARANTINE_PATH); this.inputQuarantinePath.setPreferredSize(DIMENSION_TEXTFIELD_SIZE); this.inputQuarantinePath.setToolTipText("Wähle das Quarantäne-Verzeichnis aus, " + "in welches die verdächtigen Emails gespeichert werden."); this.mainPanel.add(this.inputQuarantinePath, gbc); // BUTTON FOR CHOOSER // y = 4, x = 6-7, fill horizontal gbc.gridx = 6; gbc.gridy = 4; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.HORIZONTAL; final JButton btnOpenQuarantinePathChooser = new JButton((BUTTON_LABEL_SEARCH_FILE)); btnOpenQuarantinePathChooser.setName(BUTTON_ID_QUARANTINE_PATH); btnOpenQuarantinePathChooser.addActionListener(actionListener); btnOpenQuarantinePathChooser.setPreferredSize(DIMENSION_BUTTON_SIZE); this.mainPanel.add(btnOpenQuarantinePathChooser, gbc); // EMPTY ROW // let row with index 5 empty: workaround // y = 5, x = undefined, fill none gbc.gridy = 5; gbc.fill = GridBagConstraints.NONE; this.emptyRow = new JPanel(); emptyRow.setBackground(Color.WHITE); this.mainPanel.add(emptyRow, gbc); // BUTTON CLEAR LOGBOX // y = 6, x = 4-5, fill horizontal gbc.gridx = 1; gbc.gridy = 6; gbc.gridwidth = 3; gbc.fill = GridBagConstraints.HORIZONTAL; final JButton btnClearLogBox = new JButton(BUTTON_LABEL_CLEAR_LOGBOX); btnClearLogBox.setName(BUTTON_ID_CLEAR_LOGBOX); btnClearLogBox.setPreferredSize(DIMENSION_BUTTON_SIZE); btnClearLogBox.addActionListener(actionListener); this.mainPanel.add(btnClearLogBox, gbc); // BUTTON START SCAN // y = 6, x = 6-7, fill horizontal gbc.gridx = 6; gbc.gridy = 6; gbc.gridwidth = 2; gbc.fill = GridBagConstraints.HORIZONTAL; final JButton btnStartScan = new JButton(BUTTON_LABEL_START_SCAN); btnStartScan.setName(BUTTON_ID_START_SCAN); btnStartScan.addActionListener(actionListener); btnStartScan.setPreferredSize(DIMENSION_BUTTON_SIZE); this.mainPanel.add(btnStartScan, gbc); } /** * Sets the info box. */ private void setInfoBox() { // y = 7-12, x = 0-7, fill both gbc.gridy = 7; gbc.gridx = 1; gbc.gridwidth = 8; gbc.gridheight = 6; gbc.anchor = GridBagConstraints.LINE_START; gbc.fill = GridBagConstraints.BOTH; final JScrollPane infoBoxScrollable = new JScrollPane(this.logBox); infoBoxScrollable.setPreferredSize(new Dimension(500, 200)); infoBoxScrollable.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED); infoBoxScrollable.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); this.mainPanel.add(infoBoxScrollable, gbc); } /** * Sets the input mail path. * * @param inputMailPath the new input mail path */ public void setInputMailPath(final JTextField inputMailPath) { this.inputMailPath = inputMailPath; } /** * Gets the input mail path. * * @return the input mail path */ public JTextField getInputMailPath() { return this.inputMailPath; } /** * Sets the input blackword path. * * @param inputBlackwordPath the new input blackword path */ public void setInputBlackwordPath(final JTextField inputBlackwordPath) { this.inputBlackwordPath = inputBlackwordPath; } /** * Gets the input blackword path. * * @return the input blackword path */ public JTextField getInputBlackwordPath() { return this.inputBlackwordPath; } /** * Sets the input log path. * * @param inputLogPath the new input log path */ public void setInputLogPath(final JTextField inputLogPath) { this.inputLogPath = inputLogPath; } /** * Gets the input log path. * * @return the input log path */ public JTextField getInputLogPath() { return this.inputLogPath; } /** * Sets the input quarantine path. * * @param inputQuarantinePath the new input quarantine path */ public void setInputQuarantinePath(final JTextField inputQuarantinePath) { this.inputQuarantinePath = inputQuarantinePath; } /** * Gets the input quarantine path. * * @return the input quarantine path */ public JTextField getInputQuarantinePath() { return this.inputQuarantinePath; } /** * Gets the log box. * * @return the log box */ public LogBox getLogBox() { return this.logBox; } /** * Sets the log box. * * @param logBox the new log box */ public void setLogBox(LogBox logBox) { this.logBox = logBox; } /** * Gets the main panel. * * @return the main panel */ public JPanel getMainPanel() { return this.mainPanel; } /** * Sets the main panel. * * @param mainPanel the new main panel */ public void setMainPanel(JPanel mainPanel) { this.mainPanel = mainPanel; } public GridBagConstraints getGbc() { return this.gbc; } public void setGbc(GridBagConstraints gbc) { this.gbc = gbc; } }
package mod._sc; import java.io.PrintWriter; import lib.StatusException; import lib.TestCase; import lib.TestEnvironment; import lib.TestParameters; import util.SOfficeFactory; import com.sun.star.container.XEnumerationAccess; import com.sun.star.container.XIndexAccess; import com.sun.star.lang.XComponent; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.sheet.XCellFormatRangesSupplier; import com.sun.star.sheet.XSpreadsheet; import com.sun.star.sheet.XSpreadsheetDocument; import com.sun.star.sheet.XSpreadsheets; import com.sun.star.uno.AnyConverter; import com.sun.star.uno.Type; import com.sun.star.uno.UnoRuntime; import com.sun.star.uno.XInterface; /** * Test for object which is represented by service * <code>com.sun.star.sheet.CellFormatRangesEnumeration</code>. <p> * Object implements the following interfaces : * <ul> * <li> <code>com::sun::star::container::XEnumeration</code></li> * </ul> * @see com.sun.star.sheet.CellFormatRangesEnumeration * @see com.sun.star.container.XEnumeration * @see ifc.container._XEnumeration */ public class ScCellFormatsEnumeration extends TestCase { XSpreadsheetDocument xSheetDoc = null; /** * Creates Spreadsheet document. */ protected void initialize( TestParameters tParam, PrintWriter log ) { // get a soffice factory object SOfficeFactory SOF = SOfficeFactory.getFactory( (XMultiServiceFactory)tParam.getMSF()); try { log.println( "creating a sheetdocument" ); xSheetDoc = SOF.createCalcDoc(null);; } catch (com.sun.star.uno.Exception e) { // Some exception occures.FAILED e.printStackTrace( log ); throw new StatusException( "Couldn't create document", e); } } /** * Disposes Spreadsheet document. */ protected void cleanup( TestParameters tParam, PrintWriter log ) { log.println( " disposing xSheetDoc " ); XComponent oComp = (XComponent) UnoRuntime.queryInterface (XComponent.class, xSheetDoc); util.DesktopTools.closeDoc(oComp); } /** * Creating a Testenvironment for the interfaces to be tested. * Retrieves a collection of spreadsheets from a document, * and takes one of them. Then retrieves a collection of cell format range * using the interface <code>XCellFormatRangesSupplier</code>, creates the * enumeration of this collection using interface <code>XEnumerationAccess</code>. * This enumeration is the instance of the service * <code>com.sun.star.sheet.CellFormatRangesEnumeration</code>. * Object relations created : * <ul> * <li> <code>'ENUM'</code> for * {@link ifc.container._XEnumeration} (type of * <code>XEnumerationAccess</code> that was queried from the collection * of cell format range)</li> * </ul> * @see com.sun.star.sheet.CellFormatRangesEnumeration * @see com.sun.star.sheet.XCellFormatRangesSupplier * @see com.sun.star.container.XEnumerationAccess */ protected synchronized TestEnvironment createTestEnvironment(TestParameters Param, PrintWriter log) { XInterface oObj = null; log.println("getting sheets"); XSpreadsheets xSpreadsheets = (XSpreadsheets)xSheetDoc.getSheets(); log.println("getting a sheet"); XSpreadsheet oSheet = null; XIndexAccess oIndexAccess = (XIndexAccess) UnoRuntime.queryInterface(XIndexAccess.class, xSpreadsheets); try { oSheet = (XSpreadsheet) AnyConverter.toObject( new Type(XSpreadsheet.class),oIndexAccess.getByIndex(0)); } catch (com.sun.star.lang.WrappedTargetException e) { e.printStackTrace(log); throw new StatusException( "Couldn't get a spreadsheet", e); } catch (com.sun.star.lang.IndexOutOfBoundsException e) { e.printStackTrace(log); throw new StatusException( "Couldn't get a spreadsheet", e); } catch (com.sun.star.lang.IllegalArgumentException e) { e.printStackTrace(log); throw new StatusException( "Couldn't get a spreadsheet", e); } log.println("getting CellFormats"); XCellFormatRangesSupplier xCFRS = (XCellFormatRangesSupplier) UnoRuntime.queryInterface(XCellFormatRangesSupplier.class,oSheet); XIndexAccess formats = xCFRS.getCellFormatRanges(); log.println("getting Enumeration"); XEnumerationAccess oEnum = (XEnumerationAccess) UnoRuntime.queryInterface(XEnumerationAccess.class,formats); oObj = oEnum.createEnumeration(); log.println("creating a new environment for object"); TestEnvironment tEnv = new TestEnvironment(oObj); tEnv.addObjRelation("ENUM", oEnum); return tEnv; } } // finish class ScCellFormatsEnumeration
package nu.validator.xml; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.servlet.http.HttpServletRequest; import com.cybozu.labs.langdetect.Detector; import com.cybozu.labs.langdetect.DetectorFactory; import com.cybozu.labs.langdetect.LangDetectException; import com.cybozu.labs.langdetect.Language; import com.ibm.icu.util.ULocale; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.DTDHandler; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.InputSource; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.LocatorImpl; public final class LanguageDetectingXMLReaderWrapper implements XMLReader, ContentHandler { private static final String languageList = "nu/validator/localentities/files/" + "language-profiles-list.txt"; private static final String profilesDir = "nu/validator/localentities/files/" + "language-profiles/"; private static List<String> profiles = new ArrayList<>(); private static List<String> languageTags = new ArrayList<>(); public static void initialize() throws LangDetectException { try { BufferedReader br = new BufferedReader(new InputStreamReader( LanguageDetectingXMLReaderWrapper.class.getClassLoader().getResourceAsStream( languageList))); String languageTagAndName = br.readLine(); while (languageTagAndName != null) { languageTags.add(languageTagAndName.split("\t")[0]); languageTagAndName = br.readLine(); } for (String languageTag : languageTags) { profiles.add((new BufferedReader(new InputStreamReader( LanguageDetectingXMLReaderWrapper.class.getClassLoader().getResourceAsStream( profilesDir + languageTag)))).readLine()); } DetectorFactory.clear(); DetectorFactory.loadProfile(profiles); } catch (IOException e) { throw new RuntimeException(e); } } private final XMLReader wrappedReader; private ContentHandler contentHandler; private ErrorHandler errorHandler; private HttpServletRequest request; private String systemId; private Locator locator = null; private Locator htmlStartTagLocator; private StringBuilder elementContent; private StringBuilder documentContent; private String httpContentLangHeader; private String langAttrValue; private boolean hasLang; private String dirAttrValue; private boolean hasDir; private boolean inBody; private boolean collectingCharacters; private int characterCount; private static final int MAX_CHARS = 35840; private static final int MIN_CHARS = 512; private static final double MIN_PROBABILITY = .90; private static final String[] RTL_LANGS = { "ar", "ckb", "fa", "he", "pnb", "ps", "sd", "ug", "ur" }; private static final String[] COMMON_LANGS = { "ar", "bg", "ca", "cs", "da", "de", "el", "en", "es", "et", "fa", "fi", "fr", "he", "hi", "hu", "id", "it", "ja", "ka", "ko", "lt", "lv", "ms", "nl", "no", "pl", "pt", "ro", "ru", "sh", "sk", "sq", "sv", "th", "tr", "uk", "vi", "zh-hans", "zh-hant" }; public LanguageDetectingXMLReaderWrapper(XMLReader wrappedReader, HttpServletRequest request, ErrorHandler errorHandler, String httpContentLangHeader, String systemId) { this.wrappedReader = wrappedReader; this.contentHandler = wrappedReader.getContentHandler(); this.errorHandler = errorHandler; this.request = request; this.systemId = systemId; this.htmlStartTagLocator = null; this.inBody = false; this.collectingCharacters = false; this.characterCount = 0; this.elementContent = new StringBuilder(); this.documentContent = new StringBuilder(); this.httpContentLangHeader = httpContentLangHeader; this.hasLang = false; this.langAttrValue = ""; this.hasDir = false; this.dirAttrValue = ""; wrappedReader.setContentHandler(this); } /** * @see org.xml.sax.helpers.XMLFilterImpl#characters(char[], int, int) */ @Override public void characters(char[] ch, int start, int length) throws SAXException { if (contentHandler == null) { return; } if (collectingCharacters && characterCount < MAX_CHARS) { characterCount += length; elementContent.append(ch, start, length); } contentHandler.characters(ch, start, length); } /** * @see org.xml.sax.helpers.XMLFilterImpl#endElement(java.lang.String, * java.lang.String, java.lang.String) */ @Override public void endElement(String uri, String localName, String qName) throws SAXException { if (contentHandler == null) { return; } if (characterCount < MAX_CHARS) { documentContent.append(elementContent); elementContent.setLength(0); } contentHandler.endElement(uri, localName, qName); } /** * @see org.xml.sax.helpers.XMLFilterImpl#startDocument() */ @Override public void startDocument() throws SAXException { if (contentHandler == null) { return; } contentHandler.startDocument(); } /** * @see org.xml.sax.helpers.XMLFilterImpl#startElement(java.lang.String, * java.lang.String, java.lang.String, org.xml.sax.Attributes) */ @Override public void startElement(String uri, String localName, String qName, Attributes atts) throws SAXException { if (contentHandler == null) { return; } if ("html".equals(localName)) { htmlStartTagLocator = new LocatorImpl(locator); for (int i = 0; i < atts.getLength(); i++) { if ("lang".equals(atts.getLocalName(i))) { if (request != null) { request.setAttribute( "http://validator.nu/properties/lang-found", true); } hasLang = true; langAttrValue = atts.getValue(i); } else if ("dir".equals(atts.getLocalName(i))) { hasDir = true; dirAttrValue = atts.getValue(i); } } } else if ("body".equals(localName)) { inBody = true; } collectingCharacters = false; if (inBody && !"script".equals(localName) && !"style".equals(localName)) { collectingCharacters = true; } contentHandler.startElement(uri, localName, qName, atts); } /** * @see org.xml.sax.helpers.XMLFilterImpl#setDocumentLocator(org.xml.sax.Locator) */ @Override public void setDocumentLocator(Locator locator) { if (contentHandler == null) { return; } this.locator = locator; contentHandler.setDocumentLocator(locator); } @Override public ContentHandler getContentHandler() { return contentHandler; } /** * @throws SAXException * @see org.xml.sax.ContentHandler#endDocument() */ @Override public void endDocument() throws SAXException { if (contentHandler == null) { return; } detectLanguageAndCheckAgainstDeclaredLanguage(); contentHandler.endDocument(); } public void detectLanguageAndCheckAgainstDeclaredLanguage() throws SAXException { try { if (characterCount < MIN_CHARS) { contentHandler.endDocument(); return; } String textContent = documentContent.toString(); String detectedLanguage = ""; Detector detector = DetectorFactory.create(); detector.append(textContent); detector.getProbabilities(); ArrayList<Language> possibleLanguages = detector.getProbabilities(); for (Language possibility : possibleLanguages) { ULocale plocale = new ULocale(possibility.lang); if (Arrays.binarySearch(COMMON_LANGS, possibility.lang) < 0 && systemId != null) { try (FileWriter fw = new FileWriter("language-log.txt", true); BufferedWriter bw = new BufferedWriter(fw); PrintWriter out = new PrintWriter(bw)) { out.println(String.format("%s %s %s", plocale.getDisplayName(), possibility.prob, systemId)); } catch (IOException e) { throw new RuntimeException(e); } } if (possibility.prob > MIN_PROBABILITY) { detectedLanguage = possibility.lang; if (request != null) { request.setAttribute( "http://validator.nu/properties/document-language", detectedLanguage); } } } if ("".equals(detectedLanguage)) { if (!hasLang && errorHandler != null) { String message = "Consider adding a \u201Clang\u201D" + " attribute to the \u201Chtml\u201D" + " start tag to declare the language" + " of this document."; SAXParseException spe = new SAXParseException(message, htmlStartTagLocator); errorHandler.warning(spe); } contentHandler.endDocument(); return; } String detectedLanguageName = ""; String preferredLanguageCode = ""; ULocale locale = new ULocale(detectedLanguage); String detectedLanguageCode = locale.getLanguage(); if ("zh-hans".equals(detectedLanguage)) { detectedLanguageName = "Simplified Chinese"; preferredLanguageCode = "zh-hans"; } else if ("zh-hant".equals(detectedLanguage)) { detectedLanguageName = "Traditional Chinese"; preferredLanguageCode = "zh-hant"; } else if ("mhr".equals(detectedLanguage)) { detectedLanguageName = "Meadow Mari"; preferredLanguageCode = "mhr"; } else if ("mrj".equals(detectedLanguage)) { detectedLanguageName = "Hill Mari"; preferredLanguageCode = "mrj"; } else if ("nah".equals(detectedLanguage)) { detectedLanguageName = "Nahuatl"; preferredLanguageCode = "nah"; } else if ("pnb".equals(detectedLanguage)) { detectedLanguageName = "Western Panjabi"; preferredLanguageCode = "pnb"; } else { detectedLanguageName = locale.getDisplayName(); preferredLanguageCode = detectedLanguageCode; } checkLangAttribute(detectedLanguage, detectedLanguageName, detectedLanguageCode, preferredLanguageCode); checkDirAttribute(detectedLanguage, detectedLanguageName, detectedLanguageCode, preferredLanguageCode); checkContentLanguageHeader(detectedLanguage, detectedLanguageName, detectedLanguageCode, preferredLanguageCode); } catch (LangDetectException e) { } } public void checkLangAttribute(String detectedLanguage, String detectedLanguageName, String detectedLanguageCode, String preferredLanguageCode) throws SAXException { String langWarning = ""; String lowerCaseLang = langAttrValue.toLowerCase(); String declaredLangCode = new ULocale(langAttrValue).getLanguage(); if (!hasLang) { langWarning = String.format( "This document appears to be written in %s." + " Consider adding \u201Clang=\"%s\"\u201D" + " (or variant) to the \u201Chtml\u201D" + " start tag.", detectedLanguageName, preferredLanguageCode); } else { if (request != null) { if ("".equals(lowerCaseLang)) { request.setAttribute( "http://validator.nu/properties/lang-empty", true); if (systemId != null) { try (FileWriter fw = new FileWriter("language-log.txt", true); BufferedWriter bw = new BufferedWriter(fw); PrintWriter out = new PrintWriter(bw)) { out.println(String.format("*** EMPTY LANG %s", systemId)); } catch (IOException e) { throw new RuntimeException(e); } } } else { request.setAttribute( "http://validator.nu/properties/lang-value", lowerCaseLang); } } if ("tl".equals(detectedLanguageCode) && ("ceb".equals(declaredLangCode) || "ilo".equals(declaredLangCode) || "pag".equals(declaredLangCode) || "war".equals(declaredLangCode))) { return; } if ("id".equals(detectedLanguageCode) && "min".equals(declaredLangCode)) { return; } if ("hr".equals(detectedLanguageCode) && "sh".equals(declaredLangCode)) { return; } if ("de".equals(detectedLanguageCode) && ("bar".equals(declaredLangCode) || "gsw".equals(declaredLangCode) || "lb".equals(declaredLangCode))) { return; } if ("zh".equals(detectedLanguageCode) && "yue".equals(lowerCaseLang)) { return; } if ("sh".equals(detectedLanguageCode) && ("sr".equals(declaredLangCode) || "hr".equals(declaredLangCode) || "bs".equals(declaredLangCode))) { return; } if ("es".equals(detectedLanguageCode) && "ar".equals(declaredLangCode)) { return; } if ("it".equals(detectedLanguageCode) && ("co".equals(declaredLangCode) || "pms".equals(declaredLangCode) || "vec".equals(declaredLangCode) || "lmo".equals(declaredLangCode) || "scn".equals(declaredLangCode) || "nap".equals(declaredLangCode))) { return; } if ("rw".equals(detectedLanguageCode) && "rn".equals(declaredLangCode)) { return; } String message = "This document appears to be written in %s" + " but the \u201Chtml\u201D start tag has %s. Consider" + " using \u201Clang=\"%s\"\u201D (or variant) instead."; if (zhSubtagMismatch(detectedLanguage, lowerCaseLang) || !declaredLangCode.equals(detectedLanguageCode)) { if (request != null) { request.setAttribute( "http://validator.nu/properties/lang-wrong", true); } langWarning = String.format(message, detectedLanguageName, getAttValueExpr("lang", langAttrValue), preferredLanguageCode); } } if (!"".equals(langWarning)) { warn(langWarning); } } public void checkContentLanguageHeader(String detectedLanguage, String detectedLanguageName, String detectedLanguageCode, String preferredLanguageCode) throws SAXException { if ("".equals(httpContentLangHeader) || httpContentLangHeader.contains(",")) { return; } String message = ""; String lowerCaseContentLang = httpContentLangHeader.toLowerCase(); String contentLangCode = new ULocale( lowerCaseContentLang).getLanguage(); if ("tl".equals(detectedLanguageCode) && ("ceb".equals(contentLangCode) || "war".equals(contentLangCode))) { return; } if ("id".equals(detectedLanguageCode) && "min".equals(contentLangCode)) { return; } if ("ms".equals(detectedLanguageCode) && "min".equals(contentLangCode)) { return; } if ("hr".equals(detectedLanguageCode) && "sh".equals(contentLangCode)) { return; } if ("zh".equals(detectedLanguageCode) && "yue".equals(lowerCaseContentLang)) { return; } if ("sh".equals(detectedLanguageCode) && ("sr".equals(lowerCaseContentLang) || "hr".equals(lowerCaseContentLang) || "bs".equals(lowerCaseContentLang))) { return; } if (zhSubtagMismatch(detectedLanguage, lowerCaseContentLang) || !contentLangCode.equals(detectedLanguageCode)) { message = "This document appears to be written in %s but the value" + " of the HTTP \u201CContent-Language\u201D header is" + " \u201C%s\u201D. Consider changing it to" + " \u201C%s\u201D (or variant)."; String warning = String.format(message, detectedLanguageName, lowerCaseContentLang, preferredLanguageCode, preferredLanguageCode); if (errorHandler != null) { SAXParseException spe = new SAXParseException(warning, null); errorHandler.warning(spe); } } if (hasLang) { message = "The value of the HTTP \u201CContent-Language\u201D" + " header is \u201C%s\u201D but it will be ignored because" + " the \u201Chtml\u201D start tag has %s."; String lowerCaseLang = langAttrValue.toLowerCase(); String declaredLangCode = new ULocale(langAttrValue).getLanguage(); if (hasLang) { if (zhSubtagMismatch(lowerCaseContentLang, lowerCaseLang) || !contentLangCode.equals(declaredLangCode)) { warn(String.format(message, httpContentLangHeader, getAttValueExpr("lang", langAttrValue))); } } } } public void checkDirAttribute(String detectedLanguage, String detectedLanguageName, String detectedLanguageCode, String preferredLanguageCode) throws SAXException { if (Arrays.binarySearch(RTL_LANGS, detectedLanguageCode) < 0) { return; } String dirWarning = ""; if (!hasDir) { dirWarning = String.format( "This document appears to be written in %s." + " Consider adding \u201Cdir=\"rtl\"\u201D" + " to the \u201Chtml\u201D start tag.", detectedLanguageName, preferredLanguageCode); } else if (!"rtl".equals(dirAttrValue)) { String message = "This document appears to be written in %s" + " but the \u201Chtml\u201D start tag has %s." + " Consider using \u201Cdir=\"rtl\"\u201D instead."; dirWarning = String.format(message, detectedLanguageName, getAttValueExpr("dir", dirAttrValue)); } if (!"".equals(dirWarning)) { warn(dirWarning); } } private boolean zhSubtagMismatch(String expectedLanguage, String declaredLanguage) { return (("zh-hans".equals(expectedLanguage) && (declaredLanguage.contains("zh-tw") || declaredLanguage.contains("zh-hant"))) || ("zh-hant".equals(expectedLanguage) && (declaredLanguage.contains("zh-cn") || declaredLanguage.contains("zh-hans")))); } private String getAttValueExpr(String attName, String attValue) { if ("".equals(attValue)) { return String.format("an empty \u201c%s\u201d attribute", attName); } else { return String.format("\u201C%s=\"%s\"\u201D", attName, attValue); } } private void warn(String message) throws SAXException { if (errorHandler != null) { SAXParseException spe = new SAXParseException(message, htmlStartTagLocator); errorHandler.warning(spe); } } /** * @param prefix * @throws SAXException * @see org.xml.sax.ContentHandler#endPrefixMapping(java.lang.String) */ @Override public void endPrefixMapping(String prefix) throws SAXException { if (contentHandler == null) { return; } contentHandler.endPrefixMapping(prefix); } /** * @param ch * @param start * @param length * @throws SAXException * @see org.xml.sax.ContentHandler#ignorableWhitespace(char[], int, int) */ @Override public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException { if (contentHandler == null) { return; } contentHandler.ignorableWhitespace(ch, start, length); } /** * @param target * @param data * @throws SAXException * @see org.xml.sax.ContentHandler#processingInstruction(java.lang.String, * java.lang.String) */ @Override public void processingInstruction(String target, String data) throws SAXException { if (contentHandler == null) { return; } contentHandler.processingInstruction(target, data); } /** * @param name * @throws SAXException * @see org.xml.sax.ContentHandler#skippedEntity(java.lang.String) */ @Override public void skippedEntity(String name) throws SAXException { if (contentHandler == null) { return; } contentHandler.skippedEntity(name); } /** * @param prefix * @param uri * @throws SAXException * @see org.xml.sax.ContentHandler#startPrefixMapping(java.lang.String, * java.lang.String) */ @Override public void startPrefixMapping(String prefix, String uri) throws SAXException { if (contentHandler == null) { return; } contentHandler.startPrefixMapping(prefix, uri); } /** * @return * @see org.xml.sax.XMLReader#getDTDHandler() */ @Override public DTDHandler getDTDHandler() { return wrappedReader.getDTDHandler(); } /** * @return * @see org.xml.sax.XMLReader#getEntityResolver() */ @Override public EntityResolver getEntityResolver() { return wrappedReader.getEntityResolver(); } /** * @return * @see org.xml.sax.XMLReader#getErrorHandler() */ @Override public ErrorHandler getErrorHandler() { return errorHandler; } /** * @param name * @return * @throws SAXNotRecognizedException * @throws SAXNotSupportedException * @see org.xml.sax.XMLReader#getFeature(java.lang.String) */ @Override public boolean getFeature(String name) throws SAXNotRecognizedException, SAXNotSupportedException { return wrappedReader.getFeature(name); } /** * @param name * @return * @throws SAXNotRecognizedException * @throws SAXNotSupportedException * @see org.xml.sax.XMLReader#getProperty(java.lang.String) */ @Override public Object getProperty(String name) throws SAXNotRecognizedException, SAXNotSupportedException { return wrappedReader.getProperty(name); } /** * @param input * @throws IOException * @throws SAXException * @see org.xml.sax.XMLReader#parse(org.xml.sax.InputSource) */ @Override public void parse(InputSource input) throws IOException, SAXException { wrappedReader.parse(input); } /** * @param systemId * @throws IOException * @throws SAXException * @see org.xml.sax.XMLReader#parse(java.lang.String) */ @Override public void parse(String systemId) throws IOException, SAXException { wrappedReader.parse(systemId); } /** * @param handler * @see org.xml.sax.XMLReader#setContentHandler(org.xml.sax.ContentHandler) */ @Override public void setContentHandler(ContentHandler handler) { contentHandler = handler; } /** * @param handler * @see org.xml.sax.XMLReader#setDTDHandler(org.xml.sax.DTDHandler) */ @Override public void setDTDHandler(DTDHandler handler) { wrappedReader.setDTDHandler(handler); } /** * @param resolver * @see org.xml.sax.XMLReader#setEntityResolver(org.xml.sax.EntityResolver) */ @Override public void setEntityResolver(EntityResolver resolver) { wrappedReader.setEntityResolver(resolver); } /** * @param handler * @see org.xml.sax.XMLReader#setErrorHandler(org.xml.sax.ErrorHandler) */ @Override public void setErrorHandler(ErrorHandler handler) { wrappedReader.setErrorHandler(handler); } /** * @param name * @param value * @throws SAXNotRecognizedException * @throws SAXNotSupportedException * @see org.xml.sax.XMLReader#setFeature(java.lang.String, boolean) */ @Override public void setFeature(String name, boolean value) throws SAXNotRecognizedException, SAXNotSupportedException { wrappedReader.setFeature(name, value); } /** * @param name * @param value * @throws SAXNotRecognizedException * @throws SAXNotSupportedException * @see org.xml.sax.XMLReader#setProperty(java.lang.String, * java.lang.Object) */ @Override public void setProperty(String name, Object value) throws SAXNotRecognizedException, SAXNotSupportedException { wrappedReader.setProperty(name, value); } }
package org.intermine.task; import java.util.Properties; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.Iterator; import java.io.InputStream; import java.io.PrintStream; import java.io.IOException; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Task; import org.intermine.objectstore.query.iql.IqlQuery; import org.intermine.objectstore.query.Query; import org.intermine.objectstore.query.QueryClass; import org.intermine.objectstore.query.QueryReference; import org.intermine.objectstore.query.QueryObjectReference; import org.intermine.objectstore.query.QueryCollectionReference; import org.intermine.objectstore.query.ContainsConstraint; import org.intermine.objectstore.query.ConstraintOp; import org.intermine.objectstore.ObjectStore; import org.intermine.objectstore.ObjectStoreFactory; import org.intermine.objectstore.ObjectStoreException; import org.intermine.objectstore.query.Results; import org.intermine.objectstore.intermine.ObjectStoreInterMineImpl; import org.intermine.metadata.Model; import org.intermine.metadata.ClassDescriptor; import org.intermine.metadata.FieldDescriptor; import org.intermine.metadata.MetaDataException; /** * A Task that reads a list of queries from a properties file (eg. testmodel_precompute.properties) * and calls ObjectStoreInterMineImpl.precompute() using the Query. * * @author Kim Rutherford */ public class PrecomputeTask extends Task { String alias; String modelName; boolean testMode; // read by readProperties() Properties precomputeProperties = null; // set by setModel() Model model = null; /** * Set the ObjectStore alias * @param alias the ObjectStore alias */ public void setAlias(String alias) { this.alias = alias; } /** * Set the model name - used to create the name of the properties file to search for. * @param modelName the properties file */ public void setModelName(String modelName) { this.modelName = modelName; } /** * Set the mode of operation - if true run and time a set of test queries after each * call to precompute(). * @param testMode set test mode on if and only if this is true */ public void setTestMode(Boolean testMode) { this.testMode = testMode.booleanValue(); } /** * @see Task#execute */ public void execute() throws BuildException { if (alias == null) { throw new BuildException("alias attribute is not set"); } if (modelName == null) { throw new BuildException("modelName attribute is not set"); } setModel(); readProperties(); ObjectStore os = null; try { os = ObjectStoreFactory.getObjectStore(alias); } catch (Exception e) { throw new BuildException("Exception while creating ObjectStore", e); } if (!(os instanceof ObjectStoreInterMineImpl)) { throw new BuildException(alias + " isn't an ObjectStoreInterMineImpl"); } if (testMode) { PrintStream outputStream = System.out; long start = System.currentTimeMillis(); outputStream.println("Running tests before precomputing:"); runTestQueries(os); outputStream.println("tests took: " + (System.currentTimeMillis() - start) / 1000 + " seconds"); } Iterator iter = getPrecomputeQueries().entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); String key = (String) entry.getKey(); Query query = (Query) entry.getValue(); try { ((ObjectStoreInterMineImpl) os).precompute(query); } catch (ObjectStoreException e) { throw new BuildException("Exception while precomputing query: " + query, e); } if (testMode) { PrintStream outputStream = System.out; long start = System.currentTimeMillis(); outputStream.println("Running tests after precomputing " + key + ":"); runTestQueries(os); outputStream.println("tests took: " + (System.currentTimeMillis() - start) / 1000 + " seconds"); } } } private static final String TEST_QUERY_PREFIX = "test.query."; /** * Get a Map of keys (from the precomputeProperties file) to Query objects to precompute. * @return a Map of keys to Query objects * @throws BuildException if the query cannot be constructed (for example when a class or the * collection doesn't exist. */ protected Map getPrecomputeQueries() throws BuildException { Map returnList = new TreeMap(); Iterator iter = precomputeProperties.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); String precomputeKey = (String) entry.getKey(); if (precomputeKey.startsWith("precompute.query")) { String iqlQueryString = (String) entry.getValue(); Query query = parseQuery(iqlQueryString, precomputeKey); returnList.put(precomputeKey, query); } else { if (precomputeKey.startsWith("precompute.constructquery")) { String[] queryBits = ((String) entry.getValue()).split("[ \t]"); if (queryBits.length == 3) { String objectClassName = queryBits[0]; String connectingField = queryBits[1]; String subjectClassName = queryBits[2]; Query constructedQuery = constructCollectionQuery(objectClassName, connectingField, subjectClassName); returnList.put(precomputeKey, constructedQuery); } else { throw new BuildException(precomputeKey + " should have three fields " + "(ie. class fieldname class)"); } } else { if (!precomputeKey.startsWith(TEST_QUERY_PREFIX)) { throw new BuildException("unknown key in properties file " + getPropertiesFileName()); } } } } return returnList; } /** * Take two class names and a connecting collection field name and create a new Query. Eg. for * "Company", "departments", "Department", create: * SELECT DISTINCT a1_, a2_ FROM org.intermine.model.test model.Company AS a1_, * org.intermine.model.testmodel.Department AS a2_ WHERE a1_.departments CONTAINS a2_ ORDER BY * a1_ * @param objectClassName * @param connectingFieldname * @param subjectClassName * @returns the new Query. * @throws BuildException if the query cannot be constructed (for example when a class or the * collection doesn't exist. */ private Query constructCollectionQuery(String objectClassName, String connectingFieldname, String subjectClassName) throws BuildException { Query q = new Query(); q.setDistinct(true); Class objectClass; Class subjectClass; try { if (objectClassName.indexOf(".") == -1) { objectClassName = model.getPackageName() + "." + objectClassName; } objectClass = Class.forName(objectClassName); } catch (ClassNotFoundException e) { throw new BuildException("Class " + objectClassName + " not found (read name from " + getPropertiesFileName() + ")", e); } try { if (subjectClassName.indexOf(".") == -1) { subjectClassName = model.getPackageName() + "." + subjectClassName; } subjectClass = Class.forName(subjectClassName); } catch (ClassNotFoundException e) { throw new BuildException("Class " + subjectClassName + " not found (read name from " + getPropertiesFileName() + ")", e); } QueryClass qcObj = new QueryClass(objectClass); q.addFrom(qcObj); q.addToSelect(qcObj); QueryClass qcSub = new QueryClass(subjectClass); q.addFrom(qcSub); q.addToSelect(qcSub); q.addToOrderBy(qcObj); ClassDescriptor objectCD = model.getClassDescriptorByName(objectClassName); if (objectCD == null) { throw new BuildException("cannot find ClassDescriptor for " + objectClassName); } FieldDescriptor fd = objectCD.getFieldDescriptorByName(connectingFieldname); if (fd == null) { throw new BuildException("cannot find FieldDescriptor for " + connectingFieldname + " in " + objectClassName); } QueryReference ref; if (fd.isReference()) { ref = new QueryObjectReference(qcObj, connectingFieldname); } else { ref = new QueryCollectionReference(qcObj, connectingFieldname); } ContainsConstraint cc = new ContainsConstraint(ref, ConstraintOp.CONTAINS, qcSub); q.setConstraint(cc); return q; } /** * For a given IQL query, return a Query object. * @param iqlQueryString the IQL String * @param key the key from the properties file * @return a Query object * @throws BuildException if the IQL String cannot be parsed. */ protected Query parseQuery(String iqlQueryString, String key) throws BuildException { IqlQuery iqlQuery = new IqlQuery(iqlQueryString, model.getPackageName()); try { return iqlQuery.toQuery(); } catch (IllegalArgumentException e) { throw new BuildException("Exception while parsing query: " + key + " = " + iqlQueryString, e); } } /** * Run all the test queries specified in precomputeProperties. * @param os the ObjectStore to run the queries against. * @throws BuildException if there is an error while running the queries. */ protected void runTestQueries(ObjectStore os) throws BuildException { TreeMap sortedPrecomputeProperties = new TreeMap(precomputeProperties); Iterator iter = sortedPrecomputeProperties.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); String testqueryKey = (String) entry.getKey(); if (testqueryKey.startsWith(TEST_QUERY_PREFIX)) { String iqlQueryString = (String) entry.getValue(); Query query = parseQuery(iqlQueryString, testqueryKey); long start = System.currentTimeMillis(); PrintStream outputStream = System.out; outputStream.println(" running test " + testqueryKey + ":"); Results results; try { results = os.execute(query); } catch (ObjectStoreException e) { throw new BuildException("problem executing " + testqueryKey + " test", e); } int resultsSize = results.size(); outputStream.println(" got size " + resultsSize + " in " + (System.currentTimeMillis() - start) / 1000 + " seconds"); if (resultsSize > 0) { List resultsRow1 = (List) results.get(0); outputStream.println(" first row in " + (System.currentTimeMillis() - start) / 1000 + " seconds"); List resultsRow2 = (List) results.get(resultsSize - 1); outputStream.println(" last row in " + (System.currentTimeMillis() - start) / 1000 + " seconds"); } } } } /** * Set model using modelName. */ private void setModel() { try { model = Model.getInstanceByName(modelName); } catch (MetaDataException e) { throw new BuildException("Failed to find model for " + modelName, e); } } /** * Set precomputeProperties by reading from propertiesFileName. * @throws BuildException if the file cannot be read. */ private void readProperties() throws BuildException { String propertiesFileName = getPropertiesFileName(); try { InputStream is = PrecomputeTask.class.getClassLoader().getResourceAsStream(propertiesFileName); if (is == null) { throw new BuildException("Cannot find " + propertiesFileName + " in the class path"); } precomputeProperties = new Properties(); precomputeProperties.load(is); } catch (IOException e) { throw new BuildException("Exception while creating reading properties from " + propertiesFileName , e); } } /** * Return the name of the properties file that passed to the constructor. * @return the name of the properties file that passed to the constructor. */ protected String getPropertiesFileName() { return modelName + "_precompute.properties"; } }
package com.frameworkium.tests.internal; import com.frameworkium.capture.ScreenshotCapture; import com.frameworkium.config.DriverSetup; import com.frameworkium.config.DriverType; import com.frameworkium.config.WebDriverWrapper; import com.frameworkium.listeners.*; import com.frameworkium.reporting.AllureProperties; import com.saucelabs.common.SauceOnDemandAuthentication; import com.saucelabs.common.SauceOnDemandSessionIdProvider; import com.saucelabs.testng.SauceOnDemandAuthenticationProvider; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.remote.SessionId; import org.testng.annotations.AfterSuite; import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeSuite; import org.testng.annotations.Listeners; import ru.yandex.qatools.allure.Allure; import ru.yandex.qatools.allure.annotations.Issue; import ru.yandex.qatools.allure.annotations.TestCaseId; import ru.yandex.qatools.allure.events.StepFinishedEvent; import ru.yandex.qatools.allure.events.StepStartedEvent; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; @Listeners({CaptureListener.class, ScreenshotListener.class, MethodInterceptor.class, SauceLabsListener.class, TestListener.class, ResultLoggerListener.class}) public abstract class BaseTest implements SauceOnDemandSessionIdProvider, SauceOnDemandAuthenticationProvider { private static ThreadLocal<Boolean> requiresReset; private static ThreadLocal<ScreenshotCapture> capture; private static ThreadLocal<DriverType> driverType; private static List<DriverType> activeDriverTypes = new ArrayList<>(); private static Logger logger = LogManager.getLogger(BaseTest.class); public static String userAgent; /** * Method which runs first upon running a test, it will do the following: * - Retrieve the desired driver type and initialise the driver * - Initialise whether the browser needs resetting * - Initialise the screenshot capture * - Configure the browser based on paramaters (maximise window, session resets, user agent) */ @BeforeSuite(alwaysRun = true) public static void instantiateDriverObject() { driverType = new ThreadLocal<DriverType>() { @Override protected DriverType initialValue() { DriverType driverType = new DriverSetup() .returnDesiredDriverType(); driverType.instantiate(); activeDriverTypes.add(driverType); return driverType; } }; requiresReset = new ThreadLocal<Boolean>() { @Override protected Boolean initialValue() { return Boolean.FALSE; } }; capture = new ThreadLocal<ScreenshotCapture>() { @Override protected ScreenshotCapture initialValue() { return null; } }; } /** * The methods which configure the browser once a test runs * - Maximises browser based on the driver type * - Initialises screenshot capture if needed * - Clears the session if another test ran prior * - Sets the user agent of the browser * * @param testMethod - The test method name of the test */ @BeforeMethod(alwaysRun = true) public static void configureBrowserBeforeTest(Method testMethod) { configureDriverBasedOnParams(); initialiseNewScreenshotCapture(testMethod); } /** * Initialise the screenshot capture and link to issue/test case id * * @param testMethod - Test method passed from the test script */ private static void initialiseNewScreenshotCapture(Method testMethod) { if (ScreenshotCapture.isRequired()) { String testID = "n/a"; try { testID = testMethod.getName(); } catch (NullPointerException e) { logger.debug("No test method defined."); } try { testID = testMethod.getAnnotation(Issue.class).value(); } catch (NullPointerException e) { logger.debug("No Issue defined."); } try { testID = testMethod.getAnnotation(TestCaseId.class).value(); } catch (NullPointerException e) { logger.debug("No Test Case ID defined."); } capture.set(new ScreenshotCapture(testID, driverType.get().getDriver())); } } /** * Ran as part of the initialiseDriverObject, configures parts of the driver */ private static void configureDriverBasedOnParams() { requiresReset.set(driverType.get().resetBrowser(requiresReset.get())); driverType.get().maximiseBrowserWindow(); setUserAgent(); } /** * Returns the webdriver object for that given thread * * @return - WebDriver object */ public static WebDriverWrapper getDriver() { return driverType.get().getDriver(); } /** * Sets the user agent of the browser for the test run */ private static void setUserAgent() { userAgent = getUserAgent(); } /** * Loops through all active driver types and tears down the driver object */ @AfterSuite(alwaysRun = true) public static void closeDriverObject() { try { for (DriverType driverType : activeDriverTypes) { driverType.tearDownDriver(); } } catch (Exception e) { logger.warn("Session quit unexpectedly.", e); } } /** * Creates the allure properties for the report, after the test run */ @AfterSuite(alwaysRun = true) public static void createAllureProperties() { AllureProperties.create(); } /** @return the Job id for the current thread */ @Override public String getSessionId() { WebDriverWrapper driver = getDriver(); SessionId sessionId = driver.getWrappedRemoteWebDriver().getSessionId(); return (sessionId == null) ? null : sessionId.toString(); } /** * Retrieves the user agent from the browser * @return - String of the user agent */ private static String getUserAgent() { String ua; JavascriptExecutor js = getDriver(); try { ua = (String) js.executeScript("return navigator.userAgent;"); } catch (Exception e) { ua = "Unable to fetch UserAgent"; } logger.debug("User agent is: '" + ua + "'"); return ua; } /** * @return the {@link SauceOnDemandAuthentication} instance containing the Sauce username/access key */ @Override public SauceOnDemandAuthentication getAuthentication() { return new SauceOnDemandAuthentication(); } /** * @return - Screenshot capture object for the current test */ public static ScreenshotCapture getCapture() { return capture.get(); } public static void startStep(String stepName){ Allure.LIFECYCLE.fire(new StepStartedEvent(stepName)); } public static void finishStep(){ Allure.LIFECYCLE.fire(new StepFinishedEvent()); } }
package org.intermine.web; import javax.servlet.ServletContext; import javax.servlet.http.HttpSession; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.Action; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionMessages; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionError; import org.apache.struts.action.ActionErrors; import org.apache.struts.Globals; import org.intermine.objectstore.ObjectStore; import org.intermine.objectstore.ObjectStoreException; /** * Implementation of <strong>Action</strong> that saves a Query from a session. * * @author Richard Smith * @author Matthew Wakeling */ public class SaveQueryAction extends Action { /** * Process the specified HTTP request, and create the corresponding HTTP * response (or forward to another web component that will create it). * Return an <code>ActionForward</code> instance describing where and how * control should be forwarded, or <code>null</code> if the response has * already been completed. * * @param mapping The ActionMapping used to select this instance * @param form The optional ActionForm bean for this request (if any) * @param request The HTTP request we are processing * @param response The HTTP response we are creating * @return an ActionForward object defining where control goes next * * @exception Exception if the application business logic throws * an exception */ public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { HttpSession session = request.getSession(); ServletContext servletContext = session.getServletContext(); Profile profile = (Profile) session.getAttribute(Constants.PROFILE); ObjectStore os = (ObjectStore) servletContext.getAttribute(Constants.OBJECTSTORE); PathQuery query = (PathQuery) session.getAttribute(Constants.QUERY); String queryName = ((SaveQueryForm) form).getQueryName(); try { if (query.getInfo() == null) { query.setInfo(os.estimate(MainHelper.makeQuery(query, profile.getSavedBags()))); } } catch (ObjectStoreException e) { ActionErrors actionErrors = new ActionErrors(); actionErrors.add(ActionErrors.GLOBAL_ERROR, new ActionError("errors.query.objectstoreerror")); saveErrors(request, actionErrors); } SessionMethods.saveQuery(request, queryName, query); ActionMessages messages = (ActionMessages) request.getAttribute(Globals.MESSAGE_KEY); if (messages == null) { messages = new ActionMessages(); } messages.add("saveQuery", new ActionMessage("saveQuery.message", queryName)); request.setAttribute(Globals.MESSAGE_KEY, messages); return mapping.findForward("query"); } }
package com.github.dozedoff.commonj.util; import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import com.google.common.collect.EvictingQueue; public class Sampler { private EvictingQueue<Integer> ringBuffer; private AtomicInteger deltaSum = new AtomicInteger(); public Sampler(int size) { ringBuffer = EvictingQueue.create(size); } public void addDelta(int delta) { deltaSum.addAndGet(delta); } public void sample() { synchronized (ringBuffer) { ringBuffer.add(deltaSum.getAndSet(0)); } } public double getAverage() { synchronized (ringBuffer) { if (ringBuffer.isEmpty()) { return 0; } int sum = 0; for (int sample : ringBuffer) { sum += sample; } return sum / (double) ringBuffer.size(); } } public List<Integer> getSamples() { Integer[] samples; synchronized (ringBuffer) { samples = new Integer[ringBuffer.size()]; ringBuffer.toArray(samples); } return Arrays.asList(samples); } }
package org.apache.xerces.validators.schema; import org.apache.xerces.framework.XMLErrorReporter; import org.apache.xerces.validators.common.Grammar; import org.apache.xerces.validators.common.GrammarResolver; import org.apache.xerces.validators.common.GrammarResolverImpl; import org.apache.xerces.validators.common.XMLElementDecl; import org.apache.xerces.validators.common.XMLAttributeDecl; import org.apache.xerces.validators.schema.SchemaSymbols; import org.apache.xerces.validators.schema.XUtil; import org.apache.xerces.validators.datatype.DatatypeValidator; import org.apache.xerces.validators.datatype.DatatypeValidatorFactoryImpl; import org.apache.xerces.validators.datatype.InvalidDatatypeValueException; import org.apache.xerces.utils.StringPool; import org.w3c.dom.Element; //REVISIT: for now, import everything in the DOM package import org.w3c.dom.*; import java.util.*; import java.net.URL; import java.net.MalformedURLException; //Unit Test import org.apache.xerces.parsers.DOMParser; import org.apache.xerces.validators.common.XMLValidator; import org.apache.xerces.validators.datatype.DatatypeValidator.*; import org.apache.xerces.validators.datatype.InvalidDatatypeValueException; import org.apache.xerces.framework.XMLContentSpec; import org.apache.xerces.utils.QName; import org.apache.xerces.utils.NamespacesScope; import org.apache.xerces.parsers.SAXParser; import org.apache.xerces.framework.XMLParser; import org.apache.xerces.framework.XMLDocumentScanner; import org.xml.sax.InputSource; import org.xml.sax.SAXParseException; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import java.io.IOException; import org.w3c.dom.Document; import org.apache.xml.serialize.OutputFormat; import org.apache.xml.serialize.XMLSerializer; import org.apache.xerces.validators.schema.SchemaSymbols; /** * Instances of this class get delegated to Traverse the Schema and * to populate the Grammar internal representation by * instances of Grammar objects. * Traverse a Schema Grammar: * As of April 07, 2000 the following is the * XML Representation of Schemas and Schema components, * Chapter 4 of W3C Working Draft. * <schema * attributeFormDefault = qualified | unqualified * blockDefault = #all or (possibly empty) subset of {equivClass, extension, restriction} * elementFormDefault = qualified | unqualified * finalDefault = #all or (possibly empty) subset of {extension, restriction} * id = ID * targetNamespace = uriReference * version = string> * Content: ((include | import | annotation)* , ((simpleType | complexType | element | group | attribute | attributeGroup | notation) , annotation*)+) * </schema> * * * <attribute * form = qualified | unqualified * id = ID * name = NCName * ref = QName * type = QName * use = default | fixed | optional | prohibited | required * value = string> * Content: (annotation? , simpleType?) * </> * * <element * abstract = boolean * block = #all or (possibly empty) subset of {equivClass, extension, restriction} * default = string * equivClass = QName * final = #all or (possibly empty) subset of {extension, restriction} * fixed = string * form = qualified | unqualified * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * name = NCName * nullable = boolean * ref = QName * type = QName> * Content: (annotation? , (simpleType | complexType)? , (unique | key | keyref)*) * </> * * * <complexType * abstract = boolean * base = QName * block = #all or (possibly empty) subset of {extension, restriction} * content = elementOnly | empty | mixed | textOnly * derivedBy = extension | restriction * final = #all or (possibly empty) subset of {extension, restriction} * id = ID * name = NCName> * Content: (annotation? , (((minExclusive | minInclusive | maxExclusive | maxInclusive | precision | scale | length | minLength | maxLength | encoding | period | duration | enumeration | pattern)* | (element | group | all | choice | sequence | any)*) , ((attribute | attributeGroup)* , anyAttribute?))) * </> * * * <attributeGroup * id = ID * name = NCName * ref = QName> * Content: (annotation?, (attribute|attributeGroup), anyAttribute?) * </> * * <anyAttribute * id = ID * namespace = ##any | ##other | ##local | list of {uri, ##targetNamespace}> * Content: (annotation?) * </anyAttribute> * * <group * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * name = NCName * ref = QName> * Content: (annotation? , (element | group | all | choice | sequence | any)*) * </> * * <all * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </all> * * <choice * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </choice> * * <sequence * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </sequence> * * * <any * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * namespace = ##any | ##other | ##local | list of {uri, ##targetNamespace} * processContents = lax | skip | strict> * Content: (annotation?) * </any> * * <unique * id = ID * name = NCName> * Content: (annotation? , (selector , field+)) * </unique> * * <key * id = ID * name = NCName> * Content: (annotation? , (selector , field+)) * </key> * * <keyref * id = ID * name = NCName * refer = QName> * Content: (annotation? , (selector , field+)) * </keyref> * * <selector> * Content: XPathExprApprox : An XPath expression * </selector> * * <field> * Content: XPathExprApprox : An XPath expression * </field> * * * <notation * id = ID * name = NCName * public = A public identifier, per ISO 8879 * system = uriReference> * Content: (annotation?) * </notation> * * <annotation> * Content: (appinfo | documentation)* * </annotation> * * <include * id = ID * schemaLocation = uriReference> * Content: (annotation?) * </include> * * <import * id = ID * namespace = uriReference * schemaLocation = uriReference> * Content: (annotation?) * </import> * * <simpleType * abstract = boolean * base = QName * derivedBy = | list | restriction : restriction * id = ID * name = NCName> * Content: ( annotation? , ( minExclusive | minInclusive | maxExclusive | maxInclusive | precision | scale | length | minLength | maxLength | encoding | period | duration | enumeration | pattern )* ) * </simpleType> * * <length * id = ID * value = nonNegativeInteger> * Content: ( annotation? ) * </length> * * <minLength * id = ID * value = nonNegativeInteger> * Content: ( annotation? ) * </minLength> * * <maxLength * id = ID * value = nonNegativeInteger> * Content: ( annotation? ) * </maxLength> * * * <pattern * id = ID * value = string> * Content: ( annotation? ) * </pattern> * * * <enumeration * id = ID * value = string> * Content: ( annotation? ) * </enumeration> * * <maxInclusive * id = ID * value = string> * Content: ( annotation? ) * </maxInclusive> * * <maxExclusive * id = ID * value = string> * Content: ( annotation? ) * </maxExclusive> * * <minInclusive * id = ID * value = string> * Content: ( annotation? ) * </minInclusive> * * * <minExclusive * id = ID * value = string> * Content: ( annotation? ) * </minExclusive> * * <precision * id = ID * value = nonNegativeInteger> * Content: ( annotation? ) * </precision> * * <scale * id = ID * value = nonNegativeInteger> * Content: ( annotation? ) * </scale> * * <encoding * id = ID * value = | hex | base64 > * Content: ( annotation? ) * </encoding> * * * <duration * id = ID * value = timeDuration> * Content: ( annotation? ) * </duration> * * <period * id = ID * value = timeDuration> * Content: ( annotation? ) * </period> * * * @author Eric Ye, Jeffrey Rodriguez, Andy Clark * * @see org.apache.xerces.validators.common.Grammar * * @version $Id$ */ public class TraverseSchema implements NamespacesScope.NamespacesHandler{ //CONSTANTS private static final int TOP_LEVEL_SCOPE = -1; //debuggin private static boolean DEBUGGING = false; //private data members private XMLErrorReporter fErrorReporter = null; private StringPool fStringPool = null; private GrammarResolver fGrammarResolver = null; private SchemaGrammar fSchemaGrammar = null; private Element fSchemaRootElement; private DatatypeValidatorFactoryImpl fDatatypeRegistry = DatatypeValidatorFactoryImpl.getDatatypeRegistry(); private Hashtable fComplexTypeRegistry = new Hashtable(); private Hashtable fAttributeDeclRegistry = new Hashtable(); private Vector fIncludeLocations = new Vector(); private Vector fImportLocations = new Vector(); private int fAnonTypeCount =0; private int fScopeCount=0; private int fCurrentScope=TOP_LEVEL_SCOPE; private int fSimpleTypeAnonCount = 0; private Stack fCurrentTypeNameStack = new Stack(); private Hashtable fElementRecurseComplex = new Hashtable(); private boolean fElementDefaultQualified = false; private boolean fAttributeDefaultQualified = false; private int fTargetNSURI; private String fTargetNSURIString = ""; private NamespacesScope fNamespacesScope = null; private String fCurrentSchemaURL = ""; private XMLAttributeDecl fTempAttributeDecl = new XMLAttributeDecl(); private XMLElementDecl fTempElementDecl = new XMLElementDecl(); // REVISIT: maybe need to be moved into SchemaGrammar class public class ComplexTypeInfo { public String typeName; public DatatypeValidator baseDataTypeValidator; public ComplexTypeInfo baseComplexTypeInfo; public int derivedBy = 0; public int blockSet = 0; public int finalSet = 0; public boolean isAbstract = false; public int scopeDefined = -1; public int contentType; public int contentSpecHandle = -1; public int templateElementIndex = -1; public int attlistHead = -1; public DatatypeValidator datatypeValidator; } //REVISIT: verify the URI. public final static String SchemaForSchemaURI = "http: private TraverseSchema( ) { // new TraverseSchema() is forbidden; } public void setGrammarResolver(GrammarResolver grammarResolver){ fGrammarResolver = grammarResolver; } public void startNamespaceDeclScope(int prefix, int uri){ //TO DO } public void endNamespaceDeclScope(int prefix){ //TO DO, do we need to do anything here? } private String resolvePrefixToURI (String prefix) throws Exception { String uriStr = fStringPool.toString(fNamespacesScope.getNamespaceForPrefix(fStringPool.addSymbol(prefix))); if (uriStr == null) { // REVISIT: Localize reportGenericSchemaError("prefix : [" + prefix +"] can not be resolved to a URI"); return ""; } //REVISIT, !!!! a hack: needs to be updated later, cause now we only use localpart to key build-in datatype. if ( prefix.length()==0 && uriStr.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && fTargetNSURIString.length() == 0) { uriStr = ""; } return uriStr; } public TraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver, XMLErrorReporter errorReporter, String schemaURL ) throws Exception { fErrorReporter = errorReporter; fCurrentSchemaURL = schemaURL; doTraverseSchema(root, stringPool, schemaGrammar, grammarResolver); } public TraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver ) throws Exception { doTraverseSchema(root, stringPool, schemaGrammar, grammarResolver); } public void doTraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver) throws Exception { fNamespacesScope = new NamespacesScope(this); fSchemaRootElement = root; fStringPool = stringPool; fSchemaGrammar = schemaGrammar; fGrammarResolver = grammarResolver; if (root == null) { // REVISIT: Anything to do? return; } //Make sure namespace binding is defaulted String rootPrefix = root.getPrefix(); if( rootPrefix == null || rootPrefix.length() == 0 ){ String xmlns = root.getAttribute("xmlns"); if( xmlns.length() == 0 ) root.setAttribute("xmlns", SchemaSymbols.URI_SCHEMAFORSCHEMA ); } //Retrieve the targetnamespace URI information fTargetNSURIString = root.getAttribute(SchemaSymbols.ATT_TARGETNAMESPACE); if (fTargetNSURIString==null) { fTargetNSURIString=""; } fTargetNSURI = fStringPool.addSymbol(fTargetNSURIString); if (fGrammarResolver == null) { // REVISIT: Localize reportGenericSchemaError("Internal error: don't have a GrammarResolver for TraverseSchema"); } else{ // for complex type registry, attribute decl registry and // namespace mapping, needs to check whether the passed in // Grammar was a newly instantiated one. if (fSchemaGrammar.getComplexTypeRegistry() == null ) { fSchemaGrammar.setComplexTypeRegistry(fComplexTypeRegistry); } else { fComplexTypeRegistry = fSchemaGrammar.getComplexTypeRegistry(); } if (fSchemaGrammar.getAttirubteDeclRegistry() == null ) { fSchemaGrammar.setAttributeDeclRegistry(fAttributeDeclRegistry); } else { fAttributeDeclRegistry = fSchemaGrammar.getAttirubteDeclRegistry(); } if (fSchemaGrammar.getNamespacesScope() == null ) { fSchemaGrammar.setNamespacesScope(fNamespacesScope); } else { fNamespacesScope = fSchemaGrammar.getNamespacesScope(); } fSchemaGrammar.setDatatypeRegistry(fDatatypeRegistry); fSchemaGrammar.setTargetNamespaceURI(fTargetNSURIString); fGrammarResolver.putGrammar(fTargetNSURIString, fSchemaGrammar); } // Retrived the Namespace mapping from the schema element. NamedNodeMap schemaEltAttrs = root.getAttributes(); int i = 0; Attr sattr = null; boolean seenXMLNS = false; while ((sattr = (Attr)schemaEltAttrs.item(i++)) != null) { String attName = sattr.getName(); if (attName.startsWith("xmlns:")) { String attValue = sattr.getValue(); String prefix = attName.substring(attName.indexOf(":")+1); fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(prefix), fStringPool.addSymbol(attValue) ); } if (attName.equals("xmlns")) { String attValue = sattr.getValue(); fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(""), fStringPool.addSymbol(attValue) ); seenXMLNS = true; } } if (!seenXMLNS && fTargetNSURIString.length() == 0 ) { fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(""), fStringPool.addSymbol("") ); } fElementDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ELEMENTFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); fAttributeDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ATTRIBUTEFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); //REVISIT, really sticky when noTargetNamesapce, for now, we assume everyting is in the same name space); if (fTargetNSURI == StringPool.EMPTY_STRING) { fElementDefaultQualified = true; //fAttributeDefaultQualified = true; } //fScopeCount++; fCurrentScope = -1; checkTopLevelDuplicateNames(root); //extract all top-level attribute, attributeGroup, and group Decls and put them in the 3 hasn table in the SchemaGrammar. extractTopLevel3Components(root); for (Element child = XUtil.getFirstChildElement(root); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getNodeName(); if (name.equals(SchemaSymbols.ELT_ANNOTATION) ) { traverseAnnotationDecl(child); } else if (name.equals(SchemaSymbols.ELT_SIMPLETYPE )) { traverseSimpleTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_COMPLEXTYPE )) { traverseComplexTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_ELEMENT )) { traverseElementDecl(child); } else if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { //traverseAttributeGroupDecl(child); } else if (name.equals( SchemaSymbols.ELT_ATTRIBUTE ) ) { traverseAttributeDecl( child, null ); } else if (name.equals( SchemaSymbols.ELT_WILDCARD) ) { traverseWildcardDecl( child); } else if (name.equals(SchemaSymbols.ELT_GROUP) && child.getAttribute(SchemaSymbols.ATT_REF).equals("")) { //traverseGroupDecl(child); } else if (name.equals(SchemaSymbols.ELT_NOTATION)) { ; //TO DO } else if (name.equals(SchemaSymbols.ELT_INCLUDE)) { traverseInclude(child); } else if (name.equals(SchemaSymbols.ELT_IMPORT)) { traverseImport(child); } } // for each child node } // traverseSchema(Element) private void checkTopLevelDuplicateNames(Element root) { //TO DO : !!! } private void extractTopLevel3Components(Element root){ for (Element child = XUtil.getFirstChildElement(root); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getNodeName(); if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { fSchemaGrammar.topLevelAttrGrpDecls.put(name, child); } else if (name.equals( SchemaSymbols.ELT_ATTRIBUTE ) ) { fSchemaGrammar.topLevelAttrDecls.put(name, child); } else if (name.equals(SchemaSymbols.ELT_GROUP) && child.getAttribute(SchemaSymbols.ATT_REF).equals("")) { fSchemaGrammar.topLevelGroupDecls.put(name, child); } } // for each child node } /** * Expands a system id and returns the system id as a URL, if * it can be expanded. A return value of null means that the * identifier is already expanded. An exception thrown * indicates a failure to expand the id. * * @param systemId The systemId to be expanded. * * @return Returns the URL object representing the expanded system * identifier. A null value indicates that the given * system identifier is already expanded. * */ private String expandSystemId(String systemId, String currentSystemId) throws Exception{ String id = systemId; // check for bad parameters id if (id == null || id.length() == 0) { return systemId; } // if id already expanded, return try { URL url = new URL(id); if (url != null) { return systemId; } } catch (MalformedURLException e) { // continue on... } // normalize id id = fixURI(id); // normalize base URL base = null; URL url = null; try { if (currentSystemId == null) { String dir; try { dir = fixURI(System.getProperty("user.dir")); } catch (SecurityException se) { dir = ""; } if (!dir.endsWith("/")) { dir = dir + "/"; } base = new URL("file", "", dir); } else { base = new URL(currentSystemId); } // expand id url = new URL(base, id); } catch (Exception e) { // let it go through } if (url == null) { return systemId; } return url.toString(); } /** * Fixes a platform dependent filename to standard URI form. * * @param str The string to fix. * * @return Returns the fixed URI string. */ private static String fixURI(String str) { // handle platform dependent strings str = str.replace(java.io.File.separatorChar, '/'); // Windows fix if (str.length() >= 2) { char ch1 = str.charAt(1); if (ch1 == ':') { char ch0 = Character.toUpperCase(str.charAt(0)); if (ch0 >= 'A' && ch0 <= 'Z') { str = "/" + str; } } } // done return str; } private void traverseInclude(Element includeDecl) throws Exception { //TO DO: !!!!! location needs to be resolved first. String location = includeDecl.getAttribute(SchemaSymbols.ATT_SCHEMALOCATION); location = expandSystemId(location, fCurrentSchemaURL); if (fIncludeLocations.contains((Object)location)) { return; } fIncludeLocations.addElement((Object)location); DOMParser parser = new DOMParser() { public void ignorableWhitespace(char ch[], int start, int length) {} public void ignorableWhitespace(int dataIdx) {} }; parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() ); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://xml.org/sax/features/namespaces", true); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } try { parser.parse( location); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { //e.printStackTrace(); } Document document = parser.getDocument(); //Our Grammar Element root = null; if (document != null) { root = document.getDocumentElement(); } if (root != null) { String targetNSURI = root.getAttribute(SchemaSymbols.ATT_TARGETNAMESPACE); if (targetNSURI.length() > 0 && !targetNSURI.equals(fTargetNSURIString) ) { // REVISIT: Localize reportGenericSchemaError("included schema '"+location+"' has a different targetNameSpace '" +targetNSURI+"'"); } else { boolean saveElementDefaultQualified = fElementDefaultQualified; boolean saveAttributeDefaultQualified = fAttributeDefaultQualified; int saveScope = fCurrentScope; String savedSchemaURL = fCurrentSchemaURL; Element saveRoot = fSchemaRootElement; fSchemaRootElement = root; fCurrentSchemaURL = location; traverseIncludedSchema(root); fCurrentSchemaURL = savedSchemaURL; fCurrentScope = saveScope; fElementDefaultQualified = saveElementDefaultQualified; fAttributeDefaultQualified = saveAttributeDefaultQualified; fSchemaRootElement = saveRoot; } } } private void traverseIncludedSchema(Element root) throws Exception { // Retrived the Namespace mapping from the schema element. NamedNodeMap schemaEltAttrs = root.getAttributes(); int i = 0; Attr sattr = null; boolean seenXMLNS = false; while ((sattr = (Attr)schemaEltAttrs.item(i++)) != null) { String attName = sattr.getName(); if (attName.startsWith("xmlns:")) { String attValue = sattr.getValue(); String prefix = attName.substring(attName.indexOf(":")+1); fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(prefix), fStringPool.addSymbol(attValue) ); } if (attName.equals("xmlns")) { String attValue = sattr.getValue(); fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(""), fStringPool.addSymbol(attValue) ); seenXMLNS = true; } } if (!seenXMLNS && fTargetNSURIString.length() == 0 ) { fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(""), fStringPool.addSymbol("") ); } fElementDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ELEMENTFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); fAttributeDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ATTRIBUTEFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); //REVISIT, really sticky when noTargetNamesapce, for now, we assume everyting is in the same name space); if (fTargetNSURI == StringPool.EMPTY_STRING) { fElementDefaultQualified = true; //fAttributeDefaultQualified = true; } //fScopeCount++; fCurrentScope = -1; checkTopLevelDuplicateNames(root); //extract all top-level attribute, attributeGroup, and group Decls and put them in the 3 hasn table in the SchemaGrammar. extractTopLevel3Components(root); for (Element child = XUtil.getFirstChildElement(root); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getNodeName(); if (name.equals(SchemaSymbols.ELT_ANNOTATION) ) { traverseAnnotationDecl(child); } else if (name.equals(SchemaSymbols.ELT_SIMPLETYPE )) { traverseSimpleTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_COMPLEXTYPE )) { traverseComplexTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_ELEMENT )) { traverseElementDecl(child); } else if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { //traverseAttributeGroupDecl(child); } else if (name.equals( SchemaSymbols.ELT_ATTRIBUTE ) ) { traverseAttributeDecl( child, null ); } else if (name.equals( SchemaSymbols.ELT_WILDCARD) ) { traverseWildcardDecl( child); } else if (name.equals(SchemaSymbols.ELT_GROUP) && child.getAttribute(SchemaSymbols.ATT_REF).equals("")) { //traverseGroupDecl(child); } else if (name.equals(SchemaSymbols.ELT_NOTATION)) { ; //TO DO } else if (name.equals(SchemaSymbols.ELT_INCLUDE)) { traverseInclude(child); } else if (name.equals(SchemaSymbols.ELT_IMPORT)) { traverseImport(child); } } // for each child node } private void traverseImport(Element importDecl) throws Exception { String location = importDecl.getAttribute(SchemaSymbols.ATT_SCHEMALOCATION); location = expandSystemId(location, fCurrentSchemaURL); String namespaceString = importDecl.getAttribute(SchemaSymbols.ATT_NAMESPACE); SchemaGrammar importedGrammar = new SchemaGrammar(); if (fGrammarResolver.getGrammar(namespaceString) != null) { importedGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(namespaceString); } if (fImportLocations.contains((Object)location)) { return; } fImportLocations.addElement((Object)location); DOMParser parser = new DOMParser() { public void ignorableWhitespace(char ch[], int start, int length) {} public void ignorableWhitespace(int dataIdx) {} }; parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() ); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://xml.org/sax/features/namespaces", true); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } try { parser.parse( location); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { e.printStackTrace(); } Document document = parser.getDocument(); //Our Grammar Element root = null; if (document != null) { root = document.getDocumentElement(); } if (root != null) { String targetNSURI = root.getAttribute(SchemaSymbols.ATT_TARGETNAMESPACE); if (!targetNSURI.equals(namespaceString) ) { // REVISIT: Localize reportGenericSchemaError("imported schema '"+location+"' has a different targetNameSpace '" +targetNSURI+"' from what is declared '"+namespaceString+"'."); } else new TraverseSchema(root, fStringPool, importedGrammar, fGrammarResolver, fErrorReporter, location); } else { reportGenericSchemaError("Could not get the doc root for imported Schema file: "+location); } } /** * No-op - Traverse Annotation Declaration * * @param comment */ private void traverseAnnotationDecl(Element comment) { //TO DO return ; } /** * Traverse SimpleType declaration: * <simpleType * abstract = boolean * base = QName * derivedBy = | list | restriction : restriction * id = ID * name = NCName> * Content: ( annotation? , ( minExclusive | minInclusive | maxExclusive | maxInclusive | precision | scale | length | minLength | maxLength | encoding | period | duration | enumeration | pattern )* ) * </simpleType> * * @param simpleTypeDecl * @return */ private int traverseSimpleTypeDecl( Element simpleTypeDecl ) throws Exception { String varietyProperty = simpleTypeDecl.getAttribute( SchemaSymbols.ATT_DERIVEDBY ); if (varietyProperty.length() == 0) { varietyProperty = SchemaSymbols.ATTVAL_RESTRICTION; } String nameProperty = simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME ); String baseTypeQNameProperty = simpleTypeDecl.getAttribute( SchemaSymbols.ATT_BASE ); String abstractProperty = simpleTypeDecl.getAttribute( SchemaSymbols.ATT_ABSTRACT ); int newSimpleTypeName = -1; if ( nameProperty.equals("")) { // anonymous simpleType newSimpleTypeName = fStringPool.addSymbol( "#S#"+fSimpleTypeAnonCount++ ); } else newSimpleTypeName = fStringPool.addSymbol( nameProperty ); int basetype; DatatypeValidator baseValidator = null; if( baseTypeQNameProperty!= null ) { basetype = fStringPool.addSymbol( baseTypeQNameProperty ); String prefix = ""; String localpart = baseTypeQNameProperty; int colonptr = baseTypeQNameProperty.indexOf(":"); if ( colonptr > 0) { prefix = baseTypeQNameProperty.substring(0,colonptr); localpart = baseTypeQNameProperty.substring(colonptr+1); } String uri = resolvePrefixToURI(prefix); baseValidator = getDatatypeValidator(uri, localpart); if (baseValidator == null) { Element baseTypeNode = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (baseTypeNode != null) { traverseSimpleTypeDecl( baseTypeNode ); baseValidator = getDatatypeValidator(uri, localpart); if (baseValidator == null) { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_BASE ), simpleTypeDecl.getAttribute(SchemaSymbols.ATT_NAME) }); return -1; //reportGenericSchemaError("Base type could not be found : " + baseTypeQNameProperty); } } else { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_BASE ), simpleTypeDecl.getAttribute(SchemaSymbols.ATT_NAME) }); return -1; //reportGenericSchemaError("Base type could not be found : " + baseTypeQNameProperty); } } } // Any Children if so then check Content otherwise bail out Element content = XUtil.getFirstChildElement( simpleTypeDecl ); int numFacets = 0; Hashtable facetData = null; if( content != null ) { //Content follows: ( annotation? , facets* ) //annotation ? ( 0 or 1 ) if( content.getNodeName().equals( SchemaSymbols.ELT_ANNOTATION ) ){ traverseAnnotationDecl( content ); content = XUtil.getNextSiblingElement(content); } //TODO: If content is annotation again should raise validation error // if( content.getNodeName().equal( SchemaSymbols.ELT_ANNOTATIO ) { // throw ValidationException(); } //facets * ( 0 or more ) int numEnumerationLiterals = 0; facetData = new Hashtable(); Vector enumData = new Vector(); while (content != null) { if (content.getNodeType() == Node.ELEMENT_NODE) { Element facetElt = (Element) content; numFacets++; if (facetElt.getNodeName().equals(SchemaSymbols.ELT_ENUMERATION)) { numEnumerationLiterals++; String enumVal = facetElt.getAttribute(SchemaSymbols.ATT_VALUE); enumData.addElement(enumVal); //Enumerations can have annotations ? ( 0 | 1 ) Element enumContent = XUtil.getFirstChildElement( facetElt ); if( enumContent != null && enumContent != null && enumContent.getNodeName().equals( SchemaSymbols.ELT_ANNOTATION ) ){ traverseAnnotationDecl( content ); } //TODO: If enumContent is encounter again should raise validation error // enumContent.getNextSibling(); // if( enumContent.getNodeName().equal( SchemaSymbols.ELT_ANNOTATIO ) { // throw ValidationException(); } } else { facetData.put(facetElt.getNodeName(),facetElt.getAttribute( SchemaSymbols.ATT_VALUE )); } } //content = (Element) content.getNextSibling(); content = XUtil.getNextSiblingElement(content); } if (numEnumerationLiterals > 0) { facetData.put(SchemaSymbols.ELT_ENUMERATION, enumData); } } // create & register validator for "generated" type if it doesn't exist String nameOfType = fStringPool.toString( newSimpleTypeName); if (fTargetNSURIString.length () != 0) { nameOfType = fTargetNSURIString+","+nameOfType; } try { DatatypeValidator newValidator = fDatatypeRegistry.getDatatypeValidator( nameOfType ); if( newValidator == null ) { // not previously registered boolean derivedByList = varietyProperty.equals( SchemaSymbols.ATTVAL_LIST ) ? true:false; fDatatypeRegistry.createDatatypeValidator( nameOfType, baseValidator, facetData, derivedByList ); } } catch (Exception e) { //e.printStackTrace(System.err); reportSchemaError(SchemaMessageProvider.DatatypeError,new Object [] { e.getMessage() }); } return fStringPool.addSymbol(nameOfType); } /* * <any * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * namespace = ##any | ##other | ##local | list of {uri, ##targetNamespace} * processContents = lax | skip | strict> * Content: (annotation?) * </any> */ private int traverseAny(Element child) throws Exception { int anyIndex = -1; String namespace = child.getAttribute(SchemaSymbols.ATT_NAMESPACE).trim(); String processContents = child.getAttribute("processContents").trim(); int processContentsAny = XMLContentSpec.CONTENTSPECNODE_ANY; int processContentsAnyOther = XMLContentSpec.CONTENTSPECNODE_ANY_OTHER; int processContentsAnyLocal = XMLContentSpec.CONTENTSPECNODE_ANY_LOCAL; if (processContents.length() > 0 && !processContents.equals("strict")) { if (processContents.equals("lax")) { processContentsAny = XMLContentSpec.CONTENTSPECNODE_ANY_LAX; processContentsAnyOther = XMLContentSpec.CONTENTSPECNODE_ANY_OTHER_LAX; processContentsAnyLocal = XMLContentSpec.CONTENTSPECNODE_ANY_LOCAL_LAX; } else if (processContents.equals("skip")) { processContentsAny = XMLContentSpec.CONTENTSPECNODE_ANY_SKIP; processContentsAnyOther = XMLContentSpec.CONTENTSPECNODE_ANY_OTHER_SKIP; processContentsAnyLocal = XMLContentSpec.CONTENTSPECNODE_ANY_LOCAL_SKIP; } } if (namespace.length() == 0 || namespace.equals("##any")) { anyIndex = fSchemaGrammar.addContentSpecNode(processContentsAny, -1, -1, false); } else if (namespace.equals("##other")) { String uri = child.getOwnerDocument().getDocumentElement().getAttribute("targetNamespace"); int uriIndex = fStringPool.addSymbol(uri); anyIndex = fSchemaGrammar.addContentSpecNode(processContentsAnyOther, -1, uriIndex, false); } else if (namespace.equals("##local")) { anyIndex = fSchemaGrammar.addContentSpecNode(processContentsAnyLocal, -1, -1, false); } else if (namespace.length() > 0) { StringTokenizer tokenizer = new StringTokenizer(namespace); Vector tokens = new Vector(); while (tokenizer.hasMoreElements()) { String token = tokenizer.nextToken(); if (token.equals("##targetNamespace")) { token = child.getOwnerDocument().getDocumentElement().getAttribute("targetNamespace"); } tokens.addElement(token); } String uri = (String)tokens.elementAt(0); int uriIndex = fStringPool.addSymbol(uri); int leafIndex = fSchemaGrammar.addContentSpecNode(processContentsAny, -1, uriIndex, false); int valueIndex = leafIndex; int count = tokens.size(); if (count > 1) { uri = (String)tokens.elementAt(1); uriIndex = fStringPool.addSymbol(uri); leafIndex = fSchemaGrammar.addContentSpecNode(processContentsAny, -1, uriIndex, false); int otherValueIndex = leafIndex; int choiceIndex = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, valueIndex, otherValueIndex, false); for (int i = 2; i < count; i++) { uri = (String)tokens.elementAt(i); uriIndex = fStringPool.addSymbol(uri); leafIndex = fSchemaGrammar.addContentSpecNode(processContentsAny, -1, uriIndex, false); otherValueIndex = leafIndex; choiceIndex = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, choiceIndex, otherValueIndex, false); } anyIndex = choiceIndex; } else { anyIndex = leafIndex; } } else { // REVISIT: Localize reportGenericSchemaError("Empty namespace attribute for any element"); } return anyIndex; } public DatatypeValidator getDatatypeValidator(String uri, String localpart) { DatatypeValidator dv = null; if (uri.length()==0 || uri.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA)) { dv = fDatatypeRegistry.getDatatypeValidator( localpart ); } else { dv = fDatatypeRegistry.getDatatypeValidator( uri+","+localpart ); } return dv; } /* * <anyAttribute * id = ID * namespace = ##any | ##other | ##local | list of {uri, ##targetNamespace}> * Content: (annotation?) * </anyAttribute> */ private XMLAttributeDecl traverseAnyAttribute(Element anyAttributeDecl) throws Exception { XMLAttributeDecl anyAttDecl = new XMLAttributeDecl(); String processContents = anyAttributeDecl.getAttribute(SchemaSymbols.ATT_PROCESSCONTENTS).trim(); String namespace = anyAttributeDecl.getAttribute(SchemaSymbols.ATT_NAMESPACE).trim(); String curTargetUri = anyAttributeDecl.getOwnerDocument().getDocumentElement().getAttribute("targetNamespace"); if ( namespace.length() == 0 || namespace.equals(SchemaSymbols.ATTVAL_TWOPOUNDANY) ) { anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_ANY; } else if (namespace.equals(SchemaSymbols.ATTVAL_TWOPOUNDOTHER)) { anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_OTHER; anyAttDecl.name.uri = fStringPool.addSymbol(curTargetUri); } else if (namespace.equals(SchemaSymbols.ATTVAL_TWOPOUNDLOCAL)) { anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_LOCAL; } else if (namespace.length() > 0){ anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_LIST; StringTokenizer tokenizer = new StringTokenizer(namespace); int aStringList = fStringPool.startStringList(); Vector tokens = new Vector(); while (tokenizer.hasMoreElements()) { String token = tokenizer.nextToken(); if (token.equals("##targetNamespace")) { token = curTargetUri; } if (!fStringPool.addStringToList(aStringList, fStringPool.addSymbol(token))){ reportGenericSchemaError("Internal StringPool error when reading the "+ "namespace attribute for anyattribute declaration"); } } fStringPool.finishStringList(aStringList); anyAttDecl.enumeration = aStringList; } else { // REVISIT: Localize reportGenericSchemaError("Empty namespace attribute for anyattribute declaration"); } // default processContents is "strict"; anyAttDecl.defaultType = XMLAttributeDecl.PROCESSCONTENTS_STRICT; if (processContents.equals(SchemaSymbols.ATTVAL_SKIP)){ anyAttDecl.defaultType = XMLAttributeDecl.PROCESSCONTENTS_SKIP; } else if (processContents.equals(SchemaSymbols.ATTVAL_LAX)) { anyAttDecl.defaultType = XMLAttributeDecl.PROCESSCONTENTS_LAX; } return anyAttDecl; } private XMLAttributeDecl mergeTwoAnyAttribute(XMLAttributeDecl oneAny, XMLAttributeDecl anotherAny) { if (oneAny.type == -1) { return oneAny; } if (anotherAny.type == -1) { return anotherAny; } if (oneAny.type == XMLAttributeDecl.TYPE_ANY_ANY) { return anotherAny; } if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_ANY) { return oneAny; } if (oneAny.type == XMLAttributeDecl.TYPE_ANY_OTHER) { if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_OTHER) { if ( anotherAny.name.uri == oneAny.name.uri ) { return oneAny; } else { oneAny.type = -1; return oneAny; } } else if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_LOCAL) { return anotherAny; } else if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { if (!fStringPool.stringInList(anotherAny.enumeration, oneAny.name.uri) ) { return anotherAny; } else { int[] anotherAnyURIs = fStringPool.stringListAsIntArray(anotherAny.enumeration); int newList = fStringPool.startStringList(); for (int i=0; i< anotherAnyURIs.length; i++) { if (anotherAnyURIs[i] != oneAny.name.uri ) { fStringPool.addStringToList(newList, anotherAnyURIs[i]); } } fStringPool.finishStringList(newList); anotherAny.enumeration = newList; return anotherAny; } } } if (oneAny.type == XMLAttributeDecl.TYPE_ANY_LOCAL) { if ( anotherAny.type == XMLAttributeDecl.TYPE_ANY_OTHER || anotherAny.type == XMLAttributeDecl.TYPE_ANY_LOCAL) { return oneAny; } else if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { oneAny.type = -1; return oneAny; } } if (oneAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { if ( anotherAny.type == XMLAttributeDecl.TYPE_ANY_OTHER){ if (!fStringPool.stringInList(oneAny.enumeration, anotherAny.name.uri) ) { return oneAny; } else { int[] oneAnyURIs = fStringPool.stringListAsIntArray(oneAny.enumeration); int newList = fStringPool.startStringList(); for (int i=0; i< oneAnyURIs.length; i++) { if (oneAnyURIs[i] != anotherAny.name.uri ) { fStringPool.addStringToList(newList, oneAnyURIs[i]); } } fStringPool.finishStringList(newList); oneAny.enumeration = newList; return oneAny; } } else if ( anotherAny.type == XMLAttributeDecl.TYPE_ANY_LOCAL) { oneAny.type = -1; return oneAny; } else if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { int[] result = intersect2sets( fStringPool.stringListAsIntArray(oneAny.enumeration), fStringPool.stringListAsIntArray(anotherAny.enumeration)); int newList = fStringPool.startStringList(); for (int i=0; i<result.length; i++) { fStringPool.addStringToList(newList, result[i]); } fStringPool.finishStringList(newList); oneAny.enumeration = newList; return oneAny; } } // should never go there; return oneAny; } int[] intersect2sets(int[] one, int[] theOther){ int[] result = new int[(one.length>theOther.length?one.length:theOther.length)]; // simple implemention, int count = 0; for (int i=0; i<one.length; i++) { for(int j=0; j<theOther.length; j++) { if (one[i]==theOther[j]) { result[count++] = one[i]; } } } int[] result2 = new int[count]; System.arraycopy(result, 0, result2, 0, count); return result2; } /** * Traverse ComplexType Declaration. * * <complexType * abstract = boolean * base = QName * block = #all or (possibly empty) subset of {extension, restriction} * content = elementOnly | empty | mixed | textOnly * derivedBy = extension | restriction * final = #all or (possibly empty) subset of {extension, restriction} * id = ID * name = NCName> * Content: (annotation? , (((minExclusive | minInclusive | maxExclusive * | maxInclusive | precision | scale | length | minLength * | maxLength | encoding | period | duration | enumeration * | pattern)* | (element | group | all | choice | sequence | any)*) , * ((attribute | attributeGroup)* , anyAttribute?))) * </complexType> * @param complexTypeDecl * @return */ //REVISIT: TO DO, base and derivation ??? private int traverseComplexTypeDecl( Element complexTypeDecl ) throws Exception { String isAbstract = complexTypeDecl.getAttribute( SchemaSymbols.ATT_ABSTRACT ); String base = complexTypeDecl.getAttribute(SchemaSymbols.ATT_BASE); String blockSet = complexTypeDecl.getAttribute( SchemaSymbols.ATT_BLOCK ); String content = complexTypeDecl.getAttribute(SchemaSymbols.ATT_CONTENT); String derivedBy = complexTypeDecl.getAttribute( SchemaSymbols.ATT_DERIVEDBY ); String finalSet = complexTypeDecl.getAttribute( SchemaSymbols.ATT_FINAL ); String typeId = complexTypeDecl.getAttribute( SchemaSymbols.ATTVAL_ID ); String typeName = complexTypeDecl.getAttribute(SchemaSymbols.ATT_NAME); boolean isNamedType = false; if ( DEBUGGING ) System.out.println("traversing complex Type : " + typeName +","+base+","+content+"."); if (typeName.equals("")) { // gensym a unique name typeName = "#"+fAnonTypeCount++; } else { fCurrentTypeNameStack.push(typeName); isNamedType = true; } if (isTopLevel(complexTypeDecl)) { String fullName = fTargetNSURIString+","+typeName; ComplexTypeInfo temp = (ComplexTypeInfo) fComplexTypeRegistry.get(fullName); if (temp != null ) { return fStringPool.addSymbol(fullName); } } int scopeDefined = fScopeCount++; int previousScope = fCurrentScope; fCurrentScope = scopeDefined; Element child = null; int contentSpecType = -1; int csnType = 0; int left = -2; int right = -2; ComplexTypeInfo baseTypeInfo = null; //if base is a complexType; DatatypeValidator baseTypeValidator = null; //if base is a simple type or a complex type derived from a simpleType DatatypeValidator simpleTypeValidator = null; int baseTypeSymbol = -1; String fullBaseName = ""; boolean baseIsSimpleSimple = false; boolean baseIsComplexSimple = false; boolean baseFromAnotherSchema = false; String baseTypeSchemaURI = null; boolean derivedByRestriction = true; boolean derivedByExtension = false; int baseContentSpecHandle = -1; Element baseTypeNode = null; //int parsedderivedBy = parseComplexDerivedBy(derivedBy); //handle the inhreitance here. if (base.length()>0) { //first check if derivedBy is present if (derivedBy.length() == 0) { // REVISIT: Localize reportGenericSchemaError("derivedBy must be present when base is present in " +SchemaSymbols.ELT_COMPLEXTYPE +" "+ typeName); derivedBy = SchemaSymbols.ATTVAL_EXTENSION; } if (derivedBy.equals(SchemaSymbols.ATTVAL_EXTENSION)) { derivedByRestriction = false; } String prefix = ""; String localpart = base; int colonptr = base.indexOf(":"); if ( colonptr > 0) { prefix = base.substring(0,colonptr); localpart = base.substring(colonptr+1); } int localpartIndex = fStringPool.addSymbol(localpart); String typeURI = resolvePrefixToURI(prefix); // check if the base type is from the same Schema; if ( ! typeURI.equals(fTargetNSURIString) && ! typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && typeURI.length() != 0 ) /*REVISIT, !!!! a hack: for schema that has no target namespace, e.g. personal-schema.xml*/{ baseFromAnotherSchema = true; baseTypeSchemaURI = typeURI; baseTypeInfo = getTypeInfoFromNS(typeURI, localpart); if (baseTypeInfo == null) { baseTypeValidator = getTypeValidatorFromNS(typeURI, localpart); if (baseTypeValidator == null) { //TO DO: report error here; System.out.println("Could not find base type " +localpart + " in schema " + typeURI); } else{ baseIsSimpleSimple = true; } } } else { fullBaseName = typeURI+","+localpart; // assume the base is a complexType and try to locate the base type first baseTypeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fullBaseName); // if not found, 2 possibilities: 1: ComplexType in question has not been compiled yet; // 2: base is SimpleTYpe; if (baseTypeInfo == null) { baseTypeValidator = getDatatypeValidator(typeURI, localpart); if (baseTypeValidator == null) { baseTypeNode = getTopLevelComponentByName(SchemaSymbols.ELT_COMPLEXTYPE,localpart); if (baseTypeNode != null) { baseTypeSymbol = traverseComplexTypeDecl( baseTypeNode ); baseTypeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fStringPool.toString(baseTypeSymbol)); //REVISIT: should it be fullBaseName; } else { baseTypeNode = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (baseTypeNode != null) { baseTypeSymbol = traverseSimpleTypeDecl( baseTypeNode ); simpleTypeValidator = baseTypeValidator = getDatatypeValidator(typeURI, localpart); if (simpleTypeValidator == null) { //TO DO: signal error here. } baseIsSimpleSimple = true; } else { // REVISIT: Localize reportGenericSchemaError("Base type could not be found : " + base); } } } else { simpleTypeValidator = baseTypeValidator; baseIsSimpleSimple = true; } } } //Schema Spec : 5.11: Complex Type Definition Properties Correct : 2 if (baseIsSimpleSimple && derivedByRestriction) { // REVISIT: Localize reportGenericSchemaError("base is a simpledType, can't derive by restriction in " + typeName); } //if the base is a complexType if (baseTypeInfo != null ) { //Schema Spec : 5.11: Derivation Valid ( Extension ) 1.1.1 // 5.11: Derivation Valid ( Restriction, Complex ) 1.2.1 if (derivedByRestriction) { //REVISIT: check base Type's finalset does not include "restriction" } else { //REVISIT: check base Type's finalset doest not include "extension" } if ( baseTypeInfo.contentSpecHandle > -1) { if (derivedByRestriction) { //REVISIT: !!! really hairy staff to check the particle derivation OK in 5.10 checkParticleDerivationOK(complexTypeDecl, baseTypeNode); } baseContentSpecHandle = baseTypeInfo.contentSpecHandle; } else if ( baseTypeInfo.datatypeValidator != null ) { baseTypeValidator = baseTypeInfo.datatypeValidator; baseIsComplexSimple = true; } } //Schema Spec : 5.11: Derivation Valid ( Extension ) 1.1.1 if (baseIsComplexSimple && !derivedByRestriction ) { // REVISIT: Localize reportGenericSchemaError("base is ComplexSimple, can't derive by extension in " + typeName); } } // END of if (base.length() > 0) {} // skip refinement and annotations child = null; if (baseIsComplexSimple) { contentSpecType = XMLElementDecl.TYPE_SIMPLE; int numEnumerationLiterals = 0; int numFacets = 0; Hashtable facetData = new Hashtable(); Vector enumData = new Vector(); //REVISIT: there is a better way to do this, for (child = XUtil.getFirstChildElement(complexTypeDecl); child != null && (child.getNodeName().equals(SchemaSymbols.ELT_MINEXCLUSIVE) || child.getNodeName().equals(SchemaSymbols.ELT_MININCLUSIVE) || child.getNodeName().equals(SchemaSymbols.ELT_MAXEXCLUSIVE) || child.getNodeName().equals(SchemaSymbols.ELT_MAXINCLUSIVE) || child.getNodeName().equals(SchemaSymbols.ELT_PRECISION) || child.getNodeName().equals(SchemaSymbols.ELT_SCALE) || child.getNodeName().equals(SchemaSymbols.ELT_LENGTH) || child.getNodeName().equals(SchemaSymbols.ELT_MINLENGTH) || child.getNodeName().equals(SchemaSymbols.ELT_MAXLENGTH) || child.getNodeName().equals(SchemaSymbols.ELT_ENCODING) || child.getNodeName().equals(SchemaSymbols.ELT_PERIOD) || child.getNodeName().equals(SchemaSymbols.ELT_DURATION) || child.getNodeName().equals(SchemaSymbols.ELT_ENUMERATION) || child.getNodeName().equals(SchemaSymbols.ELT_PATTERN) || child.getNodeName().equals(SchemaSymbols.ELT_ANNOTATION)); child = XUtil.getNextSiblingElement(child)) { if ( child.getNodeType() == Node.ELEMENT_NODE ) { Element facetElt = (Element) child; numFacets++; if (facetElt.getNodeName().equals(SchemaSymbols.ELT_ENUMERATION)) { numEnumerationLiterals++; enumData.addElement(facetElt.getAttribute(SchemaSymbols.ATT_VALUE)); //Enumerations can have annotations ? ( 0 | 1 ) Element enumContent = XUtil.getFirstChildElement( facetElt ); if( enumContent != null && enumContent.getNodeName().equals( SchemaSymbols.ELT_ANNOTATION ) ){ traverseAnnotationDecl( child ); } // TO DO: if Jeff check in new changes to TraverseSimpleType, copy them over } else { facetData.put(facetElt.getNodeName(),facetElt.getAttribute( SchemaSymbols.ATT_VALUE )); } } } if (numEnumerationLiterals > 0) { facetData.put(SchemaSymbols.ELT_ENUMERATION, enumData); } //if (numFacets > 0) // baseTypeValidator.setFacets(facetData, derivedBy ); if (numFacets > 0) { simpleTypeValidator = fDatatypeRegistry.createDatatypeValidator( typeName, baseTypeValidator, facetData, false ); } else simpleTypeValidator = baseTypeValidator; if (child != null) { // REVISIT: Localize reportGenericSchemaError("Invalid child '"+child.getNodeName()+"' in complexType : '" + typeName + "', because it restricts another complexSimpleType"); } } // if content = textonly, base is a datatype if (content.equals(SchemaSymbols.ATTVAL_TEXTONLY)) { //TO DO if (base.length() == 0) { simpleTypeValidator = baseTypeValidator = getDatatypeValidator("", SchemaSymbols.ATTVAL_STRING); } else if ( baseTypeValidator == null && baseTypeInfo != null && baseTypeInfo.datatypeValidator==null ) // must be datatype reportSchemaError(SchemaMessageProvider.NotADatatype, new Object [] { base }); //REVISIT check forward refs //handle datatypes contentSpecType = XMLElementDecl.TYPE_SIMPLE; /** * Traverses Schema attribute declaration. * * <attribute * form = qualified | unqualified * id = ID * name = NCName * ref = QName * type = QName * use = default | fixed | optional | prohibited | required * value = string> * Content: (annotation? , simpleType?) * <attribute/> * * @param attributeDecl * @return * @exception Exception */ private int traverseAttributeDecl( Element attrDecl, ComplexTypeInfo typeInfo ) throws Exception { String attNameStr = attrDecl.getAttribute(SchemaSymbols.ATT_NAME); int attName = fStringPool.addSymbol(attNameStr);// attribute name String isQName = attrDecl.getAttribute(SchemaSymbols.ATT_FORM);//form attribute DatatypeValidator dv = null; // attribute type int attType = -1; boolean attIsList = false; int dataTypeSymbol = -1; String ref = attrDecl.getAttribute(SchemaSymbols.ATT_REF); String datatype = attrDecl.getAttribute(SchemaSymbols.ATT_TYPE); String localpart = null; if (!ref.equals("")) { if (XUtil.getFirstChildElement(attrDecl) != null) reportSchemaError(SchemaMessageProvider.NoContentForRef, null); String prefix = ""; localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } String uriStr = resolvePrefixToURI(prefix); if (!uriStr.equals(fTargetNSURIString)) { addAttributeDeclFromAnotherSchema(localpart, uriStr, typeInfo); return -1; } Element referredAttribute = getTopLevelComponentByName(SchemaSymbols.ELT_ATTRIBUTE,localpart); if (referredAttribute != null) { traverseAttributeDecl(referredAttribute, typeInfo); } else { if (fAttributeDeclRegistry.get(localpart) != null) { addAttributeDeclFromAnotherSchema(localpart, uriStr, typeInfo); } else // REVISIT: Localize reportGenericSchemaError ( "Couldn't find top level attribute " + ref); } return -1; } if (datatype.equals("")) { Element child = XUtil.getFirstChildElement(attrDecl); while (child != null && !child.getNodeName().equals(SchemaSymbols.ELT_SIMPLETYPE)) child = XUtil.getNextSiblingElement(child); if (child != null && child.getNodeName().equals(SchemaSymbols.ELT_SIMPLETYPE)) { attType = XMLAttributeDecl.TYPE_SIMPLE; dataTypeSymbol = traverseSimpleTypeDecl(child); localpart = fStringPool.toString(dataTypeSymbol); } else { attType = XMLAttributeDecl.TYPE_SIMPLE; localpart = "string"; dataTypeSymbol = fStringPool.addSymbol(localpart); } localpart = fStringPool.toString(dataTypeSymbol); dv = fDatatypeRegistry.getDatatypeValidator(localpart); } else { String prefix = ""; localpart = datatype; dataTypeSymbol = fStringPool.addSymbol(localpart); int colonptr = datatype.indexOf(":"); if ( colonptr > 0) { prefix = datatype.substring(0,colonptr); localpart = datatype.substring(colonptr+1); } String typeURI = resolvePrefixToURI(prefix); if ( typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) || typeURI.length()==0) { dv = getDatatypeValidator("", localpart); if (localpart.equals("ID")) { attType = XMLAttributeDecl.TYPE_ID; } else if (localpart.equals("IDREF")) { attType = XMLAttributeDecl.TYPE_IDREF; } else if (localpart.equals("IDREFS")) { attType = XMLAttributeDecl.TYPE_IDREF; attIsList = true; } else if (localpart.equals("ENTITY")) { attType = XMLAttributeDecl.TYPE_ENTITY; } else if (localpart.equals("ENTITIES")) { attType = XMLAttributeDecl.TYPE_ENTITY; attIsList = true; } else if (localpart.equals("NMTOKEN")) { attType = XMLAttributeDecl.TYPE_NMTOKEN; } else if (localpart.equals("NMTOKENS")) { attType = XMLAttributeDecl.TYPE_NMTOKEN; attIsList = true; } else if (localpart.equals(SchemaSymbols.ELT_NOTATION)) { attType = XMLAttributeDecl.TYPE_NOTATION; } else { attType = XMLAttributeDecl.TYPE_SIMPLE; if (dv == null && typeURI.length() == 0) { Element topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (topleveltype != null) { traverseSimpleTypeDecl( topleveltype ); dv = getDatatypeValidator(typeURI, localpart); }else { // REVISIT: Localize reportGenericSchemaError("simpleType not found : " + localpart); } } } } else { // check if the type is from the same Schema dv = getDatatypeValidator(typeURI, localpart); if (dv == null && typeURI.equals(fTargetNSURIString) ) { Element topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (topleveltype != null) { traverseSimpleTypeDecl( topleveltype ); dv = getDatatypeValidator(typeURI, localpart); }else { // REVISIT: Localize reportGenericSchemaError("simpleType not found : " + localpart); } } attType = XMLAttributeDecl.TYPE_SIMPLE; } } // attribute default type int attDefaultType = -1; int attDefaultValue = -1; String use = attrDecl.getAttribute(SchemaSymbols.ATT_USE); boolean required = use.equals(SchemaSymbols.ATTVAL_REQUIRED); if (dv == null) { // REVISIT: Localize reportGenericSchemaError("could not resolve the type or get a null validator for datatype : " + fStringPool.toString(dataTypeSymbol)); } if (required) { attDefaultType = XMLAttributeDecl.DEFAULT_TYPE_REQUIRED; } else { if (use.equals(SchemaSymbols.ATTVAL_FIXED)) { String fixed = attrDecl.getAttribute(SchemaSymbols.ATT_VALUE); if (!fixed.equals("")) { attDefaultType = XMLAttributeDecl.DEFAULT_TYPE_FIXED; attDefaultValue = fStringPool.addString(fixed); } } else if (use.equals(SchemaSymbols.ATTVAL_DEFAULT)) { // attribute default value String defaultValue = attrDecl.getAttribute(SchemaSymbols.ATT_VALUE); if (!defaultValue.equals("")) { attDefaultType = XMLAttributeDecl.DEFAULT_TYPE_DEFAULT; attDefaultValue = fStringPool.addString(defaultValue); } } else if (use.equals(SchemaSymbols.ATTVAL_PROHIBITED)) { //REVISIT, TO DO. !!! attDefaultType = XMLAttributeDecl.DEFAULT_TYPE_IMPLIED; //attDefaultValue = fStringPool.addString(""); } else { attDefaultType = XMLAttributeDecl.DEFAULT_TYPE_IMPLIED; } // check default value is valid for the datatype. if (attType == XMLAttributeDecl.TYPE_SIMPLE && attDefaultValue != -1) { try { if (dv != null) //REVISIT dv.validate(fStringPool.toString(attDefaultValue), null); else reportSchemaError(SchemaMessageProvider.NoValidatorFor, new Object [] { datatype }); } catch (InvalidDatatypeValueException idve) { reportSchemaError(SchemaMessageProvider.IncorrectDefaultType, new Object [] { attrDecl.getAttribute(SchemaSymbols.ATT_NAME), idve.getMessage() }); } catch (Exception e) { e.printStackTrace(); System.out.println("Internal error in attribute datatype validation"); } } } int uriIndex = -1; if ( isQName.equals(SchemaSymbols.ATTVAL_QUALIFIED)|| fAttributeDefaultQualified || isTopLevel(attrDecl) ) { uriIndex = fTargetNSURI; } QName attQName = new QName(-1,attName,attName,uriIndex); if ( DEBUGGING ) System.out.println(" the dataType Validator for " + fStringPool.toString(attName) + " is " + dv); //put the top-levels in the attribute decl registry. if (isTopLevel(attrDecl)) { fTempAttributeDecl.datatypeValidator = dv; fTempAttributeDecl.name.setValues(attQName); fTempAttributeDecl.type = attType; fTempAttributeDecl.defaultType = attDefaultType; fTempAttributeDecl.list = attIsList; if (attDefaultValue != -1 ) { fTempAttributeDecl.defaultValue = new String(fStringPool.toString(attDefaultValue)); } fAttributeDeclRegistry.put(attNameStr, new XMLAttributeDecl(fTempAttributeDecl)); } // add attribute to attr decl pool in fSchemaGrammar, if (typeInfo != null) { fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, attQName, attType, dataTypeSymbol, attDefaultType, fStringPool.toString( attDefaultValue), dv, attIsList); } return -1; } // end of method traverseAttribute private int addAttributeDeclFromAnotherSchema( String name, String uriStr, ComplexTypeInfo typeInfo) throws Exception { SchemaGrammar aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(uriStr); if (uriStr == null || ! (aGrammar instanceof SchemaGrammar) ) { // REVISIT: Localize reportGenericSchemaError("!!Schema not found in #addAttributeDeclFromAnotherSchema, schema uri : " + uriStr); return -1; } Hashtable attrRegistry = aGrammar.getAttirubteDeclRegistry(); if (attrRegistry == null) { // REVISIT: Localize reportGenericSchemaError("no attribute was defined in schema : " + uriStr); return -1; } XMLAttributeDecl tempAttrDecl = (XMLAttributeDecl) attrRegistry.get(name); if (tempAttrDecl == null) { // REVISIT: Localize reportGenericSchemaError( "no attribute named \"" + name + "\" was defined in schema : " + uriStr); return -1; } if (typeInfo!= null) { fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, tempAttrDecl.name, tempAttrDecl.type, -1, tempAttrDecl.defaultType, tempAttrDecl.defaultValue, tempAttrDecl.datatypeValidator, tempAttrDecl.list); } return 0; } /* * * <attributeGroup * id = ID * name = NCName * ref = QName> * Content: (annotation?, (attribute|attributeGroup), anyAttribute?) * </> * */ private int traverseAttributeGroupDecl( Element attrGrpDecl, ComplexTypeInfo typeInfo, Vector anyAttDecls ) throws Exception { // attribute name int attGrpName = fStringPool.addSymbol(attrGrpDecl.getAttribute(SchemaSymbols.ATT_NAME)); String ref = attrGrpDecl.getAttribute(SchemaSymbols.ATT_REF); // attribute type int attType = -1; int enumeration = -1; if (!ref.equals("")) { if (XUtil.getFirstChildElement(attrGrpDecl) != null) reportSchemaError(SchemaMessageProvider.NoContentForRef, null); String prefix = ""; String localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } String uriStr = resolvePrefixToURI(prefix); if (!uriStr.equals(fTargetNSURIString)) { traverseAttributeGroupDeclFromAnotherSchema(localpart, uriStr, typeInfo, anyAttDecls); return -1; // TO DO // REVISIST: different NS, not supported yet. // REVISIT: Localize //reportGenericSchemaError("Feature not supported: see an attribute from different NS"); } Element referredAttrGrp = getTopLevelComponentByName(SchemaSymbols.ELT_ATTRIBUTEGROUP,localpart); if (referredAttrGrp != null) { traverseAttributeGroupDecl(referredAttrGrp, typeInfo, anyAttDecls); } else { // REVISIT: Localize reportGenericSchemaError ( "Couldn't find top level attributegroup " + ref); } return -1; } for ( Element child = XUtil.getFirstChildElement(attrGrpDecl); child != null ; child = XUtil.getNextSiblingElement(child)) { if ( child.getNodeName().equals(SchemaSymbols.ELT_ATTRIBUTE) ){ traverseAttributeDecl(child, typeInfo); } else if ( child.getNodeName().equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) ) { traverseAttributeGroupDecl(child, typeInfo,anyAttDecls); } else if ( child.getNodeName().equals(SchemaSymbols.ELT_ANYATTRIBUTE) ) { anyAttDecls.addElement(traverseAnyAttribute(child)); break; } else if (child.getNodeName().equals(SchemaSymbols.ELT_ANNOTATION) ) { // REVISIT: what about appInfo } } return -1; } // end of method traverseAttributeGroup private int traverseAttributeGroupDeclFromAnotherSchema( String attGrpName , String uriStr, ComplexTypeInfo typeInfo, Vector anyAttDecls ) throws Exception { SchemaGrammar aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(uriStr); if (uriStr == null || aGrammar == null || ! (aGrammar instanceof SchemaGrammar) ) { // REVISIT: Localize reportGenericSchemaError("!!Schema not found in #traverseAttributeGroupDeclFromAnotherSchema, schema uri : " + uriStr); return -1; } // attribute name Element attGrpDecl = (Element) aGrammar.topLevelAttrGrpDecls.get((Object)attGrpName); if (attGrpDecl == null) { // REVISIT: Localize reportGenericSchemaError( "no attribute group named \"" + attGrpName + "\" was defined in schema : " + uriStr); return -1; } NamespacesScope saveNSMapping = fNamespacesScope; int saveTargetNSUri = fTargetNSURI; fTargetNSURI = fStringPool.addSymbol(aGrammar.getTargetNamespaceURI()); fNamespacesScope = aGrammar.getNamespacesScope(); // attribute type int attType = -1; int enumeration = -1; for ( Element child = XUtil.getFirstChildElement(attGrpDecl); child != null ; child = XUtil.getNextSiblingElement(child)) { //child attribute couldn't be a top-level attribute DEFINITION, if ( child.getNodeName().equals(SchemaSymbols.ELT_ATTRIBUTE) ){ String childAttName = child.getAttribute(SchemaSymbols.ATT_NAME); if ( childAttName.length() > 0 ) { Hashtable attDeclRegistry = aGrammar.getAttirubteDeclRegistry(); if (attDeclRegistry != null) { if (attDeclRegistry.get((Object)childAttName) != null ){ addAttributeDeclFromAnotherSchema(childAttName, uriStr, typeInfo); return -1; } } } else traverseAttributeDecl(child, typeInfo); } else if ( child.getNodeName().equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) ) { traverseAttributeGroupDecl(child, typeInfo, anyAttDecls); } else if ( child.getNodeName().equals(SchemaSymbols.ELT_ANYATTRIBUTE) ) { anyAttDecls.addElement(traverseAnyAttribute(child)); break; } else if (child.getNodeName().equals(SchemaSymbols.ELT_ANNOTATION) ) { // REVISIT: what about appInfo } } fNamespacesScope = saveNSMapping; fTargetNSURI = saveTargetNSUri; return -1; } // end of method traverseAttributeGroupFromAnotherSchema /** * Traverse element declaration: * <element * abstract = boolean * block = #all or (possibly empty) subset of {equivClass, extension, restriction} * default = string * equivClass = QName * final = #all or (possibly empty) subset of {extension, restriction} * fixed = string * form = qualified | unqualified * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * name = NCName * nullable = boolean * ref = QName * type = QName> * Content: (annotation? , (simpleType | complexType)? , (unique | key | keyref)*) * </element> * * * The following are identity-constraint definitions * <unique * id = ID * name = NCName> * Content: (annotation? , (selector , field+)) * </unique> * * <key * id = ID * name = NCName> * Content: (annotation? , (selector , field+)) * </key> * * <keyref * id = ID * name = NCName * refer = QName> * Content: (annotation? , (selector , field+)) * </keyref> * * <selector> * Content: XPathExprApprox : An XPath expression * </selector> * * <field> * Content: XPathExprApprox : An XPath expression * </field> * * * @param elementDecl * @return * @exception Exception */ private QName traverseElementDecl(Element elementDecl) throws Exception { int contentSpecType = -1; int contentSpecNodeIndex = -1; int typeNameIndex = -1; int scopeDefined = -2; //signal a error if -2 gets gets through //cause scope can never be -2. DatatypeValidator dv = null; String name = elementDecl.getAttribute(SchemaSymbols.ATT_NAME); if ( DEBUGGING ) System.out.println("traversing element decl : " + name ); String ref = elementDecl.getAttribute(SchemaSymbols.ATT_REF); String type = elementDecl.getAttribute(SchemaSymbols.ATT_TYPE); String minOccurs = elementDecl.getAttribute(SchemaSymbols.ATT_MINOCCURS); String maxOccurs = elementDecl.getAttribute(SchemaSymbols.ATT_MAXOCCURS); String dflt = elementDecl.getAttribute(SchemaSymbols.ATT_DEFAULT); String fixed = elementDecl.getAttribute(SchemaSymbols.ATT_FIXED); String equivClass = elementDecl.getAttribute(SchemaSymbols.ATT_EQUIVCLASS); // form attribute String isQName = elementDecl.getAttribute(SchemaSymbols.ATT_FORM); String fromAnotherSchema = null; if (isTopLevel(elementDecl)) { int nameIndex = fStringPool.addSymbol(name); int eltKey = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, nameIndex,TOP_LEVEL_SCOPE); if (eltKey > -1 ) { return new QName(-1,nameIndex,nameIndex,fTargetNSURI); } } // parse out 'block', 'final', 'nullable', 'abstract' int blockSet = parseBlockSet(elementDecl.getAttribute(SchemaSymbols.ATT_BLOCK)); int finalSet = parseFinalSet(elementDecl.getAttribute(SchemaSymbols.ATT_FINAL)); boolean isNullable = elementDecl.getAttribute (SchemaSymbols.ATT_NULLABLE).equals(SchemaSymbols.ATTVAL_TRUE)? true:false; boolean isAbstract = elementDecl.getAttribute (SchemaSymbols.ATT_ABSTRACT).equals(SchemaSymbols.ATTVAL_TRUE)? true:false; int elementMiscFlags = 0; if (isNullable) { elementMiscFlags += SchemaSymbols.NULLABLE; } if (isAbstract) { elementMiscFlags += SchemaSymbols.ABSTRACT; } //if this is a reference to a global element int attrCount = 0; if (!ref.equals("")) attrCount++; if (!type.equals("")) attrCount++; //REVISIT top level check for ref & archref if (attrCount > 1) reportSchemaError(SchemaMessageProvider.OneOfTypeRefArchRef, null); if (!ref.equals("")) { if (XUtil.getFirstChildElement(elementDecl) != null) reportSchemaError(SchemaMessageProvider.NoContentForRef, null); String prefix = ""; String localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } int localpartIndex = fStringPool.addSymbol(localpart); String uriString = resolvePrefixToURI(prefix); QName eltName = new QName(prefix != null ? fStringPool.addSymbol(prefix) : -1, localpartIndex, fStringPool.addSymbol(ref), uriString != null ? fStringPool.addSymbol(uriString) : -1); //if from another schema, just return the element QName if (! uriString.equals(fTargetNSURIString) ) { return eltName; } int elementIndex = fSchemaGrammar.getElementDeclIndex(eltName, TOP_LEVEL_SCOPE); //if not found, traverse the top level element that if referenced if (elementIndex == -1 ) { Element targetElement = getTopLevelComponentByName(SchemaSymbols.ELT_ELEMENT,localpart); if (targetElement == null ) { // REVISIT: Localize reportGenericSchemaError("Element " + localpart + " not found in the Schema"); //REVISIT, for now, the QName anyway return eltName; //return new QName(-1,fStringPool.addSymbol(localpart), -1, fStringPool.addSymbol(uriString)); } else { // do nothing here, other wise would cause infinite loop for // <element name="recur"><complexType><element ref="recur"> ... //eltName= traverseElementDecl(targetElement); } } return eltName; } // Handle the equivClass Element equivClassElementDecl = null; int equivClassElementDeclIndex = -1; boolean noErrorSoFar = true; String equivClassUri = null; String equivClassLocalpart = null; String equivClassFullName = null; ComplexTypeInfo equivClassEltTypeInfo = null; DatatypeValidator equivClassEltDV = null; if ( equivClass.length() > 0 ) { equivClassUri = resolvePrefixToURI(getPrefix(equivClass)); equivClassLocalpart = getLocalPart(equivClass); equivClassFullName = equivClassUri+","+equivClassLocalpart; if ( !equivClassUri.equals(fTargetNSURIString) ) { equivClassEltTypeInfo = getElementDeclTypeInfoFromNS(equivClassUri, equivClassLocalpart); if (equivClassEltTypeInfo == null) { equivClassEltDV = getElementDeclTypeValidatorFromNS(equivClassUri, equivClassLocalpart); if (equivClassEltDV == null) { //TO DO: report error here; noErrorSoFar = false; reportGenericSchemaError("Could not find type for element '" +equivClassLocalpart + "' in schema '" + equivClassUri+"'"); } } } else { equivClassElementDecl = getTopLevelComponentByName(SchemaSymbols.ELT_ELEMENT, equivClassLocalpart); if (equivClassElementDecl == null) { equivClassElementDeclIndex = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, getLocalPartIndex(equivClass),TOP_LEVEL_SCOPE); if ( equivClassElementDeclIndex == -1) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("Equivclass affiliation element " +equivClass +" in element declaration " +name); } } else { equivClassElementDeclIndex = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, getLocalPartIndex(equivClass),TOP_LEVEL_SCOPE); if ( equivClassElementDeclIndex == -1) { traverseElementDecl(equivClassElementDecl); equivClassElementDeclIndex = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, getLocalPartIndex(equivClass),TOP_LEVEL_SCOPE); } } if (equivClassElementDeclIndex != -1) { equivClassEltTypeInfo = fSchemaGrammar.getElementComplexTypeInfo( equivClassElementDeclIndex ); if (equivClassEltTypeInfo == null) { fSchemaGrammar.getElementDecl(equivClassElementDeclIndex, fTempElementDecl); equivClassEltDV = fTempElementDecl.datatypeValidator; if (equivClassEltDV == null) { //TO DO: report error here; noErrorSoFar = false; reportGenericSchemaError("Could not find type for element '" +equivClassLocalpart + "' in schema '" + equivClassUri+"'"); } } } } } // resolving the type for this element right here ComplexTypeInfo typeInfo = null; // element has a single child element, either a datatype or a type, null if primitive Element child = XUtil.getFirstChildElement(elementDecl); while (child != null && child.getNodeName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); boolean haveAnonType = false; // Handle Anonymous type if there is one if (child != null) { String childName = child.getNodeName(); if (childName.equals(SchemaSymbols.ELT_COMPLEXTYPE)) { if (child.getAttribute(SchemaSymbols.ATT_NAME).length() > 0) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("anonymous complexType in element '" + name +"' has a name attribute"); } else typeNameIndex = traverseComplexTypeDecl(child); if (typeNameIndex != -1 ) { typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fStringPool.toString(typeNameIndex)); } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("traverse complexType error in element '" + name +"'"); } haveAnonType = true; } else if (childName.equals(SchemaSymbols.ELT_SIMPLETYPE)) { // TO DO: the Default and fixed attribute handling should be here. if (child.getAttribute(SchemaSymbols.ATT_NAME).length() > 0) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("anonymous simpleType in element '" + name +"' has a name attribute"); } else typeNameIndex = traverseSimpleTypeDecl(child); if (typeNameIndex != -1) { dv = fDatatypeRegistry.getDatatypeValidator(fStringPool.toString(typeNameIndex)); } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("traverse simpleType error in element '" + name +"'"); } contentSpecType = XMLElementDecl.TYPE_SIMPLE; haveAnonType = true; } else if (type.equals("")) { // "ur-typed" leaf contentSpecType = XMLElementDecl.TYPE_ANY; //REVISIT: is this right? //contentSpecType = fStringPool.addSymbol("UR_TYPE"); // set occurrence count contentSpecNodeIndex = -1; } else { System.out.println("unhandled case in TraverseElementDecl"); } } // handle type="" here if (haveAnonType && (type.length()>0)) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError( "Element '"+ name + "' have both a type attribute and a annoymous type child" ); } // type specified as an attribute and no child is type decl. else if (!type.equals("")) { if (equivClassElementDecl != null) { checkEquivClassOK(elementDecl, equivClassElementDecl); } String prefix = ""; String localpart = type; int colonptr = type.indexOf(":"); if ( colonptr > 0) { prefix = type.substring(0,colonptr); localpart = type.substring(colonptr+1); } String typeURI = resolvePrefixToURI(prefix); // check if the type is from the same Schema if ( !typeURI.equals(fTargetNSURIString) && !typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && typeURI.length() != 0) { // REVISIT, only needed because of resolvePrifixToURI. fromAnotherSchema = typeURI; typeInfo = getTypeInfoFromNS(typeURI, localpart); if (typeInfo == null) { dv = getTypeValidatorFromNS(typeURI, localpart); if (dv == null) { //TO DO: report error here; noErrorSoFar = false; reportGenericSchemaError("Could not find type " +localpart + " in schema " + typeURI); } } } else { typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(typeURI+","+localpart); if (typeInfo == null) { dv = getDatatypeValidator(typeURI, localpart); if (dv == null ) if (typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && !fTargetNSURIString.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA)) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("type not found : " + typeURI+":"+localpart); } else { Element topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_COMPLEXTYPE,localpart); if (topleveltype != null) { if (fCurrentTypeNameStack.search((Object)localpart) > - 1) { //then we found a recursive element using complexType. // REVISIT: this will be broken when recursing happens between 2 schemas int uriInd = -1; if ( isQName.equals(SchemaSymbols.ATTVAL_QUALIFIED)|| fElementDefaultQualified) { uriInd = fTargetNSURI; } int nameIndex = fStringPool.addSymbol(name); QName tempQName = new QName(fCurrentScope, nameIndex, nameIndex, uriInd); fElementRecurseComplex.put(tempQName, localpart); return new QName(-1, nameIndex, nameIndex, uriInd); } else { typeNameIndex = traverseComplexTypeDecl( topleveltype ); typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fStringPool.toString(typeNameIndex)); } } else { topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (topleveltype != null) { typeNameIndex = traverseSimpleTypeDecl( topleveltype ); dv = getDatatypeValidator(typeURI, localpart); // TO DO: the Default and fixed attribute handling should be here. } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("type not found : " + typeURI+":"+localpart); } } } } } } else if (haveAnonType){ if (equivClassElementDecl != null ) { checkEquivClassOK(elementDecl, equivClassElementDecl); } } // this element is ur-type, check its equivClass afficliation. else { // if there is equivClass affiliation and not type defintion found for this element, // then grab equivClass affiliation's type and give it to this element if ( typeInfo == null && dv == null ) typeInfo = equivClassEltTypeInfo; if ( typeInfo == null && dv == null ) dv = equivClassEltDV; } if (typeInfo == null && dv==null) { if (noErrorSoFar) { // Actually this Element's type definition is ur-type; contentSpecType = XMLElementDecl.TYPE_ANY; // REVISIT, need to wait till we have wildcards implementation. // ADD attribute wildcards here } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError ("untyped element : " + name ); } } // if element belongs to a compelx type if (typeInfo!=null) { contentSpecNodeIndex = typeInfo.contentSpecHandle; contentSpecType = typeInfo.contentType; scopeDefined = typeInfo.scopeDefined; dv = typeInfo.datatypeValidator; } // if element belongs to a simple type if (dv!=null) { contentSpecType = XMLElementDecl.TYPE_SIMPLE; if (typeInfo == null) { fromAnotherSchema = null; // not to switch schema in this case } } // key/keyref/unique processing\ child = XUtil.getFirstChildElement(elementDecl); Vector idConstraints = null; while (child != null){ String childName = child.getNodeName(); /** * Traverse attributeGroup Declaration * * <attributeGroup * id = ID * ref = QName> * Content: (annotation?) * </> * * @param elementDecl * @exception Exception */ /*private int traverseAttributeGroupDecl( Element attributeGroupDecl ) throws Exception { int attributeGroupID = fStringPool.addSymbol( attributeGroupDecl.getAttribute( SchemaSymbols.ATTVAL_ID )); int attributeGroupName = fStringPool.addSymbol( attributeGroupDecl.getAttribute( SchemaSymbols.ATT_NAME )); return -1; }*/ /** * Traverse Group Declaration. * * <group * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * name = NCName * ref = QName> * Content: (annotation? , (element | group | all | choice | sequence | any)*) * <group/> * * @param elementDecl * @return * @exception Exception */ private int traverseGroupDecl( Element groupDecl ) throws Exception { String groupName = groupDecl.getAttribute(SchemaSymbols.ATT_NAME); String ref = groupDecl.getAttribute(SchemaSymbols.ATT_REF); if (!ref.equals("")) { if (XUtil.getFirstChildElement(groupDecl) != null) reportSchemaError(SchemaMessageProvider.NoContentForRef, null); String prefix = ""; String localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } int localpartIndex = fStringPool.addSymbol(localpart); String uriStr = resolvePrefixToURI(prefix); if (!uriStr.equals(fTargetNSURIString)) { return traverseGroupDeclFromAnotherSchema(localpart, uriStr); } int contentSpecIndex = -1; Element referredGroup = getTopLevelComponentByName(SchemaSymbols.ELT_GROUP,localpart); if (referredGroup == null) { // REVISIT: Localize reportGenericSchemaError("Group " + localpart + " not found in the Schema"); //REVISIT, this should be some custom Exception throw new Exception("Group " + localpart + " not found in the Schema"); } else { contentSpecIndex = traverseGroupDecl(referredGroup); } return contentSpecIndex; } boolean traverseElt = true; if (fCurrentScope == TOP_LEVEL_SCOPE) { traverseElt = false; } Element child = XUtil.getFirstChildElement(groupDecl); while (child != null && child.getNodeName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); int contentSpecType = 0; int csnType = 0; int allChildren[] = null; int allChildCount = 0; csnType = XMLContentSpec.CONTENTSPECNODE_SEQ; contentSpecType = XMLElementDecl.TYPE_CHILDREN; int left = -2; int right = -2; boolean hadContent = false; boolean seeAll = false; boolean seeParticle = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; hadContent = true; boolean illegalChild = false; String childName = child.getNodeName(); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = traverseGroupDecl(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ALL)) { index = traverseAll(child); //seeParticle = true; seeAll = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { illegalChild = true; reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if ( ! illegalChild ) { index = expandContentModel( index, child); } if (seeParticle && seeAll) { reportSchemaError( SchemaMessageProvider.GroupContentRestricted, new Object [] { "'all' needs to be 'the' only Child", childName}); } if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent && right != -2) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); return left; } private int traverseGroupDeclFromAnotherSchema( String groupName , String uriStr ) throws Exception { SchemaGrammar aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(uriStr); if (uriStr == null || aGrammar==null ||! (aGrammar instanceof SchemaGrammar) ) { // REVISIT: Localize reportGenericSchemaError("!!Schema not found in #traverseGroupDeclFromAnotherSchema, "+ "schema uri: " + uriStr +", groupName: " + groupName); return -1; } Element groupDecl = (Element) aGrammar.topLevelGroupDecls.get((Object)groupName); if (groupDecl == null) { // REVISIT: Localize reportGenericSchemaError( "no group named \"" + groupName + "\" was defined in schema : " + uriStr); return -1; } NamespacesScope saveNSMapping = fNamespacesScope; int saveTargetNSUri = fTargetNSURI; fTargetNSURI = fStringPool.addSymbol(aGrammar.getTargetNamespaceURI()); fNamespacesScope = aGrammar.getNamespacesScope(); boolean traverseElt = true; if (fCurrentScope == TOP_LEVEL_SCOPE) { traverseElt = false; } Element child = XUtil.getFirstChildElement(groupDecl); while (child != null && child.getNodeName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); int contentSpecType = 0; int csnType = 0; int allChildren[] = null; int allChildCount = 0; csnType = XMLContentSpec.CONTENTSPECNODE_SEQ; contentSpecType = XMLElementDecl.TYPE_CHILDREN; int left = -2; int right = -2; boolean hadContent = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; hadContent = true; boolean seeParticle = false; String childName = child.getNodeName(); int childNameIndex = fStringPool.addSymbol(childName); String formAttrVal = child.getAttribute(SchemaSymbols.ATT_FORM); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = traverseGroupDecl(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ALL)) { index = traverseAll(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if (seeParticle) { index = expandContentModel( index, child); } if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent && right != -2) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); fNamespacesScope = saveNSMapping; fTargetNSURI = saveTargetNSUri; return left; } // end of method traverseGroupDeclFromAnotherSchema /** * * Traverse the Sequence declaration * * <sequence * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </sequence> * **/ int traverseSequence (Element sequenceDecl) throws Exception { Element child = XUtil.getFirstChildElement(sequenceDecl); while (child != null && child.getNodeName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); int contentSpecType = 0; int csnType = 0; csnType = XMLContentSpec.CONTENTSPECNODE_SEQ; contentSpecType = XMLElementDecl.TYPE_CHILDREN; int left = -2; int right = -2; boolean hadContent = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; hadContent = true; boolean seeParticle = false; String childName = child.getNodeName(); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = traverseGroupDecl(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if (seeParticle) { index = expandContentModel( index, child); } if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent && right != -2) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); return left; } /** * * Traverse the Sequence declaration * * <choice * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </choice> * **/ int traverseChoice (Element choiceDecl) throws Exception { // REVISIT: traverseChoice, traverseSequence can be combined Element child = XUtil.getFirstChildElement(choiceDecl); while (child != null && child.getNodeName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); int contentSpecType = 0; int csnType = 0; csnType = XMLContentSpec.CONTENTSPECNODE_CHOICE; contentSpecType = XMLElementDecl.TYPE_CHILDREN; int left = -2; int right = -2; boolean hadContent = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; hadContent = true; boolean seeParticle = false; String childName = child.getNodeName(); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = traverseGroupDecl(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if (seeParticle) { index = expandContentModel( index, child); } if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent && right != -2) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); return left; } /** * * Traverse the "All" declaration * * <all * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </all> * **/ int traverseAll( Element allDecl) throws Exception { Element child = XUtil.getFirstChildElement(allDecl); while (child != null && child.getNodeName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); int allChildren[] = null; int allChildCount = 0; int left = -2; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; boolean seeParticle = false; String childName = child.getNodeName(); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = traverseGroupDecl(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if (seeParticle) { index = expandContentModel( index, child); } try { allChildren[allChildCount] = index; } catch (NullPointerException ne) { allChildren = new int[32]; allChildren[allChildCount] = index; } catch (ArrayIndexOutOfBoundsException ae) { int[] newArray = new int[allChildren.length*2]; System.arraycopy(allChildren, 0, newArray, 0, allChildren.length); allChildren[allChildCount] = index; } allChildCount++; } left = buildAllModel(allChildren,allChildCount); return left; } /** builds the all content model */ private int buildAllModel(int children[], int count) throws Exception { // build all model if (count > 1) { // create and initialize singletons XMLContentSpec choice = new XMLContentSpec(); choice.type = XMLContentSpec.CONTENTSPECNODE_CHOICE; choice.value = -1; choice.otherValue = -1; int[] exactChildren = new int[count]; System.arraycopy(children,0,exactChildren,0,count); // build all model sort(exactChildren, 0, count); int index = buildAllModel(exactChildren, 0, choice); return index; } if (count > 0) { return children[0]; } return -1; } /** Builds the all model. */ private int buildAllModel(int src[], int offset, XMLContentSpec choice) throws Exception { // swap last two places if (src.length - offset == 2) { int seqIndex = createSeq(src); if (choice.value == -1) { choice.value = seqIndex; } else { if (choice.otherValue != -1) { choice.value = fSchemaGrammar.addContentSpecNode(choice.type, choice.value, choice.otherValue, false); } choice.otherValue = seqIndex; } swap(src, offset, offset + 1); seqIndex = createSeq(src); if (choice.value == -1) { choice.value = seqIndex; } else { if (choice.otherValue != -1) { choice.value = fSchemaGrammar.addContentSpecNode(choice.type, choice.value, choice.otherValue, false); } choice.otherValue = seqIndex; } return fSchemaGrammar.addContentSpecNode(choice.type, choice.value, choice.otherValue, false); } // recurse for (int i = offset; i < src.length - 1; i++) { choice.value = buildAllModel(src, offset + 1, choice); choice.otherValue = -1; sort(src, offset, src.length - offset); shift(src, offset, i + 1); } int choiceIndex = buildAllModel(src, offset + 1, choice); sort(src, offset, src.length - offset); return choiceIndex; } // buildAllModel(int[],int,ContentSpecNode,ContentSpecNode):int /** Creates a sequence. */ private int createSeq(int src[]) throws Exception { int left = src[0]; int right = src[1]; for (int i = 2; i < src.length; i++) { left = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, left, right, false); right = src[i]; } return fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, left, right, false); } // createSeq(int[]):int /** Shifts a value into position. */ private void shift(int src[], int pos, int offset) { int temp = src[offset]; for (int i = offset; i > pos; i src[i] = src[i - 1]; } src[pos] = temp; } // shift(int[],int,int) /** Simple sort. */ private void sort(int src[], final int offset, final int length) { for (int i = offset; i < offset + length - 1; i++) { int lowest = i; for (int j = i + 1; j < offset + length; j++) { if (src[j] < src[lowest]) { lowest = j; } } if (lowest != i) { int temp = src[i]; src[i] = src[lowest]; src[lowest] = temp; } } } // sort(int[],int,int) /** Swaps two values. */ private void swap(int src[], int i, int j) { int temp = src[i]; src[i] = src[j]; src[j] = temp; } // swap(int[],int,int) /** * Traverse Wildcard declaration * * <any * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * namespace = ##any | ##other | ##local | list of {uri, ##targetNamespace} * processContents = lax | skip | strict> * Content: (annotation?) * </any> * @param elementDecl * @return * @exception Exception */ private int traverseWildcardDecl( Element wildcardDecl ) throws Exception { int wildcardID = fStringPool.addSymbol( wildcardDecl.getAttribute( SchemaSymbols.ATTVAL_ID )); int wildcardMaxOccurs = fStringPool.addSymbol( wildcardDecl.getAttribute( SchemaSymbols.ATT_MAXOCCURS )); int wildcardMinOccurs = fStringPool.addSymbol( wildcardDecl.getAttribute( SchemaSymbols.ATT_MINOCCURS )); int wildcardNamespace = fStringPool.addSymbol( wildcardDecl.getAttribute( SchemaSymbols.ATT_NAMESPACE )); int wildcardProcessContents = fStringPool.addSymbol( wildcardDecl.getAttribute( SchemaSymbols.ATT_PROCESSCONTENTS )); int wildcardContent = fStringPool.addSymbol( wildcardDecl.getAttribute( SchemaSymbols.ATT_CONTENT )); return -1; } // utilities from Tom Watson's SchemaParser class // TO DO: Need to make this more conformant with Schema int type parsing private int parseInt (String intString) throws Exception { if ( intString.equals("*") ) { return SchemaSymbols.INFINITY; } else { return Integer.parseInt (intString); } } private int parseSimpleDerivedBy (String derivedByString) throws Exception { if ( derivedByString.equals (SchemaSymbols.ATTVAL_LIST) ) { return SchemaSymbols.LIST; } else if ( derivedByString.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { return SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ("SimpleType: Invalid value for 'derivedBy'"); return -1; } } private int parseComplexDerivedBy (String derivedByString) throws Exception { if ( derivedByString.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { return SchemaSymbols.EXTENSION; } else if ( derivedByString.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { return SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "ComplexType: Invalid value for 'derivedBy'" ); return -1; } } private int parseSimpleFinal (String finalString) throws Exception { if ( finalString.equals (SchemaSymbols.ATTVAL_POUNDALL) ) { return SchemaSymbols.ENUMERATION+SchemaSymbols.RESTRICTION+SchemaSymbols.LIST+SchemaSymbols.REPRODUCTION; } else { int enumerate = 0; int restrict = 0; int list = 0; int reproduce = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ("restriction in set twice"); } } else if ( token.equals (SchemaSymbols.ATTVAL_LIST) ) { if ( list == 0 ) { list = SchemaSymbols.LIST; } else { // REVISIT: Localize reportGenericSchemaError ("list in set twice"); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid value (" + finalString + ")" ); } } return enumerate+restrict+list+reproduce; } } private int parseComplexContent (String contentString) throws Exception { if ( contentString.equals (SchemaSymbols.ATTVAL_EMPTY) ) { return XMLElementDecl.TYPE_EMPTY; } else if ( contentString.equals (SchemaSymbols.ATTVAL_ELEMENTONLY) ) { return XMLElementDecl.TYPE_CHILDREN; } else if ( contentString.equals (SchemaSymbols.ATTVAL_TEXTONLY) ) { return XMLElementDecl.TYPE_SIMPLE; } else if ( contentString.equals (SchemaSymbols.ATTVAL_MIXED) ) { return XMLElementDecl.TYPE_MIXED; } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid value for content" ); return -1; } } private int parseDerivationSet (String finalString) throws Exception { if ( finalString.equals ("#all") ) { return SchemaSymbols.EXTENSION+SchemaSymbols.RESTRICTION+SchemaSymbols.REPRODUCTION; } else { int extend = 0; int restrict = 0; int reproduce = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { if ( extend == 0 ) { extend = SchemaSymbols.EXTENSION; } else { // REVISIT: Localize reportGenericSchemaError ( "extension already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "restriction already in set" ); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid final value (" + finalString + ")" ); } } return extend+restrict+reproduce; } } private int parseBlockSet (String finalString) throws Exception { if ( finalString.equals ("#all") ) { return SchemaSymbols.EQUIVCLASS+SchemaSymbols.EXTENSION+SchemaSymbols.LIST+SchemaSymbols.RESTRICTION+SchemaSymbols.REPRODUCTION; } else { int extend = 0; int restrict = 0; int reproduce = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_EQUIVCLASS) ) { if ( extend == 0 ) { extend = SchemaSymbols.EQUIVCLASS; } else { // REVISIT: Localize reportGenericSchemaError ( "'equivClass' already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { if ( extend == 0 ) { extend = SchemaSymbols.EXTENSION; } else { // REVISIT: Localize reportGenericSchemaError ( "extension already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_LIST) ) { if ( extend == 0 ) { extend = SchemaSymbols.LIST; } else { // REVISIT: Localize reportGenericSchemaError ( "'list' already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "restriction already in set" ); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid final value (" + finalString + ")" ); } } return extend+restrict+reproduce; } } private int parseFinalSet (String finalString) throws Exception { if ( finalString.equals ("#all") ) { return SchemaSymbols.EQUIVCLASS+SchemaSymbols.EXTENSION+SchemaSymbols.LIST+SchemaSymbols.RESTRICTION+SchemaSymbols.REPRODUCTION; } else { int extend = 0; int restrict = 0; int reproduce = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_EQUIVCLASS) ) { if ( extend == 0 ) { extend = SchemaSymbols.EQUIVCLASS; } else { // REVISIT: Localize reportGenericSchemaError ( "'equivClass' already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { if ( extend == 0 ) { extend = SchemaSymbols.EXTENSION; } else { // REVISIT: Localize reportGenericSchemaError ( "extension already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_LIST) ) { if ( extend == 0 ) { extend = SchemaSymbols.LIST; } else { // REVISIT: Localize reportGenericSchemaError ( "'list' already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "restriction already in set" ); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid final value (" + finalString + ")" ); } } return extend+restrict+reproduce; } } private void reportGenericSchemaError (String error) throws Exception { if (fErrorReporter == null) { System.err.println("__TraverseSchemaError__ : " + error); } else { reportSchemaError (SchemaMessageProvider.GenericError, new Object[] { error }); } } private void reportSchemaError(int major, Object args[]) throws Exception { if (fErrorReporter == null) { System.out.println("__TraverseSchemaError__ : " + SchemaMessageProvider.fgMessageKeys[major]); for (int i=0; i< args.length ; i++) { System.out.println((String)args[i]); } } else { fErrorReporter.reportError(fErrorReporter.getLocator(), SchemaMessageProvider.SCHEMA_DOMAIN, major, SchemaMessageProvider.MSG_NONE, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } //Unit Test here public static void main(String args[] ) { if( args.length != 1 ) { System.out.println( "Error: Usage java TraverseSchema yourFile.xsd" ); System.exit(0); } DOMParser parser = new DOMParser() { public void ignorableWhitespace(char ch[], int start, int length) {} public void ignorableWhitespace(int dataIdx) {} }; parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() ); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } try { parser.parse( args[0]); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { e.printStackTrace(); } Document document = parser.getDocument(); //Our Grammar OutputFormat format = new OutputFormat( document ); java.io.StringWriter outWriter = new java.io.StringWriter(); XMLSerializer serial = new XMLSerializer( outWriter,format); TraverseSchema tst = null; try { Element root = document.getDocumentElement();// This is what we pass to TraverserSchema //serial.serialize( root ); //System.out.println(outWriter.toString()); tst = new TraverseSchema( root, new StringPool(), new SchemaGrammar(), (GrammarResolver) new GrammarResolverImpl() ); } catch (Exception e) { e.printStackTrace(System.err); } parser.getDocument(); } static class Resolver implements EntityResolver { private static final String SYSTEM[] = { "http: "http: "http: }; private static final String PATH[] = { "structures.dtd", "datatypes.dtd", "versionInfo.ent", }; public InputSource resolveEntity(String publicId, String systemId) throws IOException { // looking for the schema DTDs? for (int i = 0; i < SYSTEM.length; i++) { if (systemId.equals(SYSTEM[i])) { InputSource source = new InputSource(getClass().getResourceAsStream(PATH[i])); source.setPublicId(publicId); source.setSystemId(systemId); return source; } } // use default resolution return null; } // resolveEntity(String,String):InputSource } // class Resolver static class ErrorHandler implements org.xml.sax.ErrorHandler { /** Warning. */ public void warning(SAXParseException ex) { System.err.println("[Warning] "+ getLocationString(ex)+": "+ ex.getMessage()); } /** Error. */ public void error(SAXParseException ex) { System.err.println("[Error] "+ getLocationString(ex)+": "+ ex.getMessage()); } /** Fatal error. */ public void fatalError(SAXParseException ex) throws SAXException { System.err.println("[Fatal Error] "+ getLocationString(ex)+": "+ ex.getMessage()); throw ex; } // Private methods /** Returns a string of the location. */ private String getLocationString(SAXParseException ex) { StringBuffer str = new StringBuffer(); String systemId_ = ex.getSystemId(); if (systemId_ != null) { int index = systemId_.lastIndexOf('/'); if (index != -1) systemId_ = systemId_.substring(index + 1); str.append(systemId_); } str.append(':'); str.append(ex.getLineNumber()); str.append(':'); str.append(ex.getColumnNumber()); return str.toString(); } // getLocationString(SAXParseException):String } }
package com.github.nwillc.jmeterout; import java.util.Collections; import java.util.LinkedList; import java.util.List; import static com.github.nwillc.jmeterout.Stats.avg; import static com.github.nwillc.jmeterout.Stats.percentile; class RequestEntry { private final String url; int failures = 0; int threads = 0; final List<Integer> times = new LinkedList<>(); RequestEntry(String url) { this.url = url; } @Override public String toString() { Collections.sort(times); return url + ", " + times.size() + ", " + times.get(0) + ", " + times.get(times.size() - 1) + ", " + avg(times) + ", " + percentile(times, 95) + ", " + failures + ", " + threads; } }
package algorithms; import algorithms.interfaces.IGenerator; @SuppressWarnings("UnusedDeclaration") public class LinearFeedbackShiftRegister implements IGenerator { private static final int M = 32; private static boolean[] bits = new boolean[M + 1]; // hard-coded for 32-bits private static final int[] TAPS = {1, 2, 22, 32}; public LinearFeedbackShiftRegister() { long seed = System.currentTimeMillis() + System.identityHashCode(new Object()); for (int i = 0; i < M; i++) { bits[i] = (((1 << i) & seed) >>> i) == 1; } } private static void generateLFSR(long seed) { for (int i = 0; i < M; i++) { bits[i] = (((1 << i) & seed) >>> i) == 1; } } /* generate a random int uniformly on the interval [-2^31 + 1, 2^31 - 1] */ public long getRandomNumber() { generateLSFR(System.currentTimeMillis() + System.identityHashCode(new Object())); //printBits(); // calculate the integer value from the registers int next = 0; for (int i = 0; i < M; i++) { next |= (bits[i] ? 1 : 0) << i; } // allow for zero without allowing for -2^31 if (next < 0) next++; // calculate the last register from all the preceding bits[M] = false; for (int TAP : TAPS) { bits[M] ^= bits[M - TAP]; } // shift all the registers System.arraycopy(bits, 1, bits, 0, M); return Math.abs(next); } /** * returns random double uniformly over [0, 1) */ public double nextDouble() { return ((getRandomNumber() / (Integer.MAX_VALUE + 1.0)) + 1.0) / 2.0; } /** * returns random boolean */ public boolean nextBoolean() { long rand = getRandomNumber(); return rand % 2 == 0; } private void printBits() { System.out.print(bits[M] ? 1 : 0); System.out.print(" -> "); for (int i = M - 1; i >= 0; i System.out.print(bits[i] ? 1 : 0); } System.out.println(); } public long getRandomNumber(long maxValue) { long randomNumber = getRandomNumber(); return randomNumber % (maxValue + 1); } public long getRandomNumber(long minValue, long maxValue) { long randomNumber = getRandomNumber(); return minValue + (randomNumber % (maxValue - minValue + 1)); } }
package org.helioviewer.jhv.opengl.text; import java.awt.Graphics2D; import java.awt.Image; import java.awt.Rectangle; import java.awt.image.*; import com.jogamp.opengl.*; import com.jogamp.opengl.util.texture.*; import com.jogamp.opengl.util.texture.awt.*; import org.helioviewer.jhv.math.Transform; /** Provides the ability to render into an OpenGL {@link com.jogamp.opengl.util.texture.Texture Texture} using the Java 2D APIs. This renderer class uses an internal Java 2D image (of unspecified type) for its backing store and flushes portions of that image to an OpenGL texture on demand. The resulting OpenGL texture can then be mapped on to a polygon for display. */ class JhvTextureRenderer { // For now, we supply only a BufferedImage back-end for this // renderer. In theory we could use the Java 2D/JOGL bridge to fully // accelerate the rendering paths, but there are restrictions on // what work can be done where; for example, Graphics2D-related work // must not be done on the Queue Flusher Thread, but JOGL's // OpenGL-related work must be. This implies that the user's code // would need to be split up into multiple callbacks run from the // appropriate threads, which would be somewhat unfortunate. // The backing store itself private BufferedImage image; private Texture texture; private final AWTTextureData textureData; private boolean mustReallocateTexture; private Rectangle dirtyRegion; private final int width; private final int height; /** Creates a new renderer with backing store of the specified width and height. @param width the width of the texture to render into @param height the height of the texture to render into */ JhvTextureRenderer(int _width, int _height) { width = _width; height = _height; int internalFormat = GL2.GL_RGBA; // force for high version OpenGL int imageType = BufferedImage.TYPE_INT_ARGB_PRE; image = new BufferedImage(width, height, imageType); // Always reallocate the TextureData associated with this // BufferedImage; it's just a reference to the contents but we // need it in order to update sub-regions of the underlying // texture final GL2 gl = (GL2) GLContext.getCurrentGL(); textureData = new AWTTextureData(gl.getGLProfile(), internalFormat, 0, true, image); // For now, always reallocate the underlying OpenGL texture when // the backing store size changes mustReallocateTexture = true; } public int getWidth() { return width; } public int getHeight() { return height; } /** Creates a {@link java.awt.Graphics2D Graphics2D} instance for rendering to the backing store of this renderer. The returned object should be disposed of using the normal {@link java.awt.Graphics#dispose() Graphics.dispose()} method once it is no longer being used. @return a new {@link java.awt.Graphics2D Graphics2D} object for rendering into the backing store of this renderer */ public Graphics2D createGraphics() { return image.createGraphics(); } /** Returns the underlying Java 2D {@link java.awt.Image Image} being rendered into. */ public Image getImage() { return image; } /** Marks the given region of the TextureRenderer as dirty. This region, and any previously set dirty regions, will be automatically synchronized with the underlying Texture during the next {@link #getTexture getTexture} operation, at which point the dirty region will be cleared. It is not necessary for an OpenGL context to be current when this method is called. @param x the x coordinate (in Java 2D coordinates -- relative to upper left) of the region to update @param y the y coordinate (in Java 2D coordinates -- relative to upper left) of the region to update @param width the width of the region to update @param height the height of the region to update */ public void markDirty(final int x, final int y, final int width, final int height) { final Rectangle curRegion = new Rectangle(x, y, width, height); if (dirtyRegion == null) { dirtyRegion = curRegion; } else { dirtyRegion.add(curRegion); } } /** Returns the underlying OpenGL Texture object associated with this renderer, synchronizing any dirty regions of the TextureRenderer with the underlying OpenGL texture. @throws GLException If an OpenGL context is not current when this method is called */ public Texture getTexture() throws GLException { if (dirtyRegion != null) { sync(dirtyRegion.x, dirtyRegion.y, dirtyRegion.width, dirtyRegion.height); dirtyRegion = null; } ensureTexture(); return texture; } /** Disposes all resources associated with this renderer. It is not valid to use this renderer after calling this method. @throws GLException If an OpenGL context is not current when this method is called */ public void dispose() throws GLException { if (texture != null) { texture.destroy(GLContext.getCurrentGL()); texture = null; } if (image != null) { image.flush(); image = null; } } /** Convenience method which assists in rendering portions of the OpenGL texture to the screen, if the application intends to draw them as a flat overlay on to the screen. Pushes OpenGL state bits (GL_ENABLE_BIT, GL_DEPTH_BUFFER_BIT and GL_TRANSFORM_BIT); disables the depth test, back-face culling, and lighting; enables the texture in this renderer; and sets up the viewing matrices for orthographic rendering where the coordinates go from (0, 0) at the lower left to (width, height) at the upper right. Equivalent to beginOrthoRendering(width, height, true). {@link #endOrthoRendering} must be used in conjunction with this method to restore all OpenGL states. @param width the width of the current on-screen OpenGL drawable @param height the height of the current on-screen OpenGL drawable @throws GLException If an OpenGL context is not current when this method is called */ public void beginOrthoRendering(final int width, final int height) throws GLException { beginRendering(true, width, height); } /** Convenience method which assists in rendering portions of the OpenGL texture to the screen as 2D quads in 3D space. Pushes OpenGL state (GL_ENABLE_BIT); disables lighting; and enables the texture in this renderer. Unlike {@link #beginOrthoRendering beginOrthoRendering}, does not modify the depth test, back-face culling, lighting, or the modelview or projection matrices. {@link #end3DRendering} must be used in conjunction with this method to restore all OpenGL states. @throws GLException If an OpenGL context is not current when this method is called */ public void begin3DRendering() throws GLException { beginRendering(false, 0, 0); } /** Convenience method which assists in rendering portions of the OpenGL texture to the screen, if the application intends to draw them as a flat overlay on to the screen. Must be used if {@link #beginOrthoRendering} is used to set up the rendering stage for this overlay. @throws GLException If an OpenGL context is not current when this method is called */ public void endOrthoRendering() throws GLException { endRendering(true); } /** Convenience method which assists in rendering portions of the OpenGL texture to the screen as 2D quads in 3D space. Must be used if {@link #begin3DRendering} is used to set up the rendering stage for this overlay. @throws GLException If an OpenGL context is not current when this method is called */ public void end3DRendering() throws GLException { endRendering(false); } // Internals only below this point private void beginRendering(final boolean ortho, final int width, final int height) { final GL2 gl = (GL2) GLContext.getCurrentGL(); if (ortho) { gl.glDisable(GL2.GL_DEPTH_TEST); Transform.pushProjection(); Transform.setOrthoProjection(0, width, 0, height, -1, 1); Transform.pushView(); Transform.setIdentityView(); } final Texture texture = getTexture(); texture.enable(gl); texture.bind(gl); } private void endRendering(final boolean ortho) { final GL2 gl = (GL2) GLContext.getCurrentGL(); final Texture texture = getTexture(); texture.disable(gl); if (ortho) { gl.glEnable(GL2.GL_DEPTH_TEST); Transform.popView(); Transform.popProjection(); } } /** Synchronizes the specified region of the backing store down to the underlying OpenGL texture. If {@link #markDirty markDirty} is used instead to indicate the regions that are out of sync, this method does not need to be called. @param x the x coordinate (in Java 2D coordinates -- relative to upper left) of the region to update @param y the y coordinate (in Java 2D coordinates -- relative to upper left) of the region to update @param width the width of the region to update @param height the height of the region to update @throws GLException If an OpenGL context is not current when this method is called */ private void sync(final int x, final int y, final int width, final int height) throws GLException { // Force allocation if necessary final boolean canSkipUpdate = ensureTexture(); if (!canSkipUpdate) { // Update specified region. // NOTE that because BufferedImage-based TextureDatas now don't // do anything to their contents, the coordinate systems for // OpenGL and Java 2D actually line up correctly for // updateSubImage calls, so we don't need to do any argument // conversion here (i.e., flipping the Y coordinate). final GL2 gl = (GL2) GLContext.getCurrentGL(); texture.updateSubImage(gl, textureData, 0, x, y, x, y, width, height); gl.glGenerateMipmap(GL2.GL_TEXTURE_2D); } } // Returns true if the texture was newly allocated, false if not private boolean ensureTexture() { if (mustReallocateTexture) { final GL2 gl = (GL2) GLContext.getCurrentGL(); if (texture != null) { texture.destroy(gl); texture = null; } mustReallocateTexture = false; } if (texture == null) { final GL2 gl = (GL2) GLContext.getCurrentGL(); texture = TextureIO.newTexture(textureData); texture.setTexParameteri(gl, GL2.GL_TEXTURE_BASE_LEVEL, 0); texture.setTexParameteri(gl, GL2.GL_TEXTURE_MAX_LEVEL, 15); texture.setTexParameteri(gl, GL2.GL_TEXTURE_MIN_FILTER, GL2.GL_LINEAR_MIPMAP_LINEAR); texture.setTexParameteri(gl, GL2.GL_TEXTURE_MAG_FILTER, GL2.GL_LINEAR); texture.setTexParameteri(gl, GL2.GL_TEXTURE_WRAP_S, GL2.GL_CLAMP_TO_EDGE); texture.setTexParameteri(gl, GL2.GL_TEXTURE_WRAP_T, GL2.GL_CLAMP_TO_EDGE); return true; } return false; } }
package org.openqa.selenium.rc; import com.google.common.base.Throwables; import com.google.common.io.Files; import org.json.JSONObject; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.openqa.selenium.Build; import org.openqa.selenium.Pages; import org.openqa.selenium.Proxy; import org.openqa.selenium.WebDriver; import org.openqa.selenium.environment.GlobalTestEnvironment; import org.openqa.selenium.environment.InProcessTestEnvironment; import org.openqa.selenium.environment.TestEnvironment; import org.openqa.selenium.io.TemporaryFilesystem; import org.openqa.selenium.net.NetworkUtils; import org.openqa.selenium.net.PortProber; import org.openqa.selenium.os.CommandLine; import org.openqa.selenium.remote.DesiredCapabilities; import org.openqa.selenium.remote.HttpRequest; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.InProject; import org.openqa.selenium.testing.SeleniumTestRunner; import org.openqa.selenium.testing.drivers.WebDriverBuilder; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import static org.junit.Assert.assertTrue; import static org.openqa.selenium.remote.CapabilityType.PROXY; import static org.openqa.selenium.remote.HttpRequest.Method.DELETE; import static org.openqa.selenium.remote.HttpRequest.Method.GET; import static org.openqa.selenium.remote.HttpRequest.Method.POST; import static org.openqa.selenium.remote.HttpRequest.Method.PUT; import static org.openqa.selenium.testing.Ignore.Driver.ANDROID; import static org.openqa.selenium.testing.Ignore.Driver.IE; import static org.openqa.selenium.testing.Ignore.Driver.IPHONE; import static org.openqa.selenium.testing.Ignore.Driver.SELENESE; @Ignore(value = {ANDROID, IE, IPHONE, SELENESE}, reason = "Not tested on these browsers yet.") @RunWith(SeleniumTestRunner.class) public class SetProxyTest { private static BrowserMobProxyServer proxyServer; private static Pages pages; private ProxyInstance instance; @BeforeClass public static void startProxy() { TestEnvironment environment = GlobalTestEnvironment.get(InProcessTestEnvironment.class); pages = new Pages(environment.getAppServer()); proxyServer = new BrowserMobProxyServer(); } @AfterClass public static void detroyProxy() { proxyServer.destroy(); } @Before public void newProxyInstance() { instance = proxyServer.newInstance(); } @After public void deleteProxyInstance() { instance.destroy(); } @Test public void shouldAllowProxyToBeSetViaTheCapabilities() { Proxy proxy = instance.asProxy(); DesiredCapabilities caps = new DesiredCapabilities(); caps.setCapability(PROXY, proxy); WebDriver driver = new WebDriverBuilder().setCapabilities(caps).get(); driver.get(pages.simpleTestPage); driver.quit(); assertTrue(instance.hasBeenCalled()); } @Test public void shouldAllowProxyToBeConfiguredAsAPac() throws IOException { String pac = String.format( "function FindProxyForURL(url, host) {\n" + " return 'PROXY http://%s:%d';\n" + "}", new NetworkUtils().getPrivateLocalAddress(), instance.port); File tempDir = new File(System.getProperty("java.io.tmpdir")); TemporaryFilesystem tempFs = TemporaryFilesystem.getTmpFsBasedOn(tempDir); File pacFile = new File(tempDir, "proxy.pac"); // Use the default platform charset because otherwise IE gets upset. Apparently. Files.write(pac, pacFile, Charset.defaultCharset()); String autoConfUrl = pacFile.toURI().toString(); if (!autoConfUrl.startsWith("file: autoConfUrl = autoConfUrl.replace("file:/", "file: } Proxy proxy = new Proxy(); proxy.setProxyAutoconfigUrl(autoConfUrl); DesiredCapabilities caps = new DesiredCapabilities(); caps.setCapability(PROXY, proxy); WebDriver driver = new WebDriverBuilder().setCapabilities(caps).get(); driver.get(pages.simpleTestPage); driver.quit(); tempFs.deleteTemporaryFiles(); assertTrue(instance.hasBeenCalled()); } private static class BrowserMobProxyServer { private CommandLine process; private String proxyUrl; public BrowserMobProxyServer() { // We need to run out of process as the browsermob proxy has a dependency // on the Selenium Proxy interface, which may change. new Build().of("//third_party/java/browsermob_proxy:browsermob_proxy:uber").go(); String browserBinary = InProject.locate( "build/third_party/java/browsermob_proxy/browsermob_proxy-standalone.jar") .getAbsolutePath(); int port = PortProber.findFreePort(); process = new CommandLine("java", "-jar", browserBinary, "--port", String.valueOf(port)); process.copyOutputTo(System.err); process.executeAsync(); PortProber.pollPort(port); String address = new NetworkUtils().getPrivateLocalAddress(); proxyUrl = String.format("http://%s:%d", address, port); } public ProxyInstance newInstance() { try { HttpRequest request = new HttpRequest(POST, proxyUrl + "/proxy", null); JSONObject proxyDetails = new JSONObject(request.getResponse()); int port = proxyDetails.getInt("port"); // Wait until the proxy starts and is listening PortProber.pollPort(port); // Start recording requests new HttpRequest(PUT, String.format("%s/proxy/%d/har", proxyUrl, port), null); return new ProxyInstance(proxyServer.proxyUrl, port); } catch (Exception e) { throw Throwables.propagate(e); } } public void destroy() { process.destroy(); } } private static class ProxyInstance { private final String baseUrl; private final int port; public ProxyInstance(String baseUrl, int port) { this.baseUrl = baseUrl; this.port = port; } public boolean hasBeenCalled() { String url = String.format("%s/proxy/%d/har", baseUrl, port); HttpRequest request = new HttpRequest(GET, url, null); String response = request.getResponse(); return response.length() > 0; } public void destroy() { try { String url = String.format("%s/proxy/%d", baseUrl, port); new HttpRequest(DELETE, url, null); } catch (Exception e) { throw Throwables.propagate(e); } } public Proxy asProxy() { Proxy proxy = new Proxy(); String address = new NetworkUtils().getPrivateLocalAddress(); String format = String.format("%s:%d", address, port); proxy.setHttpProxy(format); return proxy; } } }
package net.RPower.RPowermod.entity; import java.util.Random; import net.minecraft.block.Block; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.init.Blocks; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.world.World; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import net.RPower.RPowermod.core.RPCore; import net.minecraft.world.gen.feature.*; import net.minecraft.world.storage.WorldInfo; public class EntityJadeTNTPrimed extends Entity { private int exXcord; private int exYcord; private int exZcord; /** How long the fuse is */ public int fuse; private EntityLivingBase tntPlacedBy; private boolean doBlockNotify; private static final String __OBFID = "CL_00001681"; public EntityJadeTNTPrimed(World par1World) { super(par1World); this.preventEntitySpawning = true; this.setSize(0.98F, 0.98F); this.yOffset = this.height / 2.0F; } public EntityJadeTNTPrimed(World par1World, double par2, double par4, double par6, EntityLivingBase par8EntityLivingBase) { this(par1World); this.setPosition(par2, par4, par6); float f = (float)(Math.random() * Math.PI * 2.0D); this.motionX = (double)(-((float)Math.sin((double)f)) * 0.02F); this.motionY = 0.20000000298023224D; this.motionZ = (double)(-((float)Math.cos((double)f)) * 0.02F); this.fuse = 80; this.prevPosX = par2; this.prevPosY = par4; this.prevPosZ = par6; this.tntPlacedBy = par8EntityLivingBase; } protected void entityInit() {} /** * returns if this entity triggers Block.onEntityWalking on the blocks they walk on. used for spiders and wolves to * prevent them from trampling crops */ protected boolean canTriggerWalking() { return false; } /** * Returns true if other Entities should be prevented from moving through this Entity. */ public boolean canBeCollidedWith() { return !this.isDead; } /** * Called to update the entity's position/logic. */ public void onUpdate() { this.prevPosX = this.posX; this.prevPosY = this.posY; this.prevPosZ = this.posZ; this.motionY -= 0.03999999910593033D; this.moveEntity(this.motionX, this.motionY, this.motionZ); this.motionX *= 0.9800000190734863D; this.motionY *= 0.9800000190734863D; this.motionZ *= 0.9800000190734863D; if (this.onGround) { this.motionX *= 0.699999988079071D; this.motionZ *= 0.699999988079071D; this.motionY *= -0.5D; } if (this.fuse { this.setDead(); this.explode(); } else { this.worldObj.spawnParticle("smoke", this.posX, this.posY + 0.5D, this.posZ, 0.0D, 0.0D, 0.0D); this.worldObj.spawnParticle("smoke", this.posX, this.posY + 0.5D, this.posZ, 1D, 0.0D, 0.0D); this.worldObj.spawnParticle("smoke", this.posX, this.posY + 0.5D, this.posZ, 0.0D, 1D, 0.0D); this.worldObj.spawnParticle("smoke", this.posX, this.posY + 0.5D, this.posZ, 0.0D, 0.0D, 1D); this.worldObj.spawnParticle("smoke", this.posX, this.posY + 0.5D, this.posZ, 1D, 1D, 0.0D); this.worldObj.spawnParticle("smoke", this.posX, this.posY + 0.5D, this.posZ, 0.0D, 1D, 1D); this.worldObj.spawnParticle("smoke", this.posX, this.posY + 0.5D, this.posZ, 1D, 1D, 1D); } } private void explode() { if (this.posX < 0){ exXcord = (int) this.posX - 1; } else { exXcord = (int) this.posX; } exYcord = (int) this.posY; exZcord = (int) this.posZ; int radius = 4; int radiusSquared = (radius*radius)+1; for(int targetX=-(radius);targetX<=(radius); targetX++) { for(int targetY=-(radius);targetY<=(radius); targetY++) { for(int targetZ=-(radius);targetZ<=(radius); targetZ++) { if((((targetX*targetX)+(targetY*targetY))<radiusSquared)&&(((targetX*targetX)+(targetZ*targetZ))<radiusSquared)&&(((targetY*targetY)+(targetZ*targetZ))<radiusSquared)){ System.out.println("Testing:"+(exXcord+targetX)+","+(exYcord+targetY)+","+(exZcord+targetZ)+","); setBlockAndNotifyAdequately(exXcord+targetX, exYcord+targetY, exZcord+targetZ, RPCore.jadeBlock, 0); setBlockAndNotifyAdequately(exXcord+targetX, exYcord+targetY+20, exZcord+targetZ, Blocks.stone, 0); } } } } int radius2 = radius / 2; int radiusSquared2 = (radius2*radius2)+1; for(int targetX2=-(radius2);targetX2<=(radius2); targetX2++) { for(int targetY2=-(radius2);targetY2<=(radius2); targetY2++) { for(int targetZ2=-(radius2);targetZ2<=(radius2); targetZ2++) { if((((targetX2*targetX2)+(targetY2*targetY2))<=radiusSquared2)&&(((targetX2*targetX2)+(targetZ2*targetZ2))<=radiusSquared2)&&(((targetY2*targetY2)+(targetZ2*targetZ2))<=radiusSquared2)){ System.out.println("Testing:"+(exXcord+targetX2)+","+(exYcord+targetY2)+","+(exZcord+targetZ2)+","); setBlockAndNotifyAdequately(exXcord+targetX2, exYcord+targetY2, exZcord+targetZ2, RPCore.obsidianWhite, 0); } } } } setBlockAndNotifyAdequately(exXcord, exYcord+10, exZcord, RPCore.jadeBlock, 0); for(int targetX=-(radius);targetX<=(radius); targetX++) { for(int targetY=-(radius);targetY<=(radius); targetY++) { for(int targetZ=-(radius);targetZ<=(radius); targetZ++) { if((((targetX*targetX)+(targetY*targetY))<radiusSquared)&&(((targetX*targetX)+(targetZ*targetZ))<radiusSquared)&&(((targetY*targetY)+(targetZ*targetZ))<radiusSquared)){ System.out.println("Testing:"+(exXcord+targetX)+","+(exYcord+targetY)+","+(exZcord+targetZ)+","); System.out.println(this.worldObj.getBlock(exXcord+targetX, exYcord+targetY, exZcord+targetZ)); } } } } } /** * (abstract) Protected helper method to write subclass entity data to NBT. */ protected void writeEntityToNBT(NBTTagCompound par1NBTTagCompound) { par1NBTTagCompound.setByte("Fuse", (byte)this.fuse); } /** * (abstract) Protected helper method to read subclass entity data from NBT. */ protected void readEntityFromNBT(NBTTagCompound par1NBTTagCompound) { this.fuse = par1NBTTagCompound.getByte("Fuse"); } @SideOnly(Side.CLIENT) public float getShadowSize() { return 0.0F; } /** * returns null or the entityliving it was placed or ignited by */ public EntityLivingBase getTntPlacedBy() { return this.tntPlacedBy; } protected void setBlockAndNotifyAdequately(int p_150516_2_, int p_150516_3_, int p_150516_4_, Block p_150516_5_, int p_150516_6_) { if (this.doBlockNotify) { worldObj.setBlock(p_150516_2_, p_150516_3_, p_150516_4_, p_150516_5_, p_150516_6_, 0); worldObj.setBlock(p_150516_2_, p_150516_3_, p_150516_4_, p_150516_5_, p_150516_6_, 1); worldObj.setBlock(p_150516_2_, p_150516_3_, p_150516_4_, p_150516_5_, p_150516_6_, 2); worldObj.setBlock(p_150516_2_, p_150516_3_, p_150516_4_, p_150516_5_, p_150516_6_, 3); worldObj.scheduleBlockUpdate(p_150516_2_, p_150516_3_, p_150516_4_, p_150516_5_, this.tickRate(worldObj)); } else { worldObj.setBlock(p_150516_2_, p_150516_3_, p_150516_4_, p_150516_5_, p_150516_6_, 0); worldObj.setBlock(p_150516_2_, p_150516_3_, p_150516_4_, p_150516_5_, p_150516_6_, 1); worldObj.setBlock(p_150516_2_, p_150516_3_, p_150516_4_, p_150516_5_, p_150516_6_, 2); worldObj.setBlock(p_150516_2_, p_150516_3_, p_150516_4_, p_150516_5_, p_150516_6_, 3); } } private int tickRate(World worldObj) { return 2; } }
package com.helloworld.controller; import java.text.DateFormat; import java.util.Date; import java.util.Locale; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; /** * Handles requests for the application home page. */ @Controller public class HomeController { private static final Logger logger = LoggerFactory.getLogger(HomeController.class); /** * Simply selects the home view to render by returning its name. */ @RequestMapping(value = "/", method = RequestMethod.GET) public String home(Locale locale, Model model) { logger.info("Welcome home! The client locale is {}.", locale); Date date = new Date(); DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.LONG, locale); String formattedDate = dateFormat.format(date); model.addAttribute("serverTime", formattedDate ); return "home"; } }
package org.nutz.ioc.loader.annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import org.nutz.castor.Castors; import org.nutz.ioc.IocException; import org.nutz.ioc.IocLoader; import org.nutz.ioc.IocLoading; import org.nutz.ioc.ObjectLoadException; import org.nutz.ioc.annotation.InjectName; import org.nutz.ioc.meta.IocEventSet; import org.nutz.ioc.meta.IocField; import org.nutz.ioc.meta.IocObject; import org.nutz.ioc.meta.IocValue; import org.nutz.json.Json; import org.nutz.lang.Lang; import org.nutz.lang.Mirror; import org.nutz.lang.Strings; import org.nutz.log.Log; import org.nutz.log.Logs; import org.nutz.resource.Scans; /** * Ioc * * @author wendal(wendal1985@gmail.com) * */ public class AnnotationIocLoader implements IocLoader { private static final Log log = Logs.get(); private HashMap<String, IocObject> map = new HashMap<String, IocObject>(); public AnnotationIocLoader(String... packages) { for (String packageZ : packages) for (Class<?> classZ : Scans.me().scanPackage(packageZ)) addClass(classZ); if (map.size() > 0) { if (log.isInfoEnabled()) log.infof("Scan complete ! Found %s classes in %s base-packages!\nbeans = %s", map.size(), packages.length, Castors.me().castToString(map.keySet())); } else { log.warn("NONE Annotation-Class found!! Check your configure or report a bug!! packages=" + Arrays.toString(packages)); } } private void addClass(Class<?> classZ) { if (classZ.isInterface() || classZ.isMemberClass() || classZ.isEnum() || classZ.isAnnotation() || classZ.isAnonymousClass()) return; int modify = classZ.getModifiers(); if (Modifier.isAbstract(modify) || (!Modifier.isPublic(modify))) return; IocBean iocBean = classZ.getAnnotation(IocBean.class); if (iocBean != null) { if (log.isDebugEnabled()) log.debugf("Found a Class with Ioc-Annotation : %s", classZ); // @IocBean->name String beanName = iocBean.name(); if (Strings.isBlank(beanName)) { // @InjectName InjectName innm = classZ.getAnnotation(InjectName.class); if (null != innm && !Strings.isBlank(innm.value())) { beanName = innm.value(); } // !? simpleName else { beanName = Strings.lowerFirst(classZ.getSimpleName()); } } if (map.containsKey(beanName)) throw Lang.makeThrow(IocException.class, "Duplicate beanName=%s, by %s !! Have been define by %s !!", beanName, classZ, map.get(beanName).getClass()); IocObject iocObject = new IocObject(); iocObject.setType(classZ); map.put(beanName, iocObject); iocObject.setSingleton(iocBean.singleton()); if (!Strings.isBlank(iocBean.scope())) iocObject.setScope(iocBean.scope()); String[] args = iocBean.args(); // if (null == args || args.length == 0) // args = iocBean.param(); if (null != args && args.length > 0) for (String value : args) iocObject.addArg(convert(value)); // Events IocEventSet eventSet = new IocEventSet(); iocObject.setEvents(eventSet); if (!Strings.isBlank(iocBean.create())) eventSet.setCreate(iocBean.create().trim().intern()); if (!Strings.isBlank(iocBean.depose())) eventSet.setDepose(iocBean.depose().trim().intern()); if (!Strings.isBlank(iocBean.fetch())) eventSet.setFetch(iocBean.fetch().trim().intern()); // (@Inject,) List<String> fieldList = new ArrayList<String>(); Mirror<?> mirror = Mirror.me(classZ); Field[] fields = mirror.getFields(Inject.class); for (Field field : fields) { Inject inject = field.getAnnotation(Inject.class); // if(fieldList.contains(field.getName())) // throw duplicateField(classZ,field.getName()); IocField iocField = new IocField(); iocField.setName(field.getName()); IocValue iocValue; if (Strings.isBlank(inject.value())) { iocValue = new IocValue(); iocValue.setType(IocValue.TYPE_REFER); iocValue.setValue(field.getName()); } else iocValue = convert(inject.value()); iocField.setValue(iocValue); iocObject.addField(iocField); fieldList.add(iocField.getName()); } // (@Inject,set) Method[] methods; try { methods = classZ.getMethods(); } catch (Exception e) { log.info("Fail to call getMethods(), miss class or Security Limit, ignore it", e); methods = new Method[0]; } for (Method method : methods) { Inject inject = method.getAnnotation(Inject.class); if (inject == null) continue; int m = method.getModifiers(); if (Modifier.isAbstract(m) || (!Modifier.isPublic(m)) || Modifier.isStatic(m)) continue; String methodName = method.getName(); if (methodName.startsWith("set") && methodName.length() > 3 && method.getParameterTypes().length == 1) { IocField iocField = new IocField(); iocField.setName(Strings.lowerFirst(methodName.substring(3))); if (fieldList.contains(iocField.getName())) throw duplicateField(classZ, iocField.getName()); IocValue iocValue; if (Strings.isBlank(inject.value())) { iocValue = new IocValue(); iocValue.setType(IocValue.TYPE_REFER); iocValue.setValue(Strings.lowerFirst(methodName.substring(3))); } else iocValue = convert(inject.value()); iocField.setValue(iocValue); iocObject.addField(iocField); fieldList.add(iocField.getName()); } } // (@IocBean.field) String[] flds = iocBean.fields(); if (flds != null && flds.length > 0) { for (String fieldInfo : flds) { if (fieldList.contains(fieldInfo)) throw duplicateField(classZ, fieldInfo); IocField iocField = new IocField(); if (fieldInfo.contains(":")) { // dao:jndi:dataSource/jdbc String[] datas = fieldInfo.split(":", 2); // , @Inject iocField.setName(datas[0]); iocField.setValue(convert(datas[1])); iocObject.addField(iocField); } else { // , bean iocField.setName(fieldInfo); IocValue iocValue = new IocValue(); iocValue.setType(IocValue.TYPE_REFER); iocValue.setValue(fieldInfo); iocField.setValue(iocValue); iocObject.addField(iocField); } fieldList.add(iocField.getName()); } } } else { if (log.isWarnEnabled()) { Field[] fields = classZ.getDeclaredFields(); for (Field field : fields) if (field.getAnnotation(Inject.class) != null) { log.warnf("class(%s) don't has @IocBean, but field(%s) has @Inject! Miss @IocBean ??", classZ.getName(), field.getName()); break; } } } } protected IocValue convert(String value) { IocValue iocValue = new IocValue(); if (value.contains(":")) { iocValue.setType(value.substring(0, value.indexOf(':'))); iocValue.setValue(value.substring(value.indexOf(':') + 1)); } else { iocValue.setValue(value); // TODO refer? } return iocValue; } public String[] getName() { return map.keySet().toArray(new String[map.size()]); } public boolean has(String name) { return map.containsKey(name); } public IocObject load(IocLoading loading, String name) throws ObjectLoadException { if (has(name)) return map.get(name); throw new ObjectLoadException("Object '" + name + "' without define!"); } private static final IocException duplicateField(Class<?> classZ, String name) { return Lang.makeThrow(IocException.class, "Duplicate filed defined! Class=%s,FileName=%s", classZ, name); } public String toString() { return "/*AnnotationIocLoader*/\n" + Json.toJson(map); } }
package org.pentaho.di.trans.steps.exceloutput; import java.util.Locale; import jxl.Workbook; import jxl.WorkbookSettings; import jxl.write.DateFormat; import jxl.write.DateFormats; import jxl.write.DateTime; import jxl.write.Label; import jxl.write.NumberFormat; import jxl.write.WritableCellFormat; import jxl.write.WritableFont; import org.apache.commons.vfs.FileObject; import org.pentaho.di.core.Const; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Converts input rows to excel cells and then writes this information to one or more files. * * @author Matt * @since 7-sep-2006 */ public class ExcelOutput extends BaseStep implements StepInterface { private ExcelOutputMeta meta; private ExcelOutputData data; public ExcelOutput(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans) { super(stepMeta, stepDataInterface, copyNr, transMeta, trans); } public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta=(ExcelOutputMeta)smi; data=(ExcelOutputData)sdi; Object[] r=getRow(); // This also waits for a row to be finished. if (first && r != null) { // get the RowMeta, rowMeta is only set when a row is read data.previousMeta = getInputRowMeta().clone(); //do not set first=false, below is another part that uses first if(meta.isDoNotOpenNewFileInit()) { data.oneFileOpened=true; if (!openNewFile()) { logError("Couldn't open file "+meta.getFileName()); return false; } // If we need to write a header, do so... if(meta.isHeaderEnabled() && !data.headerWrote) { writeHeader(); data.headerWrote=true; } }else { // If we need to write a header, do so... if(meta.isHeaderEnabled() && !data.headerWrote) { writeHeader(); data.headerWrote=true; } } } // If we split the data stream in small XLS files, we need to do this here... if ( r!=null && getLinesOutput()>0 && meta.getSplitEvery()>0 && ((getLinesOutput()+1)%meta.getSplitEvery())==0) { // Not finished: open another file... if (r!=null) { closeFile(); if (!openNewFile()) { logError("Unable to open new file (split #"+data.splitnr+"..."); setErrors(1); return false; } // If we need to write a header, do so... if(meta.isHeaderEnabled() && !data.headerWrote) { writeHeader(); data.headerWrote=true; } } } if (r==null) // no more input to be expected... { setOutputDone(); return false; } boolean result=writeRowToFile(r); if (!result) { setErrors(1); stopAll(); return false; } putRow(data.previousMeta, r); // in case we want it to go further... if (checkFeedback(getLinesOutput())) { if(log.isBasic()) logBasic("linenr "+getLinesOutput()); } return result; } private boolean writeRowToFile(Object[] r) { Object v; try { if (first) { first=false; data.fieldnrs=new int[meta.getOutputFields().length]; for (int i=0;i<meta.getOutputFields().length;i++) { data.fieldnrs[i]=data.previousMeta.indexOfValue(meta.getOutputFields()[i].getName()); if (data.fieldnrs[i]<0) { logError("Field ["+meta.getOutputFields()[i].getName()+"] couldn't be found in the input stream!"); setErrors(1); stopAll(); return false; } } } if (meta.getOutputFields()==null || meta.getOutputFields().length==0) { /* * Write all values in stream to text file. */ for (int i=0;i<data.previousMeta.size();i++) { v=r[i]; if(!writeField(v, data.previousMeta.getValueMeta(i), null, i)) return false; } // go to the next line data.positionX = 0; data.positionY++; } else { /* * Only write the fields specified! */ for (int i=0;i<meta.getOutputFields().length;i++) { v=r[data.fieldnrs[i]]; if(!writeField(v, data.previousMeta.getValueMeta(data.fieldnrs[i]), meta.getOutputFields()[i], i)) return false; } // go to the next line data.positionX = 0; data.positionY++; } } catch(Exception e) { logError("Error writing line :"+e.toString()); return false; } incrementLinesOutput(); return true; } /** * Write a value to Excel, increasing data.positionX with one afterwards. * @param v The value to write * @param vMeta The valueMeta to write * @param excelField the field information (if any, otherwise : null) * @param column the excel column for getting the template format * @return */ private boolean writeField(Object v, ValueMetaInterface vMeta, ExcelField excelField, int column) { return writeField(v, vMeta, excelField, column, false); } /** * Write a value to Excel, increasing data.positionX with one afterwards. * @param v The value to write * @param vMeta The valueMeta to write * @param excelField the field information (if any, otherwise : null) * @param column the excel column for getting the template format * @param isHeader true if this is part of the header/footer * @return */ private boolean writeField(Object v, ValueMetaInterface vMeta, ExcelField excelField, int column, boolean isHeader) { WritableFont writableFont = new WritableFont(WritableFont.ARIAL, 10, WritableFont.NO_BOLD); try { String hashName = vMeta.getName(); if (isHeader) hashName = "____header_field____"; // all strings, can map to the same format. WritableCellFormat cellFormat=(WritableCellFormat) data.formats.get(hashName); // when template is used, take over the column format if (cellFormat==null && meta.isTemplateEnabled() && !isHeader) { try { if (column<data.templateColumns) { cellFormat=new WritableCellFormat(data.sheet.getColumnView(column).getFormat()); data.formats.put(hashName, cellFormat); // save for next time around... } } catch (RuntimeException e) { //ignore if the column is not found, format as usual } } if(meta.isAutoSizeColums()) { // prepare auto size colums int vlen=vMeta.getName().length(); if(!isHeader && v!=null) vlen=v.toString().trim().length(); if(vlen>0 && vlen>data.fieldsWidth[column]) data.fieldsWidth[column]=vlen+1; } switch(vMeta.getType()) { case ValueMetaInterface.TYPE_DATE: { if (v!=null && vMeta.getDate(v)!=null) { if (cellFormat==null) { if (excelField!=null && excelField.getFormat()!=null) { DateFormat dateFormat = new DateFormat(excelField.getFormat()); cellFormat=new WritableCellFormat(dateFormat); } else { cellFormat = new WritableCellFormat(DateFormats.FORMAT9); } data.formats.put(hashName, cellFormat); // save for next time around... } DateTime dateTime = new DateTime(data.positionX, data.positionY, vMeta.getDate(v), cellFormat); data.sheet.addCell(dateTime); } else { data.sheet.addCell(new Label(data.positionX, data.positionY, "")); } } break; case ValueMetaInterface.TYPE_STRING: case ValueMetaInterface.TYPE_BOOLEAN: case ValueMetaInterface.TYPE_BINARY: { if (v!=null) { if (cellFormat==null) { cellFormat = new WritableCellFormat(writableFont); data.formats.put(hashName, cellFormat); } Label label = new Label(data.positionX, data.positionY, vMeta.getString(v), cellFormat); data.sheet.addCell(label); } else { data.sheet.addCell(new Label(data.positionX, data.positionY, "")); } } break; case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_BIGNUMBER: case ValueMetaInterface.TYPE_INTEGER: { if (v!=null) { if (cellFormat==null) { String format; if (excelField!=null && excelField.getFormat()!=null) { format=excelField.getFormat(); } else { format = " } NumberFormat numberFormat = new NumberFormat(format); cellFormat = new WritableCellFormat(numberFormat); data.formats.put(vMeta.getName(), cellFormat); // save for next time around... } jxl.write.Number number = new jxl.write.Number(data.positionX, data.positionY, vMeta.getNumber(v), cellFormat); data.sheet.addCell(number); } else { data.sheet.addCell(new Label(data.positionX, data.positionY, "")); } } break; default: break; } } catch(Exception e) { logError("Error writing field ("+data.positionX+","+data.positionY+") : "+e.toString()); logError(Const.getStackTracker(e)); return false; } finally { data.positionX++; // always advance :-) } return true; } private boolean writeHeader() { boolean retval=false; try { // If we have fields specified: list them in this order! if (meta.getOutputFields()!=null && meta.getOutputFields().length>0) { if(meta.isAutoSizeColums()) data.fieldsWidth = new int[meta.getOutputFields().length]; for (int i=0;i<meta.getOutputFields().length;i++) { String fieldName = meta.getOutputFields()[i].getName(); ValueMetaInterface vMeta=new ValueMeta(fieldName, ValueMetaInterface.TYPE_STRING); writeField(fieldName, vMeta, null, i, true); } } else if (data.previousMeta!=null) // Just put all field names in the header/footer { if(meta.isAutoSizeColums()) data.fieldsWidth = new int[data.previousMeta.size()]; for (int i=0;i<data.previousMeta.size();i++) { String fieldName = data.previousMeta.getFieldNames()[i]; ValueMetaInterface vMeta=new ValueMeta(fieldName, ValueMetaInterface.TYPE_STRING); writeField(fieldName, vMeta, null, i, true); } } } catch(Exception e) { logError("Error writing header line: "+e.toString()); logError(Const.getStackTracker(e)); retval=true; } finally { data.positionX=0; data.positionY++; } incrementLinesOutput(); return retval; } public String buildFilename() { return meta.buildFilename(this, getCopy(), data.splitnr); } public boolean openNewFile() { boolean retval=false; try { WorkbookSettings ws = new WorkbookSettings(); ws.setLocale(Locale.getDefault()); if (!Const.isEmpty(meta.getEncoding())) { ws.setEncoding(meta.getEncoding()); } String filename=buildFilename(); if(log.isDebug()) log.logDebug(toString(),Messages.getString("ExcelOutput.Log.OpeningFile",filename)); data.file = KettleVFS.getFileObject(filename); if(meta.isAddToResultFiles()) { // Add this to the result file names... ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, data.file, getTransMeta().getName(), getStepname()); resultFile.setComment("This file was created with an Excel output step by Pentaho Data Integration"); addResultFile(resultFile); } // Create the workboook if (!meta.isTemplateEnabled()) { /*if (file.exists()) { // Attempts to load it from the local file failed in the past. // As such we will try to remove the file first... // file.delete(); }*/ if(meta.isAppend() && data.file.exists()) { boolean find=false; int position=0; // Update Workbook data.workbook = Workbook.createWorkbook(KettleVFS.getOutputStream(data.file,false) ,Workbook.getWorkbook(KettleVFS.getInputStream(data.file))); // get available sheets String listSheets[]=data.workbook.getSheetNames(); // Let's see if this sheet already exist... for (int i=0;i<listSheets.length;i++) { if(listSheets[i].equals(data.realSheetname)) { // We find the sheet find=true; position=i; } } if(find) { // let's delete sheet data.workbook.removeSheet(position); } // and now .. we create the new sheet data.sheet = data.workbook.createSheet(data.realSheetname,data.workbook.getNumberOfSheets()+1); }else{ // Create a new Workbook data.workbook = Workbook.createWorkbook(KettleVFS.getOutputStream(data.file, false), ws); // Create a sheet? String sheetname = "Sheet1"; data.sheet = data.workbook.getSheet(sheetname); if (data.sheet==null) { data.sheet = data.workbook.createSheet(sheetname, 0); } } } else { FileObject fo = KettleVFS.getFileObject(environmentSubstitute(meta.getTemplateFileName())); // create the openFile from the template Workbook tmpWorkbook=Workbook.getWorkbook(KettleVFS.getInputStream(fo), ws); data.workbook = Workbook.createWorkbook(KettleVFS.getOutputStream(data.file,false), tmpWorkbook); tmpWorkbook.close(); // use only the first sheet as template data.sheet = data.workbook.getSheet(0); // save inital number of columns data.templateColumns = data.sheet.getColumns(); } // Rename Sheet if (!Const.isEmpty(environmentSubstitute(meta.getSheetname()))) { data.sheet.setName(data.realSheetname); } if (meta.isSheetProtected()) { // Protect Sheet by setting password data.sheet.getSettings().setProtected(true); data.sheet.getSettings().setPassword(environmentSubstitute(meta.getPassword())); } // Set the initial position... data.positionX = 0; if (meta.isTemplateEnabled() && meta.isTemplateAppend()) { data.positionY = data.sheet.getRows(); } else { data.positionY = 0; } // If we need to write a header, do so... /* if (meta.isHeaderEnabled()) { writeHeader(); }*/ data.headerWrote=false; if(log.isDebug()) log.logDebug(toString(),Messages.getString("ExcelOutput.Log.FileOpened",filename)); retval=true; } catch(Exception e) { logError("Error opening new file", e); setErrors(1); } // System.out.println("end of newFile(), splitnr="+splitnr); data.splitnr++; return retval; } private boolean closeFile() { boolean retval=false; String filename=null; try { if (meta.isFooterEnabled()) { writeHeader(); } if ( data.workbook != null ) { if(meta.isAutoSizeColums()) { // auto resize columns int nrfields=data.fieldsWidth.length; for(int i=0;i<nrfields;i++) { data.sheet.setColumnView(i,data.fieldsWidth[i]); } } data.fieldsWidth=null; data.workbook.write(); data.workbook.close(); data.workbook = null; if(data.outputStream!=null) { data.outputStream.close(); data.outputStream=null; } if (data.sheet!=null) { data.sheet = null; } if(data.file!=null) { filename=data.file.toString(); data.file.close(); data.file=null; } } //data.formats.clear(); if(log.isDebug()) log.logDebug(toString(),Messages.getString("ExcelOutput.Log.FileClosed",filename)); // Explicitly call garbage collect to have file handle // released. Bug tracker: PDI-48 System.gc(); retval=true; } catch(Exception e) { logError("Unable to close openFile file : " + data.file.toString(), e); setErrors(1); } return retval; } public boolean init(StepMetaInterface smi, StepDataInterface sdi) { meta=(ExcelOutputMeta)smi; data=(ExcelOutputData)sdi; if (super.init(smi, sdi)) { data.splitnr=0; data.realSheetname=environmentSubstitute(meta.getSheetname()); if(!meta.isDoNotOpenNewFileInit()) { data.oneFileOpened=true; if (openNewFile()) { return true; } else { logError("Couldn't open file "+meta.getFileName()); setErrors(1L); stopAll(); } }else { return true; } } return false; } public void dispose(StepMetaInterface smi, StepDataInterface sdi) { meta=(ExcelOutputMeta)smi; data=(ExcelOutputData)sdi; if(data.oneFileOpened) closeFile(); if(data.file!=null) { try{ data.file.close(); data.file=null; }catch(Exception e){} } super.dispose(smi, sdi); } // Run is were the action happens! public void run() { BaseStep.runStepThread(this, meta, data); } }
package org.openqa.grid.common; import java.io.IOException; import java.io.InputStream; import java.util.Properties; import org.apache.commons.lang.WordUtils; public class GridDocHelper { private static Properties gridProperties = load("defaults/GridParameters.properties"); public static void printHelp(String msg) { printHelpInConsole(gridProperties, msg); } public static String getGridParam(String param) { return getParam(gridProperties, param); } private static String getParam(Properties p, String param) { if (param == null) { return ""; } String s = (String) gridProperties.get(param); if (s == null) { return "No help specified for " + param; } else { return s; } } private static void printHelpInConsole(Properties p, String msg) { if (msg != null) { System.out.println("Error building the config :" + msg); } System.out.println("Usage :"); for (Object key : p.keySet()) { System.out.println("-" + key + ":\n\t" + WordUtils.wrap(getParam(p, key.toString()), 80, "\n\t", false)); } } private static Properties load(String resource) { InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource); Properties p = new Properties(); if (in != null) { try { p.load(in); return p; } catch (IOException e) { throw new RuntimeException("bug." + resource + " cannot be loaded."); } } else { throw new RuntimeException("bug." + resource + " cannot be loaded."); } } }
package com.ircclouds.irc.api.commands; import java.util.*; /** * CAP command to request 1 or more capabilities. * * @author Danny van Heumen */ public class CapReqCmd extends CapCmd { private final List<String> extensions = new LinkedList<String>(); public CapReqCmd(String extension, String... extensions) { this.extensions.add(extension); this.extensions.addAll(Arrays.asList(extensions)); } @Override public String asString() { final StringBuilder req = new StringBuilder("CAP REQ :"); for (String ext : extensions) { req.append(ext).append(" "); } return req.append(CRNL).toString(); } }
package com.jaguarlandrover.rvi; import android.os.AsyncTask; import android.util.Log; import javax.net.SocketFactory; import javax.net.ssl.*; import java.io.*; import java.security.*; import java.security.cert.Certificate; import java.util.Enumeration; /** * The TCP/IP server @RemoteConnectionInterface implementation */ class ServerConnection implements RemoteConnectionInterface { private final static String TAG = "RVI/ServerConnection___"; private RemoteConnectionListener mRemoteConnectionListener; private String mServerUrl; private Integer mServerPort; private java.security.cert.Certificate mRemoteDeviceCertificate; private KeyStore mServerKeyStore = null; private KeyStore mLocalDeviceKeyStore = null; private String mLocalDeviceKeyStorePassword = null; private SSLSocket mSocket; @Override public void sendRviRequest(DlinkPacket dlinkPacket) { if (!isConfigured()) mRemoteConnectionListener.onDidFailToSendDataToRemoteConnection(new Throwable("RVI node is not configured.")); else if (!isConnected()) mRemoteConnectionListener.onDidFailToSendDataToRemoteConnection(new Throwable("RVI node is not connected.")); else new SendDataTask(dlinkPacket).executeOnExecutor(AsyncTask.SERIAL_EXECUTOR); } @Override public boolean isConnected() { return mSocket != null && mSocket.isConnected(); } @Override public boolean isConfigured() { return !(mServerUrl == null || mServerUrl.isEmpty() || mServerPort == 0 || mServerKeyStore == null || mLocalDeviceKeyStore == null); } @Override public void connect() { if (isConnected()) disconnect(null); connectSocket(); } @Override public void disconnect(Throwable trigger) { try { if (mSocket != null) mSocket.close(); } catch (Exception e) { Log.d(TAG, e.getLocalizedMessage()); } mRemoteDeviceCertificate = null; mSocket = null; if (mRemoteConnectionListener != null) mRemoteConnectionListener.onRemoteConnectionDidDisconnect(trigger); } @Override public void setRemoteConnectionListener(RemoteConnectionListener remoteConnectionListener) { mRemoteConnectionListener = remoteConnectionListener; } private void connectSocket() { Log.d(TAG, "Connecting the socket: " + mServerUrl + ":" + mServerPort); ConnectTask connectAndAuthorizeTask = new ConnectTask(mServerUrl, mServerPort, mServerKeyStore, mLocalDeviceKeyStore, mLocalDeviceKeyStorePassword); connectAndAuthorizeTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); } private class ConnectTask extends AsyncTask<Void, String, Throwable> { /** * The destination address. */ String dstAddress; /** * The destination port. */ int dstPort; /** * The key store of the server certs. */ KeyStore serverKeyStore; /** * The key store of the client certs. */ KeyStore clientKeyStore; /** * The key store password of the client certs. */ String clientKeyStorePassword; /** * Instantiates a new Connect task. * @param addr the addr * @param port the port * @param sks the server key store * @param cks the client key store * @param cksPass the client key store password */ ConnectTask(String addr, int port, KeyStore sks, KeyStore cks, String cksPass) { dstAddress = addr; dstPort = port; clientKeyStore = cks; serverKeyStore = sks; clientKeyStorePassword = cksPass; } @Override protected Throwable doInBackground(Void... params) { try { try { KeyStore keyStore = KeyStore.getInstance("BKS", "BC"); keyStore.load(null, null); } catch (Exception e) { e.printStackTrace(); } Log.d(TAG, "Creating socket factory"); String trustManagerAlgorithm = "X509"; TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(trustManagerAlgorithm); trustManagerFactory.init(serverKeyStore); String keyManagerAlgorithm = "X509"; KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(keyManagerAlgorithm); keyManagerFactory.init(clientKeyStore, clientKeyStorePassword != null ? clientKeyStorePassword.toCharArray() : null); SSLContext context = SSLContext.getInstance("TLS"); context.init(keyManagerFactory.getKeyManagers(), trustManagerFactory.getTrustManagers(), null); SocketFactory sf = context.getSocketFactory(); Log.d(TAG, "Creating ssl socket"); mSocket = (SSLSocket) sf.createSocket(dstAddress, dstPort); SSLSession session = mSocket.getSession(); java.security.cert.Certificate[] peerCertificates = session.getPeerCertificates(); // if (peerCertificates == null || peerCertificates.length != 1) { // throw new Exception("Remote certificate chain is null or contains more than 1 certificate"); mRemoteDeviceCertificate = peerCertificates[0]; Log.d(TAG, "Creating ssl socket complete"); } catch (Exception e) { e.printStackTrace(); return e; } return null; } @Override protected void onPostExecute(Throwable result) { super.onPostExecute(result); if (result == null) { // TODO: Does the input buffer stream cache data in the case that my async thread sends the auth command before the listener is set up? ListenTask listenTask = new ListenTask(); listenTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); if (mRemoteConnectionListener != null) mRemoteConnectionListener.onRemoteConnectionDidConnect(); } else { if (mRemoteConnectionListener != null) mRemoteConnectionListener.onRemoteConnectionDidFailToConnect(result); mSocket = null; } } } private class ListenTask extends AsyncTask<Void, String, Throwable> { @Override protected Throwable doInBackground(Void... params) { Log.d(TAG, "Listening on socket..."); try { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(1024); byte[] buffer = new byte[1024]; int bytesRead; InputStream inputStream = mSocket.getInputStream(); while ((bytesRead = inputStream.read(buffer)) != -1) { byteArrayOutputStream.write(buffer, 0, bytesRead); publishProgress(byteArrayOutputStream.toString("UTF-8")); byteArrayOutputStream.reset(); } } catch (Exception e) { e.printStackTrace(); return e; } return null; } @Override protected void onProgressUpdate(String... params) { super.onProgressUpdate(params); String data = params[0]; if (mRemoteConnectionListener != null) mRemoteConnectionListener.onRemoteConnectionDidReceiveData(data); } @Override protected void onPostExecute(Throwable result) { super.onPostExecute(result); disconnect(result); } } // TODO: Extract SendDataTask and ListenTask to their own classes that can be called by both BluetoothConnection and ServerConnection private class SendDataTask extends AsyncTask<Void, Void, Throwable> { private DlinkPacket mPacket; SendDataTask(DlinkPacket packet) { mPacket = packet; } @Override protected Throwable doInBackground(Void... params) { String data = mPacket.toJsonString(); try { DataOutputStream wr = new DataOutputStream(mSocket.getOutputStream()); wr.writeBytes(data); wr.flush(); } catch (Exception e) { e.printStackTrace(); return e; } return null; } @Override protected void onPostExecute(Throwable result) { if (result == null) { if (mRemoteConnectionListener != null) mRemoteConnectionListener.onDidSendDataToRemoteConnection(mPacket); } else { if (mRemoteConnectionListener != null) mRemoteConnectionListener.onDidFailToSendDataToRemoteConnection(result); disconnect(result); } } } /** * Sets server url. * * @param serverUrl the server url */ void setServerUrl(String serverUrl) { mServerUrl = serverUrl; } /** * Sets server port. * * @param serverPort the server port */ void setServerPort(Integer serverPort) { mServerPort = serverPort; } public void setServerKeyStore(KeyStore serverKeyStore) { mServerKeyStore = serverKeyStore; } public void setLocalDeviceKeyStore(KeyStore localDeviceKeyStore) { mLocalDeviceKeyStore = localDeviceKeyStore; } public void setLocalDeviceKeyStorePassword(String localDeviceKeyStorePassword) { mLocalDeviceKeyStorePassword = localDeviceKeyStorePassword; } public Certificate getRemoteDeviceCertificate() { return mRemoteDeviceCertificate; } public Certificate getLocalDeviceCertificate() { try { if (mLocalDeviceKeyStore == null) throw new Exception("Device keystore is null"); Enumeration<String> aliases = mLocalDeviceKeyStore.aliases(); String alias = aliases.nextElement(); KeyStore.PrivateKeyEntry entry = (KeyStore.PrivateKeyEntry) mLocalDeviceKeyStore.getEntry(alias, null); return entry.getCertificate(); } catch (Exception e) { e.printStackTrace(); disconnect(e); } return null; } public Certificate getServerCertificate() { try { if (mServerKeyStore == null) throw new Exception("Server keystore is null"); Enumeration<String> aliases = mServerKeyStore.aliases(); String alias = aliases.nextElement(); KeyStore.TrustedCertificateEntry entry = (KeyStore.TrustedCertificateEntry) mServerKeyStore.getEntry(alias, null); // TODO: Maybe check here if have more than one entry? return entry.getTrustedCertificate(); } catch (Exception e) { e.printStackTrace(); disconnect(e); } return null; } }
package com.kihira.corruption.common; import com.kihira.corruption.Corruption; import com.kihira.corruption.common.corruption.CorruptionRegistry; import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import cpw.mods.fml.common.gameevent.TickEvent; import cpw.mods.fml.relauncher.Side; import java.util.ArrayList; import java.util.List; import java.util.Set; public class TickHandler { private final int CORRUPTION_MAX = 17280; @SubscribeEvent @SuppressWarnings("unchecked") public void onPlayerTick(TickEvent.PlayerTickEvent e) { if (e.phase == TickEvent.Phase.END) { //Main corruption tick //Server if (e.side == Side.SERVER) { if (Corruption.isCorruptionActiveGlobal && CorruptionDataHelper.canBeCorrupted(e.player)) { //5 second //TODO: reduce this time for modjam only? if (e.player.worldObj.getTotalWorldTime() % 100 == 0) { CorruptionDataHelper.increaseCorruptionForPlayer(e.player, 1); //24 hours if (e.player.worldObj.rand.nextInt(CORRUPTION_MAX) < CorruptionDataHelper.getCorruptionForPlayer(e.player)) { String corrName = CorruptionRegistry.getRandomCorruptionEffect(e.player); CorruptionRegistry.addCorruptionEffect(e.player, corrName); } } if (CorruptionRegistry.currentCorruption.containsKey(e.player.getCommandSenderName())) { Set<String> corruptionNames = CorruptionRegistry.currentCorruption.get(e.player.getCommandSenderName()); List<String> toRemove = new ArrayList<String>(); for (String corrName : corruptionNames) { if (CorruptionRegistry.corruptionHashMap.containsKey(corrName)) { if (!CorruptionRegistry.corruptionHashMap.get(corrName).shouldContinue(e.player, FMLCommonHandler.instance().getEffectiveSide())) { toRemove.add(corrName); } } } //To prevent CME's if (!toRemove.isEmpty()) { for (String corrName : toRemove) { CorruptionRegistry.removeCorruptionEffectFromPlayer(e.player.getCommandSenderName(), corrName); } } } //AfraidOfTheDark if (e.player.worldObj.getTotalWorldTime() % 200 == 0 && e.player.worldObj.getBlockLightValue((int) e.player.posX, (int) e.player.posY, (int) e.player.posZ) <= 8) { if (CorruptionDataHelper.getCorruptionForPlayer(e.player) > 3000 && e.player.worldObj.rand.nextInt(CORRUPTION_MAX) < CorruptionDataHelper.getCorruptionForPlayer(e.player)) { CorruptionRegistry.addCorruptionEffect(e.player, "afraidOfTheDark"); } } } //Removing corruption if ((!Corruption.isCorruptionActiveGlobal || !CorruptionDataHelper.canBeCorrupted(e.player)) && CorruptionDataHelper.getCorruptionForPlayer(e.player) > 0 && e.player.worldObj.getTotalWorldTime() % 10 == 0) { CorruptionDataHelper.decreaseCorruptionForPlayer(e.player, 300); } } //Common if (CorruptionRegistry.currentCorruption.containsKey(e.player.getCommandSenderName())) { Set<String> corruptionNames = CorruptionRegistry.currentCorruption.get(e.player.getCommandSenderName()); for (String corrName : corruptionNames) { if (CorruptionRegistry.corruptionHashMap.containsKey(corrName)) { CorruptionRegistry.corruptionHashMap.get(corrName).onUpdate(e.player, FMLCommonHandler.instance().getEffectiveSide()); } } } //Client if (e.player.worldObj.isRemote) { if (CorruptionDataHelper.canBeCorrupted(e.player) && e.player.worldObj.rand.nextInt(1200) < CorruptionDataHelper.getCorruptionForPlayer(e.player) && e.player.ticksExisted % 2 == 0) { Corruption.proxy.spawnFootprint(e.player); } } } } }
package com.lazerycode.jmeter.mojo; import java.io.File; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Execute; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import com.lazerycode.jmeter.configuration.JMeterArgumentsArray; import com.lazerycode.jmeter.json.TestConfig; import com.lazerycode.jmeter.testrunner.TestManager; /** * Goal that runs jmeter based on configuration defined in your pom.<br/> * This goal runs within Lifecycle phase {@link LifecyclePhase#INTEGRATION_TEST}. */ @Mojo(name = "jmeter", defaultPhase = LifecyclePhase.INTEGRATION_TEST) @Execute(goal = "configure") public class RunJMeterMojo extends AbstractJMeterMojo { /** * Run all the JMeter tests. * * @throws MojoExecutionException MojoExecutionException */ @Override public void doExecute() throws MojoExecutionException { getLog().info(" "); getLog().info(LINE_SEPARATOR); getLog().info(" P E R F O R M A N C E T E S T S"); getLog().info(LINE_SEPARATOR); if (!testFilesDirectory.exists()) { getLog().info("<testFilesDirectory>" + testFilesDirectory.getAbsolutePath() + "</testFilesDirectory> does not exist..."); getLog().info("Performance tests skipped."); return; } TestConfig testConfig = new TestConfig(new File(testConfigFile)); JMeterArgumentsArray testArgs = computeJMeterArgumentsArray(true, testConfig.getResultsOutputIsCSVFormat()); jMeterProcessJVMSettings.forceHeadless(); remoteConfig.setPropertiesMap(JMeterConfigurationHolder.getInstance().getPropertiesMap()); copyFilesInTestDirectory(testFilesDirectory, testFilesBuildDirectory); TestManager jMeterTestManager = new TestManager(testArgs, testFilesBuildDirectory, testFilesIncluded, testFilesExcluded, remoteConfig, suppressJMeterOutput, JMeterConfigurationHolder.getInstance().getWorkingDirectory(), jMeterProcessJVMSettings, JMeterConfigurationHolder.getInstance().getRuntimeJarName(), reportDirectory, generateReports); jMeterTestManager.setPostTestPauseInSeconds(postTestPauseInSeconds); getLog().info(" "); if (proxyConfig != null) { getLog().info(this.proxyConfig.toString()); } testConfig.setResultsFileLocations(jMeterTestManager.executeTests()); testConfig.writeResultFilesConfigTo(testConfigFile); } }
package de.eightbitboy.ecorealms.entity; import com.badlogic.ashley.core.Engine; import com.badlogic.ashley.core.Entity; import com.badlogic.ashley.core.Family; public class EntityEngine { private Engine engine; public EntityEngine() { engine = new Engine(); addExampleEntity(); Family positionFamily = Family.all(PositionComponent.class).get(); } public void update(float deltaTime) { engine.update(deltaTime); } private void addExampleEntity() { ExampleEntity entity = new ExampleEntity(); entity.add(new PositionComponent()); entity.add(new ModelComponent()); engine.addEntity(entity); } private class ExampleEntity extends Entity { } }
package com.librato.metrics.client; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; import java.net.HttpURLConnection; import java.net.URL; import java.util.Map; import static java.util.concurrent.TimeUnit.MILLISECONDS; public class DefaultPoster implements IPoster { private static final Logger log = LoggerFactory.getLogger(DefaultPoster.class); @Override public HttpResponse post(String uri, Duration connectTimeout, Duration readTimeout, Map<String, String> headers, byte[] payload) { try { HttpURLConnection connection = open(uri); final int responseCode; final byte[] responseBody; connection.setDoOutput(true); connection.setDoInput(true); connection.setConnectTimeout((int) connectTimeout.to(MILLISECONDS)); connection.setReadTimeout((int) readTimeout.to(MILLISECONDS)); connection.setRequestMethod("POST"); connection.setInstanceFollowRedirects(false); for (String header : headers.keySet()) { connection.setRequestProperty(header, headers.get(header)); } connection.connect(); OutputStream outputStream = connection.getOutputStream(); try { outputStream.write(payload); } finally { close(outputStream); } responseCode = connection.getResponseCode(); InputStream responseStream; if (responseCode / 100 == 2) { responseStream = connection.getInputStream(); } else { responseStream = connection.getErrorStream(); } if(responseStream == null) { log.warn("responseStream null for {} responseCode {}", uri, responseCode); responseBody = new byte[0]; } else { responseBody = readResponse(responseStream); } return new HttpResponse() { @Override public int getResponseCode() { return responseCode; } @Override public byte[] getResponseBody() { return responseBody; } }; } catch (Exception e) { throw new RuntimeException(e); } } HttpURLConnection open(String url) throws IOException { try { return (HttpURLConnection) new URL(url).openConnection(); } catch (ClassCastException ignore) { throw new RuntimeException("URL " + url + " must use either http or https"); } } private byte[] readResponse(InputStream in) throws IOException { try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); byte[] buffer = new byte[4096]; int bytesRead; while ((bytesRead = in.read(buffer)) > 0) { bos.write(buffer, 0, bytesRead); } return bos.toByteArray(); } finally { close(in); } } void close(Closeable closeable) { try { if (closeable != null) { closeable.close(); } } catch (IOException e) { log.warn("Could not close " + closeable, e); } } }
package com.gentics.mesh.etc; import static com.gentics.mesh.util.URIUtils.encodeFragment; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import javax.inject.Inject; import javax.inject.Singleton; import javax.naming.InvalidNameException; import com.gentics.mesh.Mesh; import io.vertx.core.Handler; import io.vertx.core.Vertx; import io.vertx.core.logging.Logger; import io.vertx.core.logging.LoggerFactory; import io.vertx.ext.web.Router; import io.vertx.ext.web.RoutingContext; import io.vertx.ext.web.handler.CookieHandler; import io.vertx.ext.web.handler.CorsHandler; import io.vertx.ext.web.handler.LoggerHandler; /** * Central storage for all vertx web request routers. * * Structure: * * <pre> * {@code * ROOT_ROUTER(:coreRouter) -> customRouters * -> apiRouters -> apiSubRouter (eg: /users.., /roles..) * -> projectRouters (eg: /Dummy/nodes) * } * </pre> * * Project routers are automatically bound to all projects. This way only a single node verticle is needed to handle all project requests. * */ @Singleton public class RouterStorage { private static final Logger log = LoggerFactory.getLogger(RouterStorage.class); private Vertx vertx; private static final String ROOT_ROUTER_KEY = "ROOT_ROUTER"; private static final String API_ROUTER_KEY = "API_ROUTER"; private static final String CUSTOM_ROUTER_KEY = "CUSTOM_ROUTER"; public static final String DEFAULT_API_MOUNTPOINT = "/api/v1"; public static final String DEFAULT_CUSTOM_MOUNTPOINT = "/custom"; public static final String PROJECT_CONTEXT_KEY = "mesh-project"; private static RouterStorage instance; @Inject public RouterStorage(CorsHandler corsHandler, Handler<RoutingContext> bodyHandler) { this.vertx = Mesh.vertx(); RouterStorage.instance = this; initAPIRouter(corsHandler, bodyHandler); } public static RouterStorage getIntance() { return instance; } /** * Core routers are routers that are responsible for dealing with routes that are no project routes. E.g: /api/v1/admin, /api/v1 */ private Map<String, Router> coreRouters = new HashMap<>(); /** * Custom routers. (E.g.: /demo) */ private Map<String, Router> customRouters = new HashMap<>(); /** * Project routers are routers that handle project rest api endpoints. E.g: /api/v1/dummy, /api/v1/yourprojectname */ private Map<String, Router> projectRouters = new HashMap<>(); /** * Project sub routers are routers that are mounted by project routers. E.g: /api/v1/dummy/nodes, /api/v1/yourprojectname/tagFamilies */ private Map<String, Router> projectSubRouters = new HashMap<>(); /** * The root {@link Router} is a core router that is used as a parent for all other routers. This method will create the root router if non is existing. * * @return the root router */ public Router getRootRouter() { Router rootRouter = coreRouters.get(ROOT_ROUTER_KEY); if (rootRouter == null) { rootRouter = Router.router(vertx); // Root handlers rootRouter.route().handler(LoggerHandler.create()); // TODO add a dedicated error for api router that informs about APPLICATION_JSON requirements. This may not be true for other routes (eg. custom // routes) rootRouter.route().last().handler(DefaultNotFoundHandler.create()); rootRouter.route().failureHandler(FailureHandler.create()); coreRouters.put(ROOT_ROUTER_KEY, rootRouter); } return rootRouter; } /** * Initialise the Root API router and add common handlers to the router. The API router is used to attach subrouters for routes like * /api/v1/[groups|users|roles] */ private void initAPIRouter(CorsHandler corsHandler, Handler<RoutingContext> bodyHandler) { Router router = getAPIRouter(); if (Mesh.mesh().getOptions().getHttpServerOptions().isCorsEnabled()) { router.route().handler(corsHandler); } router.route().handler(bodyHandler); router.route().handler(CookieHandler.create()); } /** * Return or create the custom router which will be the base router for all custom verticles which can be accessed using <code>/custom</code>. * * @return */ public Router getCustomRouter() { Router customRouter = coreRouters.get(CUSTOM_ROUTER_KEY); if (customRouter == null) { customRouter = Router.router(vertx); coreRouters.put(CUSTOM_ROUTER_KEY, customRouter); getRootRouter().mountSubRouter(DEFAULT_CUSTOM_MOUNTPOINT, customRouter); } return customRouter; } /** * The api router is a core router which is being used to identify the api and rest api version. This method will create a api router if non is existing. * * @return api router */ public Router getAPIRouter() { Router apiRouter = coreRouters.get(API_ROUTER_KEY); if (apiRouter == null) { apiRouter = Router.router(vertx); coreRouters.put(API_ROUTER_KEY, apiRouter); getRootRouter().mountSubRouter(DEFAULT_API_MOUNTPOINT, apiRouter); } return apiRouter; } /** * Return the map with core routers. * * @return */ public Map<String, Router> getCoreRouters() { return coreRouters; } /** * Get a core api subrouter. A new router will be created id no existing one could be found. * * @param mountPoint * @return existing or new router */ public Router getAPISubRouter(String mountPoint) { // TODO check for conflicting project routers Router apiSubRouter = coreRouters.get(mountPoint); if (apiSubRouter == null) { apiSubRouter = Router.router(vertx); if (log.isDebugEnabled()) { log.debug("Creating subrouter for {" + mountPoint + "}"); } getAPIRouter().mountSubRouter("/" + mountPoint, apiSubRouter); coreRouters.put(mountPoint, apiSubRouter); } return apiSubRouter; } public boolean removeProjectRouter(String name) { Router projectRouter = projectRouters.get(name); if (projectRouter != null) { // TODO umount router from api router? projectRouter.clear(); projectRouters.remove(name); // TODO remove from all routers? return true; } return false; } /** * Add a new project router with the given name to the api router. This method will return an existing router when one already has been setup. * * @param name * @return Router for the given project name * @throws InvalidNameException */ public Router addProjectRouter(String name) throws InvalidNameException { String encodedName = encodeFragment(name); if (coreRouters.containsKey(encodedName)) { throw new InvalidNameException("The project name {" + encodedName + "} is conflicting with a core router. Best guess is that an core verticle is already occupying the name. Please choose a different name or remove the conflicting core verticle."); } Router projectRouter = projectRouters.get(encodedName); // TODO synchronize access to projectRouters if (projectRouter == null) { projectRouter = Router.router(vertx); projectRouters.put(name, projectRouter); log.info("Added project router {" + name + "}"); projectRouter.route().handler(ctx -> { ctx.data().put(PROJECT_CONTEXT_KEY, name); ctx.next(); }); getAPIRouter().mountSubRouter("/" + encodedName, projectRouter); mountSubRoutersForProjectRouter(projectRouter, encodedName); } return projectRouter; } /** * Mount all registered project subrouters on the project router. * * @param projectRouter * @param projectRouterName * Name of the project router */ private void mountSubRoutersForProjectRouter(Router projectRouter, String projectRouterName) { for (String mountPoint : projectSubRouters.keySet()) { log.info("Mounting subrouter {" + mountPoint + "} onto given project router. {" + projectRouterName + "}"); Router projectSubRouter = projectSubRouters.get(mountPoint); projectRouter.mountSubRouter("/" + mountPoint, projectSubRouter); } } /** * Mounts the given router in all registered project routers * * @param localRouter * @param mountPoint */ public void mountRouterInProjects(Router localRouter, String mountPoint) { for (Entry<String, Router> projectRouterEntry : projectRouters.entrySet()) { log.info("Mounting router onto project router {" + projectRouterEntry.getKey() + "} with mountpoint {" + mountPoint + "}"); projectRouterEntry.getValue().mountSubRouter("/" + mountPoint, localRouter); } } /** * Return the registered project subrouter. * * @return the router or null if no router was found */ public Router getProjectSubRouter(String name) { Router router = projectSubRouters.get(name); if (router == null) { router = Router.router(vertx); log.info("Added project subrouter {" + name + "}"); projectSubRouters.put(name, router); } mountRouterInProjects(router, name); return router; } public Router getCustomSubRouter(String name) { Router router = customRouters.get(name); if (router == null) { router = Router.router(vertx); log.info("Added custom subrouter {" + name + "}"); customRouters.put(name, router); } getCustomRouter().mountSubRouter("/" + name, router); return router; } }
package com.lothrazar.samscontent.cfg; import net.minecraft.entity.passive.EntityChicken; import net.minecraft.entity.passive.EntityCow; import net.minecraft.entity.passive.EntityHorse; import net.minecraft.entity.passive.EntityPig; import net.minecraft.entity.passive.EntityRabbit; import net.minecraft.entity.passive.EntitySheep; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.Item; import net.minecraftforge.common.config.Configuration; import com.lothrazar.samscontent.command.*; import com.lothrazar.samscontent.item.*; public class ConfigFile { private Configuration instance; private String category = ""; public Configuration instance() { return instance; } public ConfigFile(Configuration c) { instance = c; mob_changes(); commands(); blocks(); recipes_new(); recipes_changes(); creative(); items(); harvesting_changes(); terrain_generation(); dungeon_chests(); debug_info(); mob_spawning(); potions(); nature(); pocket_edition(); category = "tweaks";//these are the misc. changes i made that have no clear category yet flintPumpkin = instance.getBoolean("flint_pumpkin",category, true, "Lighting a pumpkin with a flint and steel turns it into a lit pumpkin (jack-o-lantern). "); betterBonemeal = instance.getBoolean("better_bonemeal",category, true, "Bonemeal grows more things: lilypads, all flowers, and reeds. "); increasedStackSizes = instance.getBoolean("stack_size",category, true, "While true, most vanilla items and blocks have their max stack size increased to 64 (not tools/armor/potions). "); potionStackSize = instance.getInt("stack_size_potion",category, 1,1,3, "Potion stack size can be increased to three, but not by default."); moreFuel = instance.getBoolean("more_fuel",category, true, "More can be used as furnace fuel: seeds, leaves, paper, shrubs, and more."); swiftDeposit = instance.getBoolean("swift_deposit",category, true, "Punch a chest while sneaking to merge items from your inventory into existing item stacks in the chest." ); smartEnderchest = instance.getBoolean("smart_enderchest",category, true, "Attack with the ender chest to open it without placing it." ); skullSignNames = instance.getBoolean("skull_sign_names",category, true, "Hit a sign with a player skull to make the skull take on the name (skin) of the first word/line on the sign"); playerDeathCoordinates = instance.getBoolean("player_death_coordinates",category, true, "Players will have their death point coordinates broadcast in chat."); dropPlayerSkullOnDeath = instance.getBoolean("drop_player_skull",category, true, "Players will drop their skull when they die."); fragileTorches = instance.getBoolean("fragile_torches",category, true, "Torches have a chance to break when living entity colides with it (unless it is a sneaking player)."); if(instance.hasChanged()){ instance.save(); } } public void pocket_edition() { category = "pocket_edition"; beetroot = instance.getBoolean( "beetroot",category,true, "Add beetroot, similar to pocket edition. Use a golden hoe to get seeds. " ); } private void mob_changes() { category = "mob_changes"; livestockLootMultiplier = instance.getInt("livestock_multiplier",category, 3,1,10, "Factor to increase drops from livestock: including sheep, chicken, horse, cow, rabbit, and also pigs get double this factor again. Useful on servers because less animals being collected and bred = less lag. (use 1 for vanilla behavior)"); petNametagDrops = instance.getBoolean("nametag_drops",category, true, "Some mobs that are named drop a name tag when they die (wolf, ocelot, villager, bat, rabbit, horse)."); //TODO: RESPAWNING? petNametagChat = instance.getBoolean("nametag_death_messages",category, true, "Non-player entities that are named with a Name Tag send a chat death message when they die."); removeZombieCarrotPotato = instance.getBoolean("remove_zombie_carrot_potato",category, true, "Disable these zombie drops."); chanceZombieChildFeather = instance.getInt("chance_zombie_child_feather",category, 5,0,100, "Percent chance that a child zombie will drop a feather (so 0 for vanilla)."); //TODO: zombie pigman rare pork chop. like beta chanceZombieVillagerEmerald = instance.getInt("chance_zombie_villager_emerald",category, 5,0,100, "Percent chance that a villager zombie will drop an emerald (so 0 for vanilla)."); endermenDropCarryingBlock = instance.getBoolean("endermen_drop_carrying_block",category, true, "Endermen will always drop any block they are carrying."); } private void nature() { category = "nature"; plantDespawningSaplings = instance.getBoolean("sapling_plant_despawn",category, true, "When a sapling (or mushroom) despawns while sitting on grass or dirt, it will instead attempt to plant itself."); saplingGrowthRestricted = instance.getBoolean("sapling_biome_restricted",category, true, "Sapling growth is restricted to only their native biomes (for example, birch trees will not grow in roofed forests)."); saplingAllNether = instance.getBoolean("sapling_nether",category, false, "If true, all saplings grow in the nether (ignoring sapling_biome_restricted)."); saplingAllEnd = instance.getBoolean("sapling_end",category, false, "If true, all saplings grow in the end (ignoring sapling_biome_restricted)"); } private void terrain_generation() { category = "terrain_generation"; //TODO: also add dirt and sand!?!?!? worldGenClayOceans = instance.getBoolean("clay_oceans",category, true, "Clay can generate in oceans just like it used to in the old days. It replaces the gravel in patches."); } private void potions() { category = "potions"; potionIdWaterwalk = instance.getInt("potion_waterwalk_id",category, 40,33,200, "ID is only exposed to avoid conflicts with other mods."); potionIdSlowfall = instance.getInt("potion_slowfall_id",category, 41,33,200, "ID is only exposed to avoid conflicts with other mods."); potionIdFlying = instance.getInt("potion_flying_id",category, 42,33,200, "ID is only exposed to avoid conflicts with other mods. "); potionIdLavawalk = instance.getInt("potion_lavawalk_id",category, 43,33,200, "ID is only exposed to avoid conflicts with other mods."); potionIdEnder = instance.getInt("potion_ender_id",category, 44,33,200, "ID is only exposed to avoid conflicts with other mods."); potionIdFrozen = instance.getInt("potion_frost_id",category, 45,33,200, "ID is only exposed to avoid conflicts with other mods."); slowfallSpeed = instance.getFloat("potion_slowfall_speed",category, 0.41F,0.1F,1F, "This factor affects how much the slowfall potion slows down the entity."); } private void creative() { category = "creative_inventory_added"; //no comment on purpose. more readable, less vertical space mushroomBlocksCreativeInventory = instance.get(category,"mushroomBlocks", true).getBoolean(); barrierCreativeInventory = instance.get(category,"barrier", true).getBoolean(); dragonEggCreativeInventory = instance.get(category,"dragonEgg", true).getBoolean(); farmlandCreativeInventory = instance.get(category,"farmland", true).getBoolean(); spawnerCreativeInventory = instance.get(category,"spawner", true).getBoolean(); } private void recipes_changes() { category = "recipes_changes"; furnaceNeedsCoal = instance.getBoolean("furnace_coal",category, true, "If true, you cannot craft a furnace with only 8 cobblestone, it will also require one coal in the center."); smoothstoneToolsRequired = instance.getBoolean("smoothstone_tools",category, true, "If true, all stone tools will require smoothstone instead of cobble."); tieredArmor = instance.getBoolean("tiered_armor",category, true, "If true, crafting iron armor requires repaired leather armor as part of the recipe, AND diamond armor requires chain mail."); } private void harvesting_changes() { category = "harvesting_changes"; /* String csv = instance.getString("harvestOnlyShovel",category, "minecraft:dirt,minecraft:sand", "If these blocks are not harvested by a shovel, they will break but have no drops."); HandlerPlayerHarvest.setShovelFromCSV(csv); String csvaxe = instance.getString("harvestOnlyAxe",category, "minecraft:log,minecraft:log2", "If these blocks are not harvested by an axe, they will break but have no drops."); HandlerPlayerHarvest.seAxeFromCSV(csvaxe); */ harvestGlassPickaxe = instance.getBoolean("harvest_glass_pickaxe",category, true, "Sets the pickaxe as the correct tool to harvest glass (by default there is no correct glass tool)."); obsidianHardness = instance.getInt("obsidian_hardness",category, 10,1,50, "Hardness level of Obsidian (vanilla is 50)."); redstoneOreHardness = instance.getInt("redstone_ore_hardness",category, 6,1,50, "Hardness level of redstone ore (vanilla is 3)."); diamondOreHardness = instance.getInt("diamond_ore_hardness",category, 10,1,50, "Hardness level of diamond ore (vanilla is 3)."); emeraldOreHardness = instance.getInt("emerald_ore_hardness",category, 12,1,50, "Hardness level of emerald ore (vanilla is 3)."); spawnerHardness = instance.getInt("spawner_hardness",category, 50,1,50, "Hardness level of mob spawners (vanilla is 5)."); } private void mob_spawning() { category = "mob_spawning"; spawnBlazeDesertHills = instance.get(category,"blaze_desertHills", true).getBoolean(); spawnMagmaCubeDesert = instance.get(category,"magmaCube_Desert", true).getBoolean(); spawnCaveSpiderMesa = instance.get(category,"caveSpider_Mesa", true).getBoolean(); spawnCaveSpiderRoofedForest = instance.get(category,"caveSpider_RoofedForest", true).getBoolean(); spawnSnowgolemsIceMountains = instance.get(category,"snowgolems_IceMountains", true).getBoolean(); spawnGhastDeepOcean = instance.get(category,"ghast_DeepOcean", true).getBoolean(); spawnHorseIcePlains = instance.get(category,"horse_IcePlains", true).getBoolean(); spawnHorseOceanIslands = instance.get(category,"horse_OceanIslands", true).getBoolean(); spawnHorseExtremeHills = instance.get(category,"horse_ExtremeHills", true).getBoolean(); spawnVillagerExtremeHills = instance.get(category,"villager_ExtremeHills", true).getBoolean(); //"Villagers pawn naturally in Extreme Hills (not village buildings, it just rarely spawns a villager instead of another passive mob). "); spawnCaveSpiderJungle = instance.get(category,"caveSpider_Jungle", true).getBoolean(); } private void debug_info() { category = "debug_screen_f3"; debugClearRight = instance.getBoolean("clear_right",category, false, "Clears the right side. " ); debugSlime = instance.getBoolean("slime",category, true, "Shows if you are standing in a slime chunk." ); debugHorseInfo = instance.getBoolean("horse",category, true, "Shows info on any horse ridden including speed, jump height, species."); debugVillageInfo = instance.getBoolean("village",category, true, "Shows info on any village you are standing in."); } private void dungeon_chests() { category = "more_chest_loot"; lootObsidian = instance.get(category,"obsidian", true).getBoolean(); lootAllRecords = instance.get(category,"records", true).getBoolean(); lootGlowstone = instance.get(category,"glowstone", true).getBoolean(); lootQuartz = instance.get(category,"quartz", true).getBoolean(); } private void items() { category = "items"; enderBook = instance.getBoolean( "ender_book",category,true, " Craft an ender book that lets you save a waypoint, and then teleport to it later (single use)."); chest_sack = instance.getBoolean("chest_sack",category, true, "Craft an empty sack that can transport chests by turning them into sacks; place the full sack to re-create the full chest. Items with NBT data (enchantments and more) will pop out on the ground."); appleChocolate = instance.get( category,"apple_chocolate",true).getBoolean(); appleEmerald = instance.get( category,"apple_emerald",true).getBoolean(); appleLapis = instance.get(category, "apple_lapis",true).getBoolean(); appleDiamond = instance.get(category, "apple_diamond",true).getBoolean(); appleNetherStar = instance.get(category, "apple_netherwart",true).getBoolean(); fire_charge_throw = instance.getBoolean("fire_charge_throw",category, true, "Craft new version of the fire charge that is throwable (as if it came out of a dispenser)."); frozen_snowball = instance.getBoolean("frozen_snowball",category, true, "Throw a frozen snowball that freezes water and causes a short icy potion effect to anything it hits."); carbon_paper = instance.getBoolean("carbon_paper",category, true, "Craft a wand that can copy and paste note blocks and signs."); ItemMagicHarvester.RADIUS = instance.getInt("harvest_charge.radius",category, 16,1,64, "Range in all directions."); harvest_charge = instance.getBoolean("harvest_charge",category, true, "This harvests a large area of crops at once while also replanting for you."); respawn_egg = instance.getBoolean("respawn_egg",category, true, "Use an empty respawn egg to turn an mob into a respawn egg. This works the same as a regular spawn egg, but does not interact with mob spawners. Works only on livestock/passive mobs, not hostiles."); ItemWandTransform.DURABILITY = instance.getInt("wand_transform.durability",category, 200,1,999, "Durability (number of uses in survival)."); wandTransform = instance.getBoolean("wand_transform",category, true, "Craft a wand that will transform the targeted block by its metadata value. Does not work on every block in the game, but it does allow you to use otherwise obtainable values (mushroom blocks, logs, etc). "); /* category = parentCateory + ".wandProspect"; ItemWandProspect.DURABILITY = instance.getInt("durability",category, 200,1,999, "Durability (number of uses in survival)."); ItemWandProspect.RADIUS = instance.getInt("radius",category, 16,1,64, "Range in all directions."); wandProspect = instance.getBoolean("wandProspect",category, true, "Craft a wand that will prospect the nearby area for diamonds."); */ wandBuilding = instance.getBoolean( "wand_building", category,true, "Can craft and use a building wand that can store many stacks of items, and replace blocks without mining."); ItemWandBuilding.DURABILITY = instance.getInt("wand_building.durability",category, 200,1,999, "Durability (number of uses in survival)."); ItemWandBuilding.replaceBedrock = instance.getBoolean("wand_building.replaceBedrock", category ,true, "Set true to allow the building wand to affect bedrock. " ); ItemWandBuilding.replaceObsidian = instance.getBoolean("wand_building.replaceObsidian", category ,true, "Set true to allow the building wand to affect obsidian. " ); ItemWandBuilding.replaceTileEntities = instance.getBoolean("wand_building.replaceTileEntities", category ,true, "Set true to allow the building wand to affect Tile Entities - which is anything with an invnetory " + "(such as chest or dispenser). " ); ItemWandWater.DURABILITY = instance.getInt("wand_water.durability",category, 50,1,999, "Durability (number of uses in survival)."); wandWater = instance.getBoolean("wand_water",category, true, "Craft a wand that places water."); lightning_charge = instance.getBoolean("lightning_charge",category, true, "Works like a fire charge, but it spawns lightning instead of fire."); } private void blocks() { category = "blocks"; //TODO: one config for the blocks storeBucketsBlock = instance.getBoolean( "storeBuckets",category,true, "A block that stores any number of milk/water/lava buckets (click to insert / withdraw)."); shearSheepBlock = instance.getBoolean( "shearSheep",category,true, "Shears adult sheep that collide with this block."); fishingNetBlock = instance.getBoolean( "fishing_net",category,true, "Place the fishing block in deep water and it will randomly spawn fish with the same odds as a pole (but no treasures or junk)."); xRayBlock = instance.getBoolean( "chunk_error_xray",category,true, "Create an xray block to see through the world at the block location, in the same way a chunk error would. Intended for single player, not for cheating on servers."); weatherBlock = instance.getBoolean( "weather",category,true, "Craft block that will run /toggledownfall whenever it gets a redstone signal."); teleportBedBlock = instance.getBoolean( "teleport_bed",category,true, "Command block that teleports you to the world spawn"); teleportSpawnBlock = instance.getBoolean( "teleport_spawn",category,true, "Command block that teleports you to your bed"); gameruleBlockRegen = instance.getBoolean( "gamerule_naturalregen",category,true, "Craft blocks that toggle '/gamerule naturalRegenration' on redstone signal. (Can never be opened or edited like a regular command block)."); gameruleBlockDaylight = instance.getBoolean( "gamerule_daylightcycle",category,true, "Craft blocks that toggle '/gamerule doDaylightCycle' on redstone signal. (Can never be opened or edited like a regular command block)."); gameruleBlockFiretick = instance.getBoolean( "gamerule_firetick",category,true, "Craft blocks that toggle '/gamerule doFireTick' on redstone signal. (Can never be opened or edited like a regular command block)."); gameruleBlockMobgrief = instance.getBoolean( "gamerule_mobgrief",category,true, "Craft blocks that toggle '/gamerule doMobGriefing' on redstone signal. (Can never be opened or edited like a regular command block)."); } private void recipes_new() { category = "recipes_new"; netherwartPurpleDye = instance.getBoolean( "netherwart_purple_dye",category,true, "Craft bonemeal and netherwart into purple dye."); simpleDispenser = instance.getBoolean( "simple_dispenser",category,true, "Craft a dispenser with string in the center instead of a bow. (Since string is stackable, this makes crafting tons of them much faster and cheaper)."); craftBooksWithoutLeather = instance.getBoolean( "books_without_leather",category,true, "This allows use the old book crafting recipe from previous versions of the game; three paper but no leather needed."); craftableTransmuteRecords = instance.getBoolean( "transmute_records",category,true, "This allows you to surround any record in emeralds to transmute it into a different record."); craftableBonemealColouredWool = instance.getBoolean( "bonemeal_coloured_wool",category,true ,"Allows you to dye coloured wool back to white using bonemeal" ); uncraftGeneral = instance.getBoolean( "uncrafting",category,true, "uncrafting: craft or smelt blocks back into their ingredients. Often it is not a perfect trade. " + "Example: Craft stairs back into blocks using a 4x4 pattern." ); craftableMushroomBlocks = instance.getBoolean( "mushroom_blocks",category,true ,"Craft mushroom blocks. "); } private void commands() { category = "commands"; kit = instance.getBoolean("kit",category, true, "Use /kit to give yourself kit items. Can only be done once each time you die."); String csv = instance.getString("kit.items",category, "minecraft:wooden_pickaxe,minecraft:wooden_sword", "Using /kit gives the following item. Each must have minecraft:item or modname:item, no spaces and split by commas."); CommandKit.setItemsFromString(csv); CommandHome.REQUIRES_OP = instance.getBoolean("home.needs_op",category, false, "Command is restricted to players with OP (or single player worlds with cheats enabled)."); home = instance.getBoolean("home",category, true, "Use /home to go to the players spawn point, as defined by a bed."); CommandHome.REQUIRES_OP = instance.getBoolean("home.needs_op",category, false, "Command is restricted to players with OP (or single player worlds with cheats enabled)."); worldhome = instance.getBoolean("worldhome",category, true, "Use /worldhome to go to the worlds global spawn point."); CommandWorldHome.REQUIRES_OP = instance.getBoolean("worldhomehome.needs_op",category, false, "Command is restricted to players with OP (or single player worlds with cheats enabled)."); searchspawner = instance.getBoolean("searchspawner",category, true, "Players can search for spawners placed in the world. Result is only chat output."); CommandSearchSpawner.REQUIRES_OP = instance.getBoolean("searchspawner.needs_op",category, false, "Command is restricted to players with OP (or single player worlds with cheats enabled)."); searchtrade = instance.getBoolean("searchtrade",category, true, "Players can search the trades of nearby villagers. Result is only chat output."); CommandSearchTrades.REQUIRES_OP = instance.getBoolean("searchtrade.needs_op",category, false, "Command is restricted to players with OP (or single player worlds with cheats enabled)."); searchitem = instance.getBoolean("searchitem",category, true, "Players can search nearby chests for items. Result is only chat output." ); CommandSearchItem.REQUIRES_OP = instance.getBoolean("searchitem.needs_op",category, false, "Command is restricted to players with OP (or single player worlds with cheats enabled)."); enderchest = instance.getBoolean("enderchest",category, true, "Players can open their enderchest with a command, no item needed." ); CommandEnderChest.REQUIRES_OP = instance.getBoolean("enderchest.needs_op",category, true, "Command is restricted to players with OP (or single player worlds with cheats enabled)."); simplewaypoint = instance.getBoolean("simplewaypoint",category, true, "Command that lets players save waypoints that then show up in the F3 debug screen, so we can navigate back to it (no tp)." ); CommandSimpleWaypoints.REQUIRES_OP = instance.getBoolean("simplewaypoint.needs_op",category, false, "Command is restricted to players with OP (or single player worlds with cheats enabled)."); todo = instance.getBoolean("todo",category, true, "Command that lets players use /todo myreminder text, which will then show whatever text they put on the F3 debug screen."); CommandTodoList.REQUIRES_OP = instance.getBoolean("todo.needs_op",category, false, "Command is restricted to players with OP (or single player worlds with cheats enabled)."); } public boolean swiftDeposit; public boolean smartEnderchest; public boolean increasedStackSizes; public boolean moreFuel; public boolean skullSignNames; public boolean craftableTransmuteRecords; public boolean craftableBonemealColouredWool; public boolean craftBooksWithoutLeather; public boolean betterBonemeal; public boolean decorativeBlocks; public boolean uncraftGeneral; public boolean fishingNetBlock; public boolean xRayBlock; public boolean enderBook; public boolean weatherBlock; public boolean craftableMushroomBlocks; public boolean searchtrade; public boolean searchitem; public boolean killall; public boolean enderchest; public boolean simplewaypoint; public boolean todo; public boolean kit; public boolean home; public boolean worldhome; public boolean lootObsidian; public boolean lootAllRecords; public boolean lootGlowstone; public boolean lootQuartz; public boolean appleDiamond; public boolean appleLapis; public boolean appleChocolate; public boolean appleEmerald; public boolean gameruleBlockRegen; public boolean gameruleBlockDaylight; public boolean gameruleBlockFiretick; public boolean gameruleBlockMobgrief; public boolean debugSlime; public boolean debugHorseInfo; public boolean debugClearRight; public boolean debugVillageInfo; public boolean spawnBlazeDesertHills; public boolean spawnMagmaCubeDesert; public boolean spawnCaveSpiderMesa; public boolean spawnCaveSpiderRoofedForest; public boolean spawnSnowgolemsIceMountains; public boolean spawnGhastDeepOcean; public boolean spawnHorseIcePlains; public boolean spawnHorseOceanIslands; public boolean spawnHorseExtremeHills; public boolean craftWoolDye8; public boolean craftRepeaterSimple; public boolean craftMinecartsSimple; public boolean petNametagDrops; public boolean spawnVillagerExtremeHills; public boolean teleportBedBlock; public boolean teleportSpawnBlock; public boolean spawnCaveSpiderJungle; public boolean appleNetherStar; public boolean smoothstoneToolsRequired; public boolean tieredArmor; public boolean furnaceNeedsCoal; public boolean plantDespawningSaplings; public boolean wandBuilding; public boolean simpleDispenser; public boolean dropPlayerSkullOnDeath; public boolean searchspawner; public boolean mushroomBlocksCreativeInventory; public boolean barrierCreativeInventory; public boolean dragonEggCreativeInventory; public boolean farmlandCreativeInventory; public boolean spawnerCreativeInventory; public boolean fragileTorches; public boolean removeZombieCarrotPotato; public boolean petNametagChat; public boolean playerDeathCoordinates; public int obsidianHardness; public int diamondOreHardness; public int emeraldOreHardness; public int spawnerHardness; public boolean chest_sack; public boolean carbon_paper; public boolean harvest_charge; public boolean respawn_egg; public boolean wandTransform; public int livestockLootMultiplier; public int potionIdWaterwalk; public int potionIdSlowfall; public int potionIdFlying; public float slowfallSpeed; public boolean flintTool; public int potionIdLavawalk; public boolean netherwartPurpleDye; public boolean worldGenClayOceans; public boolean saplingGrowthRestricted; public boolean saplingAllNether; public boolean saplingAllEnd; public int potionIdEnder; public boolean wandWater; public boolean harvestGlassPickaxe; public boolean lightning_charge; public boolean shearSheepBlock; public boolean storeBucketsBlock; public boolean beetroot; public boolean flintPumpkin; public boolean endermenDropCarryingBlock; public boolean fire_charge_throw; public boolean frozen_snowball; public int potionStackSize; public int potionIdFrozen; public int chanceZombieChildFeather; public int chanceZombieVillagerEmerald; public float redstoneOreHardness; }
package hex.genmodel.tools; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import hex.genmodel.MojoModel; import hex.genmodel.algos.tree.ConvertTreeOptions; import hex.genmodel.algos.gbm.GbmMojoModel; import hex.genmodel.algos.tree.SharedTreeGraph; import hex.genmodel.algos.tree.SharedTreeGraphConverter; import hex.genmodel.algos.tree.TreeBackedMojoModel; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.*; import java.util.List; import static water.util.JavaVersionUtils.JAVA_VERSION; /** * Print dot (graphviz) representation of one or more trees in a DRF or GBM model. */ public class PrintMojo implements MojoPrinter { protected MojoModel genModel; protected Format format = Format.dot; protected int treeToPrint = -1; protected int maxLevelsToPrintPerEdge = 10; protected boolean detail = false; protected String outputFileName = null; protected String optionalTitle = null; protected PrintTreeOptions pTreeOptions; protected boolean internal; protected final String tmpOutputFileName = "tmpOutputFileName.gv"; public static void main(String[] args) { MojoPrinter mojoPrinter = null; if (JAVA_VERSION.isKnown() && JAVA_VERSION.getMajor() > 7) { ServiceLoader<MojoPrinter> mojoPrinters = ServiceLoader.load(MojoPrinter.class); for (MojoPrinter printer : mojoPrinters) { if (printer.supportsFormat(getFormat(args))) { mojoPrinter = printer; } } if (mojoPrinter == null) { System.out.println("No supported MojoPrinter for format required found. Please make sure you are using h2o-genmodel.jar for executing this tool."); System.exit(1); } } else { mojoPrinter = new PrintMojo(); } // Parse command line arguments mojoPrinter.parseArgs(args); // Run the main program try { mojoPrinter.run(); } catch (Exception e) { e.printStackTrace(); System.exit(2); } // Success System.exit(0); } @Override public boolean supportsFormat(Format format) { if (Format.png.equals(format)){ return false; } else { return true; } } static Format getFormat(String[] args) { for (int i = 0; i < args.length; i++) { if (args[i].equals("--format")) { try { return Format.valueOf(args[++i]); } catch (Exception e) { // invalid format will be handled in parseArgs() return null; } } } return null; } private void loadMojo(String modelName) throws IOException { genModel = MojoModel.load(modelName); } protected static void usage() { System.out.println("Emit a human-consumable graph of a model for use with dot (graphviz)."); System.out.println("The currently supported model types are DRF, GBM and XGBoost."); System.out.println(); System.out.println("Usage: java [...java args...] hex.genmodel.tools.PrintMojo [--tree n] [--levels n] [--title sss] [-o outputFileName]"); System.out.println(); System.out.println(" --format Output format. For .png output at least Java 8 is required."); System.out.println(" dot|json|raw|png [default dot]"); System.out.println(); System.out.println(" --tree Tree number to print."); System.out.println(" [default all]"); System.out.println(); System.out.println(" --levels Number of levels per edge to print."); System.out.println(" [default 10]"); System.out.println(); System.out.println(" --title (Optional) Force title of tree graph."); System.out.println(); System.out.println(" --detail Specify to print additional detailed information like node numbers."); System.out.println(); System.out.println(" --input | -i Input mojo file."); System.out.println(); System.out.println(" --output | -o Output filename. Taken as a directory name in case of .png format and multiple trees to visualize."); System.out.println(" [default stdout]"); System.out.println(" --decimalplaces | -d Set decimal places of all numerical values."); System.out.println(); System.out.println(" --fontsize | -f Set font sizes of strings."); System.out.println(); System.out.println(" --internal Internal H2O representation of the decision tree (splits etc.) is used for generating the GRAPHVIZ format."); System.out.println(); System.out.println(); System.out.println("Example:"); System.out.println(); System.out.println(" (brew install graphviz)"); System.out.println(" java -cp h2o.jar hex.genmodel.tools.PrintMojo --tree 0 -i model_mojo.zip -o model.gv -f 20 -d 3"); System.out.println(" dot -Tpng model.gv -o model.png"); System.out.println(" open model.png"); System.out.println(); System.exit(1); } public void parseArgs(String[] args) { int nPlaces = -1; int fontSize = 14; // default size is 14 boolean setDecimalPlaces = false; try { for (int i = 0; i < args.length; i++) { String s = args[i]; switch (s) { case "--format": i++; if (i >= args.length) usage(); s = args[i]; try { format = Format.valueOf(s); } catch (Exception e) { System.out.println("ERROR: invalid --format argument (" + s + ")"); System.exit(1); } break; case "--tree": i++; if (i >= args.length) usage(); s = args[i]; try { treeToPrint = Integer.parseInt(s); } catch (Exception e) { System.out.println("ERROR: invalid --tree argument (" + s + ")"); System.exit(1); } break; case "--levels": i++; if (i >= args.length) usage(); s = args[i]; try { maxLevelsToPrintPerEdge = Integer.parseInt(s); } catch (Exception e) { System.out.println("ERROR: invalid --levels argument (" + s + ")"); System.exit(1); } break; case "--title": i++; if (i >= args.length) usage(); optionalTitle = args[i]; break; case "--detail": detail = true; break; case "--input": case "-i": i++; if (i >= args.length) usage(); s = args[i]; loadMojo(s); break; case "--fontsize": case "-f": i++; if (i >= args.length) usage(); s = args[i]; fontSize = Integer.parseInt(s); break; case "--decimalplaces": case "-d": i++; if (i >= args.length) usage(); setDecimalPlaces=true; s = args[i]; nPlaces = Integer.parseInt(s); break; case "--raw": format = Format.raw; break; case "--internal": internal = true; break; case "-o": case "--output": i++; if (i >= args.length) usage(); outputFileName = args[i]; break; default: System.out.println("ERROR: Unknown command line argument: " + s); usage(); break; } } pTreeOptions = new PrintTreeOptions(setDecimalPlaces, nPlaces, fontSize, internal); } catch (Exception e) { e.printStackTrace(); usage(); } } protected void validateArgs() { if (genModel == null) { System.out.println("ERROR: Must specify -i"); usage(); } } public void run() throws Exception { validateArgs(); PrintStream os; if (outputFileName != null) { os = new PrintStream(new FileOutputStream(new File(outputFileName))); } else { os = System.out; } if (genModel instanceof SharedTreeGraphConverter) { SharedTreeGraphConverter treeBackedModel = (SharedTreeGraphConverter) genModel; ConvertTreeOptions options = new ConvertTreeOptions().withTreeConsistencyCheckEnabled(); final SharedTreeGraph g = treeBackedModel.convert(treeToPrint, null, options); switch (format) { case raw: g.print(); break; case dot: g.printDot(os, maxLevelsToPrintPerEdge, detail, optionalTitle, pTreeOptions); break; case json: if (!(treeBackedModel instanceof TreeBackedMojoModel)) { System.out.println("ERROR: Printing XGBoost MOJO as JSON not supported"); System.exit(1); } printJson((TreeBackedMojoModel) treeBackedModel, g, os); break; } } else { System.out.println("ERROR: Unsupported MOJO type"); System.exit(1); } } private Map<String, Object> getParamsAsJson(TreeBackedMojoModel tree) { Map<String, Object> params = new LinkedHashMap<>(); params.put("h2o_version", genModel._h2oVersion); params.put("mojo_version", genModel._mojo_version); params.put("algo", genModel._algoName); params.put("model_category", genModel._category.toString()); params.put("classifier", genModel.isClassifier()); params.put("supervised", genModel._supervised); params.put("nfeatures", genModel._nfeatures); params.put("nclasses", genModel._nclasses); params.put("balance_classes", genModel._balanceClasses); params.put("n_tree_groups", tree.getNTreeGroups()); params.put("n_trees_in_group", tree.getNTreesPerGroup()); params.put("base_score", tree.getInitF()); if (genModel.isClassifier()) { String[] responseValues = genModel.getDomainValues(genModel.getResponseIdx()); params.put("class_labels", responseValues); } if (genModel instanceof GbmMojoModel) { GbmMojoModel m = (GbmMojoModel) genModel; params.put("family", m._family.toString()); params.put("link_function", m._link_function.toString()); } return params; } private List<Object> getDomainValuesAsJSON() { List<Object> domainValues = new ArrayList<>(); String[][] values = genModel.getDomainValues(); // each col except response for (int i = 0; i < values.length-1; i++) { if (values[i] == null) continue; Map<String, Object> colValuesObject = new LinkedHashMap<>(); colValuesObject.put("colId", i); colValuesObject.put("colName", genModel._names[i]); colValuesObject.put("values", values[i]); domainValues.add(colValuesObject); } return domainValues; } private void printJson(TreeBackedMojoModel mojo, SharedTreeGraph trees, PrintStream os) { Map<String, Object> json = new LinkedHashMap<>(); json.put("params", getParamsAsJson(mojo)); json.put("domainValues", getDomainValuesAsJSON()); json.put("trees", trees.toJson()); if (optionalTitle != null) { json.put("title", optionalTitle); } Gson gson = new GsonBuilder().setPrettyPrinting().create(); os.print(gson.toJson(json)); } public static class PrintTreeOptions { public boolean _setDecimalPlace; public int _nPlaces; public int _fontSize; public boolean _internal; public PrintTreeOptions(boolean setdecimalplaces, int nplaces, int fontsize, boolean internal) { _setDecimalPlace = setdecimalplaces; _nPlaces = _setDecimalPlace ? nplaces : _nPlaces; _fontSize = fontsize; _internal = internal; } public float roundNPlace(float value) { if (_nPlaces < 0) return value; double sc = Math.pow(10, _nPlaces); return (float) (Math.round(value*sc)/sc); } } }
package com.lowtuna.jsonblob.core; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.base.Stopwatch; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import io.dropwizard.util.Duration; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; @Slf4j @RequiredArgsConstructor public class BlobCleanupJob implements Runnable { private final Path blobDirectory; private final Duration blobAccessTtl; private final FileSystemJsonBlobManager fileSystemJsonBlobManager; private final ObjectMapper om; private final boolean deleteEnabled; @Override public void run() { Stopwatch stopwatch = new Stopwatch().start(); AtomicLong blobsRemoved = new AtomicLong(0); try { Files.walk(blobDirectory) .parallel() .filter(p -> !p.toFile().isDirectory()) .map(Path::getParent) .distinct() .forEach(dataDir -> { log.info("Checking for blobs not accessed in the last {} in {}", blobAccessTtl, dataDir.toAbsolutePath()); if (!dataDir.toFile().exists() || !dataDir.toFile().isDirectory()) { return; } try { List<File> files = Arrays.asList(dataDir.toFile().listFiles()).parallelStream().filter(File::exists).collect(Collectors.toList()); Set<String> blobs = Sets .newHashSet(Lists.transform(files, f -> f.getName().split("\\.", 2)[0])) .parallelStream() .filter(f -> fileSystemJsonBlobManager.resolveTimestamp(f).isPresent()).collect(Collectors.toSet()); log.info("Identified {} blobs in {}", blobs.size(), dataDir); Map<String, DateTime> lastAccessed = Maps.newHashMap(Maps.asMap(blobs, new Function<String, DateTime>() { @Nullable @Override public DateTime apply(@Nullable String input) { return fileSystemJsonBlobManager.resolveTimestamp(input).get(); } })); log.debug("Completed building map of {} last accessed timestamps in {}", lastAccessed.size(), dataDir); File metadataFile = fileSystemJsonBlobManager.getMetaDataFile(dataDir.toFile()); try { BlobMetadataContainer metadataContainer = metadataFile.exists() ? om.readValue(fileSystemJsonBlobManager.readFile(metadataFile), BlobMetadataContainer.class) : new BlobMetadataContainer(); log.debug("Adding {} last accessed timestamp from metadata {}", metadataContainer.getLastAccessedByBlobId().size(), metadataFile.getAbsolutePath()); lastAccessed.putAll(metadataContainer.getLastAccessedByBlobId()); log.debug("Determining which blobs to remove from {}", dataDir); Map<String, DateTime> toRemove = Maps.filterEntries(lastAccessed, input -> input.getValue().plusMillis((int) blobAccessTtl.toMilliseconds()).isBefore(DateTime.now())); log.info("Identified {} blobs to remove in {}", toRemove.size(), dataDir); toRemove.keySet().parallelStream().forEach(blobId -> { if (deleteEnabled) { log.debug("Deleting blob with id {}", blobId); try { fileSystemJsonBlobManager.deleteBlob(blobId); blobsRemoved.incrementAndGet(); } catch (BlobNotFoundException e) { log.debug("Couldn't delete blobId {} because it's already been deleted", blobId); } } }); } catch (IOException e) { log.warn("Couldn't load metadata file from {}", dataDir.toAbsolutePath(), e); } } catch (Exception e) { log.warn("Caught Exception while trying to remove un-accessed blobs in {}", dataDir, e); } }); log.info("Completed cleanup of {} blobs in {}ms", blobsRemoved.get(), stopwatch.elapsed(TimeUnit.MILLISECONDS)); } catch (Exception e) { log.warn("Couldn't remove old blobs", e); } } }
package gov.nih.nci.cananolab.ui.sample; import gov.nih.nci.cananolab.domain.particle.Sample; import gov.nih.nci.cananolab.dto.common.ExperimentConfigBean; import gov.nih.nci.cananolab.dto.common.FileBean; import gov.nih.nci.cananolab.dto.common.FindingBean; import gov.nih.nci.cananolab.dto.common.UserBean; import gov.nih.nci.cananolab.dto.particle.SampleBean; import gov.nih.nci.cananolab.dto.particle.characterization.CharacterizationBean; import gov.nih.nci.cananolab.dto.particle.characterization.CharacterizationSummaryViewBean; import gov.nih.nci.cananolab.service.sample.CharacterizationService; import gov.nih.nci.cananolab.service.sample.helper.CharacterizationServiceHelper; import gov.nih.nci.cananolab.service.sample.impl.CharacterizationServiceLocalImpl; import gov.nih.nci.cananolab.ui.core.BaseAnnotationAction; import gov.nih.nci.cananolab.ui.core.InitSetup; import gov.nih.nci.cananolab.ui.protocol.InitProtocolSetup; import gov.nih.nci.cananolab.util.Constants; import gov.nih.nci.cananolab.util.DateUtils; import gov.nih.nci.cananolab.util.ExportUtils; import gov.nih.nci.cananolab.util.StringUtils; import java.net.URL; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import java.util.Map; import java.util.SortedSet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionMessages; import org.apache.struts.validator.DynaValidatorForm; /** * Base action for characterization actions * * @author pansu * */ public class CharacterizationAction extends BaseAnnotationAction { /** * Add or update the data to database * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward create(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean charBean = (CharacterizationBean) theForm .get("achar"); InitCharacterizationSetup.getInstance() .persistCharacterizationDropdowns(request, charBean); // TODO:: // if (!validateDerivedDatum(request, charBean)) { // return mapping.getInputForward(); saveCharacterization(request, theForm, charBean); ActionMessages msgs = new ActionMessages(); // validate number by javascript filterFloatingNumber // validateNumber(request, charBean, msgs); ActionMessage msg = new ActionMessage("message.addCharacterization", charBean.getCharacterizationName()); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); saveMessages(request, msgs); return summaryEdit(mapping, form, request, response); } /** * Set up the input form for adding new characterization * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward setupNew(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; setupInputForm(request, theForm); // reset characterizationBean CharacterizationBean charBean = new CharacterizationBean(); theForm.set("achar", charBean); String charType = request.getParameter("charType"); if (charType != null) { charBean.setCharacterizationType(charType); SortedSet<String> charNames = InitCharacterizationSetup .getInstance().getCharNamesByCharType(request, charBean.getCharacterizationType()); request.getSession().setAttribute("charTypeChars", charNames); } return mapping.getInputForward(); } /** * Set up drop-downs need for the input form * * @param request * @param theForm * @throws Exception */ private void setupInputForm(HttpServletRequest request, DynaValidatorForm theForm) throws Exception { String sampleId = request.getParameter("sampleId"); String charType = request.getParameter("charType"); InitSampleSetup.getInstance().setSharedDropdowns(request); InitCharacterizationSetup.getInstance().setCharactierizationDropDowns( request, sampleId); InitExperimentConfigSetup.getInstance().setExperimentConfigDropDowns( request); if (charType != null) InitProtocolSetup.getInstance().getProtocolsByChar(request, charType); InitCharacterizationSetup.getInstance().setCharacterizationDropdowns( request); // String detailPage = setupDetailPage(charBean); // request.getSession().setAttribute("characterizationDetailPage", // detailPage); // set up other samples with the same primary point of contact InitSampleSetup.getInstance().getOtherSampleNames(request, sampleId); } /** * Set up the input form for editing existing characterization * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward setupUpdate(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; String charId = request.getParameter("charId"); CharacterizationService charService = new CharacterizationServiceLocalImpl(); UserBean user = (UserBean) request.getSession().getAttribute("user"); CharacterizationBean charBean = charService.findCharacterizationById( charId, user); // setup correct display for characterization name and characterization // type InitCharacterizationSetup.getInstance().setCharacterizationName( request, charBean); InitCharacterizationSetup.getInstance().setCharacterizationType( request, charBean); // setup dropdown for existing characterization InitCharacterizationSetup.getInstance().getCharNamesByCharType(request, charBean.getCharacterizationType()); InitCharacterizationSetup.getInstance().getAssayTypesByCharName( request, charBean.getCharacterizationName()); InitCharacterizationSetup.getInstance().getDatumNamesByCharName( request, charBean.getCharacterizationName()); request.setAttribute("achar", charBean); theForm.set("achar", charBean); setupInputForm(request, theForm); String detailPage = null; if (charBean.isWithProperties()) { detailPage = setupDetailPage(charBean); } request.setAttribute("characterizationDetailPage", detailPage); return mapping.getInputForward(); } private String setupDetailPage(CharacterizationBean charBean) { String includePage = null; if (charBean.getClassName().equals("PhysicalState") || charBean.getClassName().equals("Shape") || charBean.getClassName().equals("Solubility") || charBean.getClassName().equals("Surface")) { includePage = "physical/body" + charBean.getClassName() + "Info.jsp"; } else if (charBean.getClassName().equals("Cytotoxicity") || charBean.getClassName().equals("EnzymeInduction") || charBean.getClassName().equals("Transfection")) { includePage = "invitro/body" + charBean.getClassName() + "Info.jsp"; } return includePage; } private void saveToOtherSamples(HttpServletRequest request, CharacterizationBean copyBean, UserBean user, String sampleName, Sample[] otherSamples) throws Exception { CharacterizationService charService = new CharacterizationServiceLocalImpl(); for (Sample sample : otherSamples) { // replace file URI with new sample name for (FindingBean findingBean : copyBean.getFindings()) { for (FileBean fileBean : findingBean.getFiles()) { fileBean.getDomainFile().getUri().replace(sampleName, sample.getName()); } } charService.saveCharacterization(sample, copyBean, user); } } private void setupDomainChar(HttpServletRequest request, DynaValidatorForm theForm, CharacterizationBean charBean) throws Exception { UserBean user = (UserBean) request.getSession().getAttribute("user"); if (charBean.getClassName() == null || charBean.getClassName().length() == 0) { String className = InitSetup.getInstance().getClassName( charBean.getCharacterizationName(), request.getSession().getServletContext()); charBean.setClassName(className); } charBean.setupDomain(user.getLoginName()); } // TODO for datum and condition // protected boolean validateDerivedDatum(HttpServletRequest request, // CharacterizationBean charBean) throws Exception { // ActionMessages msgs = new ActionMessages(); // boolean noErrors = true; // for (DerivedBioAssayDataBean derivedBioassayDataBean : charBean // .getDerivedBioAssayDataList()) { // List<DerivedDatumBean> datumList = derivedBioassayDataBean // .getDatumList(); // FileBean lfBean = derivedBioassayDataBean.getFileBean(); // // error, if no data input from either the lab file or derived datum // boolean noFileError = true; // if (datumList == null || datumList.size() == 0) { // noFileError = validateFileBean(request, msgs, lfBean); // if (!noFileError) { // ActionMessage msg = new ActionMessage("errors.required", // "If no derived datum entered, the file data"); // msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); // this.saveErrors(request, msgs); // noErrors = false; // for (DerivedDatumBean datum : datumList) { // // if value field is populated, so does the name field. // if (datum.getDomainDerivedDatum().getName().length() == 0) { // ActionMessage msg = new ActionMessage("errors.required", // "Derived data name"); // msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); // this.saveErrors(request, msgs); // noErrors = false; // try { // Float value = new Float(datum.getValueStr()); // // for boolean type, the value must be 0/1 // if (datum.getDomainDerivedDatum().getValueType() // .equalsIgnoreCase("boolean") // && value != 0.0 && value != 1.0) { // ActionMessage msg = new ActionMessage( // "error.booleanValue", "Derived data value"); // msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); // saveErrors(request, msgs); // noErrors = false; // } catch (NumberFormatException e) { // // for boolean type, the value must be true/false // if (datum.getDomainDerivedDatum().getValueType() // .equalsIgnoreCase("boolean") // && !datum.getValueStr().equalsIgnoreCase("true") // && !datum.getValueStr().equalsIgnoreCase("false")) { // ActionMessage msg = new ActionMessage( // "error.booleanValue", "Derived data value"); // msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); // saveErrors(request, msgs); // noErrors = false; // } else if (!datum.getDomainDerivedDatum().getValueType() // .equalsIgnoreCase("boolean")) { // ActionMessage msg = new ActionMessage( // "error.derivedDatumValue", "Derived data value"); // msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); // saveErrors(request, msgs); // noErrors = false; // return noErrors; private void saveCharacterization(HttpServletRequest request, DynaValidatorForm theForm, CharacterizationBean charBean) throws Exception { SampleBean sampleBean = setupSample(theForm, request, Constants.LOCAL_SITE, false); UserBean user = (UserBean) request.getSession().getAttribute("user"); setupDomainChar(request, theForm, charBean); CharacterizationService charService = new CharacterizationServiceLocalImpl(); charService .saveCharacterization(sampleBean.getDomain(), charBean, user); // save to other samples Sample[] otherSamples = prepareCopy(request, theForm); if (otherSamples != null) { Boolean copyData = (Boolean) theForm.get("copyData"); charService.copyAndSaveCharacterization(charBean, sampleBean .getDomain(), otherSamples, copyData, user); } sampleBean = setupSample(theForm, request, Constants.LOCAL_SITE, false); request.setAttribute("sampleId", sampleBean.getDomain().getId()); request.setAttribute("location", Constants.LOCAL_SITE); } private void deleteCharacterization(HttpServletRequest request, DynaValidatorForm theForm, CharacterizationBean charBean, String createdBy) throws Exception { charBean.setupDomain(createdBy); UserBean user = (UserBean) request.getSession().getAttribute("user"); CharacterizationService charService = new CharacterizationServiceLocalImpl(); charService.deleteCharacterization(charBean.getDomainChar(), user); } public ActionForward delete(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean charBean = (CharacterizationBean) theForm .get("achar"); UserBean user = (UserBean) request.getSession().getAttribute("user"); deleteCharacterization(request, theForm, charBean, user.getLoginName()); ActionMessages msgs = new ActionMessages(); ActionMessage msg = new ActionMessage("message.deleteCharacterization"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); saveMessages(request, msgs); ActionForward forward = mapping.findForward("success"); return forward; } public void validateNumber(HttpServletRequest request, CharacterizationBean charBean, ActionMessages msgs) throws Exception { if (charBean.getSolubility().getCriticalConcentration() == 0.0) { ActionMessage msg = new ActionMessage("message.invalidNumber"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); } } private void setCharacterizationFileFullPath(HttpServletRequest request, CharacterizationBean charBean, String location) throws Exception { if (location.equals(Constants.LOCAL_SITE)) { // TODO:: // set file full path // for (DerivedBioAssayDataBean bioassayBean : charBean // .getDerivedBioAssayDataList()) { // if (bioassayBean.getFileBean() != null) { // FileBean fileBean = bioassayBean.getFileBean(); // if (!fileBean.getDomainFile().getUriExternal()) { // String fileRoot = PropertyReader.getProperty( // Constants.FILEUPLOAD_PROPERTY, // "fileRepositoryDir"); // fileBean.setFullPath(fileRoot + File.separator // + fileBean.getDomainFile().getUri()); // } else { // fileBean.setFullPath(fileBean.getDomainFile().getUri()); } else { String serviceUrl = InitSetup.getInstance().getGridServiceUrl( request, location); URL localURL = new URL(request.getRequestURL().toString()); String actionPath = localURL.getPath(); URL remoteUrl = new URL(serviceUrl); String remoteServerHostUrl = remoteUrl.getProtocol() + ": + remoteUrl.getHost() + ":" + remoteUrl.getPort(); String remoteDownloadUrlBase = remoteServerHostUrl + actionPath + "?dispatch=download&location=local&fileId="; // TODO:: // for (DerivedBioAssayDataBean bioassayBean : charBean // .getDerivedBioAssayDataList()) { // if (bioassayBean.getFileBean() != null) { // FileBean fileBean = bioassayBean.getFileBean(); // String remoteDownloadUrl = remoteDownloadUrlBase // + fileBean.getDomainFile().getId().toString(); // fileBean.setFullPath(remoteDownloadUrl); } } /** * summaryEdit() handles Edit request for Characterization Summary view. * * @param mapping * @param form * @param request * @param response * @return ActionForward * @throws Exception * if error occurred. */ public ActionForward summaryEdit(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { // Prepare data. this.prepareSummary(mapping, form, request, response); // "actionName" is for constructing the Print/Export URL. request.setAttribute("actionName", request.getRequestURL().toString()); return mapping.findForward("summaryEdit"); } /** * summaryView() handles View request for Characterization Summary report. * * @param mapping * @param form * @param request * @param response * @return ActionForward * @throws Exception * if error occurred. */ public ActionForward summaryView(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { // Prepare data. this.prepareSummary(mapping, form, request, response); this.prepareCharacterizationTypes(mapping, form, request, response); // "actionName" is for constructing the Print/Export URL. request.setAttribute("actionName", request.getRequestURL().toString()); return mapping.findForward("summaryView"); } /** * summaryPrint() handles Print request for Characterization Summary report. * * @param mapping * @param form * @param request * @param response * @return ActionForward * @throws Exception * if error occurred. */ public ActionForward summaryPrint(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { // Prepare data. this.prepareSummary(mapping, form, request, response); this.prepareCharacterizationTypes(mapping, form, request, response); // Filter out un-selected types. String type = request.getParameter("type"); if (!StringUtils.isEmpty(type)) { List<String> characterizationTypes = (List<String>) request .getAttribute("characterizationTypes"); characterizationTypes.clear(); characterizationTypes.add(type); } return mapping.findForward("summaryPrintView"); } /** * Shared function for summaryView(), summaryPrint() and summaryEdit(). * Prepare CharacterizationBean based on SampleId. * * @param mapping * @param form * @param request * @param response * @return ActionForward * @throws Exception * if error occurred. */ private void prepareSummary(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; UserBean user = (UserBean) request.getSession().getAttribute("user"); String sampleId = theForm.getString("sampleId"); String location = theForm.getString("location"); setupSample(theForm, request, location, false); CharacterizationService service = null; if (Constants.LOCAL_SITE.equals(location)) { service = new CharacterizationServiceLocalImpl(); } else { // TODO model change // String serviceUrl = InitSetup.getInstance().getGridServiceUrl( // request, location); // service = new CharacterizationServiceRemoteImpl( // serviceUrl); } List<CharacterizationBean> charBeans = service.findCharsBySampleId( sampleId, user); // set characterization types and retrieve visibility for (CharacterizationBean charBean : charBeans) { InitCharacterizationSetup.getInstance().setCharacterizationType( request, charBean); InitCharacterizationSetup.getInstance().setCharacterizationName( request, charBean); } CharacterizationSummaryViewBean summaryView = new CharacterizationSummaryViewBean( charBeans); request.setAttribute("characterizationSummaryView", summaryView); InitCharacterizationSetup.getInstance().setCharactierizationDropDowns( request, sampleId); } /** * Shared function for summaryView() and summaryPrint(). Keep submitted * characterization types in the correct display order. Should be called * after calling prepareSummary(). * * @param mapping * @param form * @param request * @param response * @return ActionForward * @throws Exception * if error occurred. */ private void prepareCharacterizationTypes(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { CharacterizationSummaryViewBean summaryView = (CharacterizationSummaryViewBean) request .getAttribute("characterizationSummaryView"); // Keep submitted characterization types in the correct display order List<String> allCharacterizationTypes = new ArrayList<String>( (List<? extends String>) request.getSession().getAttribute( "characterizationTypes")); List<String> characterizationTypes = new ArrayList<String>(); for (String charType : allCharacterizationTypes) { if (summaryView.getCharacterizationTypes().contains(charType) && !characterizationTypes.contains(charType)) { characterizationTypes.add(charType); } } request.setAttribute("characterizationTypes", characterizationTypes); } /** * summaryExport() handles Export request for Characterization Summary * report. * * @param mapping * @param form * @param request * @param response * @return ActionForward * @throws Exception * if error occurred. */ public ActionForward summaryExport(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { // Prepare data. this.prepareSummary(mapping, form, request, response); DynaValidatorForm theForm = (DynaValidatorForm) form; String location = theForm.getString("location"); SampleBean sampleBean = setupSample(theForm, request, location, false); CharacterizationSummaryViewBean charSummaryBean = (CharacterizationSummaryViewBean) request .getAttribute("characterizationSummaryView"); Map<String, SortedSet<CharacterizationBean>> charBeanMap = charSummaryBean .getType2Characterizations(); SortedSet<CharacterizationBean> charBeans = null; // Filter out un-selected types. String type = request.getParameter("type"); if (!StringUtils.isEmpty(type)) { charBeans = charBeanMap.get(type); if (charBeans != null) { charBeanMap.clear(); charBeanMap.put(type, charBeans); } } String fileName = this.getExportFileName(sampleBean.getDomain() .getName(), "CharacterizationSummaryView", type); ExportUtils.prepareReponseForExcell(response, fileName); CharacterizationService service = null; if (Constants.LOCAL_SITE.equals(location)) { service = new CharacterizationServiceLocalImpl(); } else { // TODO: Implement remote service. } CharacterizationServiceHelper.exportSummary(charSummaryBean, request, response .getOutputStream()); return null; } private String getExportFileName(String sampleName, String viewType, String subType) { List<String> nameParts = new ArrayList<String>(); nameParts.add(sampleName); nameParts.add(viewType); if (!StringUtils.isEmpty(subType)) { nameParts.add(StringUtils.getOneWordUpperCaseFirstLetter(subType)); } nameParts.add(DateUtils.convertDateToString(Calendar.getInstance() .getTime())); String exportFileName = StringUtils.join(nameParts, "_"); return exportFileName; } public ActionForward saveExperimentConfig(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); ExperimentConfigBean configBean = achar.getTheExperimentConfig(); UserBean user = (UserBean) request.getSession().getAttribute("user"); configBean.setupDomain(user.getLoginName()); CharacterizationService service = new CharacterizationServiceLocalImpl(); service.saveExperimentConfig(configBean, user); achar.addExperimentConfig(configBean); InitCharacterizationSetup.getInstance() .persistCharacterizationDropdowns(request, achar); InitExperimentConfigSetup.getInstance() .persistExperimentConfigDropdowns(request, configBean); // also save characterization saveCharacterization(request, theForm, achar); return mapping.getInputForward(); } public ActionForward getFinding(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; UserBean user = (UserBean) request.getSession().getAttribute("user"); String theFindingId = request.getParameter("findingId"); CharacterizationService service = new CharacterizationServiceLocalImpl(); FindingBean findingBean = service.findFindingById(theFindingId, user); CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); achar.setTheFinding(findingBean); request.setAttribute("anchor", "submitFinding"); return mapping.getInputForward(); } public ActionForward resetFinding(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; FindingBean findingBean = new FindingBean(); CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); achar.setTheFinding(findingBean); request.setAttribute("anchor", "submitFinding"); return mapping.getInputForward(); } public ActionForward saveFinding(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); FindingBean findingBean = achar.getTheFinding(); String theFindingId = (String) theForm.get("theFindingId"); if (theFindingId != null && !theFindingId.equals("null") && theFindingId.trim().length() > 0) { findingBean.getDomain().setId(new Long(theFindingId)); } UserBean user = (UserBean) request.getSession().getAttribute("user"); findingBean.setupDomain(user.getLoginName()); CharacterizationService service = new CharacterizationServiceLocalImpl(); service.saveFinding(findingBean, user); achar.addFinding(findingBean); InitCharacterizationSetup.getInstance() .persistCharacterizationDropdowns(request, achar); // also save characterization saveCharacterization(request, theForm, achar); request.setAttribute("anchor", "result"); return mapping.getInputForward(); } public ActionForward addFile(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); FindingBean findingBean = achar.getTheFinding(); FileBean theFile = findingBean.getTheFile(); int theFileIndex = findingBean.getTheFileIndex(); // create a new copy before adding to finding FileBean newFile = theFile.copy(); SampleBean sampleBean = setupSample(theForm, request, Constants.LOCAL_SITE, false); // setup domainFile uri for fileBeans String internalUriPath = Constants.FOLDER_PARTICLE + "/" + sampleBean.getDomain().getName() + "/" + StringUtils.getOneWordLowerCaseFirstLetter(achar .getCharacterizationName()); UserBean user = (UserBean) request.getSession().getAttribute("user"); newFile.setupDomainFile(internalUriPath, user.getLoginName(), 0); findingBean.addFile(newFile, theFileIndex); request.setAttribute("anchor", "submitFinding"); return mapping.getInputForward(); } public ActionForward removeFile(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); FindingBean findingBean = achar.getTheFinding(); int theFileIndex = findingBean.getTheFileIndex(); findingBean.removeFile(theFileIndex); findingBean.setTheFile(new FileBean()); request.setAttribute("anchor", "submitFinding"); return mapping.getInputForward(); } public ActionForward drawMatrix(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); request.setAttribute("anchor", "result"); FindingBean findingBean = achar.getTheFinding(); if (request.getParameter("removeColumn") != null) { int columnToRemove = Integer.parseInt(request .getParameter("removeColumn")); findingBean.removeColumn(columnToRemove); return mapping.getInputForward(); } else if (request.getParameter("removeRow") != null) { int rowToRemove = Integer.parseInt(request .getParameter("removeRow")); findingBean.removeRow(rowToRemove); return mapping.getInputForward(); } int existingNumberOfColumns = findingBean.getColumnHeaders().size(); int existingNumberOfRows = findingBean.getRows().size(); if (existingNumberOfColumns > findingBean.getNumberOfColumns()) { ActionMessages msgs = new ActionMessages(); ActionMessage msg = new ActionMessage( "message.addCharacterization.removeMatrixColumn"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); saveMessages(request, msgs); findingBean.setNumberOfColumns(existingNumberOfColumns); return mapping.getInputForward(); } if (existingNumberOfRows > findingBean.getNumberOfRows()) { ActionMessages msgs = new ActionMessages(); ActionMessage msg = new ActionMessage( "message.addCharacterization.removeMatrixRow"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); saveMessages(request, msgs); findingBean.setNumberOfRows(existingNumberOfRows); return mapping.getInputForward(); } findingBean.updateMatrix(findingBean.getNumberOfColumns(), findingBean .getNumberOfRows()); request.setAttribute("anchor", "submitFinding"); return mapping.getInputForward(); } public ActionForward deleteFinding(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; UserBean user = (UserBean) request.getSession().getAttribute("user"); CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); FindingBean dataSetBean = achar.getTheFinding(); CharacterizationService service = new CharacterizationServiceLocalImpl(); service.deleteFinding(dataSetBean.getDomain(), user); achar.removeFinding(dataSetBean); InitCharacterizationSetup.getInstance() .persistCharacterizationDropdowns(request, achar); request.setAttribute("anchor", "result"); return mapping.getInputForward(); } public ActionForward deleteExperimentConfig(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; UserBean user = (UserBean) request.getSession().getAttribute("user"); CharacterizationBean achar = (CharacterizationBean) theForm .get("achar"); ExperimentConfigBean configBean = achar.getTheExperimentConfig(); CharacterizationService service = new CharacterizationServiceLocalImpl(); service.deleteExperimentConfig(configBean.getDomain(), user); achar.removeExperimentConfig(configBean); InitCharacterizationSetup.getInstance() .persistCharacterizationDropdowns(request, achar); InitExperimentConfigSetup.getInstance() .persistExperimentConfigDropdowns(request, configBean); // also save characterization saveCharacterization(request, theForm, achar); return mapping.getInputForward(); } }
package com.noveogroup.android.log; import java.io.IOException; import java.io.InputStream; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * The logger manager. * <p/> * To configure this logger manager you can include an * {@code android-logger.properties} file in src directory. * The format of configuration file is: * <pre> * # root logger configuration * root=&lt;level&gt;:&lt;tag&gt; * # package / class logger configuration * logger.&lt;package or class name&gt;=&lt;level&gt;:&lt;tag&gt; * </pre> * You can use values of {@link Logger.Level} enum as level constants. * For example, the following configuration will * log all ERROR messages with tag "MyApplication" and all * messages from classes {@code com.example.server.*} with * tag "MyApplication-server": * <pre> * root=ERROR:MyApplication * logger.com.example.server=DEBUG:MyApplication-server * </pre> * <p/> */ public final class LoggerManager { private LoggerManager() { throw new UnsupportedOperationException(); } private static final Handler DEFAULT_HANDLER = new PatternHandler(Logger.Level.VERBOSE, "%logger", "%date %caller%n"); private static final Logger DEFAULT_LOGGER = new SimpleLogger("XXX", DEFAULT_HANDLER); private static final int MAX_LOG_TAG_LENGTH = 23; private static final String PROPERTIES_NAME = "android-logger.properties"; private static final String CONF_ROOT = "root"; private static final String CONF_LOGGER = "logger."; private static final Pattern CONF_LOGGER_REGEX = Pattern.compile("(.*?):(.*)"); private static void loadProperties(Properties properties) throws IOException { InputStream inputStream = null; try { inputStream = LoggerManager.class.getClassLoader().getResourceAsStream(PROPERTIES_NAME); if (inputStream != null) { properties.load(inputStream); } else { inputStream = ClassLoader.getSystemClassLoader().getResourceAsStream(PROPERTIES_NAME); if (inputStream != null) { properties.load(inputStream); } } } finally { if (inputStream != null) { inputStream.close(); } } } private static Handler decodeHandler(String handlerString) { // todo implement handler decoding from new format Matcher matcher = CONF_LOGGER_REGEX.matcher(handlerString); if (matcher.matches()) { String levelString = matcher.group(1); String tag = matcher.group(2); if (tag.length() > 23) { String trimmedTag = tag.substring(0, MAX_LOG_TAG_LENGTH); DEFAULT_LOGGER.w(String.format("Android doesn't supports tags %d characters longer. Tag '%s' will be trimmed to '%s'", MAX_LOG_TAG_LENGTH, tag, trimmedTag)); tag = trimmedTag; } try { return new PatternHandler(Logger.Level.valueOf(levelString), tag, null); } catch (IllegalArgumentException e) { DEFAULT_LOGGER.w(String.format("Cannot parse '%s' as logging level. Only %s are allowed", levelString, Arrays.toString(Logger.Level.values()))); return new PatternHandler(Logger.Level.VERBOSE, handlerString, null); } } else { return new PatternHandler(Logger.Level.VERBOSE, handlerString, null); } } private static Map<String, Handler> loadConfiguration() { Map<String, Handler> handlerMap = new HashMap<String, Handler>(); // read properties file Properties properties = new Properties(); try { loadProperties(properties); } catch (IOException e) { DEFAULT_LOGGER.e(String.format("Cannot configure logger from '%s'. Default configuration will be used", PROPERTIES_NAME), e); handlerMap.put(null, DEFAULT_HANDLER); return handlerMap; } // something is wrong if property file is empty if (!properties.propertyNames().hasMoreElements()) { DEFAULT_LOGGER.e("Logger configuration file is empty. Default configuration will be used"); handlerMap.put(null, DEFAULT_HANDLER); return handlerMap; } // parse properties to logger map for (Enumeration<?> names = properties.propertyNames(); names.hasMoreElements(); ) { String propertyName = (String) names.nextElement(); String propertyValue = properties.getProperty(propertyName); // todo fix: use Logger.ROOT_LOGGER_NAME and equalsIgnoreCase if (propertyName.equals(CONF_ROOT)) { handlerMap.put(null, decodeHandler(propertyValue)); } else if (propertyName.startsWith(CONF_LOGGER)) { String loggerName = propertyName.substring(CONF_LOGGER.length()); handlerMap.put(loggerName, decodeHandler(propertyValue)); } else { DEFAULT_LOGGER.e(String.format("unknown key '%s' in '%s' file", propertyName, PROPERTIES_NAME)); } } // logger map should have root logger (corresponding to "null" key) if (!handlerMap.containsKey(null)) { handlerMap.put(null, DEFAULT_HANDLER); } return handlerMap; } private static final Map<String, Handler> HANDLER_MAP = Collections.unmodifiableMap(loadConfiguration()); private static Handler findHandler(String name) { String currentKey = null; if (name != null) { for (String key : HANDLER_MAP.keySet()) { if (key != null && name.startsWith(key)) { // check that key corresponds to a name of sub-package if (key.length() >= name.length() || name.charAt(key.length()) == '.' || name.charAt(key.length()) == '$') { // update current best matching key if (currentKey == null || currentKey.length() < key.length()) { currentKey = key; } } } } } Handler handler = HANDLER_MAP.get(currentKey); return handler != null ? handler : DEFAULT_HANDLER; } private static final Map<String, Logger> LOGGER_CACHE = new WeakHashMap<String, Logger>(); /** * Returns logger corresponding to the specified name. * * @param name the name. * @return the {@link Logger} implementation. */ public static Logger getLogger(String name) { Logger logger; // try to find a logger in the cache synchronized (LOGGER_CACHE) { logger = LOGGER_CACHE.get(name); } // load logger from configuration if (logger == null) { logger = new SimpleLogger(name, findHandler(name)); synchronized (LOGGER_CACHE) { LOGGER_CACHE.put(logger.getName(), logger); } } // return logger return logger; } /** * Returns logger corresponding to the specified class. * * @param aClass the class. * @return the {@link Logger} implementation. */ public static Logger getLogger(Class<?> aClass) { return getLogger(aClass == null ? null : aClass.getName()); } /** * Returns logger corresponding to the caller class. * * @return the {@link Logger} implementation. */ public static Logger getLogger() { return getLogger(Utils.getCallerClassName()); } }
package com.zyeeda.framework.entities; import java.io.Serializable; import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Temporal; import javax.persistence.TemporalType; @Entity(name = "ZDA_SYS_USER") public class User/* extends SimpleDomainEntity*/ implements Serializable { private static final long serialVersionUID = -411862891641683217L; private String id; private String username; private String password; private String gender; private String position; private String degree; private String email; private String mobile; private Date birthday; private Date dateOfWork; private Boolean status; private Boolean postStatus; // private byte[] photo; private String departmentName; private String deptFullPath; private String departmentNo; @Id @Column(name = "F_ID") public String getId() { return id; } public void setId(String id) { this.id = id; } @Column(name = "F_USERNAME", nullable = false) public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } @Column(name = "F_PASSWORD", nullable = false, length = 36) public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } @Column(name = "F_GENDER", length = 4) public String getGender() { return gender; } public void setGender(String gender) { this.gender = gender; } @Column(name = "F_POSITION", length = 100) public String getPosition() { return position; } public void setPosition(String position) { this.position = position; } @Column(name = "F_DEGREE", length = 100) public String getDegree() { return degree; } public void setDegree(String degree) { this.degree = degree; } @Column(name = "F_EMAIL", length = 50) public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } @Column(name = "F_MOBILE", length = 13) public String getMobile() { return mobile; } public void setMobile(String mobile) { this.mobile = mobile; } @Temporal(TemporalType.TIME) @Column(name = "F_BIRTHDAY", length = 19) public Date getBirthday() { return birthday; } public void setBirthday(Date birthday) { this.birthday = birthday; } @Temporal(TemporalType.TIME) @Column(name = "F_DATEOFWORK", length = 19) public Date getDateOfWork() { return dateOfWork; } public void setDateOfWork(Date dateOfWork) { this.dateOfWork = dateOfWork; } @Column(name = "STATUS") public Boolean getStatus() { return status; } public void setStatus(Boolean status) { this.status = status; } @Column(name = "POSTSTATUS") public Boolean getPostStatus() { return postStatus; } public void setPostStatus(Boolean postStatus) { this.postStatus = postStatus; } // public byte[] getPhoto() { // return photo; // public void setPhoto(byte[] photo) { // this.photo = photo; @Column(name = "F_DEPARTMENTNAME", length = 100) public String getDepartmentName() { return departmentName; } public void setDepartmentName(String departmentName) { this.departmentName = departmentName; } @Column(name = "F_DEPTFULLPATH", length = 100) public String getDeptFullPath() { return deptFullPath; } public void setDeptFullPath(String deptFullPath) { this.deptFullPath = deptFullPath; } @Column(name = "F_DEPARTMENT_NO", length = 100) public String getDepartmentNo() { return this.departmentNo; } public void setDepartmentNo(String departmentNo) { this.departmentNo = departmentNo; } }
package gov.nih.nci.nautilus.ui.struts.form; import gov.nih.nci.nautilus.constants.NautilusConstants; import gov.nih.nci.nautilus.criteria.AllGenesCriteria; import gov.nih.nci.nautilus.criteria.ArrayPlatformCriteria; import gov.nih.nci.nautilus.criteria.CloneOrProbeIDCriteria; import gov.nih.nci.nautilus.criteria.DiseaseOrGradeCriteria; import gov.nih.nci.nautilus.criteria.FoldChangeCriteria; import gov.nih.nci.nautilus.criteria.GeneIDCriteria; import gov.nih.nci.nautilus.criteria.GeneOntologyCriteria; import gov.nih.nci.nautilus.criteria.PathwayCriteria; import gov.nih.nci.nautilus.criteria.RegionCriteria; import gov.nih.nci.nautilus.criteria.UntranslatedRegionCriteria; import gov.nih.nci.nautilus.criteria.SampleCriteria; import gov.nih.nci.nautilus.de.ArrayPlatformDE; import gov.nih.nci.nautilus.de.BasePairPositionDE; import gov.nih.nci.nautilus.de.ChromosomeNumberDE; import gov.nih.nci.nautilus.de.CloneIdentifierDE; import gov.nih.nci.nautilus.de.CytobandDE; import gov.nih.nci.nautilus.de.DiseaseNameDE; import gov.nih.nci.nautilus.de.ExprFoldChangeDE; import gov.nih.nci.nautilus.de.GeneIdentifierDE; import gov.nih.nci.nautilus.de.GeneOntologyDE; import gov.nih.nci.nautilus.de.PathwayDE; import gov.nih.nci.nautilus.de.SampleIDDE; import gov.nih.nci.nautilus.ui.bean.SessionQueryBag; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Set; import javax.servlet.http.HttpServletRequest; import org.apache.log4j.Logger; import org.apache.struts.action.ActionError; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionMapping; import org.apache.struts.upload.FormFile; import org.apache.struts.util.LabelValueBean; public class GeneExpressionForm extends BaseForm { // Variables /**geneOption property */ private String geneOption = "standard"; private String[] pathwayName; /** sampleList property */ private String sampleList; /** geneList property */ private String geneList; /** goClassification property */ private String goClassification; /** goCellularComp property */ private String goCellularComp; /** goMolecularFunction property */ private String goMolecularFunction; /** goBiologicalProcess property */ private String goBiologicalProcess; /** tumorGrade property */ private String tumorGrade; /** region property */ private String region; /** foldChangeValueDown property */ private String foldChangeValueDown = "2"; /** cytobandRegion property */ private String cytobandRegion; /** cloneId property */ private String cloneId; /** pathways property */ private String pathways; /** tumorType property */ private String tumorType; /** arrayPlatform property */ private String arrayPlatform; /** cloneListFile property */ private FormFile cloneListFile; /** cloneListSpecify property */ private String cloneListSpecify; /** basePairEnd property */ private String basePairEnd; /** chrosomeNumber property */ private String chrosomeNumber; /** regulationStatus property */ private String regulationStatus; /** foldChangeValueUnchangeFrom property */ private String foldChangeValueUnchangeFrom = "0.8"; /** foldChangeValueUnchangeTo property */ private String foldChangeValueUnchangeTo = "1.2"; /** foldChangeValueUp property */ private String foldChangeValueUp = "2"; /** geneType property */ private String geneType; /** foldChangeValueUDUp property */ private String foldChangeValueUDUp; /** resultView property */ private String resultView; /** geneFile property */ private FormFile geneFile; /** sampleFile property */ private FormFile sampleFile; /** foldChangeValueUDDown property */ private String foldChangeValueUDDown = "2"; /** geneGroup property */ private String geneGroup; /** sampleGroup property */ private String sampleGroup; /** cloneList property */ private String cloneList; /** queryName property */ private String queryName; /** basePairStart property */ private String basePairStart; // Collections used for Lookup values. //private ArrayList diseaseType;// moved this to the upperclass: // BaseForm.java //private ArrayList geneTypeColl;// move this to the upperclass: // BaseForm.java private ArrayList cloneTypeColl = new ArrayList(); private ArrayList arrayPlatformTypeColl = new ArrayList(); private DiseaseOrGradeCriteria diseaseOrGradeCriteria; private GeneIDCriteria geneCriteria; private AllGenesCriteria allGenesCriteria; private SampleCriteria sampleCriteria; private FoldChangeCriteria foldChangeCriteria; private RegionCriteria regionCriteria; private CloneOrProbeIDCriteria cloneOrProbeIDCriteria; private GeneOntologyCriteria geneOntologyCriteria; private PathwayCriteria pathwayCriteria; private ArrayPlatformCriteria arrayPlatformCriteria; // UntranslatedRegionCriteria: for both 5' and 3', "included" is used as // default, // on the jsp, it may be commented out for now private UntranslatedRegionCriteria untranslatedRegionCriteria; // Hashmap to store Domain elements private HashMap diseaseDomainMap = new HashMap(); private HashMap geneDomainMap = new HashMap(); private HashMap sampleDomainMap = new HashMap(); private HashMap foldUpDomainMap = new HashMap(); private HashMap foldDownDomainMap = new HashMap(); private HashMap regionDomainMap = new HashMap(); private HashMap cloneDomainMap = new HashMap(); private HashMap geneOntologyDomainMap = new HashMap(); private HashMap pathwayDomainMap = new HashMap(); private HashMap arrayPlatformDomainMap = new HashMap(); private HttpServletRequest thisRequest; private SessionQueryBag queryCollection; private boolean isAllGenes = false; private static Logger logger = Logger.getLogger(NautilusConstants.LOGGER); public GeneExpressionForm() { // Create Lookups for Gene Expression screens super(); setGeneExpressionLookup(); } /** * Method validate * * @param ActionMapping * mapping * @param HttpServletRequest * request * @return ActionErrors */ public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) { ActionErrors errors = new ActionErrors(); // Query Name cannot be blank errors = UIFormValidator.validateQueryName(queryName, errors); // Chromosomal region validations errors = UIFormValidator.validateChromosomalRegion(chrosomeNumber, region, cytobandRegion, basePairStart,basePairEnd, errors); //Validate Go Classification errors = UIFormValidator.validateGOClassification(goClassification, errors); //Validate Gene List, Gene File and Gene Group errors = UIFormValidator.validate(sampleGroup, sampleList, sampleFile, errors); //Make sure the cloneListFile uploaded is of type txt and MIME type is text/plain errors = UIFormValidator.validateTextFileType(cloneListFile, "cloneId", errors); //Make sure the geneGroup uploaded file is of type txt and MIME type is text/plain errors = UIFormValidator.validateTextFileType(geneFile, "geneGroup", errors); //Validate CloneId errors = UIFormValidator.validateCloneId(cloneId, cloneListSpecify, cloneListFile, errors); // Validate minimum criteria's for GE Query if (this.getQueryName() != null && this.getQueryName().length() >= 1 && this.getGeneOption().equalsIgnoreCase("standard")) { if ((this.getGeneGroup() == null || this.getGeneGroup().trim().length() < 1) && (this.getCloneId() == null || this.getCloneId().trim().length() < 1) && (this.getChrosomeNumber() == null || this.getChrosomeNumber().trim().length() < 1) && (this.getGoClassification() == null || this.getGoClassification().trim().length() < 1) && (this.getPathways() == null || this.getPathways().trim().length() < 1)) { errors .add( ActionErrors.GLOBAL_ERROR, new ActionError( "gov.nih.nci.nautilus.ui.struts.form.ge.minimum.error")); } } if (errors.isEmpty()) { createDiseaseCriteriaObject(); createSampleCriteriaObject(); createAllGenesCriteriaObject(); createGeneCriteriaObject(); createFoldChangeCriteriaObject(); createRegionCriteriaObject(); createCloneOrProbeCriteriaObject(); createGeneOntologyCriteriaObject(); createPathwayCriteriaObject(); createArrayPlatformCriteriaObject(); } return errors; } private void createAllGenesCriteriaObject(){ allGenesCriteria = new AllGenesCriteria(isAllGenes); if (allGenesCriteria.isEmpty()) System.out.println("its empty"); } private void createDiseaseCriteriaObject() { //look thorugh the diseaseDomainMap to extract out the domain elements // and create respective Criteria Objects Set keys = diseaseDomainMap.keySet(); Iterator iter = keys.iterator(); while (iter.hasNext()) { Object key = iter.next(); try { String strDiseaseDomainClass = (String) diseaseDomainMap .get(key);//use key to get value Constructor[] diseaseConstructors = Class.forName( strDiseaseDomainClass).getConstructors(); Object[] parameterObjects = { key }; DiseaseNameDE diseaseNameDEObj = (DiseaseNameDE) diseaseConstructors[0] .newInstance(parameterObjects); diseaseOrGradeCriteria.setDisease(diseaseNameDEObj); } catch (Exception ex) { logger.error("Error in createDiseaseCriteriaObject " + ex.getMessage()); ex.printStackTrace(); } catch (LinkageError le) { logger.error("Linkage Error in createDiseaseCriteriaObject " + le.getMessage()); le.printStackTrace(); } } } private void createGeneCriteriaObject() { // Loop thru the HashMap, extract the Domain elements and create // respective Criteria Objects Set keys = geneDomainMap.keySet(); Iterator i = keys.iterator(); while (i.hasNext()) { Object key = i.next(); logger.debug(key + "=>" + geneDomainMap.get(key)); try { String strgeneDomainClass = (String) geneDomainMap.get(key); Constructor[] geneConstructors = Class.forName( strgeneDomainClass).getConstructors(); Object[] parameterObjects = { key }; GeneIdentifierDE geneSymbolDEObj = (GeneIdentifierDE) geneConstructors[0] .newInstance(parameterObjects); geneCriteria.setGeneIdentifier(geneSymbolDEObj); logger.debug("Gene Domain Element Value==> " + geneSymbolDEObj.getValueObject()); } catch (Exception ex) { logger.debug("Error in createGeneCriteriaObject " + ex.getMessage()); ex.printStackTrace(); } catch (LinkageError le) { logger.error("Linkage Error in createGeneCriteriaObject " + le.getMessage()); le.printStackTrace(); } } } private void createSampleCriteriaObject() { // Loop thru the HashMap, extract the Domain elements and create // respective Criteria Objects Set keys = sampleDomainMap.keySet(); Iterator i = keys.iterator(); while (i.hasNext()) { Object key = i.next(); logger.debug(key + "=>" + sampleDomainMap.get(key)); try { String strSampleDomainClass = (String) sampleDomainMap.get(key); Constructor[] sampleConstructors = Class.forName( strSampleDomainClass).getConstructors(); Object[] parameterObjects = { key }; SampleIDDE sampleIDDEObj = (SampleIDDE) sampleConstructors[0] .newInstance(parameterObjects); sampleCriteria.setSampleID(sampleIDDEObj); logger.debug("Sample Domain Element Value==> " + sampleIDDEObj.getValueObject()); } catch (Exception ex) { logger.debug("Error in createSampleCriteriaObject " + ex.getMessage()); ex.printStackTrace(); } catch (LinkageError le) { logger.error("Linkage Error in createSampleCriteriaObject " + le.getMessage()); le.printStackTrace(); } } } private void createFoldChangeCriteriaObject() { // For Fold Change Up Set keys = foldUpDomainMap.keySet(); Iterator i = keys.iterator(); while (i.hasNext()) { Object key = i.next(); logger.debug(key + "=>" + foldUpDomainMap.get(key)); try { String strFoldDomainClass = (String) foldUpDomainMap.get(key); Constructor[] foldConstructors = Class.forName( strFoldDomainClass).getConstructors(); Object[] parameterObjects = { Float.valueOf((String) key) }; ExprFoldChangeDE foldChangeDEObj = (ExprFoldChangeDE) foldConstructors[0] .newInstance(parameterObjects); foldChangeCriteria.setFoldChangeObject(foldChangeDEObj); logger.debug("Fold Change Domain Element Value is ==>" + foldChangeDEObj.getValueObject()); } catch (Exception ex) { logger.error("Error in createFoldChangeCriteriaObject " + ex.getMessage()); ex.printStackTrace(); } catch (LinkageError le) { logger.error("Linkage Error in createFoldChangeCriteriaObject " + le.getMessage()); le.printStackTrace(); } } // For Fold Change Down keys = foldDownDomainMap.keySet(); i = keys.iterator(); while (i.hasNext()) { Object key = i.next(); logger.debug(key + "=>" + foldDownDomainMap.get(key)); try { String strFoldDomainClass = (String) foldDownDomainMap.get(key); Constructor[] foldConstructors = Class.forName( strFoldDomainClass).getConstructors(); Object[] parameterObjects = { Float.valueOf((String) key) }; ExprFoldChangeDE foldChangeDEObj = (ExprFoldChangeDE) foldConstructors[0] .newInstance(parameterObjects); foldChangeCriteria.setFoldChangeObject(foldChangeDEObj); logger.debug("Fold Change Domain Element Value is ==>" + foldChangeDEObj.getValueObject()); } catch (Exception ex) { logger.error("Error in createFoldChangeCriteriaObject " + ex.getMessage()); ex.printStackTrace(); } catch (LinkageError le) { logger.error("Linkage Error in createFoldChangeCriteriaObject " + le.getMessage()); le.printStackTrace(); } } } private void createRegionCriteriaObject() { Set keys = regionDomainMap.keySet(); Iterator i = keys.iterator(); while (i.hasNext()) { Object key = i.next(); logger.debug(key + "=>" + regionDomainMap.get(key)); try { String strRegionDomainClass = (String) regionDomainMap.get(key); Constructor[] regionConstructors = Class.forName( strRegionDomainClass).getConstructors(); if (strRegionDomainClass.endsWith("CytobandDE")) { Object[] parameterObjects = { (String) key }; CytobandDE cytobandDEObj = (CytobandDE) regionConstructors[0] .newInstance(parameterObjects); regionCriteria.setCytoband(cytobandDEObj); logger.debug("Test Cytoband Criteria" + regionCriteria.getCytoband().getValue()); } if (strRegionDomainClass.endsWith("ChromosomeNumberDE")) { Object[] parameterObjects = { (String) key }; ChromosomeNumberDE chromosomeDEObj = (ChromosomeNumberDE) regionConstructors[0] .newInstance(parameterObjects); regionCriteria.setChromNumber(chromosomeDEObj); logger.debug("Test Chromosome Criteria " + regionCriteria.getChromNumber().getValue()); } if (strRegionDomainClass.endsWith("StartPosition")) { Object[] parameterObjects = { Integer.valueOf((String) key) }; BasePairPositionDE.StartPosition baseStartDEObj = (BasePairPositionDE.StartPosition) regionConstructors[0] .newInstance(parameterObjects); regionCriteria.setStart(baseStartDEObj); logger.debug("Test Start Criteria" + regionCriteria.getStart().getValue()); } if (strRegionDomainClass.endsWith("EndPosition")) { Object[] parameterObjects = { Integer.valueOf((String) key) }; BasePairPositionDE.EndPosition baseEndDEObj = (BasePairPositionDE.EndPosition) regionConstructors[0] .newInstance(parameterObjects); regionCriteria.setEnd(baseEndDEObj); logger.debug("Test End Criteria" + regionCriteria.getEnd().getValue()); } } catch (Exception ex) { logger.error("Error in createRegionCriteriaObject " + ex.getMessage()); ex.printStackTrace(); } catch (LinkageError le) { logger.error("Linkage Error in createRegionCriteriaObject " + le.getMessage()); le.printStackTrace(); } } } private void createCloneOrProbeCriteriaObject() { // Loop thru the cloneDomainMap HashMap, extract the Domain elements and // create respective Criteria Objects Set keys = cloneDomainMap.keySet(); Iterator i = keys.iterator(); while (i.hasNext()) { Object key = i.next(); logger.debug(key + "=>" + cloneDomainMap.get(key)); try { String strCloneDomainClass = (String) cloneDomainMap.get(key); Constructor[] cloneConstructors = Class.forName( strCloneDomainClass).getConstructors(); Object[] parameterObjects = { key }; CloneIdentifierDE cloneIdentfierDEObj = (CloneIdentifierDE) cloneConstructors[0] .newInstance(parameterObjects); cloneOrProbeIDCriteria.setCloneIdentifier(cloneIdentfierDEObj); logger.debug("Clone Domain Element Value==> " + cloneIdentfierDEObj.getValueObject()); } catch (Exception ex) { logger.error("Error in createGeneCriteriaObject " + ex.getMessage()); ex.printStackTrace(); } catch (LinkageError le) { logger.error("Linkage Error in createGeneCriteriaObject " + le.getMessage()); le.printStackTrace(); } } } private void createGeneOntologyCriteriaObject() { // Loop thru the geneOntologyDomainMap HashMap, extract the Domain // elements and create respective Criteria Objects Set keys = geneOntologyDomainMap.keySet(); Iterator i = keys.iterator(); while (i.hasNext()) { Object key = i.next(); logger.debug(key + "=>" + geneOntologyDomainMap.get(key)); try { String strGeneOntologyDomainClass = (String) geneOntologyDomainMap .get(key); Constructor[] geneOntologyConstructors = Class.forName( strGeneOntologyDomainClass).getConstructors(); Object[] parameterObjects = { key }; GeneOntologyDE geneOntologyDEObj = (GeneOntologyDE) geneOntologyConstructors[0] .newInstance(parameterObjects); geneOntologyCriteria.setGOIdentifier(geneOntologyDEObj); logger.debug("GO Domain Element Value==> " + geneOntologyDEObj.getValueObject()); } catch (Exception ex) { logger.error("Error in createGeneOntologyCriteriaObject " + ex.getMessage()); ex.printStackTrace(); } catch (LinkageError le) { logger.error("Linkage Error in createGeneOntologyCriteriaObject " + le.getMessage()); le.printStackTrace(); } } } private void createPathwayCriteriaObject() { // Loop thru the pathwayDomainMap HashMap, extract the Domain elements // and create respective Criteria Objects Set keys = pathwayDomainMap.keySet(); Iterator i = keys.iterator(); while (i.hasNext()) { Object key = i.next(); logger.debug(key + "=>" + pathwayDomainMap.get(key)); try { String strPathwayDomainClass = (String) pathwayDomainMap .get(key); logger.debug("strPathwayDomainClass is for pathway:" + strPathwayDomainClass + strPathwayDomainClass.length()); Constructor[] pathwayConstructors = Class.forName( strPathwayDomainClass).getConstructors(); Object[] parameterObjects = { key }; PathwayDE pathwayDEObj = (PathwayDE) pathwayConstructors[0] .newInstance(parameterObjects); pathwayCriteria.setPathwayName(pathwayDEObj); logger.debug("GO Domain Element Value==> " + pathwayDEObj.getValueObject()); } catch (Exception ex) { logger.error("Error in createGeneCriteriaObject " + ex.getMessage()); ex.printStackTrace(); } catch (LinkageError le) { logger.error("Linkage Error in createGeneCriteriaObject " + le.getMessage()); le.printStackTrace(); } } } private void createArrayPlatformCriteriaObject() { // Loop thru the pathwayDomainMap HashMap, extract the Domain elements // and create respective Criteria Objects Set keys = arrayPlatformDomainMap.keySet(); Iterator i = keys.iterator(); while (i.hasNext()) { Object key = i.next(); logger.debug(key + "=>" + arrayPlatformDomainMap.get(key)); try { String strArrayPlatformDomainClass = (String) arrayPlatformDomainMap .get(key); Constructor[] arrayPlatformConstructors = Class.forName( strArrayPlatformDomainClass).getConstructors(); Object[] parameterObjects = { key }; ArrayPlatformDE arrayPlatformDEObj = (ArrayPlatformDE) arrayPlatformConstructors[0] .newInstance(parameterObjects); arrayPlatformCriteria.setPlatform(arrayPlatformDEObj); logger.debug("GO Domain Element Value==> " + arrayPlatformDEObj.getValueObject()); } catch (Exception ex) { logger.error("Error in createArrayPlatformCriteriaObject " + ex.getMessage()); ex.printStackTrace(); } catch (LinkageError le) { logger.error("Linkage Error in createArrayPlatformCriteriaObject " + le.getMessage()); le.printStackTrace(); } } } public void setGeneExpressionLookup() { //diseaseType = new ArrayList();// moved to the upper class: // BaseForm.java //geneTypeColl = new ArrayList();// moved to the upper class: // BaseForm.java cloneTypeColl = new ArrayList(); arrayPlatformTypeColl = new ArrayList(); // These are hardcoded but will come from DB /* * *moved to the upperclass:: BaseForm.java * * diseaseType.add( new LabelValueBean( "Astrocytic", "astro" ) ); * diseaseType.add( new LabelValueBean( "Oligodendroglial", "oligo" ) ); * diseaseType.add( new LabelValueBean( "Ependymal cell", "Ependymal * cell" ) ); diseaseType.add( new LabelValueBean( "Mixed gliomas", * "Mixed gliomas" ) ); diseaseType.add( new LabelValueBean( * "Neuroepithelial", "Neuroepithelial" ) ); diseaseType.add( new * LabelValueBean( "Choroid Plexus", "Choroid Plexus" ) ); * diseaseType.add( new LabelValueBean( "Neuronal and mixed * neuronal-glial", "neuronal-glial" ) ); diseaseType.add( new * LabelValueBean( "Pineal Parenchyma", "Pineal Parenchyma" )); * diseaseType.add( new LabelValueBean( "Embryonal", "Embryonal" )); * diseaseType.add( new LabelValueBean( "Glioblastoma", "Glioblastoma" * )); */ //geneTypeColl.add( new LabelValueBean( "All Genes", "allgenes" ) // );//moved to the upperclass:: BaseForm.java //geneTypeColl.add( new LabelValueBean( "Name/Symbol", "genesymbol" ) // );//moved to the upperclass:: BaseForm.java //geneTypeColl.add( new LabelValueBean( "Locus Link Id", "genelocus" ) // );//moved to the upperclass:: BaseForm.java //geneTypeColl.add( new LabelValueBean( "GenBank AccNo.", "genbankno" ) // );//moved to the upperclass:: BaseForm.java cloneTypeColl.add(new LabelValueBean("IMAGE Id", "imageId")); //cloneTypeColl.add( new LabelValueBean( "BAC Id", "BACId" ) ); cloneTypeColl.add(new LabelValueBean("Probe Set Id", "probeSetId")); arrayPlatformTypeColl.add(new LabelValueBean("all", "all")); arrayPlatformTypeColl.add(new LabelValueBean("Oligo (Affymetrix)", "Oligo (Affymetrix)")); arrayPlatformTypeColl.add(new LabelValueBean("cDNA", "cDNA")); } /** * Method reset. Reset all properties to their default values. * * @param ActionMapping * mapping used to select this instance. * @param HttpServletRequest * request The servlet request we are processing. */ public void reset(ActionMapping mapping, HttpServletRequest request) { //geneOption = ""; pathwayName = new String[0]; geneList = ""; goBiologicalProcess = ""; tumorGrade = ""; region = ""; foldChangeValueDown = "2"; cytobandRegion = ""; cloneId = ""; pathways = ""; tumorType = ""; arrayPlatform = ""; cloneListFile = null; goCellularComp = ""; goMolecularFunction = ""; cloneListSpecify = ""; goClassification = ""; basePairEnd = ""; chrosomeNumber = ""; regulationStatus = ""; foldChangeValueUnchangeFrom = "0.8"; foldChangeValueUnchangeTo = "1.2"; foldChangeValueUp = "2"; geneType = ""; foldChangeValueUDUp = "2"; resultView = ""; geneFile = null; foldChangeValueUDDown = "2"; geneGroup = ""; cloneList = ""; queryName = ""; basePairStart = ""; sampleGroup = ""; sampleList = ""; sampleFile = null; //Set the Request Object this.thisRequest = request; diseaseDomainMap = new HashMap(); geneDomainMap = new HashMap(); foldUpDomainMap = new HashMap(); foldDownDomainMap = new HashMap(); regionDomainMap = new HashMap(); cloneDomainMap = new HashMap(); geneOntologyDomainMap = new HashMap(); pathwayDomainMap = new HashMap(); arrayPlatformDomainMap = new HashMap(); diseaseOrGradeCriteria = new DiseaseOrGradeCriteria(); geneCriteria = new GeneIDCriteria(); sampleCriteria = new SampleCriteria(); foldChangeCriteria = new FoldChangeCriteria(); regionCriteria = new RegionCriteria(); cloneOrProbeIDCriteria = new CloneOrProbeIDCriteria(); geneOntologyCriteria = new GeneOntologyCriteria(); pathwayCriteria = new PathwayCriteria(); arrayPlatformCriteria = new ArrayPlatformCriteria(); allGenesCriteria = new AllGenesCriteria(isAllGenes); //arrayPlatformCriteria = new ArrayPlatformCriteria(); } /** * Returns the geneList. * * @return String */ public String getGeneList() { return geneList; } /** * Set the geneList. * * @param geneList * The geneList to set */ public void setGeneList(String geneList) { this.geneList = geneList; if(thisRequest!=null){ String thisGeneType = this.thisRequest.getParameter("geneType"); String thisGeneGroup = this.thisRequest.getParameter("geneGroup"); if ((thisGeneGroup != null) && thisGeneGroup.equalsIgnoreCase("Specify") && (thisGeneType.length() > 0) && (this.geneList.length() > 0)) { String[] splitValue = this.geneList.split("\\x2C"); for (int i = 0; i < splitValue.length; i++) { if (thisGeneType.equalsIgnoreCase("genesymbol")) { geneDomainMap.put(splitValue[i].trim(), GeneIdentifierDE.GeneSymbol.class.getName()); } else if (thisGeneType.equalsIgnoreCase("genelocus")) { geneDomainMap.put(splitValue[i].trim(), GeneIdentifierDE.LocusLink.class.getName()); } else if (thisGeneType.equalsIgnoreCase("genbankno")) { geneDomainMap.put(splitValue[i].trim(), GeneIdentifierDE.GenBankAccessionNumber.class .getName()); } else if (thisGeneType.equalsIgnoreCase("allgenes")) { geneDomainMap.put(splitValue[i].trim(), GeneIdentifierDE.GeneSymbol.class.getName()); } } } // Set for all genes /* * if (thisGeneGroup != null && * thisGeneGroup.equalsIgnoreCase("Specify") && * (thisGeneType.equalsIgnoreCase("allgenes"))){ * geneDomainMap.put("allgenes", * GeneIdentifierDE.GeneSymbol.class.getName()); } */ } } /** * Sets the geneOption * * @return String */ public void setGeneOption(String geneOption){ this.geneOption = geneOption; if (thisRequest != null){ String thisGeneOption = this.thisRequest.getParameter("geneOption"); if (thisGeneOption != null && thisGeneOption.equalsIgnoreCase("allgenes")){ isAllGenes = true; } } } /** * Returns the geneOption. * * @return String */ public String getGeneOption(){ return geneOption; } /** * Returns the sampleList. * * @return String */ public String getSampleList() { return sampleList; } /** * Set the sampleList. * * @param sampleList * The sampleList to set */ public void setSampleList(String sampleList) { this.sampleList = sampleList; if(thisRequest!=null){ String thisSampleGroup = this.thisRequest.getParameter("sampleGroup"); if ((thisSampleGroup != null) && thisSampleGroup.equalsIgnoreCase("Specify") && (this.sampleList.length() > 0)) { String[] splitSampleValue = this.sampleList.split("\\x2C"); for (int i = 0; i < splitSampleValue.length; i++) { sampleDomainMap.put(splitSampleValue[i].trim(), SampleIDDE.class.getName()); } } } } /** * Returns the geneFile. * * @return String */ public FormFile getGeneFile() { return geneFile; } /** * Returns the sampleFile. * * @return String */ public FormFile getSampleFile() { return sampleFile; } /** * Set the geneFile. * * @param geneFile * The geneFile to set */ public void setGeneFile(FormFile geneFile) { this.geneFile = geneFile; if(thisRequest!=null){ String thisGeneType = this.thisRequest.getParameter("geneType"); String thisGeneGroup = this.thisRequest.getParameter("geneGroup"); // retrieve the file name & size String fileName= geneFile.getFileName(); int fileSize = geneFile.getFileSize(); if ((thisGeneGroup != null) && thisGeneGroup.equalsIgnoreCase("Upload") && (thisGeneType.length() > 0) && (this.geneFile != null) && (this.geneFile.getFileName().endsWith(".txt")) && (this.geneFile.getContentType().equals("text/plain"))) { try { InputStream stream = geneFile.getInputStream(); String inputLine = null; BufferedReader inFile = new BufferedReader( new InputStreamReader(stream)); int count = 0; while ((inputLine = inFile.readLine()) != null && count < NautilusConstants.MAX_FILEFORM_COUNT) { if(UIFormValidator.isAscii(inputLine)){ //make sure all data is ASCII count++; if (thisGeneType.equalsIgnoreCase("genesymbol")) { geneDomainMap.put(inputLine, GeneIdentifierDE.GeneSymbol.class .getName()); } else if (thisGeneType.equalsIgnoreCase("genelocus")) { geneDomainMap.put(inputLine, GeneIdentifierDE.LocusLink.class.getName()); } else if (thisGeneType.equalsIgnoreCase("genbankno")) { geneDomainMap.put( inputLine, GeneIdentifierDE.GenBankAccessionNumber.class .getName()); } else if (thisGeneType.equalsIgnoreCase("allgenes")) { geneDomainMap.put(inputLine, GeneIdentifierDE.GeneSymbol.class .getName()); } } }// end of while inFile.close(); } catch (IOException ex) { logger.error("Errors when uploading gene file:" + ex.getMessage()); } } } } /** * Set the sampleFile. * * @param sampleFile * The sampleFile to set */ public void setSampleFile(FormFile sampleFile) { this.sampleFile = sampleFile; if(thisRequest!=null){ String thisSampleGroup = this.thisRequest.getParameter("sampleGroup"); // retrieve the file name & size String fileName= sampleFile.getFileName(); int fileSize = sampleFile.getFileSize(); if ((thisSampleGroup != null) && thisSampleGroup.equalsIgnoreCase("Upload") && (this.sampleFile != null) && (this.sampleFile.getFileName().endsWith(".txt")) && (this.sampleFile.getContentType().equals("text/plain"))) { try { InputStream stream = sampleFile.getInputStream(); String inputLine = null; BufferedReader inFile = new BufferedReader( new InputStreamReader(stream)); int count = 0; while ((inputLine = inFile.readLine()) != null && count < NautilusConstants.MAX_FILEFORM_COUNT) { if(UIFormValidator.isAscii(inputLine)){ //make sure all data is ASCII count++; sampleDomainMap.put(inputLine,SampleIDDE.class.getName()); } }// end of while inFile.close(); } catch (IOException ex) { logger.error("Errors when uploading sample file:" + ex.getMessage()); } } } } public GeneIDCriteria getGeneIDCriteria() { return this.geneCriteria; } public AllGenesCriteria getAllGenesCriteria(){ return this.allGenesCriteria; } public SampleCriteria getSampleCriteria(){ return this.sampleCriteria; } public FoldChangeCriteria getFoldChangeCriteria() { return this.foldChangeCriteria; } public RegionCriteria getRegionCriteria() { return this.regionCriteria; } public DiseaseOrGradeCriteria getDiseaseOrGradeCriteria() { return this.diseaseOrGradeCriteria; } public CloneOrProbeIDCriteria getCloneOrProbeIDCriteria() { return this.cloneOrProbeIDCriteria; } public GeneOntologyCriteria getGeneOntologyCriteria() { return this.geneOntologyCriteria; } public PathwayCriteria getPathwayCriteria() { return this.pathwayCriteria; } public ArrayPlatformCriteria getArrayPlatformCriteria() { return this.arrayPlatformCriteria; } /** * Returns the goBiologicalProcess. * * @return String */ public String getGoBiologicalProcess() { return goBiologicalProcess; } /** * Set the goBiologicalProcess. * * @param goBiologicalProcess * The goBiologicalProcess to set */ public void setGoBiologicalProcess(String goBiologicalProcess) { this.goBiologicalProcess = goBiologicalProcess; } /** * Returns the tumorGrade. * * @return String */ public String getTumorGrade() { return tumorGrade; } /** * Set the tumorGrade. * * @param tumorGrade * The tumorGrade to set */ public void setTumorGrade(String tumorGrade) { this.tumorGrade = tumorGrade; } /** * Returns the region. * * @return String */ public String getRegion() { return region; } /** * Set the region. * * @param region * The region to set */ public void setRegion(String region) { this.region = region; } /** * Returns the foldChangeValueDown. * * @return String */ public String getFoldChangeValueDown() { return foldChangeValueDown; } /** * Set the foldChangeValueDown. * * @param foldChangeValueDown * The foldChangeValueDown to set */ public void setFoldChangeValueDown(String foldChangeValueDown) { this.foldChangeValueDown = foldChangeValueDown; if(thisRequest!=null){ String thisRegulationStatus = this.thisRequest .getParameter("regulationStatus"); if (thisRegulationStatus != null && thisRegulationStatus.equalsIgnoreCase("down") && (this.foldChangeValueDown.length() > 0)) foldDownDomainMap.put(this.foldChangeValueDown, ExprFoldChangeDE.DownRegulation.class.getName()); } } /** * Returns the cytobandRegion. * * @return String */ public String getCytobandRegion() { return cytobandRegion; } /** * Set the cytobandRegion. * * @param cytobandRegion * The cytobandRegion to set */ public void setCytobandRegion(String cytobandRegion) { this.cytobandRegion = cytobandRegion; if(thisRequest!=null){ String thisRegion = this.thisRequest.getParameter("region"); String thisChrNumber = this.thisRequest.getParameter("chrosomeNumber"); if (thisChrNumber != null && thisChrNumber.trim().length() > 0) { if (thisRegion != null && thisRegion.equalsIgnoreCase("cytoband") && this.cytobandRegion.trim().length() > 0) { regionDomainMap.put(this.cytobandRegion, CytobandDE.class .getName()); } } } } /** * Returns the cloneId. * * @return String */ public String getCloneId() { return cloneId; } /** * Set the cloneId. * * @param cloneId * The cloneId to set */ public void setCloneId(String cloneId) { this.cloneId = cloneId; } /** * Returns the pathways. * * @return String */ public String getPathways() { return pathways; } /** * Set the pathways. * * @param pathways * The pathways to set */ public void setPathways(String pathways) { this.pathways = pathways.trim(); if (this.pathways != null && this.pathways.length() > 0) { String[] splitValue = this.pathways.split("\\r\\n"); for (int i = 0; i < splitValue.length; i++) { pathwayDomainMap.put(splitValue[i], PathwayDE.class.getName()); } } } /** * Returns the tumorType. * * @return String */ public String getTumorType() { return tumorType; } /** * Set the tumorType. * * @param tumorType * The tumorType to set */ public void setTumorType(String tumorType) { this.tumorType = tumorType; if (this.tumorType.equalsIgnoreCase("ALL")) { ArrayList allDiseases = this.getDiseaseType(); for (Iterator diseaseIter = allDiseases.iterator(); diseaseIter .hasNext();) { LabelValueBean thisLabelBean = (LabelValueBean) diseaseIter .next(); String thisDiseaseType = thisLabelBean.getValue(); // stuff this in our DomainMap for later use !! if (!thisDiseaseType.equalsIgnoreCase("ALL")&&diseaseDomainMap!=null) { diseaseDomainMap.put(thisDiseaseType, DiseaseNameDE.class .getName()); } } } else { diseaseDomainMap.put(this.tumorType, DiseaseNameDE.class.getName()); } } /** * Returns the arrayPlatform. * * @return String */ public String getArrayPlatform() { return arrayPlatform; } /** * Set the arrayPlatform. * * @param arrayPlatform * The arrayPlatform to set */ public void setArrayPlatform(String arrayPlatform) { this.arrayPlatform = arrayPlatform; if(arrayPlatformDomainMap!=null){ arrayPlatformDomainMap.put(this.arrayPlatform, ArrayPlatformDE.class .getName()); } } /** * Returns the goCellularComp. * * @return String */ public String getGoCellularComp() { return goCellularComp; } /** * Set the goCellularComp. * * @param goCellularComp * The goCellularComp to set */ public void setGoCellularComp(String goCellularComp) { this.goCellularComp = goCellularComp; } /** * Returns the goMolecularFunction. * * @return String */ public String getGoMolecularFunction() { return goMolecularFunction; } /** * Set the goMolecularFunction. * * @param goMolecularFunction * The goMolecularFunction to set */ public void setGoMolecularFunction(String goMolecularFunction) { this.goMolecularFunction = goMolecularFunction; } /** * Returns the cloneListSpecify. * * @return String */ public String getCloneListSpecify() { return cloneListSpecify; } /** * Set the cloneListSpecify. * * @param cloneListSpecify * The cloneListSpecify to set */ public void setCloneListSpecify(String cloneListSpecify) { this.cloneListSpecify = cloneListSpecify; if(thisRequest!=null){ // this is to check if the radio button is selected for the clone // category String thisCloneId = (String) thisRequest.getParameter("cloneId"); // this is to check the type of the clone String thisCloneList = (String) thisRequest.getParameter("cloneList"); if (thisCloneId != null && thisCloneList != null && !thisCloneList.equals("")) { if (this.cloneListSpecify != null && !cloneListSpecify.equals("")) { String[] cloneStr = cloneListSpecify.split("\\x2C"); for (int i = 0; i < cloneStr.length; i++) { if (thisCloneList.equalsIgnoreCase("imageId")) { cloneDomainMap.put(cloneStr[i].trim(), CloneIdentifierDE.IMAGEClone.class.getName()); } else if (thisCloneList.equalsIgnoreCase("BACId")) { cloneDomainMap.put(cloneStr[i].trim(), CloneIdentifierDE.BACClone.class.getName()); } else if (thisCloneList.equalsIgnoreCase("probeSetId")) { cloneDomainMap.put(cloneStr[i].trim(), CloneIdentifierDE.ProbesetID.class.getName()); } } // end of for loop }// end of if(this.cloneListSpecify != null && // !cloneListSpecify.equals("")){ } } } /** * Returns the cloneListFile. * * @return String */ public FormFile getCloneListFile() { return cloneListFile; } /** * Set the cloneListFile. * * @param cloneListFile * The cloneListFile to set */ public void setCloneListFile(FormFile cloneListFile) { this.cloneListFile = cloneListFile; if(thisRequest!=null){ // this is to check if the radio button is selected for the clone // category String thisCloneId = (String) thisRequest.getParameter("cloneId"); // this is to check the type of the clone String thisCloneList = (String) thisRequest.getParameter("cloneList"); //retrieve the file name & size String fileName= cloneListFile.getFileName(); int fileSize = cloneListFile.getFileSize(); if ((thisCloneId != null) && thisCloneId.equalsIgnoreCase("Upload") && (thisCloneList.length() > 0) && (this.cloneListFile != null) && (this.cloneListFile.getFileName().endsWith(".txt")) && (this.getCloneListFile().getContentType().equals("text/plain"))) { try { InputStream stream = cloneListFile.getInputStream(); String inputLine = null; BufferedReader inFile = new BufferedReader( new InputStreamReader(stream)); int count = 0; while ((inputLine = inFile.readLine()) != null && count < NautilusConstants.MAX_FILEFORM_COUNT) { if(UIFormValidator.isAscii(inputLine)){ //make sure all data is ASCII count ++; //increment if (thisCloneList.equalsIgnoreCase("IMAGEId")) { cloneDomainMap.put(inputLine, CloneIdentifierDE.IMAGEClone.class .getName()); } else if (thisCloneList.equalsIgnoreCase("probeSetId")) { cloneDomainMap.put(inputLine, CloneIdentifierDE.ProbesetID.class .getName()); } } }// end of while inFile.close(); } catch (IOException ex) { logger.error("Errors when uploading clone/probeset file:" + ex.getMessage()); } } } } /** * Returns the goClassification. * * @return String */ public String getGoClassification() { return goClassification; } /** * Set the goClassification. * * @param goClassification * The goClassification to set */ public void setGoClassification(String goClassification) { this.goClassification = goClassification; String goSelect = null; if(thisRequest!=null){ goSelect = (String) thisRequest.getParameter("goClassification"); } if (goSelect != null && !goSelect.equals("")) { geneOntologyDomainMap.put(this.goClassification, GeneOntologyDE.class.getName()); } } /** * Returns the basePairEnd. * * @return String */ public String getBasePairEnd() { return basePairEnd; } /** * Set the basePairEnd. * * @param basePairEnd * The basePairEnd to set */ public void setBasePairEnd(String basePairEnd) { this.basePairEnd = basePairEnd; if(thisRequest!=null){ String thisRegion = this.thisRequest.getParameter("region"); String thisChrNumber = this.thisRequest.getParameter("chrosomeNumber"); String thisBasePairStart = this.thisRequest .getParameter("basePairStart"); if (thisChrNumber != null && thisChrNumber.trim().length() > 0) { if (thisRegion != null && thisBasePairStart != null && this.basePairEnd != null) { if ((thisRegion.equalsIgnoreCase("basePairPosition")) && (thisBasePairStart.trim().length() > 0) && (this.basePairEnd.trim().length() > 0)) { regionDomainMap.put(this.basePairEnd, BasePairPositionDE.EndPosition.class.getName()); } } } } } /** * Returns the chrosomeNumber. * * @return String */ public String getChrosomeNumber() { return chrosomeNumber; } /** * Set the chrosomeNumber. * * @param chrosomeNumber * The chrosomeNumber to set */ public void setChrosomeNumber(String chrosomeNumber) { this.chrosomeNumber = chrosomeNumber; if(regionDomainMap!=null){ if (chrosomeNumber != null && chrosomeNumber.length() > 0) { regionDomainMap.put(this.chrosomeNumber, ChromosomeNumberDE.class .getName()); } } } /** * Returns the regulationStatus. * * @return String */ public String getRegulationStatus() { return regulationStatus; } /** * Set the regulationStatus. * * @param regulationStatus * The regulationStatus to set */ public void setRegulationStatus(String regulationStatus) { this.regulationStatus = regulationStatus; } /** * Returns the foldChangeValueUnchange. * * @return String */ public String getFoldChangeValueUnchangeFrom() { return foldChangeValueUnchangeFrom; } /** * Set the foldChangeValueUnchange. * * @param foldChangeValueUnchange * The foldChangeValueUnchange to set */ public void setFoldChangeValueUnchangeFrom( String foldChangeValueUnchangeFrom) { this.foldChangeValueUnchangeFrom = foldChangeValueUnchangeFrom; if(thisRequest!=null){ String thisRegulationStatus = this.thisRequest .getParameter("regulationStatus"); if (thisRegulationStatus != null && thisRegulationStatus.equalsIgnoreCase("unchange") && (this.foldChangeValueUnchangeFrom.length() > 0)) foldDownDomainMap.put(this.foldChangeValueUnchangeFrom, ExprFoldChangeDE.UnChangedRegulationDownLimit.class .getName()); } } /** * Returns the foldChangeValueUp. * * @return String */ /** * Returns the foldChangeValueUnchange. * * @return String */ public String getFoldChangeValueUnchangeTo() { return foldChangeValueUnchangeTo; } /** * Set the foldChangeValueUnchange. * * @param foldChangeValueUnchange * The foldChangeValueUnchange to set */ public void setFoldChangeValueUnchangeTo(String foldChangeValueUnchangeTo) { this.foldChangeValueUnchangeTo = foldChangeValueUnchangeTo; if(thisRequest!=null){ String thisRegulationStatus = this.thisRequest .getParameter("regulationStatus"); if (thisRegulationStatus != null && thisRegulationStatus.equalsIgnoreCase("unchange") && (this.foldChangeValueUnchangeTo.length() > 0)) { foldUpDomainMap.put(this.foldChangeValueUnchangeTo, ExprFoldChangeDE.UnChangedRegulationUpperLimit.class .getName()); } } } /** * Returns the foldChangeValueUp. * * @return String */ public String getFoldChangeValueUp() { return foldChangeValueUp; } /** * Set the foldChangeValueUp. * * @param foldChangeValueUp * The foldChangeValueUp to set */ public void setFoldChangeValueUp(String foldChangeValueUp) { this.foldChangeValueUp = foldChangeValueUp; logger.debug("I am in the setFoldChangeValueUp() method"); if(thisRequest!=null){ String thisRegulationStatus = this.thisRequest .getParameter("regulationStatus"); if (thisRegulationStatus != null && thisRegulationStatus.equalsIgnoreCase("up") && (this.foldChangeValueUp.length() > 0)) foldUpDomainMap.put(this.foldChangeValueUp, ExprFoldChangeDE.UpRegulation.class.getName()); } } /** * Returns the geneType. * * @return String */ public String getGeneType() { return geneType; } /** * Set the geneType. * * @param geneType * The geneType to set */ public void setGeneType(String geneType) { this.geneType = geneType; } /** * Returns the foldChangeValueUDUp. * * @return String */ public String getFoldChangeValueUDUp() { return foldChangeValueUDUp; } /** * Set the foldChangeValueUDUp. * * @param foldChangeValueUDUp * The foldChangeValueUDUp to set */ public void setFoldChangeValueUDUp(String foldChangeValueUDUp) { this.foldChangeValueUDUp = foldChangeValueUDUp; if(thisRequest!=null){ String thisRegulationStatus = this.thisRequest .getParameter("regulationStatus"); logger.debug("I am in the setFoldChangeValueUDUp() thisRegulationStatus:" + thisRegulationStatus); if (thisRegulationStatus != null && thisRegulationStatus.equalsIgnoreCase("updown") && (this.foldChangeValueUDUp.length() > 0)) { foldUpDomainMap.put(this.foldChangeValueUDUp, ExprFoldChangeDE.UpRegulation.class.getName()); logger.debug("foldDomainMap size in the setFoldChangeValueUDUp() method:" + foldUpDomainMap.size()); } } } /** * Returns the foldChangeValueUDDown. * * @return String */ public String getFoldChangeValueUDDown() { return foldChangeValueUDDown; } /** * Set the foldChangeValueUDDown. * * @param foldChangeValueUDDown * The foldChangeValueUDDown to set */ public void setFoldChangeValueUDDown(String foldChangeValueUDDown) { this.foldChangeValueUDDown = foldChangeValueUDDown; if(thisRequest!=null){ String thisRegulationStatus = this.thisRequest .getParameter("regulationStatus"); logger.debug("I am in the setFoldChangeValueUDDown() methid: " + thisRegulationStatus); if (thisRegulationStatus != null && thisRegulationStatus.equalsIgnoreCase("updown") && (this.foldChangeValueUDDown.length() > 0)) foldDownDomainMap.put(this.foldChangeValueUDDown, ExprFoldChangeDE.DownRegulation.class.getName()); logger.debug("foldDomainMap size in the setFoldChangeValueUDDown() method:" + foldDownDomainMap.size()); } } /** * Returns the resultView. * * @return String */ public String getResultView() { return resultView; } /** * Set the resultView. * * @param resultView * The resultView to set */ public void setResultView(String resultView) { this.resultView = resultView; } /** * Returns the geneGroup. * * @return String */ public String getGeneGroup() { return geneGroup; } /** * Set the geneGroup. * * @param geneGroup * The geneGroup to set */ public void setGeneGroup(String geneGroup) { this.geneGroup = geneGroup; } /** * Returns the geneGroup. * * @return String */ public String getSampleGroup() { return sampleGroup; } /** * Set the sampleGroup. * * @param sampleGroup * The sampleGroup to set */ public void setSampleGroup(String sampleGroup) { this.sampleGroup = sampleGroup; } /** * Returns the cloneList. * * @return String */ public String getCloneList() { return cloneList; } /** * Set the cloneList. * * @param cloneList * The cloneList to set */ public void setCloneList(String cloneList) { this.cloneList = cloneList; } /** * Returns the queryName. * * @return String */ public String getQueryName() { return queryName; } /** * Set the queryName. * * @param queryName * The queryName to set */ public void setQueryName(String queryName) { this.queryName = queryName; } /** * Returns the basePairStart. * * @return String */ public String getBasePairStart() { return basePairStart; } /** * Set the basePairStart. * * @param basePairStart * The basePairStart to set */ public void setBasePairStart(String basePairStart) { this.basePairStart = basePairStart; if(thisRequest!=null){ String thisRegion = this.thisRequest.getParameter("region"); String thisChrNumber = this.thisRequest.getParameter("chrosomeNumber"); String thisBasePairEnd = this.thisRequest.getParameter("basePairEnd"); if (thisChrNumber != null && thisChrNumber.trim().length() > 0) { if (thisRegion != null && this.basePairStart != null && thisBasePairEnd != null) { if ((thisRegion.equalsIgnoreCase("basePairPosition")) && (thisBasePairEnd.trim().length() > 0) && (this.basePairStart.trim().length() > 0)) { regionDomainMap.put(this.basePairStart, BasePairPositionDE.StartPosition.class.getName()); } } } } } public ArrayList getCloneTypeColl() { return cloneTypeColl; } public void setQueryCollection(SessionQueryBag queryCollection) { this.queryCollection = queryCollection; } public SessionQueryBag getQueryCollection() { return this.queryCollection; } public String[] getPathwayName() { return pathwayName; } public void setPathwayName(String[] pathwayName) { this.pathwayName = pathwayName; } public GeneExpressionForm cloneMe() { GeneExpressionForm form = new GeneExpressionForm(); form.setPathwayName(pathwayName); form.setGeneList(geneList); form.setSampleList(sampleList); form.setGoClassification(goClassification); form.setGoCellularComp(goCellularComp); form.setGoMolecularFunction(goMolecularFunction); form.setGoCellularComp(goBiologicalProcess); form.setTumorGrade(tumorGrade); form.setRegion(region); form.setFoldChangeValueDown(foldChangeValueDown); form.setCytobandRegion(cytobandRegion); form.setCloneId(cloneId); form.setPathways(pathways); form.setTumorType(tumorType); form.setArrayPlatform(arrayPlatform); form.setCloneListFile(cloneListFile); form.setCloneListSpecify(cloneListSpecify); form.setBasePairEnd(basePairEnd); form.setChrosomeNumber(chrosomeNumber); form.setRegulationStatus(regulationStatus); form.setFoldChangeValueUnchangeFrom(foldChangeValueUnchangeFrom); form.setFoldChangeValueUnchangeTo(foldChangeValueUnchangeTo); form.setFoldChangeValueUp(foldChangeValueUp); form.setGeneType(geneType); form.setFoldChangeValueUDUp(foldChangeValueUDUp); form.setResultView(resultView); form.setGeneFile(geneFile); form.setSampleFile(sampleFile); form.setFoldChangeValueUDDown(foldChangeValueUDDown); form.setGeneGroup(geneGroup); form.setSampleGroup(sampleGroup); form.setCloneList(cloneList); form.setQueryName(queryName); form.setBasePairStart(basePairStart); form.setQueryCollection(queryCollection); /* * form.setCloneTypeColl(cloneTypeColl); * form.setArrayPlatformTypeColl(arrayPlatformTypeColl); * form.setDiseaseOrGradeCriteria(diseaseOrGradeCriteria); * form.setGeneCriteria(geneCriteria); * form.setFoldChangeCriteria(foldChangeCriteria); * form.setRegionCriteria(regionCriteria); * form.setCloneOrProbeIDCriteria(cloneOrProbeIDCriteria); * form.setGeneOntologyCriteria(geneOntologyCriteria); * form.setPathwayCriteria(pathwayCriteria); * form.setArrayPlatform(arrayPlatformCriteria); * form.setUntranslatedRegionCriteria(untranslatedRegionCriteria); * form.setDiseaseDomainMap(diseaseDomainMap); * form.setGeneDomainMap(geneDomainMap); * form.setFoldUpDomainMap(foldUpDomainMap); * form.setFoldDownDomainMap(foldDownDomainMap); * form.setRegionDomainMap(regionDomainMap); * form.setCloneDomainMap(cloneDomainMap); * form.setGeneOntologyDomainMap(geneOntologyDomainMap); * form.setPathwayDomainMap(pathwayDomainMap); * form.setArrayPlatformDomainMap(arrayPlatformDomainMap); */ return form; } }
package com.rmn.testrail.service; import com.rmn.testrail.entity.*; import com.rmn.testrail.entity.Error; import com.rmn.testrail.parameters.ApiFilterValue; import com.rmn.testrail.parameters.ApiParameter; import com.rmn.testrail.parameters.ApiParameters; import com.rmn.testrail.parameters.GetResultsFilter; import com.rmn.testrail.util.HTTPUtils; import com.rmn.testrail.util.JSONUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.annotate.JsonSerialize; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.Serializable; import java.net.HttpURLConnection; import java.net.URL; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author mmerrell */ public class TestRailService implements Serializable { private Logger log = LoggerFactory.getLogger(getClass()); /** * This might not last forever--we'll need to make "v2" a variable at some point--but this works for the moment */ private static final String ENDPOINT_SUFFIX = "index.php?/api/v2/%s%s"; /** * Used this way, the default implementation will assume that the TestRail instance is hosted by TestRail on their server. As such, you pass in * your "client ID", and it will get put into the correct place. If you're hosting a local instance, you'll have to use the (URL, String, String) * constructor in order to pass the full URL for your instance */ private String apiEndpoint = "http://testrail.com/"; private String username; private String password; private HTTPUtils utils = new HTTPUtils(); public TestRailService() {} /** * Construct a new TestRailService with the necessary information to start communication immediately * @param clientId The clientID--usually the "<id>.testrail.com" you are assigned when you first open an account * @param username The username you will use to communicate with the API. It is recommended to create an account with minimal privileges, specifically for API use * @param password The password to use with this account */ public TestRailService(String clientId, String username, String password) { this.apiEndpoint = String.format(apiEndpoint, clientId) + ENDPOINT_SUFFIX; this.username = username; this.password = password; } public TestRailService(URL apiEndpoint, String username, String password) { this.apiEndpoint = apiEndpoint.toString(); this.username = username; this.password = password; } public void setApiEndpoint(URL apiEndpoint) { this.apiEndpoint = apiEndpoint.toString() + ENDPOINT_SUFFIX; } public void setClientId(String clientId) { this.apiEndpoint = String.format(apiEndpoint, clientId) + ENDPOINT_SUFFIX; } /** * The user name for the API-enabled user * @param username Your Username */ public void setUsername(String username) { this.username = username; } /** * The user name for the API-enabled user * @param password Your Password */ public void setPassword(String password) { this.password = password; } /** * Sets the HTTPUtils object (this is not static because we need to stub these methods for unit testing) * @param utils The HTTPUtils object */ public void setHttpUtils(HTTPUtils utils) { this.utils = utils; } protected <T extends BaseEntity> List<T> getEntityList(Class<T> clazz, String apiCall, String param) { HttpURLConnection connection = getRESTRequest(apiCall, param); log.debug(""); String contents = utils.getContentsFromConnection(connection); List<T> entities = JSONUtils.getMappedJsonObjectList(clazz, contents); for (T suite: entities) { suite.setTestRailService(this); } return entities; } protected <T extends BaseEntity> T getEntitySingle(Class<T> clazz, String apiCall, String param) { HttpURLConnection connection = getRESTRequest(apiCall, param); String contents = utils.getContentsFromConnection(connection); T entity = JSONUtils.getMappedJsonObject(clazz, contents); entity.setTestRailService(this); return entity; } /** * Pings the API, mainly to ensure that your credentials are correct * @return Whether or not it was able to establish a successful connection */ public boolean verifyCredentials() throws IOException { //At the moment this just grabs a list of projects and makes sure the response code is valid. The API does not have // a "version" or "ping" end-point, so this seemed like the only way to talk to it without knowing some data first HttpURLConnection connection = getRESTRequest(TestRailCommand.GET_PROJECTS.getCommand(), ""); return connection.getResponseCode() == 200; } //BEGIN API HELPER METHODS /** * Returns the TestCase with the given id * @param testCaseId The TestCase ID (in TestRails, this will be something like 'C7', but just provide the 7) * @return The TestCase associated with this id */ public TestCase getTestCase(int testCaseId) { return getEntitySingle(TestCase.class, TestRailCommand.GET_CASE.getCommand(), Integer.toString(testCaseId)); } /** * Get the complete list of all test cases in this Project (if the project is operating in single suite mode) * @param projectId The ID of the project * @param apiFilters one or more request filters built on GetCasesFilter enums * @return the List of TestCase entities associated with this TestSuite */ public List<TestCase> getTestCasesSingleSuiteMode(int projectId, ApiFilterValue... apiFilters) { return getTestCases(projectId, -1, -1, apiFilters); } /** * Get the complete list of all test cases in this Project (if the project is operating in single suite mode) * @param projectId The ID of the project * @param sectionId The Section ID * @param apiFilters one or more request filters built on GetCasesFilter enums * @return the List of TestCase entities associated with this TestSuite */ public List<TestCase> getTestCasesSingleSuiteMode(int projectId, int sectionId, ApiFilterValue... apiFilters) { return getTestCases(projectId, -1, sectionId, apiFilters); } /** * Get the complete list of all test cases in this TestSuite * @param projectId The ID of the project * @param suiteId The Suite ID (in TestRails, this will be something like 'S7', but just provide the 7) * @param apiFilters one or more request filters built on GetCasesFilter enums * @return the List of TestCase entities associated with this TestSuite */ public List<TestCase> getTestCases(int projectId, int suiteId, ApiFilterValue... apiFilters) { return getTestCases(projectId, suiteId, -1, apiFilters); } /** * Get the list of test cases in this TestSuite for the Section indicated * @param projectId The ID of the project * @param suiteId The Suite ID (in TestRails, this will be something like 'S7', but just provide the 7) * @param sectionId The Section ID * @param apiFilters one or more request filters built on GetCasesFilter enums * @return A List of the TestCases in this Suite */ public List<TestCase> getTestCases(int projectId, int suiteId, int sectionId, ApiFilterValue... apiFilters) { String params = Integer.toString(projectId); if (suiteId > 0) { params += ApiParameters.append(ApiParameter.SUITE_ID, suiteId); } if (sectionId > 0) { params += ApiParameters.append(ApiParameter.SECTION_ID, sectionId); } for (ApiFilterValue apiFilter : apiFilters) { params += apiFilter.append(); } return getEntityList(TestCase.class, TestRailCommand.GET_CASES.getCommand(), params); } /** * Creates a new test case. * @param testCase the new test case * @param sectionId The ID of the section the test case should be added to */ public TestCase addTestCase(TestCase testCase, int sectionId) { return postRESTBodyReturn(TestRailCommand.ADD_CASE.getCommand(), Integer.toString(sectionId), testCase, TestCase.class); } /** * Updates an existing test case (partial updates are supported, i.e. you can submit and update specific fields only). * @param testCase a TestCase object with fields to be updated set. (i.e. TestCase updateCase = new TestCase(); updateCase.setPriorityId(2); ) * @param caseId The ID of the test case * @return */ public TestCase updateTestCase(TestCase testCase, int caseId) { return postRESTBodyReturn(TestRailCommand.UPDATE_CASE.getCommand(), Integer.toString(caseId), testCase, TestCase.class); } /** * WARNING: Permanently an existing test case. Please note: Deleting a test case cannot be undone and also permanently deletes all test results in active test runs (i.e. test runs that haven't been closed (archived) yet). * @param caseId The ID of the test case */ public void deleteTestCase(int caseId) { postRESTBody(TestRailCommand.DELETE_CASE.getCommand(), Integer.toString(caseId), null); } /** * Returns a list of available test case custom fields. * @return String with JSON response, you must parse the string yourself */ public String getCaseFields() { HttpURLConnection connection = getRESTRequest(TestRailCommand.GET_CASE_FIELDS.getCommand(), null); return utils.getContentsFromConnection(connection); } /** * Returns a list of available case types. * @return String with JSON response, you must parse the string yourself */ public String getCaseTypes() { HttpURLConnection connection = getRESTRequest(TestRailCommand.GET_CASE_TYPES.getCommand(), null); return utils.getContentsFromConnection(connection); } /** * Returns a list of available configurations, grouped by configuration groups (requires TestRail 3.1 or later). * @param projectId The ID of the project * @return String with JSON response, you must parse the string yourself */ public String getConfigurations(int projectId) { HttpURLConnection connection = getRESTRequest(TestRailCommand.GET_CONFIGS.getCommand(), Integer.toString(projectId)); return utils.getContentsFromConnection(connection); } /** * Creates a new configuration group. * @param name The name of the configuration group * @param projectId The ID of the project the configuration group should be added to */ public void addConfigGroup(final String name, int projectId) { postRESTBody(TestRailCommand.ADD_CONFIG_GROUP.getCommand(), null, new BaseEntity() { @JsonProperty("name") private String nameString = name; }); } /** * Creates a new configuration group. * @param name The name of the configuration * @param configGroupId The ID of the configuration group the configuration should be added to */ public void addConfig(final String name, int configGroupId) { postRESTBody(TestRailCommand.ADD_CONFIG.getCommand(), Integer.toString(configGroupId), new BaseEntity() { @JsonProperty("name") private String nameString = name; }); } /** * Updates an existing configuration group. * @param name The new name of the configuration group * @param configGroupId The ID of the configuration group */ public void updateConfigGroup(final String name, int configGroupId) { postRESTBody(TestRailCommand.UPDATE_CONFIG_GROUP.getCommand(), Integer.toString(configGroupId), new BaseEntity() { @JsonProperty("name") private String nameString = name; }); } /** * Updates an existing configuration. * @param name The new name of the configuration * @param configId The ID of the configuration */ public void updateConfig(final String name, int configId) { postRESTBody(TestRailCommand.UPDATE_CONFIG.getCommand(), Integer.toString(configId), new BaseEntity() { @JsonProperty("name") private String nameString = name; }); } /** * Updates an existing configuration group. * Please note: Deleting a configuration group cannot be undone and also permanently deletes all configurations in this group. It does not, however, affect closed test plans/runs, or active test plans/runs unless they are updated. * @param configGroupId The ID of the configuration group */ public void deleteConfigGroup(int configGroupId) { postRESTBody(TestRailCommand.DELETE_CONFIG_GROUP.getCommand(), Integer.toString(configGroupId), null); } /** * Deletes an existing configuration. * Please note: Deleting a configuration cannot be undone. It does not, however, affect closed test plans/runs, or active test plans/runs unless they are updated. * @param configId The ID of the configuration */ public void deleteConfig(int configId) { postRESTBody(TestRailCommand.DELETE_CONFIG.getCommand(), Integer.toString(configId), null); } /** * Returns the Milestone object with the given ID * @param milestoneId the ID of the Milestone you're interested in * @return The Milestone object */ public Milestone getMilestone(int milestoneId) { return getEntitySingle(Milestone.class, TestRailCommand.GET_MILESTONE.getCommand(), Integer.toString(milestoneId)); } /** * Returns a list of all the Milestones in the given project ID * @param projectId the ID of project you want the Milestones from * @param isCompleted only accepts ApiFilter.IS_COMPLETED (numerical boolean {0|1}) * @return the list of all the Milestones in the project */ public List<Milestone> getMilestones(int projectId, ApiFilterValue... isCompleted) { return getEntityList(Milestone.class, TestRailCommand.GET_MILESTONES.getCommand(), Integer.toString(projectId) + (isCompleted.length > 0 ? isCompleted[0].append() : null)); } /** * Creates a new milestone. * @param milestone The EmptyMilestone object with parameters for creating a new milestone * @param projectId The ID of the project the milestone should be added to * @return Returns the new milestone that was created */ public Milestone addMilestone(EmptyMilestone milestone, int projectId) { return postRESTBodyReturn(TestRailCommand.ADD_MILESTONE.getCommand(), Integer.toString(projectId), milestone, Milestone.class); } public Milestone updateMilestone(int milestoneId, final boolean isCompleted) { return postRESTBodyReturn(TestRailCommand.UPDATE_MILESTONE.getCommand(), Integer.toString(milestoneId), new BaseEntity() { @JsonProperty("is_completed") private String isCompletedBoolean = isCompleted ? "1":"0"; }, Milestone.class); } /** * The TestPlan assocated with the indicated id * @param planId The id of the TestPlan you're interested in * @return The TestPlan entity indicated by the id */ public TestPlan getTestPlan(int planId) { return getEntitySingle(TestPlan.class, TestRailCommand.GET_PLAN.getCommand(), Integer.toString(planId)); } /** * The List of TestPlan entities the indicated Project contains * @param projectId The id of the project you're interested in * @param apiFilters one or more request filters built on GetPlansFilter enums * @return A List of TestPlan entities for the indicated Project */ public List<TestPlan> getTestPlans(int projectId, ApiFilterValue... apiFilters) { String params = Integer.toString(projectId); for (ApiFilterValue apiFilter : apiFilters) { params += apiFilter.append(); } return getEntityList(TestPlan.class, TestRailCommand.GET_PLANS.getCommand(), params); } /** * Adds a Test Plan in TestRails * @param projectId the ID of the project to add the Test Plan to * @param testPlan the skeleton Test Plan object the TestRails Test Plan will be based off of * @return the completed Test Plan created in TestRails */ public TestPlan addTestPlan(int projectId, TestPlanCreator testPlan) { return postRESTBodyReturn(TestRailCommand.ADD_PLAN.getCommand(), Integer.toString(projectId), testPlan, TestPlan.class); } /** * Adds a Test Plan Entry in TestRails * @param planId the ID of the Test Plan to add the Test Plan Entry to * @param planEntry the skeleton Plane Entry object the TestRails Plan Entry (Test Run) will be based off of * @return the completed Plan Entry created in TestRails */ public PlanEntry addTestPlanEntry(int planId, PlanEntry planEntry) { return postRESTBodyReturn(TestRailCommand.ADD_PLAN_ENTRY.getCommand(), Integer.toString(planId), planEntry, PlanEntry.class); } /** * Updates an existing test plan (partial updates are supported, i.e. you can submit and update specific fields only). * With the exception of the entries field, this method supports the same POST fields as add_plan. * @param planId The ID of the test plan * @param testPlan The (partially) updated test plan * @return the updated test plan */ public TestPlan updateTestPlan(int planId, TestPlanCreator testPlan) { return postRESTBodyReturn(TestRailCommand.UPDATE_PLAN.getCommand(), Integer.toString(planId), testPlan, TestPlan.class); } /** * Updates one or more existing test runs in a plan (partial updates are supported, i.e. you can submit and update specific fields only). * @param planId The ID of the test plan * @param entryId The ID of the test plan entry (note: not the test run ID) * @param updatePlanEntry the (partial) updates to the plan entry * @return the updated plan entry */ public PlanEntry updateTestPlanEntry(int planId, int entryId, UpdatePlanEntry updatePlanEntry) { return postRESTBodyReturn(TestRailCommand.UPDATE_PLAN_ENTRY.getCommand(), Integer.toString(planId) + "/" + Integer.toString(entryId), updatePlanEntry, PlanEntry.class); } /** * Closes an existing test plan and archives its test runs & results. * Please note: Closing a test plan cannot be undone. * @param planId The ID of the test plan * @return the closed test plan */ public TestPlan closeTestPlan(int planId) { return postRESTBodyReturn(TestRailCommand.CLOSE_PLAN.getCommand(), Integer.toString(planId), null, TestPlan.class); } /** * Deletes an existing test plan. * Please note: Deleting a test plan cannot be undone and also permanently deletes all test runs & results of the test plan. * @param planId The ID of the test plan */ public void deleteTestPlan(int planId) { postRESTBody(TestRailCommand.DELETE_PLAN.getCommand(), Integer.toString(planId), null); } /** * Deletes one or more existing test runs from a plan. * Please note: Deleting a test run from a plan cannot be undone and also permanently deletes all related test results. * @param planId The ID of the test plan * @param entryId The ID of the test plan entry (note: not the test run ID) */ public void deleteTestPlanEntry(int planId, int entryId) { postRESTBody(TestRailCommand.DELETE_PLAN_ENTRY.getCommand(), Integer.toString(planId) + "/" + Integer.toString(entryId), null); } /** * Returns a list of available priorities. * @return a list of Priority objects */ public List<Priority> getPriorities() { return getEntityList(Priority.class, TestRailCommand.GET_PRIORITIES.getCommand(), null); } /** * Returns the Project, specified by id * @param projectId The TestRails Project Id * @return The Project, or null if it doesn't exist */ public Project getProject(int projectId) { return getEntitySingle(Project.class, TestRailCommand.GET_PROJECT.getCommand(), Integer.toString(projectId)); } /** * Looks up the Project, specified by Name * @param projectName The Name of the Project (including spaces) * @return The Project, or null if it doesn't exist */ public Project getProjectByName(String projectName) { for (Project project: this.getProjects()) { if (project.getName().equals(projectName)) { return project; } } return null; } /** * Returns all Project entities related to this account * @param isCompleted ApiFilterValue object based off of GetProjectsFilter.IS_COMPLETED enum * @return The List of ALL Projects available to this user */ public List<Project> getProjects(ApiFilterValue... isCompleted) { return getEntityList(Project.class, TestRailCommand.GET_PROJECTS.getCommand(), isCompleted.length > 0 ? isCompleted[0].append() : null); } /** * Creates a new project (admin status required). * @param newProject project information of new project to add * @return new project that was created */ public Project addProject(ProjectCreator newProject) { return postRESTBodyReturn(TestRailCommand.ADD_PROJECT.getCommand(), null, newProject, Project.class); } /** * Updates an existing project (admin status required; partial updates are supported, i.e. you can submit and update specific fields only). * @param projectId The ID of the project * @param isCompleted Specifies whether a project is considered completed or not * @return */ public Project updateProject(int projectId, final boolean isCompleted) { return postRESTBodyReturn(TestRailCommand.ADD_PROJECT.getCommand(), Integer.toString(projectId), new BaseEntity() { @JsonProperty("is_completed") private String isCompletedBoolean = isCompleted ? "1":"0"; }, Project.class); } /** * Deletes an existing project (admin status required). * Please note: Deleting a project cannot be undone and also permanently deletes all test suites & cases, test runs & results and everything else that is part of the project. * @param projectId The ID of the project */ public void deleteProject(int projectId) { postRESTBody(TestRailCommand.DELETE_PROJECT.getCommand(), Integer.toString(projectId), null); } /** * Returns the most recent TestResult object for the given TestInstance * @param testInstanceId The TestInstance you're interested in (gathered from the specific TestRun) * @return The most recent TestResult for the given TestInstance */ public TestResult getTestResult(int testInstanceId) { List<TestResult> results = getTestResults(testInstanceId, new ApiFilterValue(GetResultsFilter.LIMIT, "1")); if (null == results || results.size() == 0) { return null; } return results.get(0); } /** * Returns a List of the TestResults (up to the 'limit' parameter provided) associated with the indicated TestInstance, most recent first * @param testInstanceId The TestInstance id * @param apiFilters one or more request filters built on GetResultsFilter enums * @return A List of TestResults in descending chronological order (i.e. most recent first) */ public List<TestResult> getTestResults(int testInstanceId, ApiFilterValue... apiFilters) { List<TestResult> results = getEntityList(TestResult.class, TestRailCommand.GET_RESULTS.getCommand(), Integer.toString(testInstanceId) + new ApiFilterValue(GetResultsFilter.LIMIT, "1").append()); if (null == results) { return null; } String params = Integer.toString(testInstanceId); for (ApiFilterValue apiFilter : apiFilters) { params += apiFilter.append(); } return getEntityList(TestResult.class, TestRailCommand.GET_RESULTS.getCommand(), params); } /** * Returns a list of test results for a test run and case combination. * @param runId The ID of the test run * @param caseId The ID of the test case * @param apiFilters one or more request filters built on GetResultsFilter enums * @return A List of TestResults in descending chronological order (i.e. most recent first) */ public List<TestResult> getTestResultsForCase(int runId, int caseId, ApiFilterValue... apiFilters) { String params = Integer.toString(runId) + "/" + Integer.toString(caseId); for (ApiFilterValue apiFilter : apiFilters) { params += apiFilter.append(); } return getEntityList(TestResult.class, TestRailCommand.GET_RESULTS_FOR_CASE.getCommand(), params); } /** * Returns a list of test results for a test run and case combination. * @param runId The ID of the test run * @param apiFilters one or more request filters built on GetResultsFilter enums * @return A List of TestResults in descending chronological order (i.e. most recent first) */ public List<TestResult> getTestResultsForRun(int runId, ApiFilterValue... apiFilters) { String params = Integer.toString(runId); for (ApiFilterValue apiFilter : apiFilters) { params += apiFilter.append(); } return getEntityList(TestResult.class, TestRailCommand.GET_RESULTS_FOR_RUN.getCommand(), params); } /** * (Adds a new test result, comment or assigns a test. It's recommended to use add_results instead if you plan to add results for multiple tests.) * Add a TestResult to a particular TestInstance, given the TestInstance id * @param testId The id of the TestInstance to which you would like to add a TestResult entity * @param result TestResult entity you wish to add to this TestInstance * @return the new test result * @throws IOException */ public TestResult addTestResult(int testId, TestResult result) throws IOException { HttpResponse response = postRESTBody(TestRailCommand.ADD_RESULT.getCommand(), Integer.toString(testId), result); if (response.getStatusLine().getStatusCode() != 200) { throw new RuntimeException(String.format("TestResult was not properly added to TestInstance [%d]: %s", testId, response.getStatusLine().getReasonPhrase())); } return JSONUtils.getMappedJsonObject(TestResult.class, utils.getContentsFromHttpResponse(response)); } /** * Adds a new test result, comment or assigns a test (for a test run and case combination). It's recommended to use add_results_for_cases instead if you plan to add results for multiple test cases. * @param runId The ID of the test run * @param caseId The ID of the test case * @param result TestResult entity you wish to add to this TestInstance * @return the new test result * @throws IOException */ public TestResult addTestResultForCase(int runId, int caseId, TestResult result) throws IOException { HttpResponse response = postRESTBody(TestRailCommand.ADD_RESULT_FOR_CASE.getCommand(), Integer.toString(runId) + "/" + Integer.toString(caseId), result); if (response.getStatusLine().getStatusCode() != 200) { throw new RuntimeException(String.format("TestResult was not properly added to Run ID: %d | Case ID: %d: %s", runId, caseId, response.getStatusLine().getReasonPhrase())); } return JSONUtils.getMappedJsonObject(TestResult.class, utils.getContentsFromHttpResponse(response)); } /** * (Adds one or more new test results, comments or assigns one or more tests. Ideal for test automation to bulk-add multiple test results in one step.) * Add a TestResult to a particular TestInstance, given the TestInstance id * @param runId The id of the TestRun to which you would like to add a TestResults entity * @param results A TestResults entity (which can include multiple TestResult entities) you wish to add to this TestRun */ public TestResults addTestResults(int runId, TestResults results) throws IOException { HttpResponse response = postRESTBody(TestRailCommand.ADD_RESULTS.getCommand(), Integer.toString(runId), results); if (response.getStatusLine().getStatusCode() != 200) { throw new RuntimeException(String.format("TestResults was not properly added to TestRun [%d]: %s", runId, response.getStatusLine().getReasonPhrase())); } TestResults returnedResults = new TestResults(); returnedResults.setResults(JSONUtils.getMappedJsonObjectList(TestResult.class, utils.getContentsFromHttpResponse(response))); return returnedResults; } /** * (Adds one or more new test results, comments or assigns one or more tests. Ideal for test automation to bulk-add multiple test results in one step.) * Add a TestResult to a particular TestInstance, given the TestInstance id * @param runId The id of the TestRun to which you would like to add a TestResults entity * @param results A TestResults entity (which can include multiple TestResult entities) you wish to add to this TestRun */ public TestResults addTestResultsForCases(int runId, TestResults results) throws IOException { HttpResponse response = postRESTBody(TestRailCommand.ADD_RESULTS_FOR_CASES.getCommand(), Integer.toString(runId), results); if (response.getStatusLine().getStatusCode() != 200) { throw new RuntimeException(String.format("TestResults was not properly added to TestRun [%d]: %s", runId, response.getStatusLine().getReasonPhrase())); } TestResults returnedResults = new TestResults(); returnedResults.setResults(JSONUtils.getMappedJsonObjectList(TestResult.class, utils.getContentsFromHttpResponse(response))); return returnedResults; } /** * Returns a list of available test result custom fields. * @return String with JSON response, you must parse the string yourself */ public String getResultFields() { HttpURLConnection connection = getRESTRequest(TestRailCommand.GET_RESULT_FIELDS.getCommand(), null); return utils.getContentsFromConnection(connection); } /** * Returns TestRun associated with the specific TestRun ID passed in (assuming you know it) * @param testRunId The id of the TestRun requested * @return The TestRun active for this TestRun ID */ public TestRun getTestRun(int testRunId) { return getEntitySingle(TestRun.class, TestRailCommand.GET_RUN.getCommand(), Integer.toString(testRunId)); } /** * Returns all the Active TestRuns associated with the given Project * @param projectId The id of the Project * @param apiFilters one or more request filters built on GetRunsFilter enums * @return The List of TestRuns currently active for this Project */ public List<TestRun> getTestRuns(int projectId, ApiFilterValue... apiFilters) { String params = Integer.toString(projectId); for (ApiFilterValue apiFilter : apiFilters) { params += apiFilter.append(); } return getEntityList(TestRun.class, TestRailCommand.GET_RUNS.getCommand(), params); } /** * Add a TestRun via a slimmed down new TestRunCreator entity to get around non-obvious json serialization problems * with the TestRun entity * @param projectId the id of the project to bind the test run to * @returns The newly created TestRun object * @throws IOException */ public TestRun addTestRun(int projectId, TestRunCreator run) { TestRun newSkeletonTestRun = postRESTBodyReturn(TestRailCommand.ADD_RUN.getCommand(), Integer.toString(projectId), run, TestRun.class); TestRun realNewlyCreatedTestRun = getTestRun(newSkeletonTestRun.getId()); return realNewlyCreatedTestRun; } /** * Updates an existing test run (partial updates are supported, i.e. you can submit and update specific fields only). * @param runId The ID of the test run * @return the updated test run */ public TestRun updateTestRun(int runId, TestRun testRun) { return postRESTBodyReturn(TestRailCommand.UPDATE_RUN.getCommand(), Integer.toString(runId), testRun, TestRun.class); } /** * Closes an existing test run and archives its tests & results. * Please note: Closing a test run cannot be undone. * @param run The TestRun you want to close * @return the newly closed test run */ public TestRun closeTestRun(TestRun run) throws IOException{ HttpResponse response = postRESTBody(TestRailCommand.CLOSE_RUN.getCommand(), Integer.toString(run.getId()), run); if (response.getStatusLine().getStatusCode() != 200) { throw new RuntimeException(String.format("TestRun was not properly closed, TestRunID [%d]: %s", run.getId(), response.getStatusLine().getReasonPhrase())); } return JSONUtils.getMappedJsonObject(TestRun.class, utils.getContentsFromHttpResponse(response)); } /** * Closes an existing test run and archives its tests & results. * Please note: Closing a test run cannot be undone. * @param runId The ID of the test run * @return the newly closed test run */ public TestRun closeTestRun(int runId) throws IOException{ HttpResponse response = postRESTBody(TestRailCommand.CLOSE_RUN.getCommand(), Integer.toString(runId), null); if (response.getStatusLine().getStatusCode() != 200) { throw new RuntimeException(String.format("TestRun was not properly closed, TestRunID [%d]: %s", runId, response.getStatusLine().getReasonPhrase())); } return JSONUtils.getMappedJsonObject(TestRun.class, utils.getContentsFromHttpResponse(response)); } /** * The ID of the test run * Please note: Deleting a test run cannot be undone and also permanently deletes all tests & results of the test run. * @param runId The ID of the test run */ public void deleteTestRun(int runId) { postRESTBody(TestRailCommand.DELETE_RUN.getCommand(), Integer.toString(runId), null); } /** * Returns an existing section. * @param sectionId The ID of the section * @return a Section object */ public Section getSection(int sectionId) { return getEntitySingle(Section.class, TestRailCommand.GET_SECTION.getCommand(), Integer.toString(sectionId)); } /** * Returns the List of Section entities the indicated Project and TestSuite entities contain * @param projectId The id of the Project you're interested in * @param suiteId The id of the TestSuite you're interested in * @return A List of Section entities for the indicated Project/TestSuite */ public List<Section> getSections( int projectId, int suiteId ) { return getEntityList(Section.class, TestRailCommand.GET_SECTIONS.getCommand(), String.format("%d&suite_id=%d", projectId, suiteId)); } /** * Returns a list of available test statuses. * @return String with JSON response, you must parse the string yourself */ public String getStatuses() { HttpURLConnection connection = getRESTRequest(TestRailCommand.GET_STATUSES.getCommand(), null); return utils.getContentsFromConnection(connection); } /** * Returns the TestSuite for the id given * @param suiteId The Suite ID (in TestRails, this will be something like 'S7', but just provide the 7) * @return A TestSuite */ public TestSuite getTestSuite( int suiteId ) { return getEntitySingle(TestSuite.class, TestRailCommand.GET_SUITE.getCommand(), Integer.toString(suiteId)); } /** * Returns all the TestSuites for the project id given * @param projectId The Project ID (in TestRails, this will be something like 'P7', but just provide the 7) * @return A List of Suites */ public List<TestSuite> getTestSuites( int projectId ) { return getEntityList(TestSuite.class, TestRailCommand.GET_SUITES.getCommand(), Integer.toString(projectId)); } /** * Creates a new test suite. * @param projectId The ID of the project the test suite should be added to * @param testSuite The information needed to create a new test suite * @return the newly created test suite */ public TestSuite addTestSuite(int projectId, TestSuiteCreator testSuite) { return postRESTBodyReturn(TestRailCommand.ADD_SUITE.getCommand(), Integer.toString(projectId), testSuite, TestSuite.class); } /** * Updates an existing test suite (partial updates are supported, i.e. you can submit and update specific fields only). * @param suiteId The ID of the test suite * @param testSuite The (partially) updated test suite * @return the newly updated test suite */ public TestSuite updateTestSuite(int suiteId, TestSuite testSuite) { return postRESTBodyReturn(TestRailCommand.UPDATE_SUITE.getCommand(), Integer.toString(suiteId), testSuite, TestSuite.class); } /** * Deletes an existing test suite. * Please note: Deleting a test suite cannot be undone and also deletes all active test runs & results, i.e. test runs & results that weren't closed (archived) yet. * @param suiteId The ID of the test suite */ public void deleteTestSuite(int suiteId) { postRESTBody(TestRailCommand.DELETE_SUITE.getCommand(), Integer.toString(suiteId), null); } /** * Returns a list of available templates (requires TestRail 5.2 or later). * @return The ID of the project */ public String getTemplates() { HttpURLConnection connection = getRESTRequest(TestRailCommand.GET_TEMPLATES.getCommand(), null); return utils.getContentsFromConnection(connection); } /** * Returns an existing test. * @param testId The ID of the test * @return TestInstance object */ public TestInstance getTest(int testId) { return getEntitySingle(TestInstance.class, TestRailCommand.GET_TEST.getCommand(), Integer.toString(testId)); } /** * Returns all TestInstances associated with the given TestRun * @param testRunId The id of the TestRun you're interested in * @param isCompleted ApiFilterValue object based off of GetProjectsFilter.IS_COMPLETED enum * @return The List of TestInstances associated with this TestRun */ public List<TestInstance> getTests(int testRunId, ApiFilterValue... statusId) { return getEntityList(TestInstance.class, TestRailCommand.GET_TESTS.getCommand(), Integer.toString(testRunId) + (statusId.length > 0 ? statusId[0].append() : null)); } /** * Get a user by id */ public User getUserById(int id) { return getEntitySingle(User.class, TestRailCommand.GET_USER_BY_ID.getCommand(), "" + id); } /** * Get a user by email address */ public User getUserByEmail(String email) { return getEntitySingle(User.class, TestRailCommand.GET_USER_BY_EMAIL.getCommand(), "&email=" + email); } /** * Get the entire list of users from the API */ public List<User> getUsers() { return getEntityList(User.class, TestRailCommand.GET_USERS.getCommand(), ""); } //END API HELPER METHODS /** * Builds the proper TestRails request URL based on the type and number of parameters. It tries to be smart about how to add * parameters to calls that require 0, 1, or 2 arguments * @param apiCall The end-point you wish to request * @param urlParams The full parameters of the request you're making (it's up to you to make it correct) * @return The URL you've built */ private String buildRequestURL(String apiCall, String urlParams) { //Some API calls take 2 parameters, like get_cases/16/1231, so we need to account for both String argString = ""; if (!StringUtils.isEmpty(urlParams)) { argString = String.format("/%s", urlParams); } //Build the complete url return String.format(apiEndpoint, apiCall, argString); } /** * Makes the specified call to the API using either 1 or 2 args. These args will be validated and inserted before making the actual GET request * @param apiCall The specific call to make to the API (NOT including the URL) * @param urlParams The first parameter * @return An active, open connection in a post-response state */ private HttpURLConnection getRESTRequest(String apiCall, String urlParams) { String completeUrl = buildRequestURL(apiCall, urlParams); try { //log the complete url log.debug("url: {}", completeUrl); //Add the application/json header Map<String, String> headers = new HashMap<String, String>(); headers.put("Content-Type", "application/json"); //Log the curl call for easy reproduction // log.warn(utils.getCurlCommandStringGet(completeUrl, headers)); String authentication = HTTPUtils.encodeAuthenticationBase64(username, password); return utils.getHTTPRequest(completeUrl, authentication, headers); } catch (IOException e) { log.error("An IOException was thrown while trying to process a REST Request against URL: {}", completeUrl); } throw new RuntimeException(String.format( "Connection is null (probably hit timeout), check parameters for [%s]", completeUrl)); } /** * Posts the given String to the given TestRails end-point * @param apiCall The end-point that expects to receive the entities (e.g. "add_result") * @param urlParams The remainder of the URL required for the POST. It is up to you to get this part right * @param entity The BaseEntity object to use at the POST body * @return The Content of the HTTP Response */ private HttpResponse postRESTBody(String apiCall, String urlParams, BaseEntity entity) { HttpClient httpClient = new DefaultHttpClient(); String completeUrl = buildRequestURL( apiCall, urlParams ); try { HttpPost request = new HttpPost( completeUrl ); String authentication = HTTPUtils.encodeAuthenticationBase64(username, password); request.addHeader("Authorization", "Basic " + authentication); request.addHeader("Content-Type", "application/json"); ObjectMapper mapper = new ObjectMapper(); mapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL); byte[] body = mapper.writeValueAsBytes(entity); request.setEntity(new ByteArrayEntity(body)); HttpResponse response = executeRequestWithRetry(request, 2); if (response.getStatusLine().getStatusCode() != 200) { Error error = JSONUtils.getMappedJsonObject(Error.class, utils.getContentsFromHttpResponse(response)); log.error("Response code: {}", response.getStatusLine().getStatusCode()); log.error("TestRails reported an error message: {}", error.getError()); request.addHeader("Encoding", "UTF-8"); } return response; } catch (IOException e) { log.error(String.format("An IOException was thrown while trying to process a REST Request against URL: [%s]", completeUrl), e.toString()); throw new RuntimeException(String.format("Connection is null, check URL: %s", completeUrl)); } finally { httpClient.getConnectionManager().shutdown(); } } /** * Posts the given String to the given TestRails end-point * * @param apiCall The end-point that expects to receive the entities (e.g. "add_result") * @param urlParams The remainder of the URL required for the POST. It is up to you to get this part right * @param entity The BaseEntity object to use at the POST body * @param returnEntityType The Class of the return type you wish to receive (helps avoid casting from the calling method) * @return The Content of the HTTP Response */ private <T extends BaseEntity> T postRESTBodyReturn(String apiCall, String urlParams, BaseEntity entity, Class<T> returnEntityType) { HttpClient httpClient = new DefaultHttpClient(); String completeUrl = buildRequestURL( apiCall, urlParams ); try { HttpPost request = new HttpPost( completeUrl ); String authentication = HTTPUtils.encodeAuthenticationBase64(username, password); request.addHeader("Authorization", "Basic " + authentication); request.addHeader("Content-Type", "application/json"); request.addHeader("Encoding", "UTF-8"); ObjectMapper mapper = new ObjectMapper(); mapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL); byte[] body = mapper.writeValueAsBytes(entity); request.setEntity(new ByteArrayEntity(body)); HttpResponse response = executeRequestWithRetry(request, 2); if (response.getStatusLine().getStatusCode() != 200) { Error error = JSONUtils.getMappedJsonObject(Error.class, utils.getContentsFromHttpResponse(response)); log.error("Response code: {}", response.getStatusLine().getStatusCode()); log.error("TestRails reported an error message: {}", error.getError()); } else if (response.getStatusLine().getStatusCode() == 200) { log.info("Returning a JSON mapped object from calling api intergration point"); return JSONUtils.getMappedJsonObject(returnEntityType, utils.getContentsFromHttpResponse(response)); } else { log.error("Unhandled return code for postRESTBodyReturn"); } } catch (IOException e) { log.error(String.format("An IOException was thrown while trying to process a REST Request against URL: [%s]", completeUrl), e); throw new RuntimeException(String.format("Connection is null, check URL: %s", completeUrl), e); } finally { httpClient.getConnectionManager().shutdown(); } return null; } /** * Execute POST request with retry * @param request * @param retries * @return * @throws IOException */ private HttpResponse executeRequestWithRetry(HttpPost request, int retries) throws IOException { boolean connected = false; int RETRY_DELAY_MS = 0; int retryDelayInMS; HttpClient httpClient = new DefaultHttpClient(); HttpResponse response = null; for (int retry = 0; retry < retries && !connected; retry++) { if (retry > 0) { log.warn("retry " + retry + "/" + retries); try { log.debug("Sleeping for retry: " + RETRY_DELAY_MS); Thread.sleep(RETRY_DELAY_MS); } catch (InterruptedException e) { // lets ignore this } } // try posting request response = httpClient.execute(request); if (response.getStatusLine().getStatusCode() == 429) { log.warn(" **429 for POST**"); retryDelayInMS = Integer.parseInt(response.getFirstHeader("Retry-After").getValue()) * 1000; // sec to ms RETRY_DELAY_MS = retryDelayInMS; // set delay and retry } else { break; // if not 429, break } } return response; } }
package gov.nih.nci.ncicb.cadsr.loader.ui; import gov.nih.nci.ncicb.cadsr.domain.ObjectClassRelationship; import gov.nih.nci.ncicb.cadsr.loader.event.ElementChangeListener; import gov.nih.nci.ncicb.cadsr.loader.event.ReviewListener; import gov.nih.nci.ncicb.cadsr.loader.ui.event.NavigationEvent; import gov.nih.nci.ncicb.cadsr.loader.ui.event.NavigationListener; import gov.nih.nci.ncicb.cadsr.loader.ui.tree.AssociationEndNode; import gov.nih.nci.ncicb.cadsr.loader.ui.tree.UMLNode; import java.beans.PropertyChangeListener; import java.util.Set; import javax.swing.JTabbedPane; /** * The Association viewer * * @author <a href="mailto:chris.ludet@oracle.com">Christophe Ludet</a> */ public class AssociationViewPanel extends JTabbedPane implements NavigationListener { private ObjectClassRelationship ocr; private UMLNode node; private AssociationDetailViewPanel detailViewPanel; private AssociationConceptPanel roleConceptPanel; private AssociationConceptPanel sourceConceptPanel; private AssociationConceptPanel targetConceptPanel; private boolean showingConceptTabs = false; public AssociationViewPanel(UMLNode node) { this.node = node; this.ocr = (ObjectClassRelationship)node.getUserObject(); initUI(); update(node); } public void update(UMLNode node) { this.node = node; this.ocr = (ObjectClassRelationship)node.getUserObject(); detailViewPanel.update(ocr); AssociationEndNode sourceEndNode = null, targetEndNode = null; Set<UMLNode> endNodes = node.getChildren(); for(UMLNode n : endNodes) { AssociationEndNode endNode = (AssociationEndNode)n; if(endNode.getType() == AssociationEndNode.TYPE_SOURCE) sourceEndNode = endNode; else targetEndNode = endNode; } roleConceptPanel.updateNode(node); sourceConceptPanel.updateNode(sourceEndNode); targetConceptPanel.updateNode(targetEndNode); if(ocr.getType().equals(ObjectClassRelationship.TYPE_HAS) && !showingConceptTabs) { addTab("Role", roleConceptPanel); addTab("Source", sourceConceptPanel); addTab("Target", targetConceptPanel); showingConceptTabs = true; } else if(ocr.getType().equals(ObjectClassRelationship.TYPE_IS) && showingConceptTabs) { remove(roleConceptPanel); remove(sourceConceptPanel); remove(targetConceptPanel); showingConceptTabs = false; } } private void initUI() { detailViewPanel = new AssociationDetailViewPanel(ocr); addTab("Detail", detailViewPanel); AssociationEndNode sourceEndNode = null, targetEndNode = null; Set<UMLNode> endNodes = node.getChildren(); for(UMLNode n : endNodes) { AssociationEndNode endNode = (AssociationEndNode)n; if(endNode.getType() == AssociationEndNode.TYPE_SOURCE) sourceEndNode = endNode; else targetEndNode = endNode; } roleConceptPanel = new AssociationConceptPanel(node); sourceConceptPanel = new AssociationConceptPanel(sourceEndNode); targetConceptPanel = new AssociationConceptPanel(targetEndNode); if(ocr.getType().equals(ObjectClassRelationship.TYPE_HAS)) { addTab("Role", roleConceptPanel); addTab("Source", sourceConceptPanel); addTab("Target", targetConceptPanel); showingConceptTabs = true; } } public void addCustomPropertyChangeListener(PropertyChangeListener l) { roleConceptPanel.addPropertyChangeListener(l); sourceConceptPanel.addPropertyChangeListener(l); targetConceptPanel.addPropertyChangeListener(l); } public void navigate(NavigationEvent evt) { roleConceptPanel.navigate(evt); sourceConceptPanel.navigate(evt); targetConceptPanel.navigate(evt); } public void addReviewListener(ReviewListener listener) { roleConceptPanel.addReviewListener(listener); sourceConceptPanel.addReviewListener(listener); targetConceptPanel.addReviewListener(listener); } public void addNavigationListener(NavigationListener listener) { roleConceptPanel.addNavigationListener(listener); sourceConceptPanel.addNavigationListener(listener); targetConceptPanel.addNavigationListener(listener); } public void addElementChangeListener(ElementChangeListener listener) { roleConceptPanel.addElementChangeListener(listener); sourceConceptPanel.addElementChangeListener(listener); targetConceptPanel.addElementChangeListener(listener); } }
package com.rox.emu.processor.mos6502; import com.rox.emu.env.RoxByte; import com.rox.emu.env.RoxWord; /** * Arithmetic Logic Unit for a {@link Mos6502}.<br/> * <br/> * Operations: * <ul> * <li> {@link #adc} </li> * <li> {@link #sbc} </li> * <li> {@link #or} </li> * <li> {@link #xor} </li> * <li> {@link #and} </li> * <li> {@link #asl} </li> * <li> {@link #rol} </li> * <li> {@link #lsr} </li> * <li> {@link #ror} </li> * </ul> * * XXX think about multi byte addition - using this calculating > 1 byte memory doesn't work, of course. */ public class Mos6502Alu { private final Registers registers; public Mos6502Alu(Registers registers) { this.registers = registers; } /** * <b>Sets the {@link Registers} carry flag to the carry of the operation</b><br/> * <br/> * Return the addition of <code>byteA</code>, <code>byteB</code> and the contents of the {@link Registers} carry * flag.<br/> * <br> * The carry flag is used for multi-byte addition, so it should be cleared at the start of any addition * that doesn't need to take into account a carry from a previous one. * * @return the result of <code>byteA ADD byteB</code> */ public RoxByte adc(final RoxByte byteA, final RoxByte byteB){ int carry = registers.getFlag(Registers.Flag.CARRY) ? 1 : 0; final RoxWord result = RoxWord.literalFrom(byteA.getRawValue() + byteB.getRawValue() + carry); registers.setFlagTo(Registers.Flag.CARRY, result.getHighByte().isBitSet(0)); if (isAdcOverflow(byteA, byteB, result)) registers.setFlag(Registers.Flag.OVERFLOW); return result.getLowByte(); } /** * Is the sign of both inputs is different from the sign of the result i.e. bit 7 set on the result of * <code>((a^result) & (b^result))</code> * * @param inputByteA * @param inputByteB * @param result * @return if the result of adc(inputByteA,inputByteB) should cause an overflow bit */ private boolean isAdcOverflow(final RoxByte inputByteA, final RoxByte inputByteB, final RoxWord result) { return and(xor(inputByteA, result.getLowByte()), xor(inputByteB, result.getLowByte())).isBitSet(7); } public RoxByte sbc(RoxByte byteA, RoxByte byteB) { return adc(byteA, byteB.asOnesCompliment()); } /** * @return the result of <code>byteA OR byteB</code><br/> */ public RoxByte or(RoxByte byteA, RoxByte byteB) { return RoxByte.fromLiteral(byteA.getRawValue() | byteB.getRawValue()); } /** * @return the result of <code>byteA AND byteB</code><br/> */ public RoxByte and(RoxByte byteA, RoxByte byteB) { return RoxByte.fromLiteral(byteA.getRawValue() & byteB.getRawValue()); } /** * @return the result of <code>byteA XOR byteB</code><br/> */ public RoxByte xor(RoxByte byteA, RoxByte byteB) { return RoxByte.fromLiteral(byteA.getRawValue() ^ byteB.getRawValue()); } /** * Shift bits left and write a zero into the low order bit, setting the carry to whatever * is shifted out of the high order bit. * * @return the result of <code>ASL byteA</code> */ public RoxByte asl(RoxByte byteA) { final RoxWord result = RoxWord.literalFrom((byteA.getRawValue() << 1)); registers.setFlagTo(Registers.Flag.CARRY, result.getHighByte().isBitSet(0)); return result.getLowByte(); } /** * Shift bits left and write the contents of the carry flag into the low order bit, setting * the carry to whatever is shifted out of the high order bit. * * @return the result of <code>ROL byteA</code> */ public RoxByte rol(RoxByte byteA) { int carry = registers.getFlag(Registers.Flag.CARRY) ? 1 : 0; final RoxWord result = RoxWord.literalFrom((byteA.getRawValue() << 1) + carry); registers.setFlagTo(Registers.Flag.CARRY, result.getHighByte().isBitSet(0)); return result.getLowByte(); } /** * Shift bits right and write a zero into the high order bit, setting the carry to whatever * is shifted out of the low order bit. * * @return the result of <code>LSR byteA</code> */ public RoxByte lsr(RoxByte byteA) { final RoxByte result = RoxByte.fromLiteral((byteA.getRawValue() >> 1)); registers.setFlagTo(Registers.Flag.CARRY, byteA.isBitSet(0)); return result; } /** * Shift bits right and write the contents of the carry flag into the high order bit, * setting the carry to whatever is shifted out of the low order bit. * * @return the result of <code>ROR byteA</code> */ public RoxByte ror(RoxByte byteA) { int carry = registers.getFlag(Registers.Flag.CARRY) ? 0b10000000 : 0; final RoxByte result = RoxByte.fromLiteral((byteA.getRawValue() >> 1) | carry); registers.setFlagTo(Registers.Flag.CARRY, byteA.isBitSet(0)); return result; } }
package org.zstack.core; @GlobalPropertyDefinition public class CoreGlobalProperty { @GlobalProperty(name = "unitTestOn", defaultValue = "false") public static boolean UNIT_TEST_ON; @GlobalProperty(name = "beanRefContextConf", defaultValue = "beanRefContext.xml") public static String BEAN_REF_CONTEXT_CONF; @GlobalProperty(name = "vmTracerOn", defaultValue = "true") public static boolean VM_TRACER_ON; @GlobalProperty(name = "profiler.workflow", defaultValue = "false") public static boolean PROFILER_WORKFLOW; @GlobalProperty(name = "profiler.httpCall", defaultValue = "false") public static boolean PROFILER_HTTP_CALL; @GlobalProperty(name = "exitJVMOnBootFailure", defaultValue = "true") public static boolean EXIT_JVM_ON_BOOT_FAILURE; @GlobalProperty(name = "checkBoxTypeInInventory", defaultValue = "false") public static boolean CHECK_BOX_TYPE_IN_INVENTORY; @GlobalProperty(name = "pidFilePath", defaultValue = "{user.home}/management-server.pid") public static String PID_FILE_PATH; @GlobalProperty(name = "consoleProxyOverriddenIp", defaultValue = "0.0.0.0") public static String CONSOLE_PROXY_OVERRIDDEN_IP; @GlobalProperty(name = "exposeSimulatorType", defaultValue = "false") public static boolean EXPOSE_SIMULATOR_TYPE; @GlobalProperty(name = "exitJVMOnStop", defaultValue = "true") public static boolean EXIT_JVM_ON_STOP; @GlobalProperty(name = "locale", defaultValue = "zh_CN") public static String LOCALE; @GlobalProperty(name = "user.home") public static String USER_HOME; @GlobalProperty(name = "RESTFacade.readTimeout", defaultValue = "300000") public static int REST_FACADE_READ_TIMEOUT; @GlobalProperty(name = "RESTFacade.connectTimeout", defaultValue = "300000") public static int REST_FACADE_CONNECT_TIMEOUT; @GlobalProperty(name = "upgradeStartOn", defaultValue = "false") public static boolean IS_UPGRADE_START; @GlobalProperty(name = "shadowEntityOn", defaultValue = "false") public static boolean SHADOW_ENTITY_ON; @GlobalProperty(name = "consoleProxyPort", defaultValue = "4900") public static int CONSOLE_PROXY_PORT; }
package com.royalrangers.configuration; import com.royalrangers.model.*; import com.royalrangers.security.repository.AuthorityRepository; import com.royalrangers.security.repository.UserRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.stream.IntStream; @Component public class Bootstrap { @Autowired private UserRepository userRepository; @Autowired private AuthorityRepository authorityRepository; @PostConstruct public void init() { initUsers(); } private void initUsers() { List<User> users = new ArrayList<>(); IntStream.range(1, 4).forEach(element -> { PasswordEncoder encoder = new BCryptPasswordEncoder(); User user = new User(); user.setUsername("username" + element); user.setEmail("user" + element + "@mail.test"); user.setPassword(encoder.encode("password" + element)); user.setFirstName("first" + element); user.setLastName("last" + element); user.setEnabled(true); user.setLastPasswordResetDate(new Date()); Authority authority = new Authority(); authority.setUsers(new HashSet<User>() {{ add(user); }}); switch (element) { case 1: authority.setName(AuthorityName.ROLE_USER); user.setCountry(new Country("Ukraine")); user.setCity(new City(user.getCountry(), "Cherkasy")); user.setGroup(new Group(user.getCity(), "group1")); user.setPlatoon(new Platoon(user.getGroup(), "platoon1")); user.setSection(new Section(user.getPlatoon(), "section1")); break; case 2: authority.setName(AuthorityName.ROLE_ADMIN); user.setCountry(new Country("USA")); user.setCity(new City(user.getCountry(), "Miami")); user.setGroup(new Group(user.getCity(), "group2")); user.setPlatoon(new Platoon(user.getGroup(), "platoon2")); user.setSection(new Section(user.getPlatoon(), "section2")); break; case 3: authority.setName(AuthorityName.ROLE_SUPER_ADMIN); user.setCountry(new Country("Canada")); user.setCity(new City(user.getCountry(), "Montreal")); user.setGroup(new Group(user.getCity(), "group3")); user.setPlatoon(new Platoon(user.getGroup(), "platoon3")); user.setSection(new Section(user.getPlatoon(), "section3")); break; } authorityRepository.save(authority); user.setAuthorities(new HashSet<Authority>() {{ add(authority); }}); users.add(user); }); userRepository.save(users); } }
package com.sap.wishlist.service; import java.util.ArrayList; import java.util.List; import javax.annotation.ManagedBean; import javax.inject.Inject; import com.sap.cloud.yaas.servicesdk.jerseysupport.pagination.PaginatedCollection; import com.sap.cloud.yaas.servicesdk.jerseysupport.pagination.PaginationRequest; import com.sap.wishlist.api.generated.Wishlist; import com.sap.wishlist.api.generated.WishlistItem; import com.sap.wishlist.api.generated.YaasAwareParameters; import com.sap.wishlist.customer.CustomerClientService; import com.sap.wishlist.document.DocumentClientService; import com.sap.wishlist.email.EmailClientService; import com.sap.wishlist.utility.AuthorizationHelper; @ManagedBean public class WishlistService { private static final String SCOPE_DOCUMENT_VIEW = "hybris.document_view"; private static final String SCOPE_DOCUMENT_MANAGE = "hybris.document_manage"; private static final String SCOPE_CUSTOMER_VIEW = "hybris.customer_read"; private static final String SCOPE_EMAIL_SEND = "hybris.email_send"; private static final String SCOPE_EMAIL_MANAGE = "hybris.email_manage"; @Inject private EmailClientService emailClient; @Inject private CustomerClientService customerClient; @Inject private DocumentClientService documentClient; @Inject private AuthorizationHelper authHelper; public PaginatedCollection<Wishlist> getWishlists(final PaginationRequest paginationRequest, final YaasAwareParameters yaasAware) { return authHelper.wrapWithAuthorization(yaasAware, SCOPE_DOCUMENT_VIEW, token -> documentClient.getWishlists(paginationRequest, yaasAware, token)); } public String createWishlist(final YaasAwareParameters yaasAware, final Wishlist wishlist) { final String email = authHelper.wrapWithAuthorization(yaasAware, SCOPE_CUSTOMER_VIEW, token -> customerClient.getCustomer(yaasAware, wishlist.getOwner(), token)); authHelper.wrapWithAuthorization(yaasAware, SCOPE_DOCUMENT_MANAGE, token -> { documentClient.createWishlist(yaasAware, wishlist, token); return null; }); final boolean created = authHelper.wrapWithAuthorization(yaasAware, SCOPE_EMAIL_MANAGE, token -> emailClient.createTemplate(yaasAware, token)); if (created) { authHelper.wrapWithAuthorization(yaasAware, SCOPE_EMAIL_MANAGE, token -> { emailClient.uploadTemplateSubject(yaasAware, token); return null; }); authHelper.wrapWithAuthorization(yaasAware, SCOPE_EMAIL_MANAGE, token -> { emailClient.uploadTemplateBody(yaasAware, token); return null; }); } authHelper.wrapWithAuthorization(yaasAware, SCOPE_EMAIL_SEND, token -> { emailClient.sendMail(yaasAware, wishlist, email, token); return null; }); return wishlist.getId(); } public Wishlist getWishlist(final YaasAwareParameters yaasAware, final String wishlistId) { return authHelper.wrapWithAuthorization(yaasAware, SCOPE_DOCUMENT_VIEW, token -> documentClient.getWishlist(yaasAware, wishlistId, token)); } public void updateWishlist(final YaasAwareParameters yaasAware, final String wishlistId, final Wishlist wishlist) { authHelper.wrapWithAuthorization(yaasAware, SCOPE_DOCUMENT_MANAGE, token -> { documentClient.updateWishlist(yaasAware, wishlistId, wishlist, token); return null; }); } public void deleteWishlist(final YaasAwareParameters yaasAware, final String wishlistId) { authHelper.wrapWithAuthorization(yaasAware, SCOPE_DOCUMENT_MANAGE, token -> { documentClient.deleteWishlist(yaasAware, wishlistId, token); return null; }); } public List<WishlistItem> getWishlistItems(final PaginationRequest paged, final YaasAwareParameters yaasAware, final String wishlistId) { return authHelper.wrapWithAuthorization(yaasAware, SCOPE_DOCUMENT_VIEW, token -> documentClient.getWishlistItems(paged, yaasAware, wishlistId, token)); } public void createWishlistItem(final YaasAwareParameters yaasAware, final String wishlistId, final WishlistItem wishlistItem) { final Wishlist wishlist = authHelper.wrapWithAuthorization(yaasAware, SCOPE_DOCUMENT_VIEW, token -> documentClient.getWishlist(yaasAware, wishlistId, token)); List<WishlistItem> wishlistItems = wishlist.getItems(); if (wishlistItems != null) { wishlistItems.add(wishlistItem); } else { wishlistItems = new ArrayList<WishlistItem>(); wishlistItems.add(wishlistItem); } wishlist.setItems(wishlistItems); authHelper.wrapWithAuthorization(yaasAware, SCOPE_DOCUMENT_MANAGE, token -> { documentClient.updateWishlist(yaasAware, wishlistId, wishlist, token); return null; }); } }
package com.soniccandle.view.components; import java.awt.Color; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.RenderingHints; import javax.swing.JLabel; import javax.swing.JPanel; import com.soniccandle.model.MainModel; public class ColorBox extends JPanel { private static final long serialVersionUID = -3870442884382904214L; MainModel m; public ColorBox(MainModel m) { this.m = m; this.add(new JLabel(" ")); updateUI(); } public void updateBox() { updateUI(); } @Override public void paint(Graphics g1) { Graphics2D g = (Graphics2D) g1; Color bgColor = new Color(Integer.parseInt(m.bgColorRed.getText()), Integer.parseInt(m.bgColorGreen.getText()), Integer.parseInt(m.bgColorBlue.getText())); Color barColor = new Color(Integer.parseInt(m.barColorRed.getText()), Integer.parseInt(m.barColorGreen.getText()), Integer.parseInt(m.barColorBlue.getText()), Integer.parseInt(m.barAlpha.getText())); g.setColor(bgColor); g.fillRect(0, 0, this.getWidth(), this.getHeight()); g.setColor(barColor); g.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_PURE); g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); // g.fillRoundRect(10, 10, this.getWidth(), this.getHeight(),15,15); g.fillOval(3, 3, this.getWidth() - 6, this.getHeight() - 6); } }
package cumt.tj.learn.structures.heap; public class BinaryHeap<T extends Comparable<? super T>> implements Heap<T>{ //PECSComparableComparable<? super T> T[] theHeap; private static final int DEFAULT_SIZE=10; int currentSize; public BinaryHeap() { this(DEFAULT_SIZE); } public BinaryHeap(int size) { theHeap=(T[])new Comparable[size]; } public BinaryHeap(T[] array){ theHeap=array; currentSize=array.length-1; } /** * * @param t */ public void insert(T t) { int hole=++currentSize; theHeap[hole]=t; siftUp(hole); } /** * * @return */ public T deleteMin() { if(currentSize<=0) throw new RuntimeException(""); T min=theHeap[1]; //currentSize1 theHeap[1]=theHeap[currentSize siftDown(currentSize); return min; } /** * * bugx[0] * * @return x[1,n]x[0] */ public T[] sort() { for(int i=2;i<=currentSize;i++){ siftUp(i); } T tmp; for(int i=currentSize;i>=2;i tmp=theHeap[1]; theHeap[1]=theHeap[i]; theHeap[i]=tmp; siftDown(i-1); } return theHeap; } /** * * @param hole */ private void siftUp(int hole){ int i=hole; T tmp=theHeap[i]; while (true){ if(i==1) break; int parent=i/2; if(theHeap[parent].compareTo(theHeap[i])<=0) break; theHeap[i]=theHeap[parent];theHeap[parent]=tmp; i=parent; } } /** * theHeap[1,n]theHeap[2,n] * theHeap[1]theHeap[1,n] * @param n */ protected void siftDown(int n){ int hole=1; int child; T tmp=theHeap[hole]; while (true){ child=hole*2; if(hole*2>n) break; if((child+1)<=n&&theHeap[child].compareTo(theHeap[child+1])>0) child++; if(theHeap[child].compareTo(theHeap[hole])>=0) break; theHeap[hole]=theHeap[child];theHeap[child]=tmp; hole=child; } } }
package de.siegmar.logbackgelf; import java.io.Closeable; import java.io.IOException; import java.io.Writer; /** * Simple JSON encoder with very basic functionality that is required by this library. */ class SimpleJsonEncoder implements Closeable { private static final char QUOTE = '"'; /** * Wrapped writer. */ private final Writer writer; /** * Flag to determine if a comma has to be added on next append execution. */ private boolean started; /** * Flag set when JSON object is closed by curly brace. */ private boolean closed; SimpleJsonEncoder(final Writer writer) throws IOException { this.writer = writer; writer.append('{'); } /** * Append field with quotes and escape characters added, if required. * * @return this */ SimpleJsonEncoder appendToJSON(final String key, final Object value) throws IOException { if (closed) { throw new IllegalStateException("Encoder already closed"); } if (value != null) { appendKey(key); if (value instanceof Number) { writer.append(value.toString()); } else { writer.append(QUOTE); escapeString(value.toString()); writer.append(QUOTE); } } return this; } /** * Append field with quotes and escape characters added in the key, if required. * The value is added without quotes and any escape characters. * * @return this */ SimpleJsonEncoder appendToJSONUnquoted(final String key, final Object value) throws IOException { if (closed) { throw new IllegalStateException("Encoder already closed"); } if (value != null) { appendKey(key); writer.append(value.toString()); } return this; } private void appendKey(final String key) throws IOException { if (started) { writer.append(','); } else { started = true; } writer.append(QUOTE); escapeString(key); writer.append(QUOTE).append(':'); } /** * Escape characters in string, if required per RFC-7159 (JSON). * * @param str string to be escaped. */ @SuppressWarnings("checkstyle:cyclomaticcomplexity") private void escapeString(final String str) throws IOException { for (int i = 0; i < str.length(); i++) { final char ch = str.charAt(i); switch (ch) { case QUOTE: case '\\': case '/': writer.append('\\'); writer.append(ch); break; case '\b': writer.append("\\b"); break; case '\f': writer.append("\\f"); break; case '\n': writer.append("\\n"); break; case '\r': writer.append("\\r"); break; case '\t': writer.append("\\t"); break; default: if (ch < ' ') { writer.append(escapeCharacter(ch)); } else { writer.append(ch); } } } } /** * Escapes character to unicode string representation (&#92;uXXXX). * * @param ch character to be escaped. * @return escaped representation of character. */ @SuppressWarnings("checkstyle:magicnumber") private static String escapeCharacter(final char ch) { final String prefix; if (ch < 0x10) { prefix = "000"; } else if (ch < 0x100) { prefix = "00"; } else if (ch < 0x1000) { prefix = "0"; } else { prefix = ""; } return "\\u" + prefix + Integer.toHexString(ch); } @Override public void close() throws IOException { if (!closed) { writer.append('}'); closed = true; } writer.close(); } }
package eu.over9000.skadi.stream; import java.io.IOException; import java.net.URISyntaxException; import java.net.URLEncoder; import java.util.List; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import eu.over9000.skadi.channel.Channel; import eu.over9000.skadi.logging.SkadiLogging; import eu.over9000.skadi.util.HttpUtil; import eu.over9000.skadi.util.M3UParser; import eu.over9000.skadi.util.StringUtil; public class StreamRetriever { private static final JsonParser parser = new JsonParser(); public static StreamDataset getStreams(final Channel channel) { for (int tryCount = 0; tryCount < 5; tryCount++) { try { final String channelname = StringUtil.extractChannelName(channel.getURL()); final String tokenResponse = HttpUtil.getAPIResponse("http://api.twitch.tv/api/channels/" + channelname + "/access_token"); final JsonObject parsedTokenResponse = StreamRetriever.parser.parse(tokenResponse).getAsJsonObject(); final String token = parsedTokenResponse.get("token").getAsString(); final String sig = parsedTokenResponse.get("sig").getAsString(); final String vidURL = "http://usher.twitch.tv/api/channel/hls/" + channelname + ".m3u8?sig=" + sig + "&token=" + URLEncoder.encode(token, "UTF-8") + "&allow_source=true"; final String vidResponse = HttpUtil.getAPIResponse(vidURL); new M3UParser(); final List<StreamQuality> quals = M3UParser.parseString(vidResponse); if ((quals == null) || quals.isEmpty()) { SkadiLogging.log("received empty/invalid quality response"); return null; } return new StreamDataset(channel, quals); } catch (final URISyntaxException | IOException e) { SkadiLogging.log("failed to retrieve stream on try " + tryCount + ", reason: " + e.getMessage()); continue; } } return null; } public static void updateStreamdataDelayed(final Channel channel) { new Thread(new Runnable() { @Override public void run() { final long start = System.currentTimeMillis(); final StreamDataset streamDataset = StreamRetriever.getStreams(channel); channel.updateStreamdata(streamDataset); final long duration = System.currentTimeMillis() - start; SkadiLogging.log("retrieved available stream qualities for channel " + channel.getURL() + ", took " + duration + " ms."); } }, "delayed updater").start(); ; } }
package foodtruck.server.resources; import java.util.Collection; import java.util.List; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import com.google.common.base.Predicate; import com.google.common.base.Strings; import com.google.common.collect.Collections2; import com.google.inject.Inject; import com.sun.jersey.api.JResponse; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormatter; import foodtruck.dao.TrackingDeviceDAO; import foodtruck.dao.TruckDAO; import foodtruck.model.TrackingDevice; import foodtruck.model.Truck; import foodtruck.server.security.SecurityChecker; import foodtruck.socialmedia.ProfileSyncService; import foodtruck.util.Clock; import foodtruck.util.DateOnlyFormatter; import static foodtruck.server.resources.Resources.requiresAdmin; /** * @author aviolette@gmail.com * @since 6/13/12 */ @Path("/trucks{view : (\\.[a-z]{3})?}") public class TruckResource { private static final Predicate<Truck> NOT_HIDDEN = new Predicate<Truck>() { @Override public boolean apply(Truck truck) { return !truck.isHidden(); } }; private final TruckDAO truckDAO; private final Clock clock; private final DateTimeZone zone; private final DateTimeFormatter formatter; private final ProfileSyncService profileSyncService; private final DailySpecialResourceFactory dailySpecialResourceFactory; private final TrackingDeviceDAO trackingDeviceDAO; private final SecurityChecker securityChecker; @Inject public TruckResource(TruckDAO truckDAO, Clock clock, DateTimeZone zone, @DateOnlyFormatter DateTimeFormatter formatter, ProfileSyncService profileSyncService, DailySpecialResourceFactory dailySpecialResourceFactory, TrackingDeviceDAO trackingDeviceDAO, SecurityChecker securityChecker) { this.truckDAO = truckDAO; this.clock = clock; this.zone = zone; this.formatter = formatter; this.profileSyncService = profileSyncService; this.dailySpecialResourceFactory = dailySpecialResourceFactory; this.trackingDeviceDAO = trackingDeviceDAO; this.securityChecker = securityChecker; } @GET @Produces({"application/json", "text/csv", "text/plain"}) public JResponse<Collection<Truck>> getTrucks(@PathParam("view") String view, @QueryParam("active") final String active, @QueryParam("tag") final String filteredBy) { MediaType mediaType = MediaType.APPLICATION_JSON_TYPE; if (".csv".equals(view)) { mediaType = new MediaType("text", "csv"); } else if (".txt".equals(view)) { mediaType = MediaType.TEXT_PLAIN_TYPE; } Collection<Truck> response; if ("false".equals(active)) { response = truckDAO.findInactiveTrucks(); } else if ("all".equals(active)) { response = truckDAO.findVisibleTrucks(); } else { response = Strings.isNullOrEmpty(filteredBy) ? truckDAO.findActiveTrucks() : truckDAO.findByCategory(filteredBy); } return JResponse.ok(Collections2.filter(response, NOT_HIDDEN), mediaType).build(); } @GET @Produces("application/json") @Path("{truckId}") public JResponse<Truck> getTruck(@PathParam("truckId") String truckId) { Truck t = truckDAO.findById(truckId); return JResponse.ok(t).build(); } @POST @Path("{truckId}/mute") public void muteTruck(@PathParam("truckId") String truckId, @QueryParam("until") String until) { requiresAdmin(); DateTime muteUntil = Strings.isNullOrEmpty(until) ? clock.currentDay().toDateTimeAtStartOfDay(zone).plusDays(1) : formatter.parseDateTime(until); Truck t = truckDAO.findById(truckId); t = Truck.builder(t).muteUntil(muteUntil) .build(); truckDAO.save(t); } @POST @Path("{truckId}/unmute") public void unmuteTruck(@PathParam("truckId") String truckId) { requiresAdmin(); Truck t = truckDAO.findById(truckId); t = Truck.builder(t).muteUntil(null) .build(); truckDAO.save(t); } @DELETE @Path("{truckId}") public void delete(@PathParam("truckId") String truckId) { requiresAdmin(); truckDAO.delete(truckId); } @Path("{truckId}/specials") public DailySpecialResource getDailySpecialResource(@PathParam("truckId") String truckId) { return dailySpecialResourceFactory.create(truckDAO.findById(truckId)); } @GET @Path("{truckId}/beacons") public List<TrackingDevice> findBeacons(@PathParam("truckId") String truckId) { securityChecker.requiresLoggedInAs(truckId); return trackingDeviceDAO.findByTruckId(truckId); } @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public JResponse<Truck> createTruck(Truck truck) { Resources.requiresAdmin(); if (truckDAO.findById(truck.getId()) != null) { throw new BadRequestException("POST can only be used , for creating objects"); } return JResponse.ok(profileSyncService.createFromTwitter(truck)).build(); } }
package foodtruck.twitter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; import java.nio.channels.Channels; import java.util.logging.Level; import java.util.logging.Logger; import com.google.api.client.util.ByteStreams; import com.google.appengine.tools.cloudstorage.GcsFileOptions; import com.google.appengine.tools.cloudstorage.GcsFilename; import com.google.appengine.tools.cloudstorage.GcsOutputChannel; import com.google.appengine.tools.cloudstorage.GcsService; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.Iterables; import com.google.inject.Inject; import foodtruck.dao.ConfigurationDAO; import foodtruck.dao.TruckDAO; import foodtruck.model.Configuration; import foodtruck.model.Truck; import twitter4j.PagableResponseList; import twitter4j.ResponseList; import twitter4j.Twitter; import twitter4j.TwitterException; import twitter4j.User; /** * @author aviolette * @since 12/30/14 */ public class ProfileSyncServiceImpl implements ProfileSyncService { private static final Logger log = Logger.getLogger(ProfileSyncServiceImpl.class.getName()); private final TwitterFactoryWrapper twitterFactory; private final GcsService cloudStorage; private final TruckDAO truckDAO; private final ConfigurationDAO configurationDAO; @Inject public ProfileSyncServiceImpl(TwitterFactoryWrapper twitterFactory, GcsService cloudStorage, TruckDAO truckDAO, ConfigurationDAO configDAO) { this.twitterFactory = twitterFactory; this.cloudStorage = cloudStorage; this.truckDAO = truckDAO; this.configurationDAO = configDAO; } @Override public Truck createFromTwitter(Truck truck) { Twitter twitter = twitterFactory.create(); try { ResponseList<User> lookup = twitter.users().lookupUsers(new String[]{truck.getTwitterHandle()}); User user = Iterables.getFirst(lookup, null); if (user != null) { Configuration configuration = configurationDAO.find(); String url = syncToGoogleStorage(user.getScreenName(), user.getProfileImageURL(), configuration.getBaseUrl(), configuration.getTruckIconsBucket()); truck = Truck.builder(truck) .name(user.getName()) .iconUrl(url) .build(); } } catch (TwitterException e) { log.log(Level.WARNING, "Error contacting twitter", e.getMessage()); } truckDAO.save(truck); return truck; } private String syncToGoogleStorage(String twitterHandle, String ogIconUrl, String baseUrl, String bucket) { try { // If the twitter profile exists, then get the icon URL String extension = ogIconUrl.substring(ogIconUrl.lastIndexOf(".")), fileName = twitterHandle + extension; // copy icon to google cloud storage GcsFilename gcsFilename = new GcsFilename(bucket, fileName); GcsOutputChannel channel = cloudStorage.createOrReplace(gcsFilename, new GcsFileOptions.Builder().mimeType(fileName.matches("png") ? "image/png" : "image/jpeg") .build()); URL iconUrl = new URL(ogIconUrl); InputStream in = iconUrl.openStream(); OutputStream out = Channels.newOutputStream(channel); try { ByteStreams.copy(in, out); } finally { in.close(); out.close(); } ogIconUrl = baseUrl + "/images/truckicons/" + fileName; } catch (Exception io) { log.log(Level.WARNING, io.getMessage(), io); } return ogIconUrl; } @Override public void syncFromTwitterList(String primaryTwitterList) { Twitter twitter = twitterFactory.create(); Configuration configuration = configurationDAO.find(); String baseUrl = configurationDAO.find().getBaseUrl(); int twitterListId = Integer.parseInt(Strings.nullToEmpty(configuration.getPrimaryTwitterList())); long cursor = -1; try { PagableResponseList<User> result; do { result = twitter.list().getUserListMembers(twitterListId, cursor); for (User user : result) { String twitterHandle = user.getScreenName().toLowerCase(); String url = syncToGoogleStorage(twitterHandle, user.getProfileImageURL(), baseUrl, configuration.getTruckIconsBucket()); truckDAO.save(Truck.builder() .id(user.getScreenName()) .name(user.getName()) .twitterHandle(twitterHandle) .iconUrl(url) .build()); } } while (!result.isEmpty()); } catch (TwitterException e) { throw Throwables.propagate(e); } }}
package fr.lteconsulting.pomexplorer; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import org.apache.maven.model.Model; import org.apache.maven.model.Parent; import org.apache.maven.model.Plugin; import org.apache.maven.model.io.xpp3.MavenXpp3Reader; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; import org.jboss.shrinkwrap.resolver.api.InvalidConfigurationFileException; import org.jboss.shrinkwrap.resolver.api.maven.Maven; import org.jboss.shrinkwrap.resolver.api.maven.MavenResolvedArtifact; import org.jboss.shrinkwrap.resolver.api.maven.MavenResolverSystem; import org.jboss.shrinkwrap.resolver.api.maven.MavenWorkingSession; import org.jboss.shrinkwrap.resolver.api.maven.ScopeType; import org.jboss.shrinkwrap.resolver.api.maven.coordinate.MavenDependency; import org.jboss.shrinkwrap.resolver.api.maven.pom.ParsedPomFile; import org.jboss.shrinkwrap.resolver.impl.maven.MavenWorkingSessionImpl; import org.jboss.shrinkwrap.resolver.impl.maven.task.AddScopedDependenciesTask; import org.jboss.shrinkwrap.resolver.impl.maven.task.ConfigureSettingsFromFileTask; import fr.lteconsulting.pomexplorer.graph.relation.BuildDependencyRelation; import fr.lteconsulting.pomexplorer.graph.relation.DependencyRelation; import fr.lteconsulting.pomexplorer.graph.relation.ParentRelation; public class PomAnalyzer { public void analyze( String directory, WorkingSession session, Client client ) { processFile( new File( directory ), session, client ); } private void processFile( File file, WorkingSession session, Client client ) { if( file == null ) return; if( file.isDirectory() ) { String name = file.getName(); if( "target".equalsIgnoreCase( name ) || "bin".equalsIgnoreCase( name ) || "src".equalsIgnoreCase( name ) ) return; for( File f : file.listFiles() ) processFile( f, session, client ); } else if( file.getName().equalsIgnoreCase( "pom.xml" ) ) { processPom( file, session, client ); } } /** * Takes a GAV, download it if needed, analyse its dependencies and add them to the graph * * @param gav gav to be analyzed * @param session working session */ public void registerExternalDependency( WorkingSession session, Client client, StringBuilder log, GAV gav ) { String mavenSettingsFilePath = session.getMavenSettingsFilePath(); MavenResolverSystem resolver; if( mavenSettingsFilePath != null && !mavenSettingsFilePath.isEmpty() ) resolver = Maven.configureResolver().fromFile( mavenSettingsFilePath ); else resolver = Maven.resolver(); MavenResolvedArtifact resolvedArtifact = null; try { resolvedArtifact = resolver.resolve( gav.toString() ).withoutTransitivity().asSingle(MavenResolvedArtifact.class); } catch( Exception e ) { log.append( Tools.errorMessage( "shrinkwrap error : " + e.getMessage() ) ); } if( resolvedArtifact == null ) { log.append(Tools.warningMessage("cannot resolve the artifact " + gav)); return; } log.append( "resolved artifact : " + resolvedArtifact.getCoordinate().toString() + "<br/>" ); // Big hack here ! String pomPath = resolvedArtifact.asFile().getAbsolutePath(); int idx = pomPath.lastIndexOf('.'); if( idx <0 ) return; pomPath = pomPath.substring(0, idx+1 ) + "pom"; processPom(new File(pomPath), session, client); } private void processPom( File pomFile, WorkingSession session, Client client ) { MavenProject unresolved = readPomFile( pomFile ); if( unresolved == null ) { client.send( "<span style='color:orange;'>cannot read pom " + pomFile.getAbsolutePath() + "</span><br/>" ); return; } ParsedPomFile resolved = loadPomFile( session, pomFile ); if( resolved == null ) { client.send( "<span style='color:orange;'>cannot load pom " + unresolved.getGroupId() + ":" + unresolved.getArtifactId() + ":" + unresolved.getVersion() + " (<i>" + pomFile.getAbsolutePath() + "</i>)</span><br/>" ); return; } GAV gav = new GAV( resolved.getGroupId(), resolved.getArtifactId(), resolved.getVersion() ); if( session.projects().contains( gav ) ) { client.send( "<span style='color:orange;'>pom already processed '" + pomFile.getAbsolutePath() + "' ! Ignoring.</span><br/>" ); return; } session.graph().addGav( gav ); // hierarchy Parent parent = unresolved.getModel().getParent(); if( parent != null ) { GAV parentGav = new GAV( parent.getGroupId(), parent.getArtifactId(), parent.getVersion() ); session.graph().addGav( parentGav ); session.graph().addRelation( gav, parentGav, new ParentRelation() ); } // dependencies for( MavenDependency dependency : resolved.getDependencies() ) { GAV depGav = new GAV( dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion() ); session.graph().addGav( depGav ); session.graph().addRelation( gav, depGav, new DependencyRelation( dependency.getScope().name(), dependency.getClassifier() ) ); } // build dependencies try { Model model = Tools.getParsedPomFileModel( resolved ); for( Plugin plugin : model.getBuild().getPlugins() ) { GAV depGav = new GAV( plugin.getGroupId(), plugin.getArtifactId(), plugin.getVersion() ); session.graph().addGav( depGav ); session.graph().addRelation( gav, depGav, new BuildDependencyRelation() ); } } catch( IllegalArgumentException | SecurityException e ) { e.printStackTrace(); } Project projectInfo = new Project( pomFile, resolved, unresolved ); session.projects().add( projectInfo ); session.repositories().add( projectInfo ); client.send( "processed project " + projectInfo.getGav() ); } private static MavenWorkingSession createMavenWorkingSession( WorkingSession workingSession ) { try { String mavenSettingsFilePath = workingSession.getMavenSettingsFilePath(); MavenWorkingSession session = new MavenWorkingSessionImpl(); if( mavenSettingsFilePath != null ) session = new ConfigureSettingsFromFileTask( mavenSettingsFilePath ).execute( session ); session = new AddScopedDependenciesTask( ScopeType.COMPILE, ScopeType.IMPORT, ScopeType.SYSTEM, ScopeType.RUNTIME ).execute( session ); return session; } catch( InvalidConfigurationFileException e ) { return null; } } private ParsedPomFile loadPomFile( WorkingSession workingSession, File pomFile ) { MavenWorkingSession session = createMavenWorkingSession(workingSession); if(session == null) return null; try { session.loadPomFromFile(pomFile); return session.getParsedPomFile(); } catch (Exception e) { return null; } } private MavenProject readPomFile( File pom ) { Model model = null; FileReader reader = null; MavenXpp3Reader mavenreader = new MavenXpp3Reader(); try { reader = new FileReader( pom ); } catch( FileNotFoundException e1 ) { } try { model = mavenreader.read( reader ); model.setPomFile( pom ); } catch( IOException | XmlPullParserException e1 ) { } MavenProject project = new MavenProject( model ); return project; } }
package hudson.plugins.copyartifact; import com.thoughtworks.xstream.converters.UnmarshallingContext; import hudson.DescriptorExtensionList; import hudson.EnvVars; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.Util; import hudson.console.HyperlinkNote; import hudson.diagnosis.OldDataMonitor; import hudson.init.InitMilestone; import hudson.init.Initializer; import hudson.matrix.MatrixBuild; import hudson.matrix.MatrixProject; import hudson.maven.MavenModuleSet; import hudson.maven.MavenModuleSetBuild; import hudson.model.*; import hudson.model.listeners.ItemListener; import hudson.model.listeners.RunListener; import hudson.security.ACL; import hudson.security.SecurityRealm; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.Builder; import hudson.util.DescribableList; import hudson.util.FormValidation; import hudson.util.XStream2; import java.io.IOException; import java.io.PrintStream; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import jenkins.model.Jenkins; import org.acegisecurity.GrantedAuthority; import org.acegisecurity.providers.UsernamePasswordAuthenticationToken; import org.kohsuke.stapler.AncestorInPath; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.StaplerRequest; /** * Build step to copy artifacts from another project. * @author Alan Harder */ public class CopyArtifact extends Builder { // specifies upgradeCopyArtifact is needed to work. private static boolean upgradeNeeded = false; private static Logger LOGGER = Logger.getLogger(CopyArtifact.class.getName()); @Deprecated private String projectName; private String project; private String parameters; private final String filter, target; private /*almost final*/ BuildSelector selector; @Deprecated private transient Boolean stable; private final Boolean flatten, optional; @DataBoundConstructor public CopyArtifact(String projectName, String parameters, BuildSelector selector, String filter, String target, boolean flatten, boolean optional) { StaplerRequest req = Stapler.getCurrentRequest(); if (req!=null) { ItemGroup context = req.findAncestorObject(ItemGroup.class); if (context == null) context = Jenkins.getInstance(); // Prevents both invalid values and access to artifacts of projects which this user cannot see. // If value is parameterized, it will be checked when build runs. if (projectName.indexOf('$') < 0 && Jenkins.getInstance().getItem(projectName, context, Job.class) == null) projectName = ""; // Ignore/clear bad value to avoid ugly 500 page } this.project = projectName; this.parameters = Util.fixEmptyAndTrim(parameters); this.selector = selector; this.filter = Util.fixNull(filter).trim(); this.target = Util.fixNull(target).trim(); this.flatten = flatten ? Boolean.TRUE : null; this.optional = optional ? Boolean.TRUE : null; } // Upgrade data from old format public static class ConverterImpl extends XStream2.PassthruConverter<CopyArtifact> { public ConverterImpl(XStream2 xstream) { super(xstream); } @Override protected void callback(CopyArtifact obj, UnmarshallingContext context) { if (obj.selector == null) { obj.selector = new StatusBuildSelector(obj.stable != null && obj.stable); OldDataMonitor.report(context, "1.355"); // Core version# when CopyArtifact 1.2 released } if (obj.isUpgradeNeeded()) { // A Copy Artifact to be upgraded. // For information of the containing project is needed, // The upgrade will be performed by upgradeCopyArtifact. setUpgradeNeeded(); } } } private static synchronized void setUpgradeNeeded() { if (!upgradeNeeded) { LOGGER.info("Upgrade for Copy Artifact is scheduled."); upgradeNeeded = true; } } // get all CopyArtifacts configured to AbstractProject. This works both for Project and MatrixProject. private static List<CopyArtifact> getCopyArtifactsInProject(AbstractProject<?,?> project) throws IOException { DescribableList<Builder,Descriptor<Builder>> list = project instanceof Project ? ((Project<?,?>)project).getBuildersList() : (project instanceof MatrixProject ? ((MatrixProject)project).getBuildersList() : null); if (list == null) return Collections.emptyList(); return list.getAll(CopyArtifact.class); } @Initializer(after=InitMilestone.JOB_LOADED) public static void upgradeCopyArtifact() { if (!upgradeNeeded) { return; } upgradeNeeded = false; boolean isUpgraded = false; for (AbstractProject<?,?> project: Jenkins.getInstance().getAllItems(AbstractProject.class)) { try { for (CopyArtifact target: getCopyArtifactsInProject(project)) { try { if (target.upgradeIfNecessary(project)) { isUpgraded = true; } } catch(IOException e) { LOGGER.log(Level.SEVERE, String.format("Failed to upgrade CopyArtifact in %s", project.getFullName()), e); } } } catch (IOException e) { LOGGER.log(Level.SEVERE, String.format("Failed to upgrade CopyArtifact in %s", project.getFullName()), e); } } if (!isUpgraded) { // No CopyArtifact is upgraded. LOGGER.warning("Update of CopyArtifact is scheduled, but no CopyArtifact to upgrade was found!"); } } public String getProjectName() { return project; } public String getParameters() { return parameters; } public BuildSelector getBuildSelector() { return selector; } public String getFilter() { return filter; } public String getTarget() { return target; } public boolean isFlatten() { return flatten != null && flatten; } public boolean isOptional() { return optional != null && optional; } private boolean upgradeIfNecessary(AbstractProject<?,?> job) throws IOException { if (isUpgradeNeeded()) { int i = projectName.lastIndexOf('/'); if (i != -1 && projectName.indexOf('=', i) != -1 && /* not matrix */Jenkins.getInstance().getItem(projectName, job.getParent(), Job.class) == null) { project = projectName.substring(0, i); parameters = projectName.substring(i + 1); } else { project = projectName; parameters = null; } LOGGER.log(Level.INFO, "Split {0} into {1} with parameters {2}", new Object[] {projectName, project, parameters}); projectName = null; job.save(); return true; } else { return false; } } private boolean isUpgradeNeeded() { return (projectName != null); } @Override public boolean perform(AbstractBuild<?,?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { upgradeIfNecessary(build.getProject()); PrintStream console = listener.getLogger(); String expandedProject = project, expandedFilter = filter; try { EnvVars env = build.getEnvironment(listener); env.overrideAll(build.getBuildVariables()); // Add in matrix axes.. expandedProject = env.expand(project); Job<?, ?> job = Jenkins.getInstance().getItem(expandedProject, build.getProject().getParent(), Job.class); if (job != null && !expandedProject.equals(project) && !canReadFrom(job, build)) { job = null; // Disallow access } if (job == null) { console.println(Messages.CopyArtifact_MissingProject(expandedProject)); return false; } Run src = selector.getBuild(job, env, parameters != null ? new ParametersBuildFilter(env.expand(parameters)) : new BuildFilter(), build); if (src == null) { console.println(Messages.CopyArtifact_MissingBuild(expandedProject)); return isOptional(); // Fail build unless copy is optional } FilePath targetDir = build.getWorkspace(), baseTargetDir = targetDir; if (targetDir == null || !targetDir.exists()) { console.println(Messages.CopyArtifact_MissingWorkspace()); // (see JENKINS-3330) return isOptional(); // Fail build unless copy is optional } // Add info about the selected build into the environment EnvAction envData = build.getAction(EnvAction.class); if (envData != null) { envData.add(getItemGroup(build), expandedProject, src.getNumber()); } if (target.length() > 0) targetDir = new FilePath(targetDir, env.expand(target)); expandedFilter = env.expand(filter); if (expandedFilter.trim().length() == 0) expandedFilter = "**"; Copier copier = Jenkins.getInstance().getExtensionList(Copier.class).get(0).clone(); if (src instanceof MavenModuleSetBuild) { // Copy artifacts from the build (ArchiveArtifacts build step) boolean ok = perform(src, build, expandedFilter, targetDir, baseTargetDir, copier, console); // Copy artifacts from all modules of this Maven build (automatic archiving) for (Run r : ((MavenModuleSetBuild)src).getModuleLastBuilds().values()) ok |= perform(r, build, expandedFilter, targetDir, baseTargetDir, copier, console); return ok; } else if (src instanceof MatrixBuild) { boolean ok = false; // Copy artifacts from all configurations of this matrix build // Use MatrixBuild.getExactRuns if available for (Run r : ((MatrixBuild) src).getExactRuns()) // Use subdir of targetDir with configuration name (like "jdk=java6u20") ok |= perform(r, build, expandedFilter, targetDir.child(r.getParent().getName()), baseTargetDir, copier, console); return ok; } else { return perform(src, build, expandedFilter, targetDir, baseTargetDir, copier, console); } } catch (IOException ex) { Util.displayIOException(ex, listener); ex.printStackTrace(listener.error( Messages.CopyArtifact_FailedToCopy(expandedProject, expandedFilter))); return false; } } private boolean canReadFrom(Job<?, ?> job, AbstractBuild<?, ?> build) { if (!ACL.SYSTEM.equals(Jenkins.getAuthentication())) { // if the build does not run on SYSTEM authorization, // Jenkins is configured to use QueueItemAuthenticator. // In this case, builds are configured to run with a proper authorization // (for example, builds run with the authorization of the user who triggered the build), // QueueItemAuthenticator is available from Jenkins 1.520. // See also JENKINS-14999, JENKINS-16956, JENKINS-18285. return job.getACL().hasPermission(Item.READ); } // for the backward compatibility, return job.getACL().hasPermission( new UsernamePasswordAuthenticationToken("authenticated", "", new GrantedAuthority[]{ SecurityRealm.AUTHENTICATED_AUTHORITY }), Item.READ); } // retrieve the "folder" (jenkins root if no folder used) for this build private ItemGroup getItemGroup(AbstractBuild<?, ?> build) { ItemGroup group = build.getProject().getParent(); if (group instanceof Job) { // MatrixProject, MavenModuleSet, IvyModuleSet or comparable return ((Job) group).getParent(); } return group; } private boolean perform(Run src, AbstractBuild<?,?> dst, String expandedFilter, FilePath targetDir, FilePath baseTargetDir, Copier copier, PrintStream console) throws IOException, InterruptedException { FilePath srcDir = selector.getSourceDirectory(src, console); if (srcDir == null) { return isOptional(); // Fail build unless copy is optional } copier.init(src,dst,srcDir,baseTargetDir); try { int cnt; if (!isFlatten()) cnt = copier.copyAll(srcDir, expandedFilter, targetDir); else { targetDir.mkdirs(); // Create target if needed FilePath[] list = srcDir.list(expandedFilter); for (FilePath file : list) copier.copyOne(file, new FilePath(targetDir, file.getName())); cnt = list.length; } console.println(Messages.CopyArtifact_Copied(cnt, HyperlinkNote.encodeTo('/'+ src.getParent().getUrl(), src.getParent().getFullDisplayName()), HyperlinkNote.encodeTo('/'+src.getUrl(), Integer.toString(src.getNumber())))); // Fail build if 0 files copied unless copy is optional return cnt > 0 || isOptional(); } finally { copier.end(); } } @Extension public static final class DescriptorImpl extends BuildStepDescriptor<Builder> { public FormValidation doCheckProjectName( @AncestorInPath AbstractItem anc, @QueryParameter String value) { if (!anc.hasPermission(Item.CONFIGURE)) return FormValidation.ok(); FormValidation result; Item item = Jenkins.getInstance().getItem(value, anc.getParent()); if (item != null) result = item instanceof MavenModuleSet ? FormValidation.warning(Messages.CopyArtifact_MavenProject()) : (item instanceof MatrixProject ? FormValidation.warning(Messages.CopyArtifact_MatrixProject()) : FormValidation.ok()); else if (value.indexOf('$') >= 0) result = FormValidation.warning(Messages.CopyArtifact_ParameterizedName()); else result = FormValidation.error( hudson.tasks.Messages.BuildTrigger_NoSuchProject( value, AbstractProject.findNearest(value).getName())); return result; } public boolean isApplicable(Class<? extends AbstractProject> clazz) { return true; } public String getDisplayName() { return Messages.CopyArtifact_DisplayName(); } public DescriptorExtensionList<BuildSelector,Descriptor<BuildSelector>> getBuildSelectors() { return Hudson.getInstance().<BuildSelector,Descriptor<BuildSelector>>getDescriptorList(BuildSelector.class); } } // Listen for project renames and update property here if needed. @Extension public static final class ListenerImpl extends ItemListener { @Override public void onRenamed(Item item, String oldName, String newName) { String oldFullName = Items.getCanonicalName(item.getParent(), oldName); String newFullName = Items.getCanonicalName(item.getParent(), newName); for (AbstractProject<?,?> project : Hudson.getInstance().getAllItems(AbstractProject.class)) { try { for (CopyArtifact ca : getCopiers(project)) { String projectName = ca.getProjectName(); String suffix = ""; // Support rename for "MatrixJobName/AxisName=value" type of name int i = projectName.indexOf('='); if (i > 0) { int end = projectName.substring(0,i).lastIndexOf('/'); suffix = projectName.substring(end); projectName = projectName.substring(0, end); } ItemGroup context = project.getParent(); String newProjectName = Items.computeRelativeNamesAfterRenaming(oldFullName, newFullName, projectName, context); if (!projectName.equals(newProjectName)) { ca.project = newProjectName + suffix; project.save(); } } } catch (IOException ex) { Logger.getLogger(ListenerImpl.class.getName()).log(Level.WARNING, "Failed to resave project " + project.getName() + " for project rename in CopyArtifact build step (" + oldName + " =>" + newName + ")", ex); } } } private static List<CopyArtifact> getCopiers(AbstractProject<?,?> project) throws IOException { List<CopyArtifact> copiers = getCopyArtifactsInProject(project); for (CopyArtifact copier : copiers) { copier.upgradeIfNecessary(project); } return copiers; } } // Listen for new builds and add EnvAction in any that use CopyArtifact build step @Extension public static final class CopyArtifactRunListener extends RunListener<Build> { public CopyArtifactRunListener() { super(Build.class); } @Override public void onStarted(Build r, TaskListener listener) { if (((Build<?,?>)r).getProject().getBuildersList().get(CopyArtifact.class) != null) r.addAction(new EnvAction()); } } private static class EnvAction implements EnvironmentContributingAction { // Decided not to record this data in build.xml, so marked transient: private transient Map<String,String> data = new HashMap<String,String>(); private void add(ItemGroup ctx, String projectName, int buildNumber) { if (data==null) return; Item item = getProject(ctx, projectName); // Use full name if configured with absolute path // and relative otherwise projectName = projectName.startsWith("/") ? item.getFullName() : item.getRelativeNameFrom(ctx); data.put("COPYARTIFACT_BUILD_NUMBER_" + projectName.toUpperCase().replaceAll("[^A-Z]+", "_"), // Only use letters and _ Integer.toString(buildNumber)); } /** * Retrieve root Job identified by this projectPath. For legacy reason, projectPath uses '/' as separator for * job name and parameters or matrix axe, so can't just use {@link Jenkins#getItemByFullName(String)}. * As a workaround, we split the path into parts and retrieve the item(group)s up to a Job. */ private Job getProject(ItemGroup ctx, String projectPath) { String[] parts = projectPath.split("/"); if (projectPath.startsWith("/")) ctx = Jenkins.getInstance(); for (int i =0; i<parts.length; i++) { String part = parts[i]; if (part.length() == 0) continue; if (part.equals("..")) { ctx = ((Item) ctx).getParent(); continue; } Item item = ctx.getItem(part); if (item == null && i == 0) { // not a relative job name, fall back to "classic" interpretation to consider absolute item = Jenkins.getInstance().getItem(part); } if (item instanceof Job) return (Job) item; ctx = (ItemGroup) item; } return null; } public void buildEnvVars(AbstractBuild<?,?> build, EnvVars env) { if (data!=null) env.putAll(data); } public String getIconFileName() { return null; } public String getDisplayName() { return null; } public String getUrlName() { return null; } } }
package icircles.recomposition; import icircles.abstractdescription.AbstractBasicRegion; import icircles.abstractdual.AbstractDualEdge; import icircles.abstractdual.AbstractDualGraph; import icircles.abstractdual.AbstractDualNode; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; /** * @author Almas Baimagambetov (AlmasB) (almaslvl@gmail.com) */ public final class RecomposerFactory { private static final Logger log = LogManager.getLogger(Recomposer.class); public static Recomposer newRecomposer(RecompositionStrategyType type) { switch (type) { case NESTED: return new BasicRecomposer(nested()); case SINGLY_PIERCED: return new BasicRecomposer(singlyPierced()); case DOUBLY_PIERCED: return new BasicRecomposer(doublyPierced()); default: throw new IllegalArgumentException("Unknown strategy type: " + type); } } private static RecompositionStrategy nested() { return zonesToSplit -> zonesToSplit.stream() .map(Cluster::new) .collect(Collectors.toList()); } // Look for pairs of AbstractBasicRegions which differ by just a // single AbstractCurve - these pairs are potential double-clusters private static RecompositionStrategy singlyPierced() { return zonesToSplit -> seekSinglePiercings(new AbstractDualGraph(zonesToSplit)); } private static RecompositionStrategy doublyPierced() { return zonesToSplit -> seekDoublePiercings(zonesToSplit); } private static List<Cluster> seekNestedPiercings(AbstractDualGraph graph) { return graph.getNodes() .stream() .map(node -> { log.trace("Adding nested cluster: " + node.getZone()); return new Cluster(node.getZone()); }) .collect(Collectors.toList()); } private static List<Cluster> seekSinglePiercings(AbstractDualGraph adg) { List<Cluster> result = new ArrayList<>(); for (AbstractDualEdge e = adg.getLowDegreeEdge(); e != null; e = adg.getLowDegreeEdge()) { Cluster c = new Cluster(e.from.getZone(), e.to.getZone()); result.add(c); log.trace("Made single-pierced cluster: " + c); log.trace("Graph before trimming for cluster: " + adg); adg.removeNode(e.from); adg.removeNode(e.to); log.trace("Graph after trimming for cluster: " + adg); } if (adg.getNumEdges() != 0) throw new RuntimeException("Non-empty adg edge set"); result.addAll(seekNestedPiercings(adg)); return result; } private static List<Cluster> seekDoublePiercings(List<AbstractBasicRegion> zonesToSplit) { // Look for four-tuples of AbstractBasicRegions which differ by // two AbstractCurves - these four-tuples are potential double-clusters List<Cluster> result = new ArrayList<>(); AbstractDualGraph adg = new AbstractDualGraph(zonesToSplit); log.trace("Zones to split: " + zonesToSplit); for (List<AbstractDualNode> nodes = adg.getFourTuple(); nodes != null; nodes = adg.getFourTuple()) { if (nodes.isEmpty()) { break; } Cluster c = new Cluster(nodes.get(0).getZone(), nodes.get(1).getZone(), nodes.get(2).getZone(), nodes.get(3).getZone()); result.add(c); log.trace("Made cluster: " + c); log.trace("Graph before trimming for cluster: " + adg); adg.removeNode(nodes.get(0)); adg.removeNode(nodes.get(1)); adg.removeNode(nodes.get(2)); adg.removeNode(nodes.get(3)); log.trace("Graph after trimming for cluster: " + adg); } result.addAll(seekSinglePiercings(adg)); return result; } }
package innovimax.mixthem.interfaces; import java.io.IOException; /** * This interface provides for writing lines in an output stream. * @author Innovimax * @version 1.0 */ public interface IOutputChar { /** * Writes a single character. * @param c The character as an int to be written * @throws IOException - If an I/O error occurs */ public void writeCharacter(int c) throws IOException; /** * Writes a portion of an array of characters. * @param buffer Buffer of characters * @param len Number of characters to write * @throws IOException - If an I/O error occurs */ public void writeCharacters(char[] buffer, int len) throws IOException; /** * Closes this output and releases any system resources associated with it. * @throws IOException - If an I/O error occurs */ public void close() throws IOException; }
package it.cvdlab.lar.pipeline.run; import it.cvdlab.lar.model.CsrMatrix; import it.cvdlab.lar.model.InputVectorsContainer; import it.cvdlab.lar.model.OutputVectorsContainer; import it.cvdlab.lar.model.serialize.CsrMatrixSerializable; import it.cvdlab.lar.model.serialize.InputVectorsSerialize; import it.cvdlab.lar.model.serialize.OutputVectorsSerialize; import it.cvdlab.lar.pipeline.helpers.ArrayUtils; import it.cvdlab.lar.pipeline.helpers.BinaryTranslator; import it.cvdlab.lar.pipeline.helpers.ResultTuple; import it.cvdlab.lar.pipeline.kernelwrap.LARTestBinary; import it.cvdlab.lar.pipeline.kernelwrap.LARTestBinaryJava; import it.cvdlab.lar.pipeline.run.utilities.DefaultFileNames; import java.util.List; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import com.google.common.collect.Lists; public class RunJobLimited { private static final String BORDO3_FILE = DefaultFileNames.BORDO3_FILE; private static final String SELETTORI_FILE = DefaultFileNames.SELETTORI_FILE; private static final String OUTPUT_FILE = DefaultFileNames.OUTPUT_FILE; @SuppressWarnings("unused") private static final Integer[] CRAP_INT_VECTOR = {}; public static void main(String[] args) { Options cmdLineOptions = new Options(); cmdLineOptions.addOption("b", "bordo", true, "input file containing the bordo matrix. Default: " + BORDO3_FILE); cmdLineOptions.addOption("s", "selettori", true, "input file containing the chains. Default: " + SELETTORI_FILE); cmdLineOptions.addOption("o", "output", true, "output file. Default: " + OUTPUT_FILE); cmdLineOptions.addOption("h", "help", false, "print help"); CommandLineParser parser = new GnuParser(); CommandLine cmd; try { cmd = parser.parse( cmdLineOptions, args); } catch (ParseException e) { e.printStackTrace(); return; } if (cmd.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp( "RunJob", cmdLineOptions ); return; } String input_bordo = BORDO3_FILE; String input_selettori = SELETTORI_FILE; String output_vettori = OUTPUT_FILE; if (cmd.hasOption("b")) { input_bordo = cmd.getOptionValue("b"); } if (cmd.hasOption("s")) { input_selettori = cmd.getOptionValue("s"); } if (cmd.hasOption("o")) { output_vettori = cmd.getOptionValue("o"); } System.out.println("Bordo3: " + input_bordo); System.out.println("Selettori: " + input_selettori); System.out.println("Output: " + output_vettori); System.out.println("Lettura bordo3"); CsrMatrix bordo3 = CsrMatrixSerializable.fromFile(input_bordo); System.out.println("Lettura q.c."); InputVectorsContainer ivc = InputVectorsSerialize.fromFile(input_selettori); runJob(bordo3, ivc, output_vettori); } private static void runJob(CsrMatrix b3, InputVectorsContainer ivc, String outFile) { int vectorLength = ivc.getVectorList().get(0).size(); int vectorsCount = ivc.getVectorList().size(); System.out.println("Conversione a binario delle q.c."); int[] flatResult = ArrayUtils.flatten(BinaryTranslator.fromArrays(ivc.getVectorList())); int bitSetLength = (int)Math.ceil((double)vectorLength / (double)Integer.SIZE); System.out.println("Chiamata kernel"); System.out.println("New vLength: " + bitSetLength); System.out.println("Old vLength: " + vectorLength); // List<ResultTuple> resultTuples = LARTestBinaryJava.clMultiply(b3, flatResult, bitSetLength, vectorLength); List<ResultTuple> resultTuples = LARTestBinary.clMultiply(b3, flatResult, bitSetLength, vectorLength); OutputVectorsContainer ov = new OutputVectorsContainer(); ov.setVectorOffset(ivc.getVectorOffset()); List<List<Byte>> resultsAnnidated = Lists.newArrayListWithCapacity(vectorsCount); System.out.println("Conversione risultati"); List<Byte> result; long totalElapsed = 0; for(ResultTuple rtCurr: resultTuples) { result = rtCurr.getDataOutput(); totalElapsed += rtCurr.getElapsedTime(); for(int i = 0; i < rtCurr.getVectorsQty(); i++) { List<Byte> currList = Lists.newArrayListWithCapacity(b3.getRowCount()); for(int j = 0; j < b3.getRowCount(); j++) { currList.add( result.get(i*b3.getRowCount() + j) ); } resultsAnnidated.add(i, currList); } } ov.setVectorList(resultsAnnidated); ov.setVectorStats( Lists.newArrayList(new Long(totalElapsed), new Long(0)) ); System.out.println("Serializzazione risultati"); OutputVectorsSerialize.toFile(ov, outFile); } private RunJobLimited() {} }
package it.smartcommunitylab.aac.apim; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.springframework.util.CollectionUtils; import it.smartcommunitylab.aac.model.ClientAppBasic; public class APIMClient { // TODO check with apim public static final String SEPARATOR = ","; private String clientId; private String clientSecret; private String clientSecretMobile; private String name; private String displayName; private String redirectUris; private Collection<String> grantedTypes; private boolean nativeAppsAccess; private Map<String, Map<String, Object>> providerConfigurations; private String mobileAppSchema; private Map<String, Boolean> identityProviders; private Map<String, Boolean> identityProviderApproval; private String userName; private String scope; private Map<String, Object> parameters; public APIMClient() { this.parameters = Collections.emptyMap(); } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public String getClientSecret() { return clientSecret; } public void setClientSecret(String clientSecret) { this.clientSecret = clientSecret; } public String getClientSecretMobile() { return clientSecretMobile; } public void setClientSecretMobile(String clientSecretMobile) { this.clientSecretMobile = clientSecretMobile; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDisplayName() { return displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } public String getRedirectUris() { return redirectUris; } public void setRedirectUris(String redirectUris) { this.redirectUris = redirectUris; } public Collection<String> getGrantedTypes() { return grantedTypes; } public void setGrantedTypes(Collection<String> grantedTypes) { this.grantedTypes = grantedTypes; } public boolean isNativeAppsAccess() { return nativeAppsAccess; } public void setNativeAppsAccess(boolean nativeAppsAccess) { this.nativeAppsAccess = nativeAppsAccess; } public Map<String, Map<String, Object>> getProviderConfigurations() { return providerConfigurations; } public void setProviderConfigurations(Map<String, Map<String, Object>> providerConfigurations) { this.providerConfigurations = providerConfigurations; } public String getMobileAppSchema() { return mobileAppSchema; } public void setMobileAppSchema(String mobileAppSchema) { this.mobileAppSchema = mobileAppSchema; } public Map<String, Boolean> getIdentityProviders() { return identityProviders; } public void setIdentityProviders(Map<String, Boolean> identityProviders) { this.identityProviders = identityProviders; } public Map<String, Boolean> getIdentityProviderApproval() { return identityProviderApproval; } public void setIdentityProviderApproval(Map<String, Boolean> identityProviderApproval) { this.identityProviderApproval = identityProviderApproval; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getScope() { return scope; } public void setScope(String scope) { this.scope = scope; } /* * Builders */ public Map<String, Object> getParameters() { return parameters; } public void setParameters(Map<String, Object> parameters) { this.parameters = parameters; } public static APIMClient from(ClientAppBasic app) { APIMClient client = new APIMClient(); // base client.name = app.getName(); client.userName = app.getUserName(); client.displayName = app.getDisplayName(); // oauth client.clientId = app.getClientId(); client.clientSecret = app.getClientSecret(); client.grantedTypes = app.getGrantedTypes(); client.scope = CollectionUtils.isEmpty(app.getScope()) ? "" : String.join(SEPARATOR, app.getScope()); client.redirectUris = CollectionUtils.isEmpty(app.getRedirectUris()) ? "" : String.join(SEPARATOR, app.getRedirectUris()); // deprecated client.clientSecretMobile = ""; client.nativeAppsAccess = false; client.mobileAppSchema = app.getMobileAppSchema(); // apim expects grantTypes in parameters as a string Map<String, Object> parameters = new HashMap<>(); parameters.put("grant_types", String.join(SEPARATOR, app.getGrantedTypes())); client.parameters = parameters; return client; } }
package me.sedlar.asm.visitor.flow; import me.sedlar.asm.util.Assembly; import org.objectweb.asm.tree.AbstractInsnNode; import org.objectweb.asm.tree.JumpInsnNode; import org.objectweb.asm.tree.LabelNode; import java.util.*; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.stream.Collectors; /** * @author Tyler Sedlar * @since 2/12/2016 */ public class ExecutionPath { private static final Predicate<ExecutionNode> LABEL_PRED = (e) -> e.source.instruction instanceof LabelNode; private static final Predicate<ExecutionNode> JUMP_PRED = (e) -> e.source.instruction instanceof JumpInsnNode; private final List<ExecutionNode> nodes = new ArrayList<>(); protected final Map<String, ExecutionNode> idMap = new HashMap<>(); private void findAll(ExecutionNode parent, Predicate<ExecutionNode> predicate, List<ExecutionNode> list, boolean recursive) { if (predicate.test(parent)) { list.add(parent); } for (List<ExecutionNode> path : parent.paths()) { for (ExecutionNode node : path) { if (recursive) { findAll(node, predicate, list, true); } else { if (predicate.test(node)) { list.add(node); } } } } } /** * Finds all nodes matching the given predicate. * * @param predicate The predicate to match against. * @return A list of all nodes matching the given predicate. */ public List<ExecutionNode> findAll(Predicate<ExecutionNode> predicate) { List<ExecutionNode> result = new ArrayList<>(); for (ExecutionNode node : nodes) { findAll(node, predicate, result, true); } return result; } /** * Finds a list of results matching the given query. * * @param query The query to match. * @return A list of results matching the given query. */ public List<FlowQueryResult> query(FlowQuery query) { List<FlowQueryResult> results = new ArrayList<>(); List<Predicate<ExecutionNode>> predicates = query.predicates(); if (predicates.isEmpty()) { return results; } List<ExecutionNode> lastMatch = null; boolean branching = false; FlowQuery.BranchType branchType = null; List<ExecutionNode> endings = new ArrayList<>(); for (int i = 0; i < predicates.size(); i++) { Predicate<ExecutionNode> predicate = predicates.get(i); List<ExecutionNode> matching; if (branching) { List<ExecutionNode> branchInstructions = new ArrayList<>(); for (ExecutionNode node : lastMatch) { Consumer<List<ExecutionNode>> consumer = (nodeList -> { branchInstructions.addAll(nodeList); nodeList.forEach(eNode -> eNode.previousExecutor = node); }); if (branchType == FlowQuery.BranchType.TRUE) { node.truePath().ifPresent(consumer); } else if (branchType == FlowQuery.BranchType.FALSE) { node.falsePath().ifPresent(consumer); } else { node.paths().forEach(consumer); } } matching = branchInstructions.stream().filter(predicate::test).collect(Collectors.toList()); } else { if (i == 0) { matching = findAll(predicate); } else { matching = new ArrayList<>(); for (ExecutionNode node : lastMatch) { ExecutionNode result = findNext(node, predicate, query.distAt(i)); if (result != null) { boolean loops = query.loopsAt(i); boolean doesNotLoop = query.doesNotLoopAt(i); if (loops || doesNotLoop) { ExecutionNode parent = node.parent; ExecutionNode parentLabel = null; while (parent != null && (parentLabel = findPrevious(parent, LABEL_PRED, 5)) == null) { parent = parent.parent; } if (parentLabel == null) { continue; } boolean valid = false; List<ExecutionNode> jumps = new ArrayList<>(); findAll(node.parent, JUMP_PRED, jumps, false); if (!jumps.isEmpty()) { for (ExecutionNode jump : jumps) { LabelNode label = ((JumpInsnNode) jump.source.instruction).label; if (label == parentLabel.source.instruction) { valid = true; } } } if (!valid) { if (doesNotLoop) { result.previousExecutor = node; matching.add(result); } continue; } } result.previousExecutor = node; matching.add(result); } } } } if (matching.isEmpty()) { return results; } if (i == (predicates.size() - 1)) { endings.addAll(matching); } lastMatch = matching; branching = query.branchesAt(i); branchType = query.branchTypeAt(i); } for (ExecutionNode node : endings) { List<ExecutionNode> hierarchy = new ArrayList<>(); hierarchy.add(node); while (node.previousExecutor != null) { hierarchy.add(node.previousExecutor); node = node.previousExecutor; } Collections.reverse(hierarchy); results.add(new FlowQueryResult(query, hierarchy)); } return results; } private ExecutionNode findNext(ExecutionNode start, Predicate<ExecutionNode> predicate, int maxDist) { ExecutionNode node = start; int jump = 0; while ((node = node.next()) != null && (jump == -1 || jump++ < maxDist)) { if (predicate.test(node)) { return node; } } return null; } private ExecutionNode findPrevious(ExecutionNode start, Predicate<ExecutionNode> predicate, int maxDist) { ExecutionNode node = start; int jump = 0; while ((node = node.previous()) != null && (jump == -1 || jump++ < maxDist)) { if (predicate.test(node)) { return node; } } return null; } private void add(ControlFlowGraph cfg, ExecutionNode eNode) { nodes.add(eNode); idMap.put(cfg.idFor(eNode.source), eNode); } protected ExecutionNode findById(String id) { return idMap.get(id); } private void print(String prefix, ExecutionNode node) { // if (!(node.source.instruction instanceof LabelNode || node.source.instruction instanceof FrameNode)) { String label = (prefix + Assembly.toString(node.source.instruction)); boolean layered = !node.paths().isEmpty(); if (layered) { label += " {"; } System.out.println(label); for (List<ExecutionNode> path : node.paths()) { System.out.println(prefix + " {"); for (ExecutionNode subNode : path) { print(prefix + " ", subNode); } System.out.println(prefix + " }"); } if (layered) { System.out.println(prefix + "}"); } } public void printTree() { for (ExecutionNode node : nodes) { print("", node); } } private static void addSuccessors(ControlFlowGraph cfg, ExecutionPath path, ExecutionNode eNode, ExecutionNode parent, List<String> added, List<ExecutionNode> successors) { if (eNode.source.backwards) { return; } String nodeId = cfg.idFor(eNode.source); if (!added.contains(nodeId)) { added.add(nodeId); path.idMap.put(nodeId, eNode); eNode.source.successors.forEach(subNode -> { ExecutionNode eSubNode = new ExecutionNode(path, parent, subNode); parent.add(cfg, eSubNode); successors.add(eSubNode); addSuccessors(cfg, path, eSubNode, (subNode.successors.size() > 1 ? eSubNode : parent), added, successors); }); parent.branch(); } } private static void setNextNodes(List<ExecutionNode> nodes) { ExecutionNode previous = null; for (ExecutionNode node : nodes) { if (previous != null) { previous.nextNode = node; node.previousNode = previous; } node.paths().forEach(ExecutionPath::setNextNodes); previous = node; } } public static ExecutionPath build(ControlFlowGraph cfg) { ExecutionPath path = new ExecutionPath(); AbstractInsnNode insn = cfg.method.instructions().getFirst(); List<String> added = new ArrayList<>(); while (insn != null) { ControlFlowNode node = cfg.nodeFor(insn, false); if (node != null) { String nodeId = cfg.idFor(node); if (path.findById(nodeId) == null) { ExecutionNode eNode = new ExecutionNode(path, null, node); if (eNode.source.successors.size() > 1) { List<ExecutionNode> successors = new ArrayList<>(); addSuccessors(cfg, path, eNode, eNode, added, successors); } path.add(cfg, eNode); } } insn = insn.getNext(); } setNextNodes(path.nodes); return path; } }
package net.finmath.optimizer; import java.io.Serializable; import java.util.Arrays; import java.util.List; import java.util.Vector; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; import java.util.logging.Level; import java.util.logging.Logger; import net.finmath.functions.LinearAlgebra; /** * This class implements a parallel Levenberg-Marquardt non-linear least-squares fit * algorithm. * * <p> * The solver minimizes \( || f ||_{L_{2}} \) for a function \( f:\mathbb{R}^n \rightarrow \mathbb{R}^m \). * The solver requires the calculation of a Jacobi-matrix \( J = \frac{\mathrm{d}f}{\mathrm{d}x} \). The iteration steps * are then defined by * \[ * \Delta x = H_{\lambda}^{-1} J^T f * \] * where \( H_{\lambda} \) is a regularized approximation of the Hessian matrix. * The solver supports two different regularizations. For <code>RegularizationMethod.LEVENBERG</code> the solver uses * \( H_{\lambda} = J^T J + \lambda I \). For <code>RegularizationMethod.LEVENBERG_MARQUARDT</code> the solver uses * \( H_{\lambda} = J^T J + \lambda \text{diag}(J^T J) \). * </p> * * <p> * The design avoids the need to define the objective function as a * separate class. The objective function is defined by overriding a class * method, see the sample code below. * </p> * <p> * The Levenberg-Marquardt solver is implemented in using multi-threading. * The calculation of the derivatives (in case a specific implementation of * {@code setDerivatives(double[] parameters, double[][] derivatives)} is not * provided) may be performed in parallel by setting the parameter <code>numberOfThreads</code>. * </p> * * <p> * To use the solver inherit from it and implement the objective function as * {@code setValues(double[] parameters, double[] values)} where values has * to be set to the value of the objective functions for the given parameters. * <br> * You may also provide an a derivative for your objective function by * additionally overriding the function {@code setDerivatives(double[] parameters, double[][] derivatives)}, * otherwise the solver will calculate the derivative via finite differences. * </p> * <p> * To reject a point, it is allowed to set an element of <code>values</code> to {@link java.lang.Double#NaN} * in the implementation of {@code setValues(double[] parameters, double[] values)}. * Put differently: The solver handles NaN values in <code>values</code> as an error larger than * the current one (regardless of the current error) and rejects the point. * <br> * Note, however, that is is an error if the initial parameter guess results in an NaN value. * That is, the solver should be initialized with an initial parameter in an admissible region. * </p> * * The following simple example finds a solution for the equation <br> * <table> * <caption>Sample linear system of equations.</caption> * <tr><td> * 0.0 * x<sub>1</sub> + 1.0 * x<sub>2</sub> = 5.0 * </td></tr> * <tr><td> * 2.0 * x<sub>1</sub> + 1.0 * x<sub>2</sub> = 10.0 * </td></tr> * </table> * * <pre> * <code> * LevenbergMarquardt optimizer = new LevenbergMarquardt() { * // Override your objective function here * public void setValues(double[] parameters, double[] values) { * values[0] = parameters[0] * 0.0 + parameters[1]; * values[1] = parameters[0] * 2.0 + parameters[1]; * } * }; * * // Set solver parameters * optimizer.setInitialParameters(new double[] { 0, 0 }); * optimizer.setWeights(new double[] { 1, 1 }); * optimizer.setMaxIteration(100); * optimizer.setTargetValues(new double[] { 5, 10 }); * * optimizer.run(); * * double[] bestParameters = optimizer.getBestFitParameters(); * </code> * </pre> * * See the example in the main method below. * * <p> * The class can be initialized to use a multi-threaded valuation. If initialized * this way the implementation of <code>setValues</code> must be thread-safe. * The solver will evaluate the gradient of the value vector in parallel, i.e., * use as many threads as the number of parameters. * </p> * * Note: Iteration steps will be logged (java.util.logging) with LogLevel.FINE * * @author Christian Fries * @version 1.6 */ public abstract class LevenbergMarquardt implements Serializable, Cloneable, Optimizer { private static final long serialVersionUID = 4560864869394838155L; /** * The regularization method used to invert the approximation of the * Hessian matrix. * * @author Christian Fries */ public enum RegularizationMethod { /** * The Hessian approximated and regularized as * \( H_{\lambda} = J^T J + \lambda I \). */ LEVENBERG, /** * The Hessian approximated and regularized as * \( H_{\lambda} = J^T J + \lambda \text{diag}(J^T J) \). */ LEVENBERG_MARQUARDT } private final RegularizationMethod regularizationMethod; private double[] initialParameters = null; private double[] parameterSteps = null; private double[] targetValues = null; private double[] weights = null; private int maxIteration = 100; private double lambda = 0.001; private double lambdaDivisor = 3.0; private double lambdaMultiplicator = 2.0; private double errorRootMeanSquaredTolerance = 0.0; // by default we solve upto machine presicion private int iteration = 0; private double[] parameterTest = null; private double[] parameterIncrement = null; private double[] valueTest = null; private double[] parameterCurrent = null; private double[] valueCurrent = null; private double[][] derivativeCurrent = null; private double errorMeanSquaredCurrent = Double.POSITIVE_INFINITY; private double errorRootMeanSquaredChange = Double.POSITIVE_INFINITY; private boolean isParameterCurrentDerivativeValid = false; // These members will be updated in each iteration. These are members to prevent repeated memory allocation. private double[][] hessianMatrix = null; private double[] beta = null; /* * Used for multi-threadded calculation of the derivative. * The use may provide its own executor. If not and numberOfThreads > 1 * we will temporarily create an executor with the specified number of threads. * Note: If an executor was provided upon construction, it will not receive a shutdown when done. */ private int numberOfThreads = 1; private ExecutorService executor = null; private boolean executorShutdownWhenDone = true; private final Logger logger = Logger.getLogger("net.finmath"); // A simple test public static void main(final String[] args) throws SolverException, CloneNotSupportedException { final LevenbergMarquardt optimizer = new LevenbergMarquardt() { private static final long serialVersionUID = -282626938650139518L; // Override your objective function here @Override public void setValues(final double[] parameters, final double[] values) { values[0] = parameters[0] * 0.0 + parameters[1]; values[1] = parameters[0] * 2.0 + parameters[1]; } }; // Set solver parameters optimizer.setInitialParameters(new double[] { 0, 0 }); optimizer.setWeights(new double[] { 1, 1 }); optimizer.setMaxIteration(100); optimizer.setTargetValues(new double[] { 5, 10 }); optimizer.run(); final double[] bestParameters = optimizer.getBestFitParameters(); System.out.println("The solver for problem 1 required " + optimizer.getIterations() + " iterations. The best fit parameters are:"); for (int i = 0; i < bestParameters.length; i++) { System.out.println("\tparameter[" + i + "]: " + bestParameters[i]); } /* * Creating a clone, continuing the search with new target values. * Note that we do not re-define the setValues method. */ final Optimizer optimizer2 = optimizer.getCloneWithModifiedTargetValues(new double[] { 5.1, 10.2 }, new double[] { 1, 1 }, true); optimizer2.run(); final double[] bestParameters2 = optimizer2.getBestFitParameters(); System.out.println("The solver for problem 2 required " + optimizer2.getIterations() + " iterations. The best fit parameters are:"); for (int i = 0; i < bestParameters2.length; i++) { System.out.println("\tparameter[" + i + "]: " + bestParameters2[i]); } } /** * Create a Levenberg-Marquardt solver. * * @param regularizationMethod The regularization method to use. See {@link RegularizationMethod}. * @param initialParameters Initial value for the parameters where the solver starts its search. * @param targetValues Target values to achieve. * @param maxIteration Maximum number of iterations. * @param executorService Executor to be used for concurrent valuation of the derivatives. This is only performed if setDerivative is not overwritten. <i>Warning</i>: The implementation of setValues has to be thread safe! */ public LevenbergMarquardt(final RegularizationMethod regularizationMethod, final double[] initialParameters, final double[] targetValues, final int maxIteration, final ExecutorService executorService) { super(); this.regularizationMethod = regularizationMethod; this.initialParameters = initialParameters; this.targetValues = targetValues; this.maxIteration = maxIteration; weights = new double[targetValues.length]; java.util.Arrays.fill(weights, 1.0); executor = executorService; executorShutdownWhenDone = (executorService == null); numberOfThreads = 1; } /** * Create a Levenberg-Marquardt solver. * * @param initialParameters Initial value for the parameters where the solver starts its search. * @param targetValues Target values to achieve. * @param maxIteration Maximum number of iterations. * @param executorService Executor to be used for concurrent valuation of the derivatives. This is only performed if setDerivative is not overwritten. <i>Warning</i>: The implementation of setValues has to be thread safe! */ public LevenbergMarquardt(final double[] initialParameters, final double[] targetValues, final int maxIteration, final ExecutorService executorService) { this(RegularizationMethod.LEVENBERG_MARQUARDT, initialParameters, targetValues, maxIteration, executorService); } /** * Create a Levenberg-Marquardt solver. * * @param regularizationMethod The regularization method to use. See {@link RegularizationMethod}. * @param initialParameters Initial value for the parameters where the solver starts its search. * @param targetValues Target values to achieve. * @param maxIteration Maximum number of iterations. * @param numberOfThreads Maximum number of threads. <i>Warning</i>: If this number is larger than one, the implementation of setValues has to be thread safe! */ public LevenbergMarquardt(final RegularizationMethod regularizationMethod, final double[] initialParameters, final double[] targetValues, final int maxIteration, final int numberOfThreads) { this(regularizationMethod, initialParameters, targetValues, maxIteration, null); this.numberOfThreads = numberOfThreads; } /** * Create a Levenberg-Marquardt solver. * * @param initialParameters Initial value for the parameters where the solver starts its search. * @param targetValues Target values to achieve. * @param maxIteration Maximum number of iterations. * @param numberOfThreads Maximum number of threads. <i>Warning</i>: If this number is larger than one, the implementation of setValues has to be thread safe! */ public LevenbergMarquardt(final double[] initialParameters, final double[] targetValues, final int maxIteration, final int numberOfThreads) { this(RegularizationMethod.LEVENBERG_MARQUARDT, initialParameters, targetValues, maxIteration, numberOfThreads); } /** * Create a Levenberg-Marquardt solver. * * @param initialParameters List of initial values for the parameters where the solver starts its search. * @param targetValues List of target values to achieve. * @param maxIteration Maximum number of iterations. * @param executorService Executor to be used for concurrent valuation of the derivatives. This is only performed if setDerivative is not overwritten. <i>Warning</i>: The implementation of setValues has to be thread safe! */ public LevenbergMarquardt(final List<Number> initialParameters, final List<Number> targetValues, final int maxIteration, final ExecutorService executorService) { this(numberListToDoubleArray(initialParameters), numberListToDoubleArray(targetValues), maxIteration, executorService); } /** * Create a Levenberg-Marquardt solver. * * @param initialParameters Initial value for the parameters where the solver starts its search. * @param targetValues Target values to achieve. * @param maxIteration Maximum number of iterations. * @param numberOfThreads Maximum number of threads. <i>Warning</i>: If this number is larger than one, the implementation of setValues has to be thread safe! */ public LevenbergMarquardt(final List<Number> initialParameters, final List<Number> targetValues, final int maxIteration, final int numberOfThreads) { this(initialParameters, targetValues, maxIteration, null); this.numberOfThreads = numberOfThreads; } /** * Create a Levenberg-Marquardt solver. */ public LevenbergMarquardt() { super(); regularizationMethod = RegularizationMethod.LEVENBERG_MARQUARDT; } /** * Convert a list of numbers to an array of doubles. * * @param listOfNumbers A list of numbers. * @return A corresponding array of doubles executing <code>doubleValue()</code> on each element. */ private static double[] numberListToDoubleArray(final List<Number> listOfNumbers) { final double[] arrayOfDoubles = new double[listOfNumbers.size()]; for(int i=0; i<arrayOfDoubles.length; i++) { arrayOfDoubles[i] = listOfNumbers.get(i).doubleValue(); } return arrayOfDoubles; } /** * Create a Levenberg-Marquardt solver. * * @param numberOfThreads Maximum number of threads. <i>Warning</i>: If this number is larger than one, the implementation of setValues has to be thread safe! */ public LevenbergMarquardt(final int numberOfThreads) { super(); regularizationMethod = RegularizationMethod.LEVENBERG_MARQUARDT; this.numberOfThreads = numberOfThreads; } /** * Set the initial parameters for the solver. * * @param initialParameters The initial parameters. * @return A self reference. */ public LevenbergMarquardt setInitialParameters(final double[] initialParameters) { if(done()) { throw new UnsupportedOperationException("Solver cannot be modified after it has run."); } this.initialParameters = initialParameters; return this; } /** * Set the parameter step for the solver. * The parameter step is used to evaluate the derivatives via * finite differences, if analytic derivatives are not provided. * * @param parameterSteps The parameter step. * @return A self reference. */ public LevenbergMarquardt setParameterSteps(final double[] parameterSteps) { if(done()) { throw new UnsupportedOperationException("Solver cannot be modified after it has run."); } this.parameterSteps = parameterSteps; return this; } /** * Set the target values for the solver. The solver will solver the * equation weights * objectiveFunction = targetValues. * * @param targetValues The target values. * @return A self reference. */ public LevenbergMarquardt setTargetValues(final double[] targetValues) { if(done()) { throw new UnsupportedOperationException("Solver cannot be modified after it has run."); } this.targetValues = targetValues; return this; } /** * Set the maximum number of iterations to be performed until the solver * gives up. * * @param maxIteration The maximum number of iterations. * @return A self reference. */ public LevenbergMarquardt setMaxIteration(final int maxIteration) { if(done()) { throw new UnsupportedOperationException("Solver cannot be modified after it has run."); } this.maxIteration = maxIteration; return this; } /** * Set the weight for the objective function. * * @param weights The weights for the objective function. * @return A self reference. */ public LevenbergMarquardt setWeights(final double[] weights) { if(done()) { throw new UnsupportedOperationException("Solver cannot be modified after it has run."); } this.weights = weights; return this; } /** * Set the error tolerance. The solver considers the solution "found" * if the error is not improving by this given error tolerance. * * @param errorTolerance The error tolerance. * @return A self reference. */ public LevenbergMarquardt setErrorTolerance(final double errorTolerance) { if(done()) { throw new UnsupportedOperationException("Solver cannot be modified after it has run."); } errorRootMeanSquaredTolerance = errorTolerance; return this; } /** * Get the parameter &lambda; used in the Tikhonov-like regularization of the Hessian matrix, * that is the \( \lambda \) in \( H + \lambda \diag H \). * * @return the parameter \( \lambda \). */ public double getLambda() { return lambda; } /** * Set the parameter &lambda; used in the Tikhonov-like regularization of the Hessian matrix, * that is the \( \lambda \) in \( H + \lambda \diag H \). * * @param lambda the lambda to set * @return Self reference to this optimizer. */ public LevenbergMarquardt setLambda(final double lambda) { this.lambda = lambda; return this; } /** * Get the multiplicator applied to lambda if the inversion of regularized * Hessian fails, that is, if \( H + \lambda \diag H \) is not invertable. * * @return the lambdaMultiplicator */ public double getLambdaMultiplicator() { return lambdaMultiplicator; } /** * Set the multiplicator applied to lambda if the inversion of regularized * Hessian fails, that is, if \( H + \lambda \diag H \) is not invertable. * * This will make lambda larger, hence let the stepping move slower. * * @param lambdaMultiplicator the lambdaMultiplicator to set. Should be &gt; 1. */ public void setLambdaMultiplicator(final double lambdaMultiplicator) { if(lambdaMultiplicator <= 1.0) { throw new IllegalArgumentException("Parameter lambdaMultiplicator is required to be > 1."); } this.lambdaMultiplicator = lambdaMultiplicator; } /** * Get the divisor applied to lambda (for the next iteration) if the inversion of regularized * Hessian succeeds, that is, if \( H + \lambda \diag H \) is invertable. * * @return the lambdaDivisor */ public double getLambdaDivisor() { return lambdaDivisor; } /** * Set the divisor applied to lambda (for the next iteration) if the inversion of regularized * Hessian succeeds, that is, if \( H + \lambda \diag H \) is invertable. * * This will make lambda smaller, hence let the stepping move faster. * * @param lambdaDivisor the lambdaDivisor to set. Should be &gt; 1. */ public void setLambdaDivisor(final double lambdaDivisor) { if(lambdaDivisor <= 1.0) { throw new IllegalArgumentException("Parameter lambdaDivisor is required to be > 1."); } this.lambdaDivisor = lambdaDivisor; } @Override public double[] getBestFitParameters() { return parameterCurrent; } @Override public double getRootMeanSquaredError() { return Math.sqrt(errorMeanSquaredCurrent); } @Override public int getIterations() { return iteration; } /** * The objective function. Override this method to implement your custom * function. * * @param parameters Input value. The parameter vector. * @param values Output value. The vector of values f(i,parameters), i=1,...,n * @throws SolverException Thrown if the valuation fails, specific cause may be available via the <code>cause()</code> method. */ public abstract void setValues(double[] parameters, double[] values) throws SolverException; /** * The derivative of the objective function. You may override this method * if you like to implement your own derivative. * * @param parameters Input value. The parameter vector. * @param derivatives Output value, where derivatives[i][j] is d(value(j)) / d(parameters(i) * @throws SolverException Thrown if the valuation fails, specific cause may be available via the <code>cause()</code> method. */ public void setDerivatives(final double[] parameters, final double[][] derivatives) throws SolverException { // Calculate new derivatives. Note that this method is called only with // parameters = parameterCurrent, so we may use valueCurrent. final Vector<Future<double[]>> valueFutures = new Vector<>(parameterCurrent.length); for (int parameterIndex = 0; parameterIndex < parameterCurrent.length; parameterIndex++) { final double[] parametersNew = parameters.clone(); final double[] derivative = derivatives[parameterIndex]; final int workerParameterIndex = parameterIndex; final Callable<double[]> worker = new Callable<double[]>() { @Override public double[] call() { double parameterFiniteDifference; if(parameterSteps != null) { parameterFiniteDifference = parameterSteps[workerParameterIndex]; } else { /* * Try to adaptively set a parameter shift. Note that in some * applications it may be important to set parameterSteps. * appropriately. */ parameterFiniteDifference = (Math.abs(parametersNew[workerParameterIndex]) + 1) * 1E-8; } // Shift parameter value parametersNew[workerParameterIndex] += parameterFiniteDifference; // Calculate derivative as (valueUpShift - valueCurrent) / parameterFiniteDifference try { setValues(parametersNew, derivative); } catch (final Exception e) { logger.severe("Valuation failed with exeption " + e.getMessage() + "\n" + e.getStackTrace()); // We signal an exception to calculate the derivative as NaN Arrays.fill(derivative, Double.NaN); } for (int valueIndex = 0; valueIndex < valueCurrent.length; valueIndex++) { derivative[valueIndex] -= valueCurrent[valueIndex]; derivative[valueIndex] /= parameterFiniteDifference; if(Double.isNaN(derivative[valueIndex])) { derivative[valueIndex] = 0.0; } } return derivative; } }; if(executor != null) { final Future<double[]> valueFuture = executor.submit(worker); valueFutures.add(parameterIndex, valueFuture); } else { final FutureTask<double[]> valueFutureTask = new FutureTask<>(worker); valueFutureTask.run(); valueFutures.add(parameterIndex, valueFutureTask); } } for (int parameterIndex = 0; parameterIndex < parameterCurrent.length; parameterIndex++) { try { derivatives[parameterIndex] = valueFutures.get(parameterIndex).get(); } catch (final InterruptedException | ExecutionException e) { throw new SolverException(e); } } } /** * You may override this method to implement a custom stop condition. * * @return Stop condition. */ boolean done() { // The solver terminates if... return // Maximum number of iterations is reached (iteration > maxIteration) || // Error does not improve by more that the given error tolerance (errorRootMeanSquaredChange <= errorRootMeanSquaredTolerance) || /* * Lambda is infinite, i.e., no new point is acceptable. * For example, this may happen if setValue repeatedly give contains invalid (NaN) values. */ Double.isInfinite(lambda); } @Override public void run() throws SolverException { // Create an executor for concurrent evaluation of derivatives if(numberOfThreads > 1) { if(executor == null) { executor = Executors.newFixedThreadPool(numberOfThreads); executorShutdownWhenDone = true; } } try { // Allocate memory final int numberOfParameters = initialParameters.length; final int numberOfValues = targetValues.length; parameterTest = initialParameters.clone(); parameterIncrement = new double[numberOfParameters]; parameterCurrent = new double[numberOfParameters]; valueTest = new double[numberOfValues]; valueCurrent = new double[numberOfValues]; derivativeCurrent = new double[parameterCurrent.length][valueCurrent.length]; hessianMatrix = new double[parameterCurrent.length][parameterCurrent.length]; beta = new double[parameterCurrent.length]; iteration = 0; while(true) { // Count iterations iteration++; // Calculate values for test parameters setValues(parameterTest, valueTest); // Calculate error final double errorMeanSquaredTest = getMeanSquaredError(valueTest); /* * Note: The following test will be false if errorMeanSquaredTest is NaN. * That is: NaN is consider as a rejected point. */ if(errorMeanSquaredTest < errorMeanSquaredCurrent) { errorRootMeanSquaredChange = Math.sqrt(errorMeanSquaredCurrent) - Math.sqrt(errorMeanSquaredTest); // Accept point System.arraycopy(parameterTest, 0, parameterCurrent, 0, parameterCurrent.length); System.arraycopy(valueTest, 0, valueCurrent, 0, valueCurrent.length); errorMeanSquaredCurrent = errorMeanSquaredTest; // Derivative has to be recalculated isParameterCurrentDerivativeValid = false; // Decrease lambda (move faster) lambda /= lambdaDivisor; } else { errorRootMeanSquaredChange = Math.sqrt(errorMeanSquaredTest) - Math.sqrt(errorMeanSquaredCurrent); // Reject point, increase lambda (move slower) lambda *= lambdaMultiplicator; } // Update a new parameter trial, if we are not done if (!done()) { updateParameterTest(); } else { break; } // Log iteration if (logger.isLoggable(Level.FINE)) { String logString = "Iteration: " + iteration + "\tLambda=" + lambda + "\tError Current (RMS):" + Math.sqrt(errorMeanSquaredCurrent) + "\tError Change:" + errorRootMeanSquaredChange + "\t"; for (int i = 0; i < parameterCurrent.length; i++) { logString += "[" + i + "] = " + parameterCurrent[i] + "\t"; } logger.fine(logString); } } } finally { // Shutdown executor if present. if(executor != null && executorShutdownWhenDone) { executor.shutdown(); executor = null; } } } public double getMeanSquaredError(final double[] value) { double error = 0.0; for (int valueIndex = 0; valueIndex < value.length; valueIndex++) { final double deviation = value[valueIndex] - targetValues[valueIndex]; error += weights[valueIndex] * deviation * deviation; } return error/value.length; } /** * Calculate a new parameter guess. * * @throws SolverException Thrown if the valuation fails, specific cause may be available via the <code>cause()</code> method. */ private void updateParameterTest() throws SolverException { if (!isParameterCurrentDerivativeValid) { this.setDerivatives(parameterCurrent, derivativeCurrent); isParameterCurrentDerivativeValid = true; } boolean hessianInvalid = true; while (hessianInvalid) { hessianInvalid = false; // Build matrix H (Hessian approximation) for (int i = 0; i < parameterCurrent.length; i++) { for (int j = i; j < parameterCurrent.length; j++) { double alphaElement = 0.0; for (int valueIndex = 0; valueIndex < valueCurrent.length; valueIndex++) { alphaElement += weights[valueIndex] * derivativeCurrent[i][valueIndex] * derivativeCurrent[j][valueIndex]; } if (i == j) { if(regularizationMethod == RegularizationMethod.LEVENBERG) { // RegularizationMethod.LEVENBERG - Regularization with a constant lambda alphaElement += lambda; } else { // RegularizationMethod.LEVENBERG_MARQUARDT - Regularization with a lambda time the diagonal of JTJ if (alphaElement == 0.0) { alphaElement = lambda; } else { alphaElement *= 1 + lambda; } } } hessianMatrix[i][j] = alphaElement; hessianMatrix[j][i] = alphaElement; } } // Build beta (Newton step) for (int i = 0; i < parameterCurrent.length; i++) { double betaElement = 0.0; final double[] derivativeCurrentSingleParam = derivativeCurrent[i]; for (int k = 0; k < valueCurrent.length; k++) { betaElement += weights[k] * (targetValues[k] - valueCurrent[k]) * derivativeCurrentSingleParam[k]; } beta[i] = betaElement; } try { // Calculate new increment // parameterIncrement = LinearAlgebra.solveLinearEquationLeastSquare(hessianMatrix, beta); parameterIncrement = LinearAlgebra.solveLinearEquationSymmetric(hessianMatrix, beta); } catch (final Exception e) { hessianInvalid = true; lambda *= 16; } } // Calculate new parameter for (int i = 0; i < parameterCurrent.length; i++) { parameterTest[i] = parameterCurrent[i] + parameterIncrement[i]; } } /** * Create a clone of this LevenbergMarquardt optimizer. * * The clone will use the same objective function than this implementation, * i.e., the implementation of {@link #setValues(double[], double[])} and * that of {@link #setDerivatives(double[], double[][])} is reused. */ @Override public LevenbergMarquardt clone() throws CloneNotSupportedException { final LevenbergMarquardt clonedOptimizer = (LevenbergMarquardt)super.clone(); clonedOptimizer.isParameterCurrentDerivativeValid = false; clonedOptimizer.iteration = 0; clonedOptimizer.errorMeanSquaredCurrent = Double.POSITIVE_INFINITY; clonedOptimizer.errorRootMeanSquaredChange = Double.POSITIVE_INFINITY; return clonedOptimizer; } /** * Create a clone of this LevenbergMarquardt optimizer with a new vector for the * target values and weights. * * The clone will use the same objective function than this implementation, * i.e., the implementation of {@link #setValues(double[], double[])} and * that of {@link #setDerivatives(double[], double[][])} is reused. * * The initial values of the cloned optimizer will either be the original * initial values of this object or the best parameters obtained by this * optimizer, the latter is used only if this optimized signals a {@link #done()}. * * @param newTargetVaues New array of target values. * @param newWeights New array of weights. * @param isUseBestParametersAsInitialParameters If true and this optimizer is done(), then the clone will use this.{@link #getBestFitParameters()} as initial parameters. * @return A new LevenbergMarquardt optimizer, cloning this one except modified target values and weights. * @throws CloneNotSupportedException Thrown if this optimizer cannot be cloned. */ public LevenbergMarquardt getCloneWithModifiedTargetValues(final double[] newTargetVaues, final double[] newWeights, final boolean isUseBestParametersAsInitialParameters) throws CloneNotSupportedException { final LevenbergMarquardt clonedOptimizer = clone(); clonedOptimizer.targetValues = newTargetVaues.clone(); // Defensive copy clonedOptimizer.weights = newWeights.clone(); // Defensive copy if(isUseBestParametersAsInitialParameters && this.done()) { clonedOptimizer.initialParameters = this.getBestFitParameters(); } return clonedOptimizer; } /** * Create a clone of this LevenbergMarquardt optimizer with a new vector for the * target values and weights. * * The clone will use the same objective function than this implementation, * i.e., the implementation of {@link #setValues(double[], double[])} and * that of {@link #setDerivatives(double[], double[][])} is reused. * * The initial values of the cloned optimizer will either be the original * initial values of this object or the best parameters obtained by this * optimizer, the latter is used only if this optimized signals a {@link #done()}. * * @param newTargetVaues New list of target values. * @param newWeights New list of weights. * @param isUseBestParametersAsInitialParameters If true and this optimizer is done(), then the clone will use this.{@link #getBestFitParameters()} as initial parameters. * @return A new LevenbergMarquardt optimizer, cloning this one except modified target values and weights. * @throws CloneNotSupportedException Thrown if this optimizer cannot be cloned. */ public LevenbergMarquardt getCloneWithModifiedTargetValues(final List<Number> newTargetVaues, final List<Number> newWeights, final boolean isUseBestParametersAsInitialParameters) throws CloneNotSupportedException { final LevenbergMarquardt clonedOptimizer = clone(); clonedOptimizer.targetValues = numberListToDoubleArray(newTargetVaues); clonedOptimizer.weights = numberListToDoubleArray(newWeights); if(isUseBestParametersAsInitialParameters && this.done()) { clonedOptimizer.initialParameters = this.getBestFitParameters(); } return clonedOptimizer; } }
package net.openhft.chronicle.network; import net.openhft.chronicle.bytes.RandomDataInput; import net.openhft.chronicle.core.Jvm; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; public class NetworkLog { private static final Logger LOG = LoggerFactory.getLogger(NetworkLog.class.getName()); @NotNull private final String desc; private long lastOut = System.currentTimeMillis(); public NetworkLog(@NotNull SocketChannel channel, String op) throws IOException { this.desc = op + " " + ((InetSocketAddress) channel.getLocalAddress()).getPort() + " " + ((InetSocketAddress) channel.getRemoteAddress()).getPort(); } public void idle() { if (!Jvm.isDebug() || !LOG.isDebugEnabled()) return; long now = System.currentTimeMillis(); if (now - lastOut > 2000) { lastOut = now; LOG.debug(desc + " idle"); } } public void log(@NotNull ByteBuffer bytes, int start, int end) { if (!Jvm.isDebug() || !LOG.isDebugEnabled()) return; // avoid inlining this. log0(bytes, start, end); } public void log0(@NotNull ByteBuffer bytes, int start, int end) { final StringBuilder sb = new StringBuilder(desc); sb.append(" len: ").append(end - start) .append(" - "); if (end - start > 128) { for (int i = start; i < start + 64; i++) appendByte(bytes, sb, i); sb.append(" ... "); for (int i = end - 64; i < end; i++) appendByte(bytes, sb, i); } else { for (int i = start; i < end; i++) appendByte(bytes, sb, i); } LOG.debug(sb.toString()); } private void appendByte(@NotNull ByteBuffer bytes, @NotNull StringBuilder sb, int i) { sb.append(RandomDataInput.charToString[bytes.get(i) & 0xFF]); } }
package net.sourceforge.schemaspy.model; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.MissingResourceException; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import net.sourceforge.schemaspy.Config; import net.sourceforge.schemaspy.model.xml.SchemaMeta; import net.sourceforge.schemaspy.model.xml.TableMeta; import net.sourceforge.schemaspy.util.CaseInsensitiveMap; public class Database { private final String databaseName; private final String schema; private String description; private final Map<String, Table> tables = new CaseInsensitiveMap<Table>(); private final Map<String, View> views = new CaseInsensitiveMap<View>(); private final Map<String, Table> remoteTables = new CaseInsensitiveMap<Table>(); // key: schema.tableName value: RemoteTable private final DatabaseMetaData meta; private final Connection connection; private final String connectTime = new SimpleDateFormat("EEE MMM dd HH:mm z yyyy").format(new Date()); private Set<String> sqlKeywords; private Pattern invalidIdentifierPattern; private final Logger logger = Logger.getLogger(getClass().getName()); private final boolean fineEnabled = logger.isLoggable(Level.FINE); public Database(Config config, Connection connection, DatabaseMetaData meta, String name, String schema, Properties properties, SchemaMeta schemaMeta) throws SQLException, MissingResourceException { this.connection = connection; this.meta = meta; databaseName = name; this.schema = schema; description = config.getDescription(); initTables(meta, properties, config); if (config.isViewsEnabled()) initViews(meta, properties, config); initCheckConstraints(properties); initTableIds(properties); initIndexIds(properties); initTableComments(properties); initTableColumnComments(properties); initViewComments(properties); initViewColumnComments(properties); connectTables(); updateFromXmlMetadata(schemaMeta); } public String getName() { return databaseName; } public String getSchema() { return schema; } /** * Details of the database type that's running under the covers. * * @return null if a description wasn't specified. */ public String getDescription() { return description; } public Collection<Table> getTables() { return tables.values(); } /** * Return a {@link Map} of all {@link Table}s keyed by their name. * * @return */ public Map<String, Table> getTablesByName() { return tables; } public Collection<View> getViews() { return views.values(); } public Collection<Table> getRemoteTables() { return remoteTables.values(); } public Connection getConnection() { return connection; } public DatabaseMetaData getMetaData() { return meta; } public String getConnectTime() { return connectTime; } public String getDatabaseProduct() { try { return meta.getDatabaseProductName() + " - " + meta.getDatabaseProductVersion(); } catch (SQLException exc) { return ""; } } /** * "macro" to validate that a table is somewhat valid */ class NameValidator { private final String clazz; private final Pattern include; private final Pattern exclude; private final Set<String> validTypes; /** * @param clazz table or view * @param include * @param exclude * @param verbose * @param validTypes */ NameValidator(String clazz, Pattern include, Pattern exclude, String[] validTypes) { this.clazz = clazz; this.include = include; this.exclude = exclude; this.validTypes = new HashSet<String>(); for (String type : validTypes) { this.validTypes.add(type.toUpperCase()); } } /** * Returns <code>true</code> if the table/view name is deemed "valid" * * @param name name of the table or view * @param type type as returned by metadata.getTables():TABLE_TYPE * @return */ boolean isValid(String name, String type) { // some databases (MySQL) return more than we wanted if (!validTypes.contains(type.toUpperCase())) return false; // Oracle 10g introduced problematic flashback tables // Naming Convention "BIN$"${globalUID}${version} // http://docs.oracle.com/cd/B19306_01/backup.102/b14192/flashptr004.htm#i1016977 if (name.indexOf("BIN$") == 0) { if (fineEnabled) { logger.fine("Excluding " + clazz + " " + name + ": \"BIN$\" prefix implies a (deleted) table in the Oracle Recycle Bin "); } return false; } if (exclude.matcher(name).matches()) { if (fineEnabled) { logger.fine("Excluding " + clazz + " " + name + ": matches exclusion pattern \"" + exclude + '"'); } return false; } boolean valid = include.matcher(name).matches(); if (fineEnabled) { if (valid) { logger.fine("Including " + clazz + " " + name + ": matches inclusion pattern \"" + include + '"'); } else { logger.fine("Excluding " + clazz + " " + name + ": doesn't match inclusion pattern \"" + include + '"'); } } return valid; } } /** * Create/initialize any tables in the schema. * @param metadata * @param properties * @param config * @throws SQLException */ private void initTables(final DatabaseMetaData metadata, final Properties properties, final Config config) throws SQLException { final Pattern include = config.getTableInclusions(); final Pattern exclude = config.getTableExclusions(); final int maxThreads = config.getMaxDbThreads(); String[] types = getTypes("tableTypes", "TABLE", properties); NameValidator validator = new NameValidator("table", include, exclude, types); List<BasicTableMeta> entries = getBasicTableMeta(metadata, true, properties, types); TableCreator creator; if (maxThreads == 1) { creator = new TableCreator(); } else { // creating tables takes a LONG time (based on JProbe analysis), // so attempt to speed it up by doing several in parallel. // note that it's actually DatabaseMetaData.getIndexInfo() that's expensive creator = new ThreadedTableCreator(maxThreads); // "prime the pump" so if there's a database problem we'll probably see it now // and not in a secondary thread while (!entries.isEmpty()) { BasicTableMeta entry = entries.remove(0); if (validator.isValid(entry.name, entry.type)) { new TableCreator().create(entry, properties); break; } } } // kick off the secondary threads to do the creation in parallel for (BasicTableMeta entry : entries) { if (validator.isValid(entry.name, entry.type)) { creator.create(entry, properties); } } // wait for everyone to finish creator.join(); } /** * Create/initialize any views in the schema. * * @param metadata * @param properties * @param config * @throws SQLException */ private void initViews(DatabaseMetaData metadata, Properties properties, Config config) throws SQLException { Pattern includeTables = config.getTableInclusions(); Pattern excludeTables = config.getTableExclusions(); Pattern excludeColumns = config.getColumnExclusions(); Pattern excludeIndirectColumns = config.getIndirectColumnExclusions(); String[] types = getTypes("viewTypes", "VIEW", properties); NameValidator validator = new NameValidator("view", includeTables, excludeTables, types); for (BasicTableMeta entry : getBasicTableMeta(metadata, false, properties, types)) { if (validator.isValid(entry.name, entry.type)) { View view = new View(this, entry.schema, entry.name, entry.remarks, entry.viewSql, properties, excludeIndirectColumns, excludeColumns); views.put(view.getName(), view); if (logger.isLoggable(Level.FINE)) { logger.fine("Found details of view " + view.getName()); } else { System.out.print('.'); } } } } /** * Collection of fundamental table/view metadata */ private class BasicTableMeta { @SuppressWarnings("hiding") final String schema; final String name; final String type; final String remarks; final String viewSql; final int numRows; // -1 if not determined /** * @param schema * @param name * @param type typically "TABLE" or "VIEW" * @param remarks * @param text optional textual SQL used to create the view * @param numRows number of rows, or -1 if not determined */ BasicTableMeta(String schema, String name, String type, String remarks, String text, int numRows) { this.schema = schema; this.name = name; this.type = type; this.remarks = remarks; viewSql = text; this.numRows = numRows; } } /** * Return a list of basic details of the tables in the schema. * * @param metadata * @param forTables true if we're getting table data, false if getting view data * @param properties * @return * @throws SQLException */ private List<BasicTableMeta> getBasicTableMeta(DatabaseMetaData metadata, boolean forTables, Properties properties, String... types) throws SQLException { String queryName = forTables ? "selectTablesSql" : "selectViewsSql"; String sql = properties.getProperty(queryName); List<BasicTableMeta> basics = new ArrayList<BasicTableMeta>(); ResultSet rs = null; if (sql != null) { String clazz = forTables ? "table" : "view"; PreparedStatement stmt = null; try { stmt = prepareStatement(sql, null); rs = stmt.executeQuery(); while (rs.next()) { String name = rs.getString(clazz + "_name"); String sch = getOptionalString(rs, clazz + "_schema"); if (sch == null) sch = schema; String remarks = getOptionalString(rs, clazz + "_comment"); String text = forTables ? null : getOptionalString(rs, "view_definition"); String rows = forTables ? getOptionalString(rs, "table_rows") : null; int numRows = rows == null ? -1 : Integer.parseInt(rows); basics.add(new BasicTableMeta(sch, name, clazz, remarks, text, numRows)); } } catch (SQLException sqlException) { // don't die just because this failed System.out.flush(); System.err.println(); System.err.println("Failed to retrieve " + clazz + " names with custom SQL: " + sqlException); System.err.println(sql); } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } } if (basics.isEmpty()) { rs = metadata.getTables(null, schema, "%", types); try { while (rs.next()) { String name = rs.getString("TABLE_NAME"); String type = rs.getString("TABLE_TYPE"); String schem = rs.getString("TABLE_SCHEM"); String remarks = getOptionalString(rs, "REMARKS"); basics.add(new BasicTableMeta(schem, name, type, remarks, null, -1)); } } catch (SQLException exc) { if (forTables) throw exc; System.out.flush(); System.err.println(); System.err.println("Ignoring view " + rs.getString("TABLE_NAME") + " due to exception:"); exc.printStackTrace(); System.err.println("Continuing analysis."); } finally { if (rs != null) rs.close(); } } return basics; } /** * Return a database-specific array of types from the .properties file * with the specified property name. * * @param propName * @param defaultValue * @param props * @return */ private String[] getTypes(String propName, String defaultValue, Properties props) { String value = props.getProperty(propName, defaultValue); List<String> types = new ArrayList<String>(); for (String type : value.split(",")) { type = type.trim(); if (type.length() > 0) types.add(type); } return types.toArray(new String[types.size()]); } /** * Some databases don't play nice with their metadata. * E.g. Oracle doesn't have a REMARKS column at all. * This method ignores those types of failures, replacing them with null. */ public String getOptionalString(ResultSet rs, String columnName) { try { return rs.getString(columnName); } catch (SQLException ignore) { return null; } } private void initCheckConstraints(Properties properties) throws SQLException { String sql = properties.getProperty("selectCheckConstraintsSql"); if (sql != null) { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = prepareStatement(sql, null); rs = stmt.executeQuery(); while (rs.next()) { String tableName = rs.getString("table_name"); Table table = tables.get(tableName); if (table != null) table.addCheckConstraint(rs.getString("constraint_name"), rs.getString("text")); } } catch (SQLException sqlException) { // don't die just because this failed System.err.println(); System.err.println("Failed to retrieve check constraints: " + sqlException); System.err.println(sql); } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } } } private void initTableIds(Properties properties) throws SQLException { String sql = properties.getProperty("selectTableIdsSql"); if (sql != null) { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = prepareStatement(sql, null); rs = stmt.executeQuery(); while (rs.next()) { String tableName = rs.getString("table_name"); Table table = tables.get(tableName); if (table != null) table.setId(rs.getObject("table_id")); } } catch (SQLException sqlException) { System.err.println(); System.err.println(sql); throw sqlException; } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } } } private void initIndexIds(Properties properties) throws SQLException { String sql = properties.getProperty("selectIndexIdsSql"); if (sql != null) { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = prepareStatement(sql, null); rs = stmt.executeQuery(); while (rs.next()) { String tableName = rs.getString("table_name"); Table table = tables.get(tableName); if (table != null) { TableIndex index = table.getIndex(rs.getString("index_name")); if (index != null) index.setId(rs.getObject("index_id")); } } } catch (SQLException sqlException) { System.err.println(); System.err.println(sql); throw sqlException; } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } } } /** * Initializes table comments. * If the SQL also returns view comments then they're plugged into the * appropriate views. * * @param properties * @throws SQLException */ private void initTableComments(Properties properties) throws SQLException { String sql = properties.getProperty("selectTableCommentsSql"); if (sql != null) { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = prepareStatement(sql, null); rs = stmt.executeQuery(); while (rs.next()) { String tableName = rs.getString("table_name"); Table table = tables.get(tableName); if (table == null) table = views.get(tableName); if (table != null) table.setComments(rs.getString("comments")); } } catch (SQLException sqlException) { // don't die just because this failed System.err.println(); System.err.println("Failed to retrieve table/view comments: " + sqlException); System.err.println(sql); } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } } } /** * Initializes view comments. * * @param properties * @throws SQLException */ private void initViewComments(Properties properties) throws SQLException { String sql = properties.getProperty("selectViewCommentsSql"); if (sql != null) { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = prepareStatement(sql, null); rs = stmt.executeQuery(); while (rs.next()) { String viewName = rs.getString("view_name"); if (viewName == null) viewName = rs.getString("table_name"); Table view = views.get(viewName); if (view != null) view.setComments(rs.getString("comments")); } } catch (SQLException sqlException) { // don't die just because this failed System.err.println(); System.err.println("Failed to retrieve table/view comments: " + sqlException); System.err.println(sql); } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } } } /** * Initializes table column comments. * If the SQL also returns view column comments then they're plugged into the * appropriate views. * * @param properties * @throws SQLException */ private void initTableColumnComments(Properties properties) throws SQLException { String sql = properties.getProperty("selectColumnCommentsSql"); if (sql != null) { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = prepareStatement(sql, null); rs = stmt.executeQuery(); while (rs.next()) { String tableName = rs.getString("table_name"); Table table = tables.get(tableName); if (table == null) table = views.get(tableName); if (table != null) { TableColumn column = table.getColumn(rs.getString("column_name")); if (column != null) column.setComments(rs.getString("comments")); } } } catch (SQLException sqlException) { // don't die just because this failed System.err.println(); System.err.println("Failed to retrieve column comments: " + sqlException); System.err.println(sql); } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } } } /** * Initializes view column comments. * * @param properties * @throws SQLException */ private void initViewColumnComments(Properties properties) throws SQLException { String sql = properties.getProperty("selectViewColumnCommentsSql"); if (sql != null) { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = prepareStatement(sql, null); rs = stmt.executeQuery(); while (rs.next()) { String viewName = rs.getString("view_name"); if (viewName == null) viewName = rs.getString("table_name"); Table view = views.get(viewName); if (view != null) { TableColumn column = view.getColumn(rs.getString("column_name")); if (column != null) column.setComments(rs.getString("comments")); } } } catch (SQLException sqlException) { // don't die just because this failed System.err.println(); System.err.println("Failed to retrieve view column comments: " + sqlException); System.err.println(sql); } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } } } /** * Create a <code>PreparedStatement</code> from the specified SQL. * The SQL can contain these named parameters (but <b>not</b> question marks). * <ol> * <li>:schema - replaced with the name of the schema * <li>:owner - alias for :schema * <li>:table - replaced with the name of the table * </ol> * @param sql String - SQL without question marks * @param tableName String - <code>null</code> if the statement doesn't deal with <code>Table</code>-level details. * @throws SQLException * @return PreparedStatement */ public PreparedStatement prepareStatement(String sql, String tableName) throws SQLException { StringBuilder sqlBuf = new StringBuilder(sql); List<String> sqlParams = getSqlParams(sqlBuf, tableName); // modifies sqlBuf PreparedStatement stmt = getConnection().prepareStatement(sqlBuf.toString()); try { for (int i = 0; i < sqlParams.size(); ++i) { stmt.setString(i + 1, sqlParams.get(i).toString()); } } catch (SQLException exc) { stmt.close(); throw exc; } return stmt; } public Table addRemoteTable(String remoteSchema, String remoteTableName, String baseSchema, Properties properties, Pattern excludeIndirectColumns, Pattern excludeColumns) throws SQLException { String fullName = remoteSchema + "." + remoteTableName; Table remoteTable = remoteTables.get(fullName); if (remoteTable == null) { if (properties != null) remoteTable = new RemoteTable(this, remoteSchema, remoteTableName, baseSchema, properties, excludeIndirectColumns, excludeColumns); else remoteTable = new ExplicitRemoteTable(this, remoteSchema, remoteTableName, baseSchema); logger.fine("Adding remote table " + fullName); remoteTable.connectForeignKeys(tables, excludeIndirectColumns, excludeColumns); remoteTables.put(fullName, remoteTable); } return remoteTable; } /** * Return an uppercased <code>Set</code> of all SQL keywords used by a database * * @return * @throws SQLException */ public Set<String> getSqlKeywords() throws SQLException { if (sqlKeywords == null) { String[] sql92Keywords = ("ADA" + "| C | CATALOG_NAME | CHARACTER_SET_CATALOG | CHARACTER_SET_NAME" + "| CHARACTER_SET_SCHEMA | CLASS_ORIGIN | COBOL | COLLATION_CATALOG" + "| COLLATION_NAME | COLLATION_SCHEMA | COLUMN_NAME | COMMAND_FUNCTION | COMMITTED" + "| CONDITION_NUMBER | CONNECTION_NAME | CONSTRAINT_CATALOG | CONSTRAINT_NAME" + "| CONSTRAINT_SCHEMA | CURSOR_NAME" + "| DATA | DATETIME_INTERVAL_CODE | DATETIME_INTERVAL_PRECISION | DYNAMIC_FUNCTION" + "| FORTRAN" + "| LENGTH" + "| MESSAGE_LENGTH | MESSAGE_OCTET_LENGTH | MESSAGE_TEXT | MORE | MUMPS" + "| NAME | NULLABLE | NUMBER" + "| PASCAL | PLI" + "| REPEATABLE | RETURNED_LENGTH | RETURNED_OCTET_LENGTH | RETURNED_SQLSTATE" + "| ROW_COUNT" + "| SCALE | SCHEMA_NAME | SERIALIZABLE | SERVER_NAME | SUBCLASS_ORIGIN" + "| TABLE_NAME | TYPE" + "| UNCOMMITTED | UNNAMED" + "| ABSOLUTE | ACTION | ADD | ALL | ALLOCATE | ALTER | AND" + "| ANY | ARE | AS | ASC" + "| ASSERTION | AT | AUTHORIZATION | AVG" + "| BEGIN | BETWEEN | BIT | BIT_LENGTH | BOTH | BY" + "| CASCADE | CASCADED | CASE | CAST | CATALOG | CHAR | CHARACTER | CHAR_LENGTH" + "| CHARACTER_LENGTH | CHECK | CLOSE | COALESCE | COLLATE | COLLATION" + "| COLUMN | COMMIT | CONNECT | CONNECTION | CONSTRAINT" + "| CONSTRAINTS | CONTINUE" + "| CONVERT | CORRESPONDING | COUNT | CREATE | CROSS | CURRENT" + "| CURRENT_DATE | CURRENT_TIME | CURRENT_TIMESTAMP | CURRENT_USER | CURSOR" + "| DATE | DAY | DEALLOCATE | DEC | DECIMAL | DECLARE | DEFAULT | DEFERRABLE" + "| DEFERRED | DELETE | DESC | DESCRIBE | DESCRIPTOR | DIAGNOSTICS" + "| DISCONNECT | DISTINCT | DOMAIN | DOUBLE | DROP" + "| ELSE | END | END-EXEC | ESCAPE | EXCEPT | EXCEPTION" + "| EXEC | EXECUTE | EXISTS" + "| EXTERNAL | EXTRACT" + "| FALSE | FETCH | FIRST | FLOAT | FOR | FOREIGN | FOUND | FROM | FULL" + "| GET | GLOBAL | GO | GOTO | GRANT | GROUP" + "| HAVING | HOUR" + "| IDENTITY | IMMEDIATE | IN | INDICATOR | INITIALLY | INNER | INPUT" + "| INSENSITIVE | INSERT | INT | INTEGER | INTERSECT | INTERVAL | INTO | IS" + "| ISOLATION" + "| JOIN" + "| KEY" + "| LANGUAGE | LAST | LEADING | LEFT | LEVEL | LIKE | LOCAL | LOWER" + "| MATCH | MAX | MIN | MINUTE | MODULE | MONTH" + "| NAMES | NATIONAL | NATURAL | NCHAR | NEXT | NO | NOT | NULL" + "| NULLIF | NUMERIC" + "| OCTET_LENGTH | OF | ON | ONLY | OPEN | OPTION | OR" + "| ORDER | OUTER" + "| OUTPUT | OVERLAPS" + "| PAD | PARTIAL | POSITION | PRECISION | PREPARE | PRESERVE | PRIMARY" + "| PRIOR | PRIVILEGES | PROCEDURE | PUBLIC" + "| READ | REAL | REFERENCES | RELATIVE | RESTRICT | REVOKE | RIGHT" + "| ROLLBACK | ROWS" + "| SCHEMA | SCROLL | SECOND | SECTION | SELECT | SESSION | SESSION_USER | SET" + "| SIZE | SMALLINT | SOME | SPACE | SQL | SQLCODE | SQLERROR | SQLSTATE" + "| SUBSTRING | SUM | SYSTEM_USER" + "| TABLE | TEMPORARY | THEN | TIME | TIMESTAMP | TIMEZONE_HOUR | TIMEZONE_MINUTE" + "| TO | TRAILING | TRANSACTION | TRANSLATE | TRANSLATION | TRIM | TRUE" + "| UNION | UNIQUE | UNKNOWN | UPDATE | UPPER | USAGE | USER | USING" + "| VALUE | VALUES | VARCHAR | VARYING | VIEW" + "| WHEN | WHENEVER | WHERE | WITH | WORK | WRITE" + "| YEAR" + "| ZONE").split("|,\\s*"); String[] nonSql92Keywords = getMetaData().getSQLKeywords().toUpperCase().split(",\\s*"); sqlKeywords = new HashSet<String>(); sqlKeywords.addAll(Arrays.asList(sql92Keywords)); sqlKeywords.addAll(Arrays.asList(nonSql92Keywords)); } return sqlKeywords; } /** * Return <code>id</code> quoted if required, otherwise return <code>id</code> * * @param id * @return * @throws SQLException */ public String getQuotedIdentifier(String id) throws SQLException { // look for any character that isn't valid (then matcher.find() returns true) Matcher matcher = getInvalidIdentifierPattern().matcher(id); boolean quotesRequired = matcher.find() || getSqlKeywords().contains(id.toUpperCase()); if (quotesRequired) { // name contains something that must be quoted String quote = getMetaData().getIdentifierQuoteString().trim(); return quote + id + quote; } // no quoting necessary return id; } /** * Return a <code>Pattern</code> whose matcher will return <code>true</code> * when run against an identifier that contains a character that is not * acceptable by the database without being quoted. */ private Pattern getInvalidIdentifierPattern() throws SQLException { if (invalidIdentifierPattern == null) { String validChars = "a-zA-Z0-9_"; String reservedRegexChars = "-&^"; String extraValidChars = getMetaData().getExtraNameCharacters(); for (int i = 0; i < extraValidChars.length(); ++i) { char ch = extraValidChars.charAt(i); if (reservedRegexChars.indexOf(ch) >= 0) validChars += "\\"; validChars += ch; } invalidIdentifierPattern = Pattern.compile("[^" + validChars + "]"); } return invalidIdentifierPattern; } /** * Replaces named parameters in <code>sql</code> with question marks and * returns appropriate matching values in the returned <code>List</code> of <code>String</code>s. * * @param sql StringBuffer input SQL with named parameters, output named params are replaced with ?'s. * @param tableName String * @return List of Strings * * @see #prepareStatement(String, String) */ private List<String> getSqlParams(StringBuilder sql, String tableName) { Map<String, String> namedParams = new HashMap<String, String>(); @SuppressWarnings("hiding") String schema = getSchema(); if (schema == null) schema = getName(); // some 'schema-less' db's treat the db name like a schema (unusual case) namedParams.put(":schema", schema); namedParams.put(":owner", schema); // alias for :schema if (tableName != null) { namedParams.put(":table", tableName); namedParams.put(":view", tableName); // alias for :table } List<String> sqlParams = new ArrayList<String>(); int nextColon = sql.indexOf(":"); while (nextColon != -1) { String paramName = new StringTokenizer(sql.substring(nextColon), " ,\"')").nextToken(); String paramValue = namedParams.get(paramName); if (paramValue == null) throw new InvalidConfigurationException("Unexpected named parameter '" + paramName + "' found in SQL '" + sql + "'"); sqlParams.add(paramValue); sql.replace(nextColon, nextColon + paramName.length(), "?"); // replace with a ? nextColon = sql.indexOf(":", nextColon); } return sqlParams; } /** * Take the supplied XML-based metadata and update our model of the schema with it * * @param schemaMeta * @throws SQLException */ private void updateFromXmlMetadata(SchemaMeta schemaMeta) throws SQLException { if (schemaMeta != null) { final Pattern excludeNone = Pattern.compile("[^.]"); final Properties noProps = new Properties(); description = schemaMeta.getComments(); // done in three passes: // 1: create any new tables // 2: add/mod columns // 3: connect // add the newly defined tables and columns first for (TableMeta tableMeta : schemaMeta.getTables()) { Table table; if (tableMeta.getRemoteSchema() != null) { table = remoteTables.get(tableMeta.getRemoteSchema() + '.' + tableMeta.getName()); if (table == null) { table = addRemoteTable(tableMeta.getRemoteSchema(), tableMeta.getName(), getSchema(), null, excludeNone, excludeNone); } } else { table = tables.get(tableMeta.getName()); if (table == null) table = views.get(tableMeta.getName()); if (table == null) { table = new Table(Database.this, getSchema(), tableMeta.getName(), null, noProps, excludeNone, excludeNone); tables.put(table.getName(), table); } } table.update(tableMeta); } // then tie the tables together for (TableMeta tableMeta : schemaMeta.getTables()) { Table table; if (tableMeta.getRemoteSchema() != null) { table = remoteTables.get(tableMeta.getRemoteSchema() + '.' + tableMeta.getName()); } else { table = tables.get(tableMeta.getName()); if (table == null) table = views.get(tableMeta.getName()); } table.connect(tableMeta, tables, remoteTables); } } } private void connectTables() throws SQLException { Pattern excludeColumns = Config.getInstance().getColumnExclusions(); Pattern excludeIndirectColumns = Config.getInstance().getIndirectColumnExclusions(); for (Table table : tables.values()) { table.connectForeignKeys(tables, excludeIndirectColumns, excludeColumns); } } /** * Single-threaded implementation of a class that creates tables */ private class TableCreator { private final Pattern excludeColumns = Config.getInstance().getColumnExclusions(); private final Pattern excludeIndirectColumns = Config.getInstance().getIndirectColumnExclusions(); /** * Create a table and put it into <code>tables</code> */ void create(BasicTableMeta tableMeta, Properties properties) throws SQLException { createImpl(tableMeta, properties); } protected void createImpl(BasicTableMeta tableMeta, Properties properties) throws SQLException { Table table = new Table(Database.this, tableMeta.schema, tableMeta.name, tableMeta.remarks, properties, excludeIndirectColumns, excludeColumns); if (tableMeta.numRows != -1) { table.setNumRows(tableMeta.numRows); } synchronized (tables) { tables.put(table.getName(), table); } if (logger.isLoggable(Level.FINE)) { logger.fine("Found details of table " + table.getName()); } else { System.out.print('.'); } } /** * Wait for all of the tables to be created. * By default this does nothing since this implementation isn't threaded. */ void join() { } } /** * Multi-threaded implementation of a class that creates tables */ private class ThreadedTableCreator extends TableCreator { private final Set<Thread> threads = new HashSet<Thread>(); private final int maxThreads; ThreadedTableCreator(int maxThreads) { this.maxThreads = maxThreads; } @Override void create(final BasicTableMeta tableMeta, final Properties properties) throws SQLException { Thread runner = new Thread() { @Override public void run() { try { createImpl(tableMeta, properties); } catch (SQLException exc) { exc.printStackTrace(); // nobody above us in call stack...dump it here } finally { synchronized (threads) { threads.remove(this); threads.notify(); } } } }; synchronized (threads) { // wait for enough 'room' while (threads.size() >= maxThreads) { try { threads.wait(); } catch (InterruptedException interrupted) { } } threads.add(runner); } runner.start(); } /** * Wait for all of the started threads to complete */ @Override public void join() { while (true) { Thread thread; synchronized (threads) { Iterator<Thread> iter = threads.iterator(); if (!iter.hasNext()) break; thread = iter.next(); } try { thread.join(); } catch (InterruptedException exc) { } } } } }
package nl.mpi.kinnate.ui; import nl.mpi.kinnate.svg.GraphPanel; import java.awt.BorderLayout; import java.awt.Color; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.io.File; import javax.swing.JPanel; import javax.swing.JSplitPane; import javax.swing.JTextArea; import nl.mpi.kinnate.kintypestrings.KinTypeStringConverter; import nl.mpi.kinnate.SavePanel; public class KinTypeStringTestPanel extends JPanel implements SavePanel { private JTextArea kinTypeStringInput; private GraphPanel graphPanel; private KinTermPanel kinTermPanel; private String defaultString = "This test panel should provide a kin diagram of the kintype strings entered here.\nEnter one string per line.\nEach new line (enter/return key) will update the graph."; public KinTypeStringTestPanel() { this.setLayout(new BorderLayout()); graphPanel = new GraphPanel(null); kinTermPanel = new KinTermPanel(this, graphPanel.getkinTerms()); kinTypeStringInput = new JTextArea(defaultString); kinTypeStringInput.setBorder(javax.swing.BorderFactory.createTitledBorder("Kin Type Strings")); JSplitPane kintermSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT); kintermSplitPane.setLeftComponent(graphPanel); kintermSplitPane.setRightComponent(kinTermPanel); this.add(kinTypeStringInput, BorderLayout.PAGE_START); this.add(kintermSplitPane, BorderLayout.CENTER); // kinTypeStringInput.setForeground(Color.lightGray); kinTypeStringInput.addFocusListener(new FocusListener() { public void focusGained(FocusEvent e) { if (kinTypeStringInput.getText().equals(defaultString)) { kinTypeStringInput.setText(""); kinTypeStringInput.setForeground(Color.BLACK); } } public void focusLost(FocusEvent e) { if (kinTypeStringInput.getText().length() == 0) { kinTypeStringInput.setText(defaultString); kinTypeStringInput.setForeground(Color.lightGray); } } }); kinTypeStringInput.addKeyListener(new KeyListener() { public void keyTyped(KeyEvent e) { // throw new UnsupportedOperationException("Not supported yet."); } public void keyPressed(KeyEvent e) { // throw new UnsupportedOperationException("Not supported yet."); } public void keyReleased(KeyEvent e) { // if (e.getKeyCode() == KeyEvent.VK_ENTER) { KinTypeStringTestPanel.this.updateGraph(); } }); } public boolean hasSaveFileName() { return graphPanel.hasSaveFileName(); } public boolean requiresSave() { return graphPanel.requiresSave(); } public void saveToFile() { graphPanel.saveToFile(); } public void saveToFile(File saveFile) { graphPanel.saveToFile(saveFile); } public void updateGraph() { KinTypeStringConverter graphData = new KinTypeStringConverter(); graphData.readKinTypes(kinTypeStringInput.getText().split("\n"), graphPanel.getkinTerms()); graphPanel.drawNodes(graphData); KinTypeStringTestPanel.this.doLayout(); } }
package org.amc.game.chessserver; import org.amc.game.chess.ChessBoard; import org.amc.game.chess.ChessGame; import org.amc.game.chess.Colour; import org.amc.game.chess.InvalidMoveException; import org.amc.game.chess.Move; import org.amc.game.chess.ObservableChessGame; import org.amc.game.chess.Player; import org.amc.game.chess.SetupChessBoard; import org.apache.log4j.Logger; /** * Represents a ChessGame Application resident in a Spring container * @author Adrian Mclaughlin * */ public class ServerChessGame { public enum status{ IN_PROGRESS, AWAITING_PLAYER, FINISHED } private static final Logger logger = Logger.getLogger(ServerChessGame.class); private ChessGame chessGame=null; private status currentStatus; private Player player; private Player opponent; public ServerChessGame(Player player) { this.player=player; this.player.setColour(Colour.WHITE); this.currentStatus=status.AWAITING_PLAYER; } /** * Adds player to the black side of the chess game * Only valid if ServerChessGame is in AWAITING_PLAYER state, no exception is thrown if not in that state * @param player Player */ public void addOpponent(Player player){ if(!this.currentStatus.equals(status.FINISHED)){ player.setColour(Colour.BLACK); this.opponent=player; ChessBoard board=new ChessBoard(); SetupChessBoard.setUpChessBoardToDefault(board); chessGame=new ObservableChessGame(board,this.player,player); this.currentStatus=status.IN_PROGRESS; } } /** * Get current status of the ServerChessGame * @return status enum */ public final status getCurrentStatus() { return currentStatus; } /** * Get the player who created the game * @return Player */ public final Player getPlayer() { return player; } /** * Set the ServerGame's status * @param currentStatus */ public final void setCurrentStatus(status currentStatus) { this.currentStatus = currentStatus; } /** * * @return the ChessGame object */ public final ChessGame getChessGame() { return chessGame; } /** * @return Player opposing player */ public final Player getOpponent(){ return opponent; } public final void move(Player player,Move move) throws InvalidMoveException{ if(chessGame!=null){ synchronized(chessGame){ chessGame.move(player, move); chessGame.changePlayer(); } checkGameStatus(); } } /** * Checks to see if the game is finished and sets it's status accordingly * No check for chessGame being null */ private void checkGameStatus(){ switch (chessGame.getGameState()) { case STALEMATE: logger.info("Game has ended in a stalemate"); case BLACK_CHECKMATE: logger.info(opponent.getName() + " has won!"); case WHITE_CHECKMATE: logger.info(player.getName() + " has won!"); setCurrentStatus(status.FINISHED); break; default: } } }
package org.animotron.expression; import org.animotron.exception.AnimoException; import org.animotron.graph.builder.FastGraphBuilder; import org.animotron.graph.builder.GraphBuilder; import org.animotron.statement.Prefix; import org.animotron.statement.Statement; import org.animotron.statement.Statements; import org.animotron.statement.instruction.Instruction; import org.animotron.statement.link.AbstractLink; import org.animotron.statement.link.LINK; import org.animotron.statement.ml.MLOperator; import org.animotron.statement.ml.QNAME; import org.animotron.statement.operator.*; import org.animotron.statement.value.AbstractValue; import org.neo4j.graphdb.Relationship; import java.io.*; import java.util.Stack; /** * @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a> * @author <a href="mailto:gazdovsky@gmail.com">Evgeny Gazdovsky</a> * */ public class AnimoExpression extends AbstractExpression { public final static Relationship[] __(String... e) { Relationship[] a = new Relationship[e.length]; for (int i = 0; i < e.length; i++) { a[i] = __(new AnimoExpression(e[i])); } return a; } private Reader reader; private boolean comma = false; public AnimoExpression(InputStream stream) { this(new InputStreamReader(stream)); } public AnimoExpression(GraphBuilder builder, InputStream stream) { this(builder, new InputStreamReader(stream)); } public AnimoExpression(String str) { this(new StringReader(str)); } public AnimoExpression(GraphBuilder builder, String str) { this(builder, new StringReader(str)); } public AnimoExpression(Reader reader) { this(new FastGraphBuilder(), reader); } public AnimoExpression(GraphBuilder builder, Reader reader) { super(builder); this.reader = reader; } private StringBuilder s = new StringBuilder(); private Stack<Integer> stack = new Stack<Integer>(); private Statement op = null; private int level = 0; private boolean prefix = false; boolean link = false; boolean number = true; boolean text = false; @Override public void build() throws IOException, AnimoException { int len; char[] buff = new char[4 * 1024]; char prev = '\0'; startList(); while ((len=reader.read(buff))>0) { for (int i = 0; i < len; i++) { char ch = buff[i]; if (ch == '\"') { if (!text) { newToken(); text = true; } else if (prev == '\\') { s.append(ch); } else { newToken(); text = false; } } else { if (text) { if (prev == '\\' || ch != '\\') { s.append(ch); } } else { if ((ch != ' ' && ch != '\t' && ch != '\n' && ch != ',' && ch != '(' && ch != ')') || (ch == '.' && number)) { if (ch != '.') { if (prev == '.') { s.append('.'); } s.append(ch); processPrefix(); } } else { number = false; switch (ch) { case ' ' : case '.' : //workaround case '\t' : case '\n' : newToken(); break; case ',' : newToken(); comma = true; break; case '(' : newToken(); startList(); break; case ')' : newToken(); endList(); break; default : s.append(ch); processPrefix(); } } } } prev = prev == '\\' ? '\0' : ch; } lastToken(); } endList(); } private void processPrefix() throws AnimoException, IOException { Statement st = Statements.name(s.toString()); if (st instanceof Prefix) { builder.start(st); level++; op = st; link = false; prefix = true; s = new StringBuilder(); number = true; } } private void newToken() throws AnimoException, IOException { if (s.length() > 0) { token(); s = new StringBuilder(); } else { prefix = false; } number = true; } private void lastToken() throws AnimoException, IOException { if (s.length() > 0) { token(); } else { prefix = false; } number = true; } private void token() throws AnimoException, IOException { String token = s.toString(); if (token.length() == 1 && ".".equals(token)) return; //XXX:start new graph if (text) { builder._(token); } else { if (prefix) { builder._(QNAME._, token); op = null; prefix = false; } else if (op instanceof THE) { builder.start(op, token); op = null; level++; } else if (token.endsWith(POSSESSIVE._.name())) { token = token.substring(0, token.length()-POSSESSIVE._.name().length()); builder.start(POSSESSIVE._); builder._(REF._, token); op = null; level++; } else { Statement s = Statements.name(token); if (s instanceof Operator) { builder.start(s); level++; } else if (s instanceof MLOperator || s instanceof AbstractLink) { builder.start(s); s = null; level++; } else if (s instanceof Instruction) { builder.start(AN._); builder._(REF._, token); level++; } else if (s == null) { Object o = AbstractValue.value(token); if (o instanceof String) { if (op instanceof REF && !comma || !(op instanceof Operator || op instanceof REF) && !(op instanceof POSSESSIVE)) { builder.start(AN._); level++; } s = REF._; builder._(s, token); comma = false; } else { builder._(o); } } op = s; } } link = false; } private void startList() throws AnimoException, IOException { if (link) { builder.start(LINK._); op = null; level++; } else { link = true; } stack.push(level); level = 0; } private void endList() throws AnimoException, IOException { for (int i = 0; i < level; i++) { builder.end(); } level = stack.pop(); } }
package org.ccnx.ccn.io.content; import java.io.IOException; import java.io.InvalidObjectException; import java.util.Arrays; import java.util.EnumSet; import java.util.HashSet; import java.util.logging.Level; import org.ccnx.ccn.CCNHandle; import org.ccnx.ccn.CCNInterestListener; import org.ccnx.ccn.ContentVerifier; import org.ccnx.ccn.config.ConfigurationException; import org.ccnx.ccn.config.SystemConfiguration; import org.ccnx.ccn.impl.CCNFlowControl; import org.ccnx.ccn.impl.CCNFlowControl.SaveType; import org.ccnx.ccn.impl.CCNFlowControl.Shape; import org.ccnx.ccn.impl.repo.RepositoryFlowControl; import org.ccnx.ccn.impl.security.crypto.ContentKeys; import org.ccnx.ccn.impl.support.Log; import org.ccnx.ccn.impl.support.Tuple; import org.ccnx.ccn.io.CCNInputStream; import org.ccnx.ccn.io.CCNVersionedInputStream; import org.ccnx.ccn.io.CCNVersionedOutputStream; import org.ccnx.ccn.io.ErrorStateException; import org.ccnx.ccn.io.LinkCycleException; import org.ccnx.ccn.io.NoMatchingContentFoundException; import org.ccnx.ccn.io.CCNAbstractInputStream.FlagTypes; import org.ccnx.ccn.io.content.Link.LinkObject; import org.ccnx.ccn.profiles.SegmentationProfile; import org.ccnx.ccn.profiles.VersioningProfile; import org.ccnx.ccn.profiles.versioning.VersionNumber; import org.ccnx.ccn.protocol.CCNTime; import org.ccnx.ccn.protocol.ContentName; import org.ccnx.ccn.protocol.ContentObject; import org.ccnx.ccn.protocol.Interest; import org.ccnx.ccn.protocol.KeyLocator; import org.ccnx.ccn.protocol.PublisherPublicKeyDigest; import org.ccnx.ccn.protocol.SignedInfo.ContentType; /** * Extends a NetworkObject to add specifics for using a CCN-based backing store. Each time * the object is saved creates a new CCN version. Readers can open a specific version or * not specify a version, in which case the latest available version is read. Defaults * allow for saving data to a repository or directly to the network. * * Need to support four use models: * dimension 1: synchronous - ask for and block, the latest version or a specific version * dimension 2: asynchronous - ask for and get in the background, the latest version or a specific * version * When possible, keep track of the latest version known so that the latest version queries * can attempt to do better than that. Start by using only in the background load case, as until * something comes back we can keep using the old one and the propensity for blocking is high. * * Support for subclasses or users specifying different flow controllers with * different behavior. Build in support for either the simplest standard flow * controller, or a standard repository-backed flow controller. * * These objects attempt to maintain a CCN copy of the current state of their data. In descriptions * below, an object that is "dirty" is one whose data has been modified locally, but not yet * saved to the network. * * While CCNNetworkObject could be used directly, it almost never is; it is usually * more effective to define a subclass specialized to save/retrieve a specific object * type. * * Updates, 12/09: Move to creating a flow controller in the write constructor if * one isn't passed in. Read constructors still lazily create flow controllers on * first write (tradeoff); preemptive construction (and registering for interests) * can be achieved by calling the setupSave() method which creates a flow controller * if one hasn't been created already. Move to a strong default of saving * to a repository, unless overridden by the subclass itself. Change of repository/raw * nature can be made with the setRawSave() and setRepositorySave() methods. * * TODO: Note that the CCNNetworkObject class hierarchy currently has a plethora of constructors. * It is also missing some important functionality -- encryption, the ability to specify * freshness, and so on. Expect new constructors to deal with the latter deficiencies, and * a cleanup of the constructor architecture overall in the near term. */ public abstract class CCNNetworkObject<E> extends NetworkObject<E> implements CCNInterestListener { protected static final byte [] GONE_OUTPUT = "GONE".getBytes(); /** * Unversioned "base" name. */ protected ContentName _baseName; /** * The most recent version we have read/written. */ protected byte [] _currentVersionComponent; /** * Cached versioned name. */ protected ContentName _currentVersionName; /** * Flag to indicate whether content has been explicitly marked as GONE * in the latest version we know about. Use an explicit flag to separate from * the option for valid null content, or content that has not yet been updated. */ protected boolean _isGone = false; /** * The first segment for the stored data */ protected ContentObject _firstSegment = null; /** * If the name we started with was actually a link, detect that, store the link, * and dereference it to get the content. Call updateLink() to update the link * itself, and if updated, to update the dereferenced value. * * If the initial link is a link, recursion should push that into the link of * this LinkObject, and read its data. If that is a link, it should push again -- * this should chain through links till we reach an object of the desired type, * or blow up. (It won't handle encrypted links, though; we may need to distinguish * between ENCR and ENCRL. Having encrypted links would be handy, to send people * off in random directions. But it matters a lot to be able to tell if the decryption * is a LINK or not.) * * Writing linked objects is better done by separately writing the object and * the link, as it gives you more control over what is happening. If you attempt * to save this object, it may break the link (as the link may link to the particular * version retrieved). You can use this inner link object to manually update the link * to the target; but there are no good defaults about how to update the data. So * you need to specify the new link value yourself. For now we don't prevent users * from getting their data and their links de-syncrhonized. */ protected LinkObject _dereferencedLink; protected PublisherPublicKeyDigest _currentPublisher; protected KeyLocator _currentPublisherKeyLocator; protected CCNHandle _handle; protected CCNFlowControl _flowControl; protected boolean _disableFlowControlRequest = false; protected PublisherPublicKeyDigest _publisher; // publisher we write under, if null, use handle defaults protected KeyLocator _keyLocator; // locator to find publisher key protected SaveType _saveType = null; // what kind of flow controller to make if we don't have one protected Integer _freshnessSeconds = null; // if we want to set short freshness protected ContentKeys _keys; protected ContentVerifier _verifier; /** * Controls ongoing update. */ Interest _currentInterest = null; boolean _continuousUpdates = false; HashSet<UpdateListener> _updateListeners = null; /** * Basic write constructor. This will set the object's internal data but it will not save it * until save() is called. Unless overridden by the subclass, will default to save to * a repository. Can be changed to save directly to the network using setRawSave(). * If a subclass sets the default behavior to raw saves, this can be overridden on a * specific instance using setRepositorySave(). * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name under which to save object. * @param data Data to save. * @param handle CCNHandle to use for network operations. If null, a new one is created using CCNHandle#open(). * @throws IOException If there is an error setting up network backing store. */ public CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, E data, SaveType saveType, CCNHandle handle) throws IOException { this(type, contentIsMutable, name, data, saveType, null, null, handle); } /** * Basic write constructor. This will set the object's internal data but it will not save it * until save() is called. Unless overridden by the subclass, will default to save to * a repository. Can be changed to save directly to the network using setRawSave(). * If a subclass sets the default behavior to raw saves, this can be overridden on a * specific instance using setRepositorySave(). * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name under which to save object. * @param data Data to save. * @param raw If true, saves to network by default, if false, saves to repository by default. * @param publisher The key to use to sign this data, or our default if null. * @param locator The key locator to use to let others know where to get our key. * @param handle CCNHandle to use for network operations. If null, a new one is created using CCNHandle#open(). * @throws IOException If there is an error setting up network backing store. */ public CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, E data, SaveType saveType, PublisherPublicKeyDigest publisher, KeyLocator locator, CCNHandle handle) throws IOException { super(type, contentIsMutable, data); if (null == handle) { try { handle = CCNHandle.open(); } catch (ConfigurationException e) { throw new IllegalArgumentException("handle null, and cannot create one: " + e.getMessage(), e); } } _handle = handle; _verifier = handle.defaultVerifier(); _baseName = name; _publisher = publisher; _keyLocator = locator; _saveType = saveType; // Make our flow controller and register interests for our base name, if we have one. // Otherwise, create flow controller when we need one. if (null != name) { createFlowController(); } } /** * Specialized constructor, allowing subclasses to override default flow controller * (and hence backing store) behavior. * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name under which to save object. * @param data Data to save. * @param publisher The key to use to sign this data, or our default if null. * @param locator The key locator to use to let others know where to get our key. * @param flowControl Flow controller to use. A single flow controller object * is used for all this instance's writes, we use underlying streams to call * CCNFlowControl#startWrite(ContentName, Shape) on each save. Calls to * setRawSave() and setRepositorySave() will replace this flow controller * with a raw or repository flow controller, and should not be used with * this type of object (which obviously cares about what flow controller to use). * @throws IOException If there is an error setting up network backing store. */ protected CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, E data, PublisherPublicKeyDigest publisher, KeyLocator locator, CCNFlowControl flowControl) throws IOException { super(type, contentIsMutable, data); _baseName = name; _publisher = publisher; _keyLocator = locator; if (null == flowControl) { throw new IOException("FlowControl cannot be null!"); } _flowControl = flowControl; _handle = _flowControl.getHandle(); _saveType = _flowControl.saveType(); _verifier = _handle.defaultVerifier(); // Register interests for our base name, if we have one. if (null != name) { flowControl.addNameSpace(name); } } /** * Read constructor. Will try to pull latest version of this object, or a specific * named version if specified in the name. If read times out, will leave object in * its uninitialized state. * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name from which to read the object. If versioned, will read that specific * version. If unversioned, will attempt to read the latest version available. * @param handle CCNHandle to use for network operations. If null, a new one is created using CCNHandle#open(). * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, CCNHandle handle) throws ContentDecodingException, IOException { this(type, contentIsMutable, name, (PublisherPublicKeyDigest)null, handle); } /** * Read constructor. Will try to pull latest version of this object, or a specific * named version if specified in the name. If read times out, will leave object in * its uninitialized state. * * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name from which to read the object. If versioned, will read that specific * version. If unversioned, will attempt to read the latest version available. * @param publisher Particular publisher we require to have signed the content, or null for any publisher. * @param flowControl Flow controller to use. A single flow controller object * is used for all this instance's writes, we use underlying streams to call * CCNFlowControl#startWrite(ContentName, Shape) on each save. * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ protected CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, PublisherPublicKeyDigest publisher, CCNFlowControl flowControl) throws ContentDecodingException, IOException { super(type, contentIsMutable); if (null == flowControl) { throw new IOException("FlowControl cannot be null!"); } _flowControl = flowControl; _handle = _flowControl.getHandle(); _saveType = _flowControl.saveType(); _verifier = _handle.defaultVerifier(); update(name, publisher); } /** * Read constructor. Will try to pull latest version of this object, or a specific * named version if specified in the name. If read times out, will leave object in * its uninitialized state. * * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name from which to read the object. If versioned, will read that specific * version. If unversioned, will attempt to read the latest version available. * @param publisher Particular publisher we require to have signed the content, or null for any publisher. * @param handle CCNHandle to use for network operations. If null, a new one is created using CCNHandle#open(). * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, PublisherPublicKeyDigest publisher, CCNHandle handle) throws ContentDecodingException, IOException { super(type, contentIsMutable); if (null == handle) { try { handle = CCNHandle.open(); } catch (ConfigurationException e) { throw new IllegalArgumentException("handle null, and cannot create one: " + e.getMessage(), e); } } _handle = handle; _verifier = handle.defaultVerifier(); _baseName = name; update(name, publisher); } /** * Read constructor if you already have a segment of the object. Used by streams. * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param firstSegment First segment of the object, retrieved by other means. * @param raw If true, defaults to raw network writes, if false, repository writes. * @param handle CCNHandle to use for network operations. If null, a new one is created using CCNHandle#open(). * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentObject firstSegment, CCNHandle handle) throws ContentDecodingException, IOException { super(type, contentIsMutable); if (null == handle) { try { handle = CCNHandle.open(); } catch (ConfigurationException e) { throw new IllegalArgumentException("handle null, and cannot create one: " + e.getMessage(), e); } } _handle = handle; _verifier = handle.defaultVerifier(); update(firstSegment); } /** * Read constructor if you already have a segment of the object. Used by streams. * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param firstSegment First segment of the object, retrieved by other means. * @param flowControl Flow controller to use. A single flow controller object * is used for all this instance's writes, we use underlying streams to call * CCNFlowControl#startWrite(ContentName, Shape) on each save. * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ protected CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentObject firstSegment, CCNFlowControl flowControl) throws ContentDecodingException, IOException { super(type, contentIsMutable); if (null == flowControl) throw new IllegalArgumentException("flowControl cannot be null!"); _flowControl = flowControl; _handle = _flowControl.getHandle(); _saveType = _flowControl.saveType(); _verifier = _handle.defaultVerifier(); update(firstSegment); } /** * Copy constructor. Handle it piece by piece, though it means * updating this whenever the structure changes (rare). */ protected CCNNetworkObject(Class<E> type, CCNNetworkObject<? extends E> other) { super(type, other); _baseName = other._baseName; _currentVersionComponent = other._currentVersionComponent; _currentVersionName = other._currentVersionName; _isGone = other._isGone; _currentPublisher = other._currentPublisher; _currentPublisherKeyLocator = other._currentPublisherKeyLocator; _handle = other._handle; _flowControl = other._flowControl; _disableFlowControlRequest = other._disableFlowControlRequest; _publisher = other._publisher; _keyLocator = other._keyLocator; _saveType = other._saveType; _keys = (null != other._keys) ? other._keys.clone() : null; _firstSegment = other._firstSegment; _verifier = other._verifier; // Do not copy update behavior. Even if other one is updating, we won't // pick that up. Have to kick off manually. } /** * Maximize laziness of flow controller creation, to make it easiest for client code to * decide how to store this object. * When we create the flow controller, we add the base name namespace, so it will respond * to requests for latest version. Create them immediately in write constructors, * when we have a strong expectation that we will save data, if we have a namespace * to start listening on. Otherwise wait till we are going to write. * @return * @throws IOException */ protected synchronized void createFlowController() throws IOException { if (null == _flowControl) { if (null == _saveType) { Log.finer("Not creating flow controller yet, no saveType set."); return; } switch (_saveType) { case RAW: _flowControl = new CCNFlowControl(_handle); break; case REPOSITORY: _flowControl = new RepositoryFlowControl(_handle); break; case LOCALREPOSITORY: _flowControl = new RepositoryFlowControl(_handle, true); break; default: throw new IOException("Unknown save type: " + _saveType); } if (_disableFlowControlRequest) _flowControl.disable(); // Have to register the version root. If we just register this specific version, we won't // see any shorter interests -- i.e. for get latest version. _flowControl.addNameSpace(_baseName); if (Log.isLoggable(Level.INFO)) Log.info("Created " + _saveType + " flow controller, for prefix {0}, save type " + _flowControl.saveType(), _baseName); } } /** * Get the flow controller associated with this object * @return the flow controller or null if not assigned */ public CCNFlowControl getFlowControl() { return _flowControl; } /** * Get timeout associated with this object * @return */ public long getTimeout() { return _flowControl.getTimeout(); } /** * Start listening to interests on our base name, if we aren't already. * @throws IOException */ public synchronized void setupSave(SaveType saveType) throws IOException { setSaveType(saveType); setupSave(); } public synchronized void setupSave() throws IOException { if (null != _flowControl) { if (null != _baseName) { _flowControl.addNameSpace(_baseName); } return; } createFlowController(); } /** * Finalizer. Somewhat dangerous, but currently best way to close * lingering open registrations. Can't close the handle, till we ref count. */ @Override protected void finalize() throws Throwable { try { close(); // close the object, canceling interests and listeners. } finally { super.finalize(); } } /** * Close flow controller, remove listeners. Have to call setupSave to save with this object again, * re-add listeners. * @return */ public synchronized void close() { cancelInterest(); clearListeners(); if (null != _flowControl) { _flowControl.close(); } } public SaveType saveType() { return _saveType; } /** * Used by subclasses to specify a mandatory save type in * read constructors. Only works on objects whose flow * controller has not yet been set, to not override * manually-set FC's. */ protected void setSaveType(SaveType saveType) throws IOException { if (null == _flowControl) { _saveType = saveType; } else if (saveType != _saveType){ throw new IOException("Cannot change save type, flow controller already set!"); } } /** * If you want to set the lifetime of objects saved with this instance. * @param freshnessSeconds If null, will unset any freshness seconds (will * write objects that stay in cache till forced out); if a value will constrain * how long objects will stay in cache. */ public void setFreshnessSeconds(Integer freshnessSeconds) { _freshnessSeconds = freshnessSeconds; } /** * Override point where subclasses can modify each input stream before * it is read. Subclasses should at least set the flags using getInputStreamFlags, * or call super.setInputStreamProperties. */ protected void setInputStreamProperties(CCNInputStream inputStream) { // default -- just set any flags inputStream.setFlags(getInputStreamFlags()); } /** * Override point where subclasses can specify set of flags on input stream * at point it is read or where necessary created. * @return */ protected EnumSet<FlagTypes> getInputStreamFlags() { return null; } /** * Allow verifier to be specified. Could put this in the constructors; though they * are already complicated enough. If not set, the default verifier for the key manager * used by the object's handle is used. * @param verifier the verifier to use. Cannot be null. */ public void setVerifier(ContentVerifier verifier) { if (null != verifier) _verifier = verifier; } /** * Attempts to find a version after the latest one we have, or times out. If * it times out, it simply leaves the object unchanged. * @return returns true if it found an update, false if not * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public boolean update(long timeout) throws ContentDecodingException, IOException { if (null == _baseName) { throw new IllegalStateException("Cannot retrieve an object without giving a name!"); } // Look for first segment of version after ours, or first version if we have none. ContentObject firstSegment = VersioningProfile.getFirstBlockOfLatestVersion(getVersionedName(), null, null, timeout, _handle.defaultVerifier(), _handle); if (null != firstSegment) { return update(firstSegment); } return false; } /** * The regular update does a call to do multi-hop get latest version -- i.e. it will try * multiple times to find the latest version of a piece of content, even if interposed caches * have something older. While that's great when you really need the latest, sometimes you are * happy with the latest available version available in your local ccnd cache; or you really * know there is only one version available and you don't want to try multiple times (and incur * a timeout) in an attempt to get a later version that does not exist. This call, updateAny, * claims to get "any" version available. In reality, it will do a single-hop latest version; * i.e. if there are two versions say in your local ccnd cache (or repo with nothing in the ccnd * cache), it will pull the later one. But * it won't move beyond those to find a newer version available at a writer, or to find a later * version in the repo than one in the ccnd cache. Use this if you know there is only one version * of something, or you want a fast path to the latest version where it really doesn't have to * be the "absolute" latest. * * Like all update methods, it will start from the version you've got -- so it is guaranteed to find * something after the current version this object knows about (if it has already found something), * and to time out and return false if there isn't anything later. */ public boolean updateAny(long timeout) throws ContentDecodingException, IOException { if (null == _baseName) { throw new IllegalStateException("Cannot retrieve an object without giving a name!"); } // Look for first segment of version after ours, or first version if we have none. ContentObject firstSegment = VersioningProfile.getFirstBlockOfAnyLaterVersion(getVersionedName(), null, null, timeout, _verifier, _handle); if (null != firstSegment) { return update(firstSegment); } return false; } public boolean updateAny() throws ContentDecodingException, IOException { return updateAny(SystemConfiguration.getDefaultTimeout()); } /** * Calls update(long) with the default timeout SystemConfiguration.getDefaultTimeout(). * @return see update(long). * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public boolean update() throws ContentDecodingException, IOException { return update(SystemConfiguration.getDefaultTimeout()); } /** * Load data into object. If name is versioned, load that version. If * name is not versioned, look for latest version. * @param name Name of object to read. * @param publisher Desired publisher, or null for any. * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public boolean update(ContentName name, PublisherPublicKeyDigest publisher) throws ContentDecodingException, IOException { Log.info("Updating object to {0}.", name); CCNVersionedInputStream is = new CCNVersionedInputStream(name, publisher, _handle); return update(is); } /** * Load a stream starting with a specific object. * @param object * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public boolean update(ContentObject object) throws ContentDecodingException, IOException { CCNInputStream is = new CCNInputStream(object, getInputStreamFlags(), _handle); setInputStreamProperties(is); is.seek(0); // in case it wasn't the first segment return update(is); } /** * Updates the object from a CCNInputStream or one of its subclasses. Used predominantly * by internal methods, most clients should use update() or update(long). Exposed for * special-purpose use and experimentation. * @param inputStream Stream to read object from. * @return true if an update found, false if not. * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public synchronized boolean update(CCNInputStream inputStream) throws ContentDecodingException, IOException { // Allow subclasses to modify input stream processing prior to first read. setInputStreamProperties(inputStream); Tuple<ContentName, byte []> nameAndVersion = null; try { if (inputStream.isGone()) { if (Log.isLoggable(Level.FINE)) Log.fine("Reading from GONE stream: {0}", inputStream.getBaseName()); _data = null; // This will have a final version and a segment nameAndVersion = VersioningProfile.cutTerminalVersion(inputStream.deletionInformation().name()); _currentPublisher = inputStream.deletionInformation().signedInfo().getPublisherKeyID(); _currentPublisherKeyLocator = inputStream.deletionInformation().signedInfo().getKeyLocator(); _available = true; _isGone = true; _isDirty = false; _lastSaved = digestContent(); } else { super.update(inputStream); nameAndVersion = VersioningProfile.cutTerminalVersion(inputStream.getBaseName()); _currentPublisher = inputStream.publisher(); _currentPublisherKeyLocator = inputStream.publisherKeyLocator(); _isGone = false; } _firstSegment = inputStream.getFirstSegment(); // preserve first segment } catch (NoMatchingContentFoundException nme) { if (Log.isLoggable(Level.INFO)) Log.info("NoMatchingContentFoundException in update from input stream {0}, timed out before data was available.", inputStream.getBaseName()); nameAndVersion = VersioningProfile.cutTerminalVersion(inputStream.getBaseName()); _baseName = nameAndVersion.first(); // used to fire off an updateInBackground here, to hopefully get a second // chance on scooping up the content. But that seemed likely to confuse // people and leave the object in an undetermined state. So allow caller // to manage that themselves. // not an error state, merely a not ready state. return false; } catch (LinkCycleException lce) { if (Log.isLoggable(Level.INFO)) Log.info("Link cycle exception: {0}", lce.getMessage()); setError(lce); throw lce; } _baseName = nameAndVersion.first(); _currentVersionComponent = nameAndVersion.second(); _currentVersionName = null; // cached if used _dereferencedLink = inputStream.getDereferencedLink(); // gets stack of links used, if any clearError(); // Signal readers. newVersionAvailable(false); return true; } /** * Update this object in the background -- asynchronously. This call updates the * object a single time, after the first update (the requested version or the * latest version), the object will not self-update again unless requested. * To use, create an object using a write constructor, setting the data field * to null. Then call updateInBackground() to retrieve the object's data asynchronously. * To wait on data arrival, call either waitForData() or wait() on the object itself. * @throws IOException */ public void updateInBackground() throws IOException { updateInBackground(false); } /** * Update this object in the background -- asynchronously. * To use, create an object using a write constructor, setting the data field * to null. Then call updateInBackground() to retrieve the object's data asynchronously. * To wait for an update to arrive, call wait() on this object itself. * @param continuousUpdates If true, updates the * object continuously to the latest version available, a single time if it is false. * @throws IOException */ public void updateInBackground(boolean continuousUpdates) throws IOException { if (null == _baseName) { throw new IllegalStateException("Cannot retrieve an object without giving a name!"); } // Look for latest version. updateInBackground(getVersionedName(), continuousUpdates, null); } public void updateInBackground(ContentName latestVersionKnown, boolean continuousUpdates) throws IOException { updateInBackground(latestVersionKnown, continuousUpdates, null); } public void updateInBackground(boolean continuousUpdates, UpdateListener listener) throws IOException { updateInBackground(getVersionedName(), continuousUpdates, listener); } /** * Update this object in the background -- asynchronously. * To use, create an object using a write constructor, setting the data field * to null. Then call updateInBackground() to retrieve the object's data asynchronously. * To wait for an update to arrive, call wait() on this object itself. * @param latestVersionKnown the name of the latest version we know of, or an unversioned * name if no version known * @param continuousUpdates If true, updates the * object continuously to the latest version available, a single time if it is false. * @throws IOException */ public synchronized void updateInBackground(ContentName latestVersionKnown, boolean continuousUpdates, UpdateListener listener) throws IOException { Log.info("updateInBackground: getting latest version after {0} in background.", latestVersionKnown); cancelInterest(); if (null != listener) { addListener(listener); } _continuousUpdates = continuousUpdates; _currentInterest = VersioningProfile.firstBlockLatestVersionInterest(latestVersionKnown, null); Log.info("updateInBackground: initial interest: {0}", _currentInterest); _handle.expressInterest(_currentInterest, this); } /** * Cancel an outstanding updateInBackground(). */ public synchronized void cancelInterest() { _continuousUpdates = false; if (null != _currentInterest) { _handle.cancelInterest(_currentInterest, this); } } public synchronized void addListener(UpdateListener listener) { if (null == _updateListeners) { _updateListeners = new HashSet<UpdateListener>(); } else if (_updateListeners.contains(listener)) { return; // don't re-add } _updateListeners.add(listener); } /** * Does this object already have this listener. Uses Object.equals * for comparison; so will only say yes if it has this *exact* listener * instance already registered. * @param listener * @return */ public synchronized boolean hasListener(UpdateListener listener) { if (null == _updateListeners) { return false; } return (_updateListeners.contains(listener)); } public void removeListener(UpdateListener listener) { if (null == _updateListeners) return; synchronized (this) { _updateListeners.remove(listener); } } public void clearListeners() { if (null == _updateListeners) return; synchronized(_updateListeners) { _updateListeners.clear(); } } /** * Save to existing name, if content is dirty. Update version. * This is the default form of save -- if the object has been told to use * a repository backing store, by either giving it a repository flow controller, * calling saveToRepository() on it for its first save, or specifying false * to a constructor that allows a raw argument, it will save to a repository. * Otherwise will perform a raw save. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ public boolean save() throws ContentEncodingException, IOException { return saveInternal(null, false, null); } /** * Method for CCNFilterListeners to save an object in response to an Interest * callback. An Interest has already been received, so the object can output * one ContentObject as soon as one is ready. Ideally this Interest will have * been received on the CCNHandle the object is using for output. If the object * is not dirty, it will not be saved, and the Interest will not be consumed. * If the Interest does not match this object, the Interest will not be consumed; * it is up to the caller to ensure that the Interest would be matched by writing * this object. (If the Interest doesn't match, no initial block will be output * even if the object is saved; the object will wait for matching Interests prior * to writing its blocks.) */ public boolean save(Interest outstandingInterest) throws ContentEncodingException, IOException { return saveInternal(null, false, outstandingInterest); } /** * Save to existing name, if content is dirty. Saves to specified version. * This is the default form of save -- if the object has been told to use * a repository backing store, by either giving it a repository flow controller, * calling saveToRepository() on it for its first save, or specifying false * to a constructor that allows a raw argument, it will save to a repository. * Otherwise will perform a raw save. * @param version Version to save to. * @return true if object was saved, false if it was not (if it was not dirty). * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ public boolean save(CCNTime version) throws ContentEncodingException, IOException { return saveInternal(version, false, null); } /** * Save to existing name, if content is dirty. Saves to specified version. * Method for CCNFilterListeners to save an object in response to an Interest * callback. An Interest has already been received, so the object can output * one ContentObject as soon as one is ready. Ideally this Interest will have * been received on the CCNHandle the object is using for output. If the object * is not dirty, it will not be saved, and the Interest will not be consumed. * If the Interest does not match this object, the Interest will not be consumed; * it is up to the caller to ensure that the Interest would be matched by writing * this object. (If the Interest doesn't match, no initial block will be output * even if the object is saved; the object will wait for matching Interests prior * to writing its blocks.) */ public boolean save(CCNTime version, Interest outstandingInterest) throws ContentEncodingException, IOException { return saveInternal(version, false, outstandingInterest); } /** * Save content to specific version. Internal form that performs actual save. * @param version If version is non-null, assume that is the desired * version. If not, set version based on current time. * @param gone Are we saving this content as gone or not. * @return return Returns true if it saved data, false if it thought data was not dirty and didn't * save. * TODO allow freshness specification * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ protected synchronized boolean saveInternal(CCNTime version, boolean gone, Interest outstandingInterest) throws ContentEncodingException, IOException { if (null == _baseName) { throw new IllegalStateException("Cannot save an object without giving it a name!"); } // move object to this name // need to make sure we get back the actual name we're using, // even if output stream does automatic versioning // probably need to refactor save behavior -- right now, internalWriteObject // either writes the object or not; we need to only make a new name if we do // write the object, and figure out if that's happened. Also need to make // parent behavior just write, put the dirty check higher in the state. if (!gone && !isDirty()) { Log.info("Object not dirty. Not saving."); return false; } if (!gone && (null == _data)) { // skip some of the prep steps that have side effects rather than getting this exception later from superclass throw new InvalidObjectException("No data to save!"); } // Create the flow controller, if we haven't already. createFlowController(); // This is the point at which we care if we don't have a flow controller if (null == _flowControl) { throw new IOException("Cannot create flow controller! Specified save type is " + _saveType + "!"); } // Handle versioning ourselves to make name handling easier. VOS should respect it. // We might have been handed a _baseName that was versioned. For most general behavior, // have to treat it as a normal name and that we are supposed to put our own version // underneath it. To save as a specific version, need to use save(version). ContentName name = _baseName; if (null != version) { name = VersioningProfile.addVersion(_baseName, version); } else { name = VersioningProfile.addVersion(_baseName); } // DKS if we add the versioned name, we don't handle get latest version. // We re-add the baseName here in case an update has changed it. // TODO -- perhaps disallow updates for unrelated names. _flowControl.addNameSpace(_baseName); if (!gone) { // CCNVersionedOutputStream will version an unversioned name. // If it gets a versioned name, will respect it. // This will call startWrite on the flow controller. CCNVersionedOutputStream cos = new CCNVersionedOutputStream(name, _keyLocator, _publisher, contentType(), _keys, _flowControl); cos.setFreshnessSeconds(_freshnessSeconds); if (null != outstandingInterest) { cos.addOutstandingInterest(outstandingInterest); } save(cos); // superclass stream save. calls flush but not close on a wrapping // digest stream; want to make sure we end up with a single non-MHT signed // segment and no header on small objects cos.close(); // Grab digest and segment number after close because for short objects there may not be // a segment generated until the close _firstSegment = cos.getFirstSegment(); } else { // saving object as gone, currently this is always one empty segment so we don't use an OutputStream ContentName segmentedName = SegmentationProfile.segmentName(name, SegmentationProfile.BASE_SEGMENT ); byte [] empty = new byte[0]; byte [] finalBlockID = SegmentationProfile.getSegmentNumberNameComponent(SegmentationProfile.BASE_SEGMENT); ContentObject goneObject = ContentObject.buildContentObject(segmentedName, ContentType.GONE, empty, _publisher, _keyLocator, null, finalBlockID); // The segmenter in the stream does an addNameSpace of the versioned name. Right now // this not only adds the prefix (ignored) but triggers the repo start write. _flowControl.addNameSpace(name); _flowControl.startWrite(name, Shape.STREAM); // Streams take care of this for the non-gone case. _flowControl.put(goneObject); _firstSegment = goneObject; _flowControl.beforeClose(); _flowControl.afterClose(); _lastSaved = GONE_OUTPUT; } _currentPublisher = _firstSegment.signedInfo().getPublisherKeyID(); _currentPublisherKeyLocator = _firstSegment.signedInfo().getKeyLocator(); _currentVersionComponent = name.lastComponent(); _currentVersionName = name; setDirty(false); _available = true; newVersionAvailable(true); Log.finest("Saved object {0} publisher {1} key locator {2}", name, _currentPublisher, _currentPublisherKeyLocator); return true; } /** * Convenience method to the data and save it in a single operation. * @param data new data for object, set with setData * @return * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ public boolean save(E data) throws ContentEncodingException, IOException { return save(null, data); } /** * Convenience method to the data and save it as a particular version in a single operation. * @param version the desired version * @param data new data for object, set with setData * @return * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ public synchronized boolean save(CCNTime version, E data) throws ContentEncodingException, IOException { setData(data); return save(version); } /** * Deprecated; use either object defaults or setRepositorySave() to indicate writes * should go to a repository, then call save() to write. * If raw=true or DEFAULT_RAW=true specified, this must be the first call to save made * for this object to force repository storage (overriding default). * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ @Deprecated public synchronized boolean saveToRepository(CCNTime version) throws ContentEncodingException, IOException { if (null == _baseName) { throw new IllegalStateException("Cannot save an object without giving it a name!"); } setSaveType(SaveType.REPOSITORY); return save(version); } /** * Deprecated; use either object defaults or setRepositorySave() to indicate writes * should go to a repository, then call save() to write. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ @Deprecated public boolean saveToRepository() throws ContentEncodingException, IOException { return saveToRepository((CCNTime)null); } /** * Deprecated; use either object defaults or setRepositorySave() to indicate writes * should go to a repository, then call save() to write. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ @Deprecated public boolean saveToRepository(E data) throws ContentEncodingException, IOException { return saveToRepository(null, data); } /** * Deprecated; use either object defaults or setRepositorySave() to indicate writes * should go to a repository, then call save() to write. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ @Deprecated public synchronized boolean saveToRepository(CCNTime version, E data) throws ContentEncodingException, IOException { setData(data); return saveToRepository(version); } /** * Save this object as GONE. Intended to mark the latest version, rather * than a specific version as GONE. So for now, require that name handed in * is *not* already versioned; throw an IOException if it is. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ public synchronized boolean saveAsGone() throws ContentEncodingException, IOException { return saveAsGone(null); } /** * For use by CCNFilterListeners, saves a GONE object and emits an inital * block in response to an already-received Interest. * Save this object as GONE. Intended to mark the latest version, rather * than a specific version as GONE. So for now, require that name handed in * is *not* already versioned; throw an IOException if it is. * @throws IOException */ public synchronized boolean saveAsGone(Interest outstandingInterest) throws ContentEncodingException, IOException { if (null == _baseName) { throw new IllegalStateException("Cannot save an object without giving it a name!"); } _data = null; _isGone = true; setDirty(true); return saveInternal(null, true, outstandingInterest); } /** * Deprecated; use either object defaults or setRepositorySave() to indicate writes * should go to a repository, then call save() to write. * If raw=true or DEFAULT_RAW=true specified, this must be the first call to save made * for this object. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ @Deprecated public synchronized boolean saveToRepositoryAsGone() throws ContentEncodingException, IOException { setSaveType(SaveType.REPOSITORY); return saveAsGone(); } /** * Turn off flow control for this object. Warning - calling this risks packet drops. It should only * be used for tests or other special circumstances in which * you "know what you are doing". */ public synchronized void disableFlowControl() { if (null != _flowControl) _flowControl.disable(); _disableFlowControlRequest = true; } /** * Used to signal waiters and listeners that a new version is available. * @param wasSave is a new version available because we were saved, or because * we found a new version on the network? */ protected void newVersionAvailable(boolean wasSave) { if (Log.isLoggable(Level.FINER)) { Log.finer("newVersionAvailable: New version of object available: {0}", getVersionedName()); } // by default signal all waiters this.notifyAll(); // and any registered listeners if (null != _updateListeners) { for (UpdateListener listener : _updateListeners) { listener.newVersionAvailable(this, wasSave); } } } /** * Will return immediately if this object already has data, otherwise * will wait indefinitely for the initial data to appear. */ public void waitForData() { if (available()) return; synchronized (this) { while (!available()) { try { wait(); } catch (InterruptedException e) { } } } } /** * Will wait for data to arrive. Callers should use * available() to determine whether data has arrived or not. * If data already available, will return immediately (in other * words, this is only useful to wait for the first update to * an object, or to ensure that it has data). To wait for later * updates, call wait() on the object itself. * @param timeout In milliseconds. If 0, will wait forever (if data does not arrive). */ public void waitForData(long timeout) { if (available()) return; synchronized (this) { long startTime = System.currentTimeMillis(); boolean keepTrying = true; while (!available() && keepTrying) { // deal with spontaneous returns from wait() try { long waitTime = timeout - (System.currentTimeMillis() - startTime); if (waitTime > 0) wait(waitTime); else keepTrying = false; } catch (InterruptedException ie) {} } } } public boolean isGone() { return _isGone; } @Override protected byte [] digestContent() throws IOException { if (isGone()) { return GONE_OUTPUT; } return super.digestContent(); } @Override protected synchronized E data() throws ContentNotReadyException, ContentGoneException, ErrorStateException { if (isGone()) { throw new ContentGoneException("Content is gone!"); } return super.data(); } @Override public synchronized void setData(E newData) { _isGone = false; // clear gone, even if we're setting to null; only saveAsGone can set as gone super.setData(newData); } public synchronized CCNTime getVersion() throws IOException { if (isSaved()) return VersioningProfile.getVersionComponentAsTimestamp(getVersionComponent()); return null; } public synchronized VersionNumber getVersionNumber() throws IOException { if (isSaved()) return new VersionNumber(getVersionComponent()); return null; } public synchronized ContentName getBaseName() { return _baseName; } public CCNHandle getHandle() { return _handle; } public synchronized byte [] getVersionComponent() throws IOException { if (isSaved()) return _currentVersionComponent; return null; } /** * Returns the first segment number for this object. * @return The index of the first segment of stream data or null if no segments generated yet. */ public Long firstSegmentNumber() { if (null != _firstSegment) { return SegmentationProfile.getSegmentNumber(_firstSegment.name()); } else { return null; } } /** * Returns the digest of the first segment of this object which may be used * to help identify object instance unambiguously. * * @return The digest of the first segment of this object if available, null otherwise */ public byte[] getFirstDigest() { // Do not attempt to force update here to leave control over whether reading // or writing with the object creator. The return value may be null if the // object is not in a state of having a first segment if (null != _firstSegment) { return _firstSegment.digest(); } else { return null; } } /** * Returns the first segment of this object. */ public ContentObject getFirstSegment() { return _firstSegment; } /** * If we traversed a link to get this object, make it available. */ public synchronized LinkObject getDereferencedLink() { return _dereferencedLink; } /** * Use only if you know what you are doing. */ public synchronized void setDereferencedLink(LinkObject dereferencedLink) { _dereferencedLink = dereferencedLink; } /** * Add a LinkObject to the stack we had to dereference to get here. */ public synchronized void pushDereferencedLink(LinkObject dereferencedLink) { if (null == dereferencedLink) { return; } if (null != _dereferencedLink) { if (null != dereferencedLink.getDereferencedLink()) { if (Log.isLoggable(Level.WARNING)) { Log.warning("Merging two link stacks -- {0} already has a dereferenced link from {1}. Behavior unpredictable.", dereferencedLink.getVersionedName(), dereferencedLink.getDereferencedLink().getVersionedName()); } } dereferencedLink.pushDereferencedLink(_dereferencedLink); } setDereferencedLink(dereferencedLink); } /** * If the object has been saved or read from the network, returns the (cached) versioned * name. Otherwise returns the base name. * @return */ public synchronized ContentName getVersionedName() { try { if (isSaved()) { if ((null == _currentVersionName) && (null != _currentVersionComponent)) // cache; only read lock necessary _currentVersionName = new ContentName(_baseName, _currentVersionComponent); return _currentVersionName; } return getBaseName(); } catch (IOException e) { if (Log.isLoggable(Level.WARNING)) Log.warning("Invalid state for object {0}, cannot get current version name: {1}", getBaseName(), e); return getBaseName(); } } public synchronized PublisherPublicKeyDigest getContentPublisher() throws IOException { if (isSaved()) return _currentPublisher; return null; } public synchronized KeyLocator getPublisherKeyLocator() throws IOException { if (isSaved()) return _currentPublisherKeyLocator; return null; } /** * Change the publisher information we use when we sign commits to this object. * Takes effect on the next save(). Useful for objects created with a read constructor, * but who want to override default publisher information. * @param signingKey indicates the identity we want to use to sign future writes to this * object. If null, will default to key manager's (user's) default key. * @param locator the key locator (key lookup location) information to attach to future * writes to this object. If null, will be the default value associated with the * chosen signing key. */ public synchronized void setOurPublisherInformation(PublisherPublicKeyDigest publisherIdentity, KeyLocator keyLocator) { _publisher = publisherIdentity; _keyLocator = keyLocator; } public synchronized Interest handleContent(ContentObject co, Interest interest) { try { boolean hasNewVersion = false; byte [][] excludes = null; try { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: " + _currentInterest + " retrieved " + co.name()); if (VersioningProfile.startsWithLaterVersionOf(co.name(), _currentInterest.name())) { // OK, we have something that is a later version of our desired object. // We're not sure it's actually the first content segment. hasNewVersion = true; if (VersioningProfile.isVersionedFirstSegment(_currentInterest.name(), co, null)) { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: Background updating of {0}, got first segment: {1}", getVersionedName(), co.name()); // Streams assume caller has verified. So we verify here. // TODO add support for settable verifiers if (!_verifier.verify(co)) { if (Log.isLoggable(Log.FAC_SIGNING, Level.WARNING)) { Log.warning(Log.FAC_SIGNING, "CCNNetworkObject: content object received from background update did not verify! Ignoring object: {0}", co.fullName()); } hasNewVersion = false; // TODO -- exclude this one by digest, otherwise we're going // to get it back! For now, just copy the top-level part of GLV // behavior and exclude this version component. This isn't the right // answer, malicious objects can exclude new versions. But it's not clear // if the right answer is to do full gLV here and let that machinery // handle things, pulling potentially multiple objects in a callback, // or we just have to wait for issue #100011, and the ability to selectively // exclude content digests. excludes = new byte [][]{co.name().component(_currentInterest.name().count())}; if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: got content for {0} that doesn't verify ({1}), excluding bogus version {2} as temporary workaround FIX WHEN POSSIBLE", _currentInterest.name(), co.fullName(), ContentName.componentPrintURI(excludes[0])); } else { update(co); } } else { // Have something that is not the first segment, like a repo write or a later segment. Go back // for first segment. ContentName latestVersionName = co.name().cut(_currentInterest.name().count() + 1); Log.info("updateInBackground: handleContent (network object): Have version information, now querying first segment of {0}", latestVersionName); // This should verify the first segment when we get it. update(latestVersionName, co.signedInfo().getPublisherKeyID()); } } else { excludes = new byte [][]{co.name().component(_currentInterest.name().count() - 1)}; if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: got content for {0} that doesn't match: {1}", _currentInterest.name(), co.name()); } } catch (IOException ex) { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: Exception {0}: {1} attempting to update based on object : {2}", ex.getClass().getName(), ex.getMessage(), co.name()); // alright, that one didn't work, try to go on. } if (hasNewVersion) { if (_continuousUpdates) { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: got a new version, continuous updates, calling updateInBackground recursively then returning null."); updateInBackground(true); } else { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: got a new version, not continuous updates, returning null."); _continuousUpdates = false; } // the updates above call newVersionAvailable return null; // implicit cancel of interest } else { if (null != excludes) { _currentInterest.exclude().add(excludes); } if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: no new version, returning new interest for expression: {0}", _currentInterest); return _currentInterest; } } catch (IOException ex) { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: Exception {0}: {1} attempting to request further updates : {2}", ex.getClass().getName(), ex.getMessage(), _currentInterest); return null; } } /** * Subclasses that need to write an object of a particular type can override. * DKS TODO -- verify type on read, modulo that ENCR overrides everything. * @return */ public ContentType contentType() { return ContentType.DATA; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + ((_baseName == null) ? 0 : _baseName.hashCode()); result = prime * result + ((_currentPublisher == null) ? 0 : _currentPublisher .hashCode()); result = prime * result + Arrays.hashCode(_currentVersionComponent); return result; } @SuppressWarnings("unchecked") // cast to obj<E> @Override public boolean equals(Object obj) { // should hold read lock? if (this == obj) return true; if (!super.equals(obj)) return false; if (getClass() != obj.getClass()) return false; CCNNetworkObject<E> other = (CCNNetworkObject<E>) obj; if (_baseName == null) { if (other._baseName != null) return false; } else if (!_baseName.equals(other._baseName)) return false; if (_currentPublisher == null) { if (other._currentPublisher != null) return false; } else if (!_currentPublisher.equals(other._currentPublisher)) return false; if (!Arrays.equals(_currentVersionComponent, other._currentVersionComponent)) return false; return true; } @Override public String toString() { try { if (isSaved()) { return getVersionedName() + ": " + (isGone() ? "GONE" : "\nData:" + data()) + "\n Publisher: " + getContentPublisher() + "\n Publisher KeyLocator: " + getPublisherKeyLocator() + "\n"; } else if (available()) { return getBaseName() + " (unsaved): " + data(); } else { return getBaseName() + " (unsaved, no data)"; } } catch (IOException e) { Log.info("Unexpected exception retrieving object information: {0}", e); return getBaseName() + ": unexpected exception " + e; } } }
package org.jpos.iso.packager; import org.jpos.iso.*; import org.jpos.iso.header.BaseHeader; import org.jpos.util.LogEvent; import org.jpos.util.LogSource; import org.jpos.util.Logger; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.helpers.XMLReaderFactory; import java.io.*; import java.util.Stack; /** * packs/unpacks ISOMsgs into XML representation * * @author apr@cs.com.uy * @version $Id$ * @see ISOPackager */ public class XMLPackager extends DefaultHandler implements ISOPackager, LogSource { protected Logger logger = null; protected String realm = null; private ByteArrayOutputStream out; private PrintStream p; private XMLReader reader = null; private Stack stk; public static final String ISOMSG_TAG = "isomsg"; public static final String ISOFIELD_TAG = "field"; public static final String ID_ATTR = "id"; public static final String VALUE_ATTR = "value"; public static final String TYPE_ATTR = "type"; public static final String TYPE_BINARY = "binary"; public static final String TYPE_BITMAP = "bitmap"; public static final String HEADER_TAG = "header"; public static final String ENCODING_ATTR = "encoding"; public static final String ASCII_ENCODING= "ascii"; public XMLPackager() throws ISOException { super(); out = new ByteArrayOutputStream(); try { p = new PrintStream(out, false, "utf-8"); } catch (UnsupportedEncodingException ignored) { // utf-8 is a supported encoding } stk = new Stack(); try { reader = createXMLReader(); } catch (Exception e) { throw new ISOException (e.toString()); } } public byte[] pack (ISOComponent c) throws ISOException { LogEvent evt = new LogEvent (this, "pack"); try { if (!(c instanceof ISOMsg)) throw new ISOException ("cannot pack "+c.getClass()); ISOMsg m = (ISOMsg) c; byte[] b; synchronized (this) { m.setDirection(0); // avoid "direction=xxxxxx" in XML msg m.dump (p, ""); b = out.toByteArray(); out.reset(); } if (logger != null) evt.addMessage (m); return b; } catch (ISOException e) { evt.addMessage (e); throw e; } finally { Logger.log(evt); } } public synchronized int unpack (ISOComponent c, byte[] b) throws ISOException { LogEvent evt = new LogEvent (this, "unpack"); try { if (!(c instanceof ISOMsg)) throw new ISOException ("Can't call packager on non Composite"); while (!stk.empty()) // purge from possible previous error stk.pop(); InputSource src = new InputSource (new ByteArrayInputStream(b)); reader.parse (src); if (stk.empty()) throw new ISOException ("error parsing"); ISOMsg m = (ISOMsg) c; ISOMsg m1 = (ISOMsg) stk.pop(); m.merge (m1); m.setHeader (m1.getHeader()); if (logger != null) evt.addMessage (m); return b.length; } catch (ISOException e) { evt.addMessage (e); throw e; } catch (IOException e) { evt.addMessage (e); throw new ISOException (e.toString()); } catch (SAXException e) { evt.addMessage (e); throw new ISOException (e.toString()); } finally { Logger.log (evt); } } public synchronized void unpack (ISOComponent c, InputStream in) throws ISOException, IOException { LogEvent evt = new LogEvent (this, "unpack"); try { if (!(c instanceof ISOMsg)) throw new ISOException ("Can't call packager on non Composite"); while (!stk.empty()) // purge from possible previous error stk.pop(); reader.parse (new InputSource (in)); if (stk.empty()) throw new ISOException ("error parsing"); ISOMsg m = (ISOMsg) c; m.merge ((ISOMsg) stk.pop()); if (logger != null) evt.addMessage (m); } catch (ISOException e) { evt.addMessage (e); throw e; } catch (SAXException e) { evt.addMessage (e); throw new ISOException (e.toString()); } finally { Logger.log (evt); } } public void startElement (String ns, String name, String qName, Attributes atts) throws SAXException { int fieldNumber = -1; try { String id = atts.getValue(ID_ATTR); if (id != null) { try { fieldNumber = Integer.parseInt (id); } catch (NumberFormatException ignored) { } } if (name.equals (ISOMSG_TAG)) { if (fieldNumber >= 0) { if (stk.empty()) throw new SAXException ("inner without outter"); ISOMsg inner = new ISOMsg(fieldNumber); ((ISOMsg)stk.peek()).set (inner); stk.push (inner); } else { stk.push (new ISOMsg(0)); } } else if (name.equals (ISOFIELD_TAG)) { ISOMsg m = (ISOMsg) stk.peek(); String value = atts.getValue(VALUE_ATTR); String type = atts.getValue(TYPE_ATTR); value = value == null ? "" : value; if (id == null || value == null) throw new SAXException ("invalid field"); ISOComponent ic; if (TYPE_BINARY.equals (type)) { ic = new ISOBinaryField ( fieldNumber, ISOUtil.hex2byte ( value.getBytes(), 0, value.length()/2 ) ); } else { ic = new ISOField (fieldNumber, value); } m.set (ic); stk.push (ic); } else if (HEADER_TAG.equals (name)) { BaseHeader bh = new BaseHeader(); bh.setAsciiEncoding (ASCII_ENCODING.equalsIgnoreCase(atts.getValue(ENCODING_ATTR))); stk.push (bh); } } catch (ISOException e) { throw new SAXException ("ISOException unpacking "+fieldNumber); } } public void characters (char ch[], int start, int length) { Object obj = stk.peek(); if (obj instanceof ISOField) { ISOField f = (ISOField) obj; if (f.getValue().toString().length() == 0) { try { f.setValue (new String (ch, start, length)); } catch (ISOException e) { try { f.setValue (e.getMessage()); } catch (ISOException ignored) { // giving up } } } } else if (obj instanceof BaseHeader) { BaseHeader bh = (BaseHeader) obj; String s = new String(ch,start,length); if (bh.isAsciiEncoding()) { bh.unpack (s.getBytes()); } else { bh.unpack (ISOUtil.hex2byte (s)); } } } public void endElement (String ns, String name, String qname) throws SAXException { if (name.equals (ISOMSG_TAG)) { ISOMsg m = (ISOMsg) stk.pop(); if (stk.empty()) stk.push (m); // push outter message } else if (ISOFIELD_TAG.equals (name)) { stk.pop(); } else if (HEADER_TAG.equals (name)) { BaseHeader h = (BaseHeader) stk.pop(); ISOMsg m = (ISOMsg) stk.peek (); m.setHeader (h); } } public String getFieldDescription(ISOComponent m, int fldNumber) { return "<notavailable/>"; } public void setLogger (Logger logger, String realm) { this.logger = logger; this.realm = realm; } public String getRealm () { return realm; } public Logger getLogger() { return logger; } public ISOMsg createISOMsg () { return new ISOMsg(); } public String getDescription () { return getClass().getName(); } private XMLReader createXMLReader () throws SAXException { XMLReader reader; try { reader = XMLReaderFactory.createXMLReader(); } catch (SAXException e) { reader = XMLReaderFactory.createXMLReader ( System.getProperty( "org.xml.sax.driver", "org.apache.crimson.parser.XMLReaderImpl" ) ); } reader.setFeature ("http://xml.org/sax/features/validation",false); reader.setContentHandler(this); reader.setErrorHandler(this); return reader; } }
package org.devdom.tracker.model.dao; import facebook4j.Facebook; import facebook4j.FacebookException; import facebook4j.FacebookFactory; import facebook4j.Post; import facebook4j.Reading; import facebook4j.ResponseList; import facebook4j.conf.ConfigurationBuilder; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import org.devdom.client.facebook.FBConnect; import org.devdom.tracker.model.dto.FacebookPost; import org.devdom.tracker.util.Configuration; public class FacebookDao { private static final long serialVersionUID = 1L; private final EntityManagerFactory emf = Persistence.createEntityManagerFactory("jpa"); private static final ConfigurationBuilder cb = Configuration.getFacebookConfig(); public FacebookDao(){ } public EntityManager getEntityManager(){ return emf.createEntityManager(Configuration.JPAConfig()); } public void syncInformation(String groupId){ EntityManager em = getEntityManager(); Facebook fb = new FacebookFactory(cb.build()).getInstance(); try { em.getTransaction().begin(); Reading reading = new Reading(); reading.limit(Configuration.POST_LIMIT); ResponseList<Post> group = fb.getGroupFeed(groupId,reading); List<Post> posts = group.subList(1,group.size()); List<FacebookPost> list = em.createNamedQuery("FacebookPost.findAll").getResultList(); posts.stream().forEach((post) -> { FacebookPost newPost = new FacebookPost(); newPost.setPostId(post.getId().split("_")[1]); newPost.setFromId(post.getFrom().getId()); newPost.setCreationDate(post.getCreatedTime()); newPost.setLikeCount(post.getLikes().size()); newPost.setMessage(post.getMessage()); boolean exists = false; for(FacebookPost postSaved : list){ if(postSaved.getFromId().equals(newPost.getFromId())){ exists = true; break; } } if(!exists) em.persist(newPost); }); em.getTransaction().commit(); } catch (FacebookException ex) { Logger.getLogger(FBConnect.class.getName()).log(Level.SEVERE, null, ex); } } }
package org.elasticsearch.license.core; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.joda.time.MutableDateTime; import org.elasticsearch.common.joda.time.format.DateTimeFormatter; public class DateUtils { private final static FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd"); private final static DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.parser().withZoneUTC(); public static long endOfTheDay(String date) { MutableDateTime dateTime = dateTimeFormatter.parseMutableDateTime(date); dateTime.dayOfMonth().roundCeiling(); return dateTime.getMillis(); } public static long beginningOfTheDay(String date) { return dateTimeFormatter.parseDateTime(date).getMillis(); } }
package org.gbif.nameparser; import org.gbif.api.model.checklistbank.ParsedName; import org.gbif.api.vocabulary.Rank; import org.gbif.utils.file.FileUtils; import java.io.IOException; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.Nullable; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.Lists; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Core parser class of the name parser that tries to take a clean name into its pieces by using regular expressions. * It runs the actual regex matching in another thread that stops whenever the configured timeout is reached. */ public class NormalisedNameParser { private static Logger LOG = LoggerFactory.getLogger(NormalisedNameParser.class); private static final ExecutorService THREAD_POOL = Executors.newCachedThreadPool(); private final long timeout; // max parsing time in milliseconds public NormalisedNameParser(long timeout) { this.timeout = timeout; } // name parsing protected static final String NAME_LETTERS = "A-ZÏËÖÜÄÉÈČÁÀÆŒ"; protected static final String name_letters = "a-zïëöüäåéèčáàæœ"; protected static final String AUTHOR_LETTERS = NAME_LETTERS + "\\p{Lu}"; // upper case unicode letter, not numerical // (\W is alphanum) protected static final String author_letters = name_letters + "\\p{Ll}"; // lower case unicode letter, not numerical // (\W is alphanum) protected static final String AUTHOR_PREFIXES = "(?:[vV](?:an)(?:[ -](?:den|der) )? ?|von[ -](?:den |der |dem )?|(?:del|Des|De|de|di|Di|da|N)[`' _]|(?:de )?(?:la|le) |d'|D'|Mac|Mc|Le|St\\.? ?|Ou|O')"; protected static final String AUTHOR = "(?:" + // author initials "(?:" + "(?:[" + AUTHOR_LETTERS + "]{1,3}\\.?[ -]?){0,3}" + // or full first name "|[" + AUTHOR_LETTERS + "][" + author_letters + "?]{3,}" + " )?" + // common prefixes AUTHOR_PREFIXES + "?" + // only allow v. in front of Capital Authornames - if included in AUTHOR_PREFIXES parseIgnoreAuthors fails "(?:v\\. )?" + // regular author name "[" + AUTHOR_LETTERS + "]+[" + author_letters + "?]*\\.?" + // potential double names, e.g. Solms-Laub. "(?:(?:[- ](?:de|da|du)?[- ]?)[" + AUTHOR_LETTERS + "]+[" + author_letters + "?]*\\.?)?" + // common name suffices (ms=manuscript, not yet published) "(?: ?(?:f|fil|j|jr|jun|junior|sr|sen|senior|ms)\\.?)?" + // at last there might be 2 well known sanction authors for fungus, see POR-2454 "(?: *: *(?:Pers|Fr)\\.?)?" + ")"; protected static final String AUTHOR_TEAM = AUTHOR + "?(?:(?: ?ex\\.? | & | et | in |, ?|; ?|\\.)(?:" + AUTHOR + "|al\\.?))*"; protected static final Pattern AUTHOR_TEAM_PATTERN = Pattern.compile("^" + AUTHOR_TEAM + "$"); protected static final String YEAR = "[12][0-9][0-9][0-9?][abcdh?]?(?:[/-][0-9]{1,4})?"; // protected static final String YEAR_RANGE = YEAR+"(?: ?-? ?"+YEAR+")?"; protected static final String RANK_MARKER_SPECIES = "(?:notho)?(?:" + StringUtils.join(Rank.RANK_MARKER_MAP_INFRASPECIFIC.keySet(), "|") + "|agg)\\.?"; private static final Function<Rank,String> REMOVE_RANK_MARKER = new Function<Rank, String>() { @Override public String apply(Rank rank) { return rank.getMarker().replaceAll("\\.", "\\\\."); } }; protected static final String RANK_MARKER_MICROBIAL = "(?:bv\\.|ct\\.|f\\. ?sp\\.|" + StringUtils.join(Lists.transform(Lists.newArrayList(Rank.INFRASUBSPECIFIC_MICROBIAL_RANKS), REMOVE_RANK_MARKER ), "|") + ")"; protected static final String EPHITHET_PREFIXES = "van|novae"; protected static final String GENETIC_EPHITHETS = "bacilliform|coliform|coryneform|cytoform|chemoform|biovar|serovar|genomovar|agamovar|cultivar|genotype|serotype|subtype|ribotype|isolate"; protected static final String __EPHITHET_UNALLOWED_ENDINGS = "\\bex|var|type|form"; protected static final String EPHITHET = "(?:[0-9]+-|[doml]')?" + "(?:(?:" + EPHITHET_PREFIXES + ") [a-z])?" + "[" + name_letters + "+-]{1,}(?<! d)[" + name_letters + "]" // avoid epithets ending with the unallowed endings, e.g. serovar + "(?<!(?:\\bex|"+GENETIC_EPHITHETS+"))(?=\\b)"; protected static final String MONOMIAL = "[" + NAME_LETTERS + "](?:\\.|[" + name_letters + "]+)(?:-[" + NAME_LETTERS + "]?[" + name_letters + "]+)?"; // a pattern matching typical latin word endings. Helps identify name parts from authors private static final Pattern LATIN_ENDINGS; static { try { List<String> endings = FileUtils.streamToList(FileUtils.classpathStream("latin-endings.txt")); LATIN_ENDINGS = Pattern.compile("(" + Joiner.on('|').skipNulls().join(endings) + ")$"); } catch (IOException e) { throw new IllegalStateException("Failed to read latin-endings.txt from classpath resources", e); } } protected static final String INFRAGENERIC = "(?:" + "\\( ?([" + NAME_LETTERS + "][" + name_letters + "-]+) ?\\)" + "|" + "(" + StringUtils .join(Rank.RANK_MARKER_MAP_INFRAGENERIC.keySet(), "|") + ")\\.? ?([" + NAME_LETTERS + "][" + name_letters + "-]+)" + ")"; protected static final String RANK_MARKER_ALL = "(notho)? *(" + StringUtils.join(Rank.RANK_MARKER_MAP.keySet(), "|") + ")\\.?"; private static final Pattern RANK_MARKER_ONLY = Pattern.compile("^" + RANK_MARKER_ALL + "$"); // main name matcher public static final Pattern CANON_NAME_IGNORE_AUTHORS = Pattern.compile("^" + // #1 genus/monomial "(×?" + MONOMIAL + ")" + // #2 or #4 subgenus/section with #3 infrageneric rank marker "(?:(?<!ceae) " + INFRAGENERIC + ")?" + // catch author name prefixes just to ignore them so they dont become wrong epithets "(?: " + AUTHOR_PREFIXES + ")?" + // #5 species "(?: (×?" + EPHITHET + "))?" + // catch author name prefixes just to ignore them so they dont become wrong epithets "(?: " + AUTHOR_PREFIXES + ")?" + "(?:" + // either directly a infraspecific epitheton or a author but then mandate rank marker "(?:" + // anything in between ".*" + // #6 infraspecies rank "( " + RANK_MARKER_SPECIES + "[ .])" + // #7 infraspecies epithet "(×?" + EPHITHET + ")" + ")|" + // #8 infraspecies epithet " (×?" + EPHITHET + ")" + ")?" + "(?: " + // #9 microbial rank "(" + RANK_MARKER_MICROBIAL + ")[ .]" + // #10 microbial infrasubspecific epithet "(\\S+)" + ")?"); public static final Pattern NAME_PATTERN = Pattern.compile("^" + // #1 genus/monomial "(×?" + MONOMIAL + ")" + // #2 or #4 subgenus/section with #3 infrageneric rank marker "(?:(?<!ceae) " + INFRAGENERIC + ")?" + // #5 species "(?: (×?" + EPHITHET + "))?" + "(?:" + "(?:" + // #6 strip out intermediate, irrelevant authors or infraspecific ranks in case of quadrinomials "( .*?)?" + // #7 infraspecies rank "( " + RANK_MARKER_SPECIES + ")" + ")?" + // #8 infraspecies epitheton "(?: (×?\"?" + EPHITHET + "\"?))" + ")?" + "(?: " + // #9 microbial rank "(" + RANK_MARKER_MICROBIAL + ")[ .]" + // #10 microbial infrasubspecific epithet "(\\S+)" + ")?" + // #11 entire authorship incl basionyms and year "(,?" + "(?: ?\\(" + // #12 basionym authors "(" + AUTHOR_TEAM + ")?" + // #13 basionym year ",?( ?" + YEAR + ")?" + "\\))?" + // #14 authors "( " + AUTHOR_TEAM + ")?" + // #15 year with or without brackets "(?: ?\\(?,? ?(" + YEAR + ")\\)?)?" + ")" + "$"); private class MatcherCallable implements Callable<Matcher> { private final String scientificName; MatcherCallable(String scientificName) { this.scientificName = scientificName; } @Override public Matcher call() throws Exception { Matcher matcher = NAME_PATTERN.matcher(scientificName); matcher.find(); return matcher; } } /** * Tries to parse a name string with the full regular expression. * In very few, extreme cases names with very long authorships might cause the regex to never finish or take hours * we run this parsing in a separate thread that can be stopped if it runs too long. * @param cn * @param scientificName * @param rank the rank of the name if it is known externally. Helps identifying infrageneric names vs bracket authors * @return true if the name could be parsed, false in case of failure */ public boolean parseNormalisedName(ParsedName cn, String scientificName, @Nullable Rank rank) { LOG.debug("Parse normed name string: {}", scientificName); FutureTask<Matcher> task = new FutureTask<Matcher>(new MatcherCallable(scientificName)); THREAD_POOL.execute(task); try { Matcher matcher = task.get(timeout, TimeUnit.MILLISECONDS); if (matcher.group(0).equals(scientificName)) { if (LOG.isDebugEnabled()) { logMatcher(matcher); } cn.setGenusOrAbove(StringUtils.trimToNull(matcher.group(1))); boolean bracketSubrankFound = false; if (matcher.group(2) != null) { bracketSubrankFound = true; cn.setInfraGeneric(StringUtils.trimToNull(matcher.group(2))); } else if (matcher.group(4) != null) { String rankMarker = StringUtils.trimToNull(matcher.group(3)); if (!rankMarker.endsWith(".")) { rankMarker = rankMarker + "."; } cn.setRankMarker(rankMarker); cn.setInfraGeneric(StringUtils.trimToNull(matcher.group(4))); } cn.setSpecificEpithet(StringUtils.trimToNull(matcher.group(5))); // #6 is filling authors or ranks in the middle not stored in ParsedName if (matcher.group(7) != null && matcher.group(7).length() > 1) { cn.setRankMarker(StringUtils.trimToNull(matcher.group(7))); } cn.setInfraSpecificEpithet(StringUtils.trimToNull(matcher.group(8))); // microbial ranks if (matcher.group(9) != null) { cn.setRankMarker(matcher.group(9)); cn.setInfraSpecificEpithet(matcher.group(10)); } // #11 is entire authorship, not stored in ParsedName cn.setBracketAuthorship(StringUtils.trimToNull(matcher.group(12))); if (bracketSubrankFound && infragenericIsAuthor(cn, rank)) { // rather an author than a infrageneric rank. Swap cn.setBracketAuthorship(cn.getInfraGeneric()); cn.setInfraGeneric(null); LOG.debug("swapped subrank with bracket author: {}", cn.getBracketAuthorship()); } if (matcher.group(13) != null && matcher.group(13).length() > 2) { String yearAsString = matcher.group(13).trim(); cn.setBracketYear(yearAsString); } cn.setAuthorship(StringUtils.trimToNull(matcher.group(14))); if (matcher.group(15) != null && matcher.group(15).length() > 2) { String yearAsString = matcher.group(15).trim(); cn.setYear(yearAsString); } // make sure (infra)specific epithet is not a rank marker! lookForIrregularRankMarker(cn); // 2 letter epitheta can also be author prefixes - check that programmatically, not in regex checkEpithetVsAuthorPrefx(cn); // if no rank was parsed but given externally use it! if (cn.getRankMarker() == null && rank != null) { cn.setRank(rank); } return true; } } catch (InterruptedException e) { LOG.warn("InterruptedException for name: {}", scientificName, e); } catch (ExecutionException e) { LOG.warn("ExecutionException for name: {}", scientificName, e); } catch (IllegalStateException e) { // we simply had no match } catch (TimeoutException e) { // timeout LOG.info("Parsing timeout for name: {}", scientificName); } return false; } private static boolean infragenericIsAuthor(ParsedName pn, Rank rank) { return pn.getBracketAuthorship() == null && pn.getSpecificEpithet() == null && ( rank != null && !(rank.isInfrageneric() && !rank.isSpeciesOrBelow()) //|| pn.getInfraGeneric().contains(" ") || rank == null && !LATIN_ENDINGS.matcher(pn.getInfraGeneric()).find() ); } /** * * @param cn * @param scientificName * @param rank the rank of the name if it is known externally. Helps identifying infrageneric names vs bracket authors * @return true if the name could be parsed, false in case of failure */ public boolean parseNormalisedNameIgnoreAuthors(ParsedName cn, String scientificName, @Nullable Rank rank) { LOG.debug("Parse normed name string ignoring authors: {}", scientificName); // match for canonical Matcher matcher = CANON_NAME_IGNORE_AUTHORS.matcher(scientificName); boolean matchFound = matcher.find(); if (matchFound) { if (LOG.isDebugEnabled()) { logMatcher(matcher); } cn.setGenusOrAbove(StringUtils.trimToNull(matcher.group(1))); if (matcher.group(2) != null) { // subrank in paranthesis. Not an author? cn.setInfraGeneric(StringUtils.trimToNull(matcher.group(2))); if (infragenericIsAuthor(cn, rank)) { // rather an author... cn.setInfraGeneric(null); } } else if (matcher.group(4) != null) { // infrageneric with rank indicator given String rankMarker = StringUtils.trimToNull(matcher.group(3)); cn.setRankMarker(rankMarker); cn.setInfraGeneric(StringUtils.trimToNull(matcher.group(4))); } cn.setSpecificEpithet(StringUtils.trimToNull(matcher.group(5))); if (matcher.group(6) != null && matcher.group(6).length() > 1) { cn.setRankMarker(matcher.group(6)); } if (matcher.group(7) != null && matcher.group(7).length() >= 2) { setCanonicalInfraSpecies(cn, matcher.group(7)); } else { setCanonicalInfraSpecies(cn, matcher.group(8)); } if (matcher.group(9) != null) { cn.setRankMarker(matcher.group(9)); cn.setInfraSpecificEpithet(matcher.group(10)); } // make sure (infra)specific epithet is not a rank marker! lookForIrregularRankMarker(cn); return true; } return false; } private static void setCanonicalInfraSpecies(ParsedName pn, String epi) { if (epi == null || epi.equalsIgnoreCase("sec") || epi.equalsIgnoreCase("sensu")) { return; } pn.setInfraSpecificEpithet(StringUtils.trimToNull(epi)); } /** * if no rank marker is set yet inspect epitheta for wrongly placed rank markers and modify parsed name accordingly. * This is sometimes the case for informal names like: Coccyzus americanus ssp. * * @param cn the already parsed name */ private void lookForIrregularRankMarker(ParsedName cn) { if (cn.getRankMarker() == null) { if (cn.getInfraSpecificEpithet() != null) { Matcher m = RANK_MARKER_ONLY.matcher(cn.getInfraSpecificEpithet()); if (m.find()) { // we found a rank marker, make it one cn.setRankMarker(cn.getInfraSpecificEpithet()); cn.setInfraSpecificEpithet(null); } } else if (cn.getSpecificEpithet() != null) { Matcher m = RANK_MARKER_ONLY.matcher(cn.getSpecificEpithet()); if (m.find()) { // we found a rank marker, make it one cn.setRankMarker(cn.getSpecificEpithet()); cn.setSpecificEpithet(null); } } } } /** * 2 letter epitheta can also be author prefixes - check that programmatically, not in regex */ private void checkEpithetVsAuthorPrefx(ParsedName cn) { if (cn.getRankMarker() == null) { if (cn.getInfraSpecificEpithet() != null) { // might be subspecies without rank marker // or short authorship prefix in epithet. test String extendedAuthor = cn.getInfraSpecificEpithet() + " " + cn.getAuthorship(); Matcher m = AUTHOR_TEAM_PATTERN.matcher(extendedAuthor); if (m.find()) { // matches author. Prefer that LOG.debug("use infraspecific epithet as author prefix"); cn.setInfraSpecificEpithet(null); cn.setAuthorship(extendedAuthor); } } else { // might be monomial with the author prefix erroneously taken as the species epithet String extendedAuthor = cn.getSpecificEpithet() + " " + cn.getAuthorship(); Matcher m = AUTHOR_TEAM_PATTERN.matcher(extendedAuthor); if (m.find()) { // matches author. Prefer that LOG.debug("use specific epithet as author prefix"); cn.setSpecificEpithet(null); cn.setAuthorship(extendedAuthor); } } } } private void logMatcher(Matcher matcher) { int i = -1; while (i < matcher.groupCount()) { i++; LOG.debug(" {}: >{}<", i, matcher.group(i)); } } }
package org.javacs.completion; import com.sun.source.tree.ClassTree; import com.sun.source.tree.CompilationUnitTree; import com.sun.source.tree.MemberReferenceTree; import com.sun.source.tree.MemberSelectTree; import com.sun.source.tree.MethodTree; import com.sun.source.tree.Scope; import com.sun.source.tree.SwitchTree; import com.sun.source.tree.Tree; import com.sun.source.util.TreePath; import com.sun.source.util.Trees; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.Predicate; import java.util.logging.Logger; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.ArrayType; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeVariable; import org.javacs.CompileTask; import org.javacs.CompilerProvider; import org.javacs.CompletionData; import org.javacs.FileStore; import org.javacs.JsonHelper; import org.javacs.ParseTask; import org.javacs.SourceFileObject; import org.javacs.StringSearch; import org.javacs.lsp.Command; import org.javacs.lsp.CompletionItem; import org.javacs.lsp.CompletionItemKind; import org.javacs.lsp.CompletionList; import org.javacs.lsp.InsertTextFormat; public class CompletionProvider { private final CompilerProvider compiler; public static final CompletionList NOT_SUPPORTED = new CompletionList(false, List.of()); public static final int MAX_COMPLETION_ITEMS = 50; private static final String[] TOP_LEVEL_KEYWORDS = { "package", "import", "public", "private", "protected", "abstract", "class", "interface", "@interface", "extends", "implements", }; private static final String[] CLASS_BODY_KEYWORDS = { "public", "private", "protected", "static", "final", "native", "synchronized", "abstract", "default", "class", "interface", "void", "boolean", "int", "long", "float", "double", }; private static final String[] METHOD_BODY_KEYWORDS = { "new", "assert", "try", "catch", "finally", "throw", "return", "break", "case", "continue", "default", "do", "while", "for", "switch", "if", "else", "instanceof", "var", "final", "class", "void", "boolean", "int", "long", "float", "double", }; public CompletionProvider(CompilerProvider compiler) { this.compiler = compiler; } public CompletionList complete(Path file, int line, int column) { LOG.info("Complete at " + file.getFileName() + "(" + line + "," + column + ")..."); var started = Instant.now(); var task = compiler.parse(file); var cursor = task.root.getLineMap().getPosition(line, column); var contents = new PruneMethodBodies(task.task).scan(task.root, cursor); var endOfLine = endOfLine(contents, (int) cursor); contents.insert(endOfLine, ';'); var list = compileAndComplete(file, contents.toString(), cursor); addTopLevelSnippets(task, list); logCompletionTiming(started, list.items, list.isIncomplete); return list; } private int endOfLine(CharSequence contents, int cursor) { while (cursor < contents.length()) { var c = contents.charAt(cursor); if (c == '\r' || c == '\n') break; cursor++; } return cursor; } private CompletionList compileAndComplete(Path file, String contents, long cursor) { var started = Instant.now(); var source = new SourceFileObject(file, contents, Instant.now()); var partial = partialIdentifier(contents, (int) cursor); var endsWithParen = endsWithParen(contents, (int) cursor); try (var task = compiler.compile(List.of(source))) { LOG.info("...compiled in " + Duration.between(started, Instant.now()).toMillis() + "ms"); var path = new FindCompletionsAt(task.task).scan(task.root(), cursor); switch (path.getLeaf().getKind()) { case IDENTIFIER: return completeIdentifier(task, path, partial, endsWithParen); case MEMBER_SELECT: return completeMemberSelect(task, path, partial, endsWithParen); case MEMBER_REFERENCE: return completeMemberReference(task, path, partial); case SWITCH: return completeSwitchConstant(task, path, partial); case IMPORT: return completeImport(qualifiedPartialIdentifier(contents, (int) cursor)); default: var list = new CompletionList(); addKeywords(path, partial, list); return list; } } } private void addTopLevelSnippets(ParseTask task, CompletionList list) { var file = Paths.get(task.root.getSourceFile().toUri()); if (!hasTypeDeclaration(task.root)) { list.items.add(classSnippet(file)); if (task.root.getPackage() == null) { list.items.add(packageSnippet(file)); } } } private boolean hasTypeDeclaration(CompilationUnitTree root) { for (var tree : root.getTypeDecls()) { if (tree.getKind() != Tree.Kind.ERRONEOUS) { return true; } } return false; } private CompletionItem packageSnippet(Path file) { var name = FileStore.suggestedPackageName(file); return snippetItem("package " + name, "package " + name + ";\n\n"); } private CompletionItem classSnippet(Path file) { var name = file.getFileName().toString(); name = name.substring(0, name.length() - ".java".length()); return snippetItem("class " + name, "class " + name + " {\n $0\n}"); } private String partialIdentifier(String contents, int end) { var start = end; while (start > 0 && Character.isJavaIdentifierPart(contents.charAt(start - 1))) { start } return contents.substring(start, end); } private boolean endsWithParen(String contents, int cursor) { for (var i = cursor; i < contents.length(); i++) { if (!Character.isJavaIdentifierPart(contents.charAt(i))) { return contents.charAt(i) == '('; } } return false; } private String qualifiedPartialIdentifier(String contents, int end) { var start = end; while (start > 0 && isQualifiedIdentifierChar(contents.charAt(start - 1))) { start } return contents.substring(start, end); } private boolean isQualifiedIdentifierChar(char c) { return c == '.' || Character.isJavaIdentifierPart(c); } private CompletionList completeIdentifier(CompileTask task, TreePath path, String partial, boolean endsWithParen) { LOG.info("...complete identifiers"); var list = new CompletionList(); list.items = completeUsingScope(task, path, partial, endsWithParen); addStaticImports(task, path.getCompilationUnit(), partial, endsWithParen, list); if (!list.isIncomplete && partial.length() > 0 && Character.isUpperCase(partial.charAt(0))) { addClassNames(path.getCompilationUnit(), partial, list); } addKeywords(path, partial, list); return list; } private void addKeywords(TreePath path, String partial, CompletionList list) { var level = findKeywordLevel(path); String[] keywords = {}; if (level instanceof CompilationUnitTree) { keywords = TOP_LEVEL_KEYWORDS; } else if (level instanceof ClassTree) { keywords = CLASS_BODY_KEYWORDS; } else if (level instanceof MethodTree) { keywords = METHOD_BODY_KEYWORDS; } for (var k : keywords) { if (StringSearch.matchesPartialName(k, partial)) { list.items.add(keyword(k)); } } } private Tree findKeywordLevel(TreePath path) { while (path != null) { if (path.getLeaf() instanceof CompilationUnitTree || path.getLeaf() instanceof ClassTree || path.getLeaf() instanceof MethodTree) { return path.getLeaf(); } path = path.getParentPath(); } throw new RuntimeException("empty path"); } private List<CompletionItem> completeUsingScope( CompileTask task, TreePath path, String partial, boolean endsWithParen) { var trees = Trees.instance(task.task); var list = new ArrayList<CompletionItem>(); var methods = new HashMap<String, List<ExecutableElement>>(); var scope = trees.getScope(path); Predicate<CharSequence> filter = name -> StringSearch.matchesPartialName(name, partial); for (var member : ScopeHelper.scopeMembers(task, scope, filter)) { if (member.getKind() == ElementKind.METHOD) { putMethod((ExecutableElement) member, methods); } else { list.add(item(task, member)); } } for (var overloads : methods.values()) { list.add(method(task, overloads, !endsWithParen)); } LOG.info("...found " + list.size() + " scope members"); return list; } private void addStaticImports( CompileTask task, CompilationUnitTree root, String partial, boolean endsWithParen, CompletionList list) { var trees = Trees.instance(task.task); var methods = new HashMap<String, List<ExecutableElement>>(); var previousSize = list.items.size(); outer: for (var i : root.getImports()) { if (!i.isStatic()) continue; var id = (MemberSelectTree) i.getQualifiedIdentifier(); if (!importMatchesPartial(id.getIdentifier(), partial)) continue; var path = trees.getPath(root, id.getExpression()); var type = (TypeElement) trees.getElement(path); for (var member : type.getEnclosedElements()) { if (!member.getModifiers().contains(Modifier.STATIC)) continue; if (!memberMatchesImport(id.getIdentifier(), member)) continue; if (!StringSearch.matchesPartialName(member.getSimpleName(), partial)) continue; if (member.getKind() == ElementKind.METHOD) { putMethod((ExecutableElement) member, methods); } else { list.items.add(item(task, member)); } if (list.items.size() + methods.size() > MAX_COMPLETION_ITEMS) { list.isIncomplete = true; break outer; } } } for (var overloads : methods.values()) { list.items.add(method(task, overloads, !endsWithParen)); } LOG.info("...found " + (list.items.size() - previousSize) + " static imports"); } private boolean importMatchesPartial(Name staticImport, String partial) { return staticImport.contentEquals("*") || StringSearch.matchesPartialName(staticImport, partial); } private boolean memberMatchesImport(Name staticImport, Element member) { return staticImport.contentEquals("*") || staticImport.contentEquals(member.getSimpleName()); } private void addClassNames(CompilationUnitTree root, String partial, CompletionList list) { var packageName = Objects.toString(root.getPackageName(), ""); var uniques = new HashSet<String>(); var previousSize = list.items.size(); for (var className : compiler.packagePrivateTopLevelTypes(packageName)) { if (!StringSearch.matchesPartialName(className, partial)) continue; list.items.add(classItem(className)); uniques.add(className); } for (var className : compiler.publicTopLevelTypes()) { if (!StringSearch.matchesPartialName(simpleName(className), partial)) continue; if (uniques.contains(className)) continue; if (list.items.size() > MAX_COMPLETION_ITEMS) { list.isIncomplete = true; break; } list.items.add(classItem(className)); uniques.add(className); } LOG.info("...found " + (list.items.size() - previousSize) + " class names"); } private CompletionList completeMemberSelect( CompileTask task, TreePath path, String partial, boolean endsWithParen) { var trees = Trees.instance(task.task); var select = (MemberSelectTree) path.getLeaf(); LOG.info("...complete members of " + select.getExpression()); path = new TreePath(path, select.getExpression()); var isStatic = trees.getElement(path) instanceof TypeElement; var scope = trees.getScope(path); var type = trees.getTypeMirror(path); if (type instanceof ArrayType) { return completeArrayMemberSelect(isStatic); } else if (type instanceof TypeVariable) { return completeTypeVariableMemberSelect(task, scope, (TypeVariable) type, isStatic, partial, endsWithParen); } else if (type instanceof DeclaredType) { return completeDeclaredTypeMemberSelect(task, scope, (DeclaredType) type, isStatic, partial, endsWithParen); } else { return NOT_SUPPORTED; } } private CompletionList completeArrayMemberSelect(boolean isStatic) { if (isStatic) { return EMPTY; } else { var list = new CompletionList(); list.items.add(keyword("length")); return list; } } private CompletionList completeTypeVariableMemberSelect( CompileTask task, Scope scope, TypeVariable type, boolean isStatic, String partial, boolean endsWithParen) { if (type.getUpperBound() instanceof DeclaredType) { return completeDeclaredTypeMemberSelect( task, scope, (DeclaredType) type.getUpperBound(), isStatic, partial, endsWithParen); } else if (type.getUpperBound() instanceof TypeVariable) { return completeTypeVariableMemberSelect( task, scope, (TypeVariable) type.getUpperBound(), isStatic, partial, endsWithParen); } else { return NOT_SUPPORTED; } } private CompletionList completeDeclaredTypeMemberSelect( CompileTask task, Scope scope, DeclaredType type, boolean isStatic, String partial, boolean endsWithParen) { var trees = Trees.instance(task.task); var typeElement = (TypeElement) type.asElement(); var list = new ArrayList<CompletionItem>(); var methods = new HashMap<String, List<ExecutableElement>>(); for (var member : task.task.getElements().getAllMembers(typeElement)) { if (member.getKind() == ElementKind.CONSTRUCTOR) continue; if (!StringSearch.matchesPartialName(member.getSimpleName(), partial)) continue; if (!trees.isAccessible(scope, member, type)) continue; if (isStatic != member.getModifiers().contains(Modifier.STATIC)) continue; if (member.getKind() == ElementKind.METHOD) { putMethod((ExecutableElement) member, methods); } else { list.add(item(task, member)); } } for (var overloads : methods.values()) { list.add(method(task, overloads, !endsWithParen)); } if (isStatic) { list.add(keyword("class")); } if (isStatic && isEnclosingClass(type, scope)) { list.add(keyword("this")); list.add(keyword("super")); } return new CompletionList(false, list); } private boolean isEnclosingClass(DeclaredType type, Scope start) { for (var s : ScopeHelper.fastScopes(start)) { // If we reach a static method, stop looking var method = s.getEnclosingMethod(); if (method != null && method.getModifiers().contains(Modifier.STATIC)) { return false; } // If we find the enclosing class var thisElement = s.getEnclosingClass(); if (thisElement != null && thisElement.asType().equals(type)) { return true; } // If the enclosing class is static, stop looking if (thisElement != null && thisElement.getModifiers().contains(Modifier.STATIC)) { return false; } } return false; } private CompletionList completeMemberReference(CompileTask task, TreePath path, String partial) { var trees = Trees.instance(task.task); var select = (MemberReferenceTree) path.getLeaf(); LOG.info("...complete methods of " + select.getQualifierExpression()); path = new TreePath(path, select.getQualifierExpression()); var element = trees.getElement(path); var isStatic = element instanceof TypeElement; var scope = trees.getScope(path); var type = trees.getTypeMirror(path); if (type instanceof ArrayType) { return completeArrayMemberReference(isStatic); } else if (type instanceof TypeVariable) { return completeTypeVariableMemberReference(task, scope, (TypeVariable) type, isStatic, partial); } else if (type instanceof DeclaredType) { return completeDeclaredTypeMemberReference(task, scope, (DeclaredType) type, isStatic, partial); } else { return NOT_SUPPORTED; } } private CompletionList completeArrayMemberReference(boolean isStatic) { if (isStatic) { var list = new CompletionList(); list.items.add(keyword("new")); return list; } else { return EMPTY; } } private CompletionList completeTypeVariableMemberReference( CompileTask task, Scope scope, TypeVariable type, boolean isStatic, String partial) { if (type.getUpperBound() instanceof DeclaredType) { return completeDeclaredTypeMemberReference( task, scope, (DeclaredType) type.getUpperBound(), isStatic, partial); } else if (type.getUpperBound() instanceof TypeVariable) { return completeTypeVariableMemberReference( task, scope, (TypeVariable) type.getUpperBound(), isStatic, partial); } else { return NOT_SUPPORTED; } } private CompletionList completeDeclaredTypeMemberReference( CompileTask task, Scope scope, DeclaredType type, boolean isStatic, String partial) { var trees = Trees.instance(task.task); var typeElement = (TypeElement) type.asElement(); var list = new ArrayList<CompletionItem>(); var methods = new HashMap<String, List<ExecutableElement>>(); for (var member : task.task.getElements().getAllMembers(typeElement)) { if (!StringSearch.matchesPartialName(member.getSimpleName(), partial)) continue; if (member.getKind() != ElementKind.METHOD) continue; if (!trees.isAccessible(scope, member, type)) continue; if (!isStatic && member.getModifiers().contains(Modifier.STATIC)) continue; if (member.getKind() == ElementKind.METHOD) { putMethod((ExecutableElement) member, methods); } else { list.add(item(task, member)); } } for (var overloads : methods.values()) { list.add(method(task, overloads, false)); } if (isStatic) { list.add(keyword("new")); } return new CompletionList(false, list); } private static final CompletionList EMPTY = new CompletionList(false, List.of()); private void putMethod(ExecutableElement method, Map<String, List<ExecutableElement>> methods) { var name = method.getSimpleName().toString(); if (!methods.containsKey(name)) { methods.put(name, new ArrayList<>()); } methods.get(name).add(method); } private CompletionList completeSwitchConstant(CompileTask task, TreePath path, String partial) { var switchTree = (SwitchTree) path.getLeaf(); path = new TreePath(path, switchTree.getExpression()); var type = Trees.instance(task.task).getTypeMirror(path); LOG.info("...complete constants of type " + type); if (!(type instanceof DeclaredType)) { return NOT_SUPPORTED; } var declared = (DeclaredType) type; var element = (TypeElement) declared.asElement(); var list = new ArrayList<CompletionItem>(); for (var member : task.task.getElements().getAllMembers(element)) { if (member.getKind() != ElementKind.ENUM_CONSTANT) continue; if (!StringSearch.matchesPartialName(member.getSimpleName(), partial)) continue; list.add(item(task, member)); } return new CompletionList(false, list); } private CompletionList completeImport(String path) { LOG.info("...complete import"); var names = new HashSet<String>(); var list = new CompletionList(); for (var className : compiler.publicTopLevelTypes()) { if (className.startsWith(path)) { var start = path.lastIndexOf('.'); var end = className.indexOf('.', path.length()); if (end == -1) end = className.length(); var segment = className.substring(start + 1, end); if (names.contains(segment)) continue; names.add(segment); var isClass = end == path.length(); if (isClass) { list.items.add(classItem(className)); } else { list.items.add(packageItem(segment)); } if (list.items.size() > MAX_COMPLETION_ITEMS) { list.isIncomplete = true; return list; } } } return list; } private CompletionItem packageItem(String name) { var i = new CompletionItem(); i.label = name; i.kind = CompletionItemKind.Module; return i; } private CompletionItem classItem(String className) { var i = new CompletionItem(); i.label = simpleName(className).toString(); i.kind = CompletionItemKind.Class; i.detail = className; var data = new CompletionData(); data.className = className; i.data = JsonHelper.GSON.toJsonTree(data); return i; } private CompletionItem snippetItem(String label, String snippet) { var i = new CompletionItem(); i.label = label; i.kind = CompletionItemKind.Snippet; i.insertText = snippet; i.insertTextFormat = InsertTextFormat.Snippet; i.sortText = String.format("%02d%s", Priority.SNIPPET, i.label); return i; } private CompletionItem item(CompileTask task, Element element) { if (element.getKind() == ElementKind.METHOD) throw new RuntimeException("method"); var i = new CompletionItem(); i.label = element.getSimpleName().toString(); i.kind = kind(element); i.detail = element.toString(); i.data = JsonHelper.GSON.toJsonTree(data(task, element, 1)); return i; } private CompletionItem method(CompileTask task, List<ExecutableElement> overloads, boolean addParens) { var first = overloads.get(0); var i = new CompletionItem(); i.label = first.getSimpleName().toString(); i.kind = CompletionItemKind.Method; i.detail = first.getReturnType() + " " + first; var data = data(task, first, overloads.size()); i.data = JsonHelper.GSON.toJsonTree(data); if (addParens) { if (overloads.size() == 1 && first.getParameters().isEmpty()) { i.insertText = first.getSimpleName() + "()$0"; } else { i.insertText = first.getSimpleName() + "($0)"; // Activate signatureHelp i.command = new Command(); i.command.command = "editor.action.triggerParameterHints"; i.command.title = "Trigger Parameter Hints"; } i.insertTextFormat = 2; // Snippet } return i; } private CompletionData data(CompileTask task, Element element, int overloads) { var data = new CompletionData(); if (element instanceof TypeElement) { var type = (TypeElement) element; data.className = type.getQualifiedName().toString(); } else if (element.getKind() == ElementKind.FIELD) { var field = (VariableElement) element; var type = (TypeElement) field.getEnclosingElement(); data.className = type.getQualifiedName().toString(); data.memberName = field.getSimpleName().toString(); } else if (element instanceof ExecutableElement) { var types = task.task.getTypes(); var method = (ExecutableElement) element; var type = (TypeElement) method.getEnclosingElement(); data.className = type.getQualifiedName().toString(); data.memberName = method.getSimpleName().toString(); data.erasedParameterTypes = new String[method.getParameters().size()]; for (var i = 0; i < data.erasedParameterTypes.length; i++) { var p = method.getParameters().get(i).asType(); data.erasedParameterTypes[i] = types.erasure(p).toString(); } data.plusOverloads = overloads - 1; } else { return null; } return data; } private Integer kind(Element e) { switch (e.getKind()) { case ANNOTATION_TYPE: return CompletionItemKind.Interface; case CLASS: return CompletionItemKind.Class; case CONSTRUCTOR: return CompletionItemKind.Constructor; case ENUM: return CompletionItemKind.Enum; case ENUM_CONSTANT: return CompletionItemKind.EnumMember; case EXCEPTION_PARAMETER: return CompletionItemKind.Property; case FIELD: return CompletionItemKind.Field; case STATIC_INIT: case INSTANCE_INIT: return CompletionItemKind.Function; case INTERFACE: return CompletionItemKind.Interface; case LOCAL_VARIABLE: return CompletionItemKind.Variable; case METHOD: return CompletionItemKind.Method; case PACKAGE: return CompletionItemKind.Module; case PARAMETER: return CompletionItemKind.Property; case RESOURCE_VARIABLE: return CompletionItemKind.Variable; case TYPE_PARAMETER: return CompletionItemKind.TypeParameter; case OTHER: default: return null; } } private CompletionItem keyword(String keyword) { var i = new CompletionItem(); i.label = keyword; i.kind = CompletionItemKind.Keyword; i.detail = "keyword"; i.sortText = String.format("%02d%s", Priority.KEYWORD, i.label); return i; } private static class Priority { static int iota = 0; static final int SNIPPET = iota; static final int LOCAL = iota++; static final int FIELD = iota++; static final int INHERITED_FIELD = iota++; static final int METHOD = iota++; static final int INHERITED_METHOD = iota++; static final int OBJECT_METHOD = iota++; static final int INNER_CLASS = iota++; static final int INHERITED_INNER_CLASS = iota++; static final int IMPORTED_CLASS = iota++; static final int NOT_IMPORTED_CLASS = iota++; static final int KEYWORD = iota++; static final int PACKAGE_MEMBER = iota++; static final int CASE_LABEL = iota++; } private void logCompletionTiming(Instant started, List<?> list, boolean isIncomplete) { var elapsedMs = Duration.between(started, Instant.now()).toMillis(); if (isIncomplete) LOG.info(String.format("Found %d items (incomplete) in %,d ms", list.size(), elapsedMs)); else LOG.info(String.format("...found %d items in %,d ms", list.size(), elapsedMs)); } private CharSequence simpleName(String className) { var dot = className.lastIndexOf('.'); if (dot == -1) return className; return className.subSequence(dot + 1, className.length()); } private static final Logger LOG = Logger.getLogger("main"); }
package org.jenkinsci.plugins.p4.groovy; import com.perforce.p4java.exception.P4JavaException; import com.perforce.p4java.server.IOptionsServer; import hudson.FilePath; import hudson.model.TaskListener; import jenkins.model.Jenkins; import org.jenkinsci.plugins.p4.client.ClientHelper; import org.jenkinsci.plugins.p4.workspace.Workspace; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Map; public class P4Groovy implements Serializable { private static final long serialVersionUID = 1L; private final String credential; private final Workspace workspace; private final FilePath buildWorkspace; private transient TaskListener listener = null; public P4Groovy(String credential, TaskListener listener, Workspace workspace, FilePath buildWorkspace) { this.credential = credential; this.workspace = workspace; this.listener = listener; this.buildWorkspace = buildWorkspace; } public String getClientName() { return workspace.getFullName(); } public String getUserName() throws P4JavaException { IOptionsServer p4 = getConnection(); String user = p4.getUserName(); p4.disconnect(); return user; } @Deprecated public Map<String, Object>[] runString(String cmd, String args) throws P4JavaException, InterruptedException, IOException { List<String> argList = new ArrayList<String>(); for (String arg : args.split(",")) { arg = arg.trim(); argList.add(arg); } String[] array = argList.toArray(new String[0]); return run(cmd, array); } public Map<String, Object>[] run(String cmd, String... args) throws P4JavaException, InterruptedException, IOException { P4GroovyTask task = new P4GroovyTask(cmd, args); task.setListener(listener); task.setCredential(credential); task.setWorkspace(workspace); return buildWorkspace.act(task); } public Map<String, Object>[] run(String cmd, List<String> args) throws P4JavaException, InterruptedException, IOException { String[] array = args.toArray(new String[0]); return run(cmd, array); } public Map<String, Object>[] save(String type, Map<String, Object> spec) throws P4JavaException, InterruptedException, IOException { String[] array = { "-i" }; P4GroovyTask task = new P4GroovyTask(type, array, spec); task.setListener(listener); task.setCredential(credential); task.setWorkspace(workspace); return buildWorkspace.act(task); } public Map<String, Object> fetch(String type, String id) throws P4JavaException, InterruptedException, IOException { String[] array = { "-o", id }; Map<String, Object>[] maps = run(type, array); if(maps.length == 0) return null; return maps[0]; } private IOptionsServer getConnection() { String client = workspace.getFullName(); String charset = workspace.getCharset(); ClientHelper p4 = new ClientHelper(Jenkins.getActiveInstance(), credential, listener, client, charset); try { p4.setClient(workspace); } catch (Exception e) { p4.log("Unable to set Client!"); } return p4.getConnection(); } }
package org.lantern; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; import java.util.concurrent.Executors; import javax.net.ssl.SSLEngine; import org.apache.commons.lang.StringUtils; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.channel.ChannelHandler; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.group.DefaultChannelGroup; import org.jboss.netty.channel.socket.ClientSocketChannelFactory; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import org.jboss.netty.handler.codec.http.DefaultHttpResponse; import org.jboss.netty.handler.codec.http.HttpChunk; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpHeaders.Names; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpRequestEncoder; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.codec.http.HttpVersion; import org.jboss.netty.handler.ssl.SslHandler; import org.lantern.httpseverywhere.HttpsEverywhere; import org.littleshoot.proxy.DefaultRelayPipelineFactoryFactory; import org.littleshoot.proxy.HttpConnectRelayingHandler; import org.littleshoot.proxy.HttpFilter; import org.littleshoot.proxy.HttpRequestHandler; import org.littleshoot.proxy.HttpResponseFilters; import org.littleshoot.proxy.KeyStoreManager; import org.littleshoot.proxy.ProxyUtils; import org.littleshoot.proxy.RelayPipelineFactoryFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Handler that relays traffic to another proxy, dispatching between * appropriate proxies depending on the type of request. */ public class DispatchingProxyRelayHandler extends SimpleChannelUpstreamHandler { private final Logger log = LoggerFactory.getLogger(getClass()); private volatile long messagesReceived = 0L; /** * Outgoing channel that handles incoming HTTP Connect requests. */ private ChannelFuture httpConnectChannelFuture; private Channel browserToProxyChannel; private static final long REQUEST_SIZE_LIMIT = 1024 * 1024 * 10 - 4096; private static final boolean PROXIES_ACTIVE = true; private static final boolean ANONYMOUS_ACTIVE = true; private static final boolean TRUSTED_ACTIVE = true; private static final boolean LAE_ACTIVE = true; private static final ClientSocketChannelFactory clientSocketChannelFactory = new NioClientSocketChannelFactory( Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); static { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { //clientSocketChannelFactory.releaseExternalResources(); } })); } private final HttpRequestProcessor unproxiedRequestProcessor = new HttpRequestProcessor() { final RelayPipelineFactoryFactory pf = new DefaultRelayPipelineFactoryFactory(null, new HttpResponseFilters() { @Override public HttpFilter getFilter(String arg0) { return null; } }, null, new DefaultChannelGroup("HTTP-Proxy-Server")); private final HttpRequestHandler requestHandler = new HttpRequestHandler(clientSocketChannelFactory, pf); @Override public boolean processRequest(final Channel browserChannel, final ChannelHandlerContext ctx, final MessageEvent me) throws IOException { requestHandler.messageReceived(ctx, me); return true; } @Override public boolean processChunk(final ChannelHandlerContext ctx, final MessageEvent me) throws IOException { requestHandler.messageReceived(ctx, me); return true; } @Override public void close() { } }; private final HttpRequestProcessor proxyRequestProcessor; //private final HttpRequestProcessor anonymousPeerRequestProcessor; //private final HttpRequestProcessor trustedPeerRequestProcessor; private final HttpRequestProcessor laeRequestProcessor; private HttpRequestProcessor currentRequestProcessor; private boolean readingChunks; /** * Specifies whether or not we're currently proxying requests. This is * necessary because we don't have all the initial HTTP request data, * such as the referer or the URI, when we're processing HTTP chunks. */ private boolean proxying; private final KeyStoreManager keyStoreManager; /** * Creates a new handler that reads incoming HTTP requests and dispatches * them to proxies as appropriate. * * @param keyStoreManager Keeps track of all trusted keys. */ public DispatchingProxyRelayHandler(final KeyStoreManager keyStoreManager) { //this.proxyProvider = proxyProvider; //this.proxyStatusListener = proxyStatusListener; this.keyStoreManager = keyStoreManager; // This uses the raw p2p client because all traffic sent over these // connections already uses end-to-end encryption. /* this.anonymousPeerRequestProcessor = new PeerHttpConnectRequestProcessor(new Proxy() { @Override public InetSocketAddress getProxy() { throw new UnsupportedOperationException( "Peer proxy required"); } @Override public URI getPeerProxy() { // For CONNECT we can use either an anonymous peer or a // trusted peer. final URI lantern = proxyProvider.getAnonymousProxy(); if (lantern == null) { return proxyProvider.getPeerProxy(); } return lantern; } }, proxyStatusListener, encryptingP2pClient); this.trustedPeerRequestProcessor = new PeerHttpRequestProcessor(new Proxy() { @Override public InetSocketAddress getProxy() { throw new UnsupportedOperationException( "Peer proxy required"); } @Override public URI getPeerProxy() { return proxyProvider.getPeerProxy(); } }, proxyStatusListener, encryptingP2pClient, this.keyStoreManager); */ this.proxyRequestProcessor = new DefaultHttpRequestProcessor(LanternHub.getProxyStatusListener(), new HttpRequestTransformer() { @Override public void transform(final HttpRequest request, final InetSocketAddress proxyAddress) { // Does nothing. } }, false, new Proxy() { @Override public URI getPeerProxy() { throw new UnsupportedOperationException( "Peer proxy not supported here."); } @Override public InetSocketAddress getProxy() { return LanternHub.getProxyProvider().getProxy(); } }, this.keyStoreManager); this.laeRequestProcessor = new DefaultHttpRequestProcessor(LanternHub.getProxyStatusListener(), new LaeHttpRequestTransformer(), true, new Proxy() { @Override public URI getPeerProxy() { throw new UnsupportedOperationException( "Peer proxy not supported here."); } @Override public InetSocketAddress getProxy() { return LanternHub.getProxyProvider().getLaeProxy(); } }, null); } @Override public void messageReceived(final ChannelHandlerContext ctx, final MessageEvent me) { messagesReceived++; log.info("Received {} total messages", messagesReceived); if (!readingChunks) { log.info("Reading HTTP request (not a chunk)..."); this.currentRequestProcessor = dispatchRequest(ctx, me); } else { log.info("Reading chunks..."); try { final HttpChunk chunk = (HttpChunk) me.getMessage(); // Remember this will typically be a persistent connection, // so we'll get another request after we're read the last // chunk. So we need to reset it back to no longer read in // chunk mode. if (chunk.isLast()) { this.readingChunks = false; } this.currentRequestProcessor.processChunk(ctx, me); } catch (final IOException e) { // Unclear what to do here. If we couldn't connect to a remote // peer, for example, we don't want to close the connection // to the browser. If the other end closed the connection, // it could have been due to connection close rules, or it // could have been because they simply went offline. log.info("Exception processing chunk", e); } } log.info("Done processing HTTP request...."); } private HttpRequestProcessor dispatchRequest( final ChannelHandlerContext ctx, final MessageEvent me) { final HttpRequest request = (HttpRequest)me.getMessage(); final String uri = request.getUri(); log.info("URI is: {}", uri); // We need to set this outside of proxying rules because we first // send incoming messages down chunked versus unchunked paths and // then send them down proxied versus unproxied paths. if (request.isChunked()) { readingChunks = true; } else { readingChunks = false; } this.proxying = shouldProxy(request); if (proxying) { // If it's an HTTP request, see if we can redirect it to HTTPS. final String https = LanternHub.httpsEverywhere().toHttps(uri); if (!https.equals(uri)) { final HttpResponse response = new DefaultHttpResponse(request.getProtocolVersion(), HttpResponseStatus.MOVED_PERMANENTLY); response.setProtocolVersion(HttpVersion.HTTP_1_0); response.setHeader(HttpHeaders.Names.LOCATION, https); response.setHeader(HttpHeaders.Names.CONTENT_LENGTH, "0"); log.info("Sending redirect response!!"); browserToProxyChannel.write(response); ProxyUtils.closeOnFlush(browserToProxyChannel); // Note this redirect should result in a new HTTPS request // coming in on this connection or a new connection -- in fact // this redirect should always result in an HTTP CONNECT // request as a result of the redirect. That new request // will not attempt to use the existing processor, so it's // not an issue to return null here. return null; } log.info("Not converting to HTTPS"); LanternHub.statsTracker().incrementProxiedRequests(); return dispatchProxyRequest(ctx, me); } else { log.info("Not proxying!"); LanternHub.statsTracker().incrementDirectRequests(); try { this.unproxiedRequestProcessor.processRequest( browserToProxyChannel, ctx, me); } catch (final IOException e) { // This should not happen because the underlying Netty handler // does not throw an exception. log.warn("Could not handle unproxied request "should never happen", e); } return this.unproxiedRequestProcessor; } } private boolean shouldProxy(final HttpRequest request) { if (LanternHub.settings().isProxyAllSites()) { return true; } return LanternHub.whitelist().isWhitelisted(request); } private HttpRequestProcessor dispatchProxyRequest( final ChannelHandlerContext ctx, final MessageEvent me) { final HttpRequest request = (HttpRequest) me.getMessage(); log.info("Dispatching request"); if (request.getMethod() == HttpMethod.CONNECT) { try { if (ANONYMOUS_ACTIVE && LanternHub.getProxyProvider().getAnonymousPeerProxyManager().processRequest( browserToProxyChannel, ctx, me) != null) { log.info("Processed CONNECT on peer...returning"); return null; } else { // We need to forward the CONNECT request from this proxy to an // external proxy that can handle it. We effectively want to // relay all traffic in this case without doing anything on // our own other than direct the CONNECT request to the correct // proxy. centralConnect(request); return null; } } catch (final IOException e) { log.warn("Could not send CONNECT to anonymous proxy", e); // This will happen whenever the server's giving us bad // anonymous proxies, which could happen quite often. // We should fall back to central. centralConnect(request); return null; } } try { if (TRUSTED_ACTIVE) { final HttpRequestProcessor rp = LanternHub.getProxyProvider().getTrustedPeerProxyManager().processRequest( browserToProxyChannel, ctx, me); if (rp != null) { return rp; } } } catch (final IOException e) { log.info("Caught exception processing request", e); } try { if (useLae() && isLae(request) && this.laeRequestProcessor.processRequest(browserToProxyChannel, ctx, me)) { return this.laeRequestProcessor; } } catch (final IOException e) { log.info("Caught exception processing request", e); } try { if (useStandardProxies() && this.proxyRequestProcessor.processRequest( browserToProxyChannel, ctx, me)) { log.info("Used standard proxy"); return this.proxyRequestProcessor; } } catch (final IOException e) { log.info("Caught exception processing request", e); } log.warn("No proxy could process the request {}", me.getMessage()); // Not much we can do if no proxy can handle it. return null; } private boolean useStandardProxies() { return PROXIES_ACTIVE && LanternHub.settings().isUseCloudProxies(); } private boolean useLae() { return LAE_ACTIVE && LanternHub.settings().isUseCloudProxies(); } private void centralConnect(final HttpRequest request) { if (this.httpConnectChannelFuture == null) { log.info("Opening HTTP CONNECT tunnel"); try { this.httpConnectChannelFuture = openOutgoingRelayChannel(request); } catch (final IOException e) { log.error("Could not open CONNECT channel", e); } } else { log.error("Outbound channel already assigned?"); } } private boolean isLae(final HttpRequest request) { final String uri = request.getUri(); if (uri.contains("youtube.com")) { log.info("NOT USING LAE FOR YOUTUBE"); return false; } final HttpMethod method = request.getMethod(); if (method == HttpMethod.GET) { return true; } if (method == HttpMethod.CONNECT) { return false; } if (LanternUtils.isTransferEncodingChunked(request)) { return false; } // send requests larger than 10MB. if (method == HttpMethod.POST) { final String contentLength = request.getHeader(Names.CONTENT_LENGTH); if (StringUtils.isBlank(contentLength)) { // If it's a post without a content length, we want to be // cautious. return false; } final long cl = Long.parseLong(contentLength); if (cl > REQUEST_SIZE_LIMIT) { return false; } return true; } return false; } @Override public void channelOpen(final ChannelHandlerContext ctx, final ChannelStateEvent e) { log.info("Got incoming channel"); this.browserToProxyChannel = e.getChannel(); } private ChannelFuture openOutgoingRelayChannel(final HttpRequest request) throws IOException { this.browserToProxyChannel.setReadable(false); // Start the connection attempt. final ClientBootstrap cb = new ClientBootstrap(LanternUtils.clientSocketChannelFactory); final ChannelPipeline pipeline = cb.getPipeline(); // This is slightly odd, as we tunnel SSL inside SSL, but we'd // otherwise just be running an open CONNECT proxy. // It's also necessary to use our own engine here, as we need to trust // the cert from the proxy. final LanternClientSslContextFactory sslFactory = new LanternClientSslContextFactory(this.keyStoreManager); final SSLEngine engine = sslFactory.getClientContext().createSSLEngine(); engine.setUseClientMode(true); ChannelHandler stats = new StatsTrackingHandler() { @Override public void addDownBytes(long bytes, Channel channel) { // global bytes proxied statistic //log.info("Recording proxied bytes through HTTP CONNECT: {}", bytes); statsTracker().addBytesProxied(bytes, channel); // contributes to local download rate statsTracker().addDownBytesViaProxies(bytes, channel); } @Override public void addUpBytes(long bytes, Channel channel) { statsTracker().addUpBytesViaProxies(bytes, channel); } }; pipeline.addLast("stats", stats); pipeline.addLast("ssl", new SslHandler(engine)); pipeline.addLast("encoder", new HttpRequestEncoder()); pipeline.addLast("handler", new HttpConnectRelayingHandler(this.browserToProxyChannel, null)); log.info("Connecting to relay proxy"); final InetSocketAddress isa = LanternHub.getProxyProvider().getProxy(); if (isa == null) { log.error("NO PROXY AVAILABLE?"); ProxyUtils.closeOnFlush(browserToProxyChannel); throw new IOException("No proxy to use for CONNECT?"); } final ChannelFuture cf = cb.connect(isa); log.info("Got an outbound channel on: {}", hashCode()); final ChannelPipeline browserPipeline = browserToProxyChannel.getPipeline(); browserPipeline.remove("encoder"); browserPipeline.remove("decoder"); browserPipeline.remove("handler"); browserPipeline.addLast("handler", new HttpConnectRelayingHandler(cf.getChannel(), null)); // This is handy, as set readable to false while the channel is // connecting ensures we won't get any incoming messages until // we're fully connected. cf.addListener(new ChannelFutureListener() { @Override public void operationComplete(final ChannelFuture future) throws Exception { if (future.isSuccess()) { cf.getChannel().write(request).addListener( new ChannelFutureListener() { @Override public void operationComplete( final ChannelFuture channelFuture) throws Exception { // we're using HTTP connect here, so we need // to remove the encoder and start reading // from the inbound channel only when we've // used the original encoder to properly encode // the CONNECT request. pipeline.remove("encoder"); // Begin to accept incoming traffic. browserToProxyChannel.setReadable(true); } }); } else { // Close the connection if the connection attempt has failed. browserToProxyChannel.close(); LanternHub.getProxyStatusListener().onCouldNotConnect(isa); } } }); return cf; } @Override public void channelClosed(final ChannelHandlerContext ctx, final ChannelStateEvent e) { log.info("Got inbound channel closed. Closing outbound."); //this.trustedPeerRequestProcessor.close(); //this.anonymousPeerRequestProcessor.close(); if (this.currentRequestProcessor != null) { this.currentRequestProcessor.close(); } this.proxyRequestProcessor.close(); this.laeRequestProcessor.close(); } @Override public void exceptionCaught(final ChannelHandlerContext ctx, final ExceptionEvent e) throws Exception { log.error("Caught exception on INBOUND channel", e.getCause()); ProxyUtils.closeOnFlush(this.browserToProxyChannel); } }
package org.lightmare.deploy.management; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Properties; import org.lightmare.config.Config; import org.lightmare.config.Configuration; import org.lightmare.utils.ObjectUtils; /** * Manages and administrator users * * @author Levan * */ public class Security { private Properties cache; public Security() throws IOException { cacheUsers(); } private void loadUsers(File file) throws IOException { InputStream stream = new FileInputStream(file); cache = new Properties(); try { cache.load(stream); } finally { ObjectUtils.close(stream); } } public void cacheUsers() throws IOException { String path = Configuration.getAdminUsersPath(); if (ObjectUtils.notAvailable(path)) { path = Config.ADMIN_USERS_PATH.getValue(); } File file = new File(path); if (file.exists()) { loadUsers(file); } } public boolean check() { return ObjectUtils.notAvailable(cache); } public boolean authenticate(String user, String pass) { boolean valid; if (ObjectUtils.available(cache)) { String cachedPass = (String) cache.get(user); valid = (ObjectUtils.available(cachedPass) && cachedPass .equals(pass)); } else { valid = Boolean.TRUE; } return valid; } }
package org.lightmare.jpa.datasource; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.concurrent.CountDownLatch; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.log4j.Logger; import org.lightmare.deploy.BeanLoader; import org.lightmare.jpa.datasource.Initializer.ConnectionConfig; import org.lightmare.utils.NamingUtils; import org.lightmare.utils.ObjectUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; public class FileParsers { public static final String JBOSS_TAG_NAME = "urn:jboss:domain:datasources:1.0"; private static final String DATA_SURCE_TAG = "datasource"; private static final String USER_TAG = "user-name"; private static final String PASSWORD_TAG = "password"; private static final String DRIVER_TAG = "driver"; private static final String MAX_POOL_TAG = "max-pool-size"; private static final String MIN_POOL_TAG = "min-pool-size"; private static final String INITIAL_POOL_TAG = "prefill"; private static final String JNDI_NAME_TAG = "jndi-name"; private static final String CONNECTION_URL_TAG = "connection-url"; private static final String SECURITY_TAG = "security"; private static final String POOL_TAG = "pool"; private static final Logger LOG = Logger.getLogger(FileParsers.class); public static Document document(File file) throws IOException { return document(file.toURI().toURL()); } public static Document document(URL url) throws IOException { Document document; URLConnection connection = url.openConnection(); InputStream stream = connection.getInputStream(); try { document = parse(stream); } finally { ObjectUtils.close(stream); } return document; } /** * Gets item with index 0 from passed {@link NodeList} instance * * @param list * @return {@link Node} */ private static Node getFirst(NodeList list) { return list.item(ObjectUtils.FIRST_INDEX); } /** * To get text from tag depended on jre installation * * @param element * @return {@link String} */ public static String getContext(Element element) { NodeList textList = element.getChildNodes(); Node firstNode = getFirst(textList); String data = firstNode.getNodeValue().trim(); return data; } /** * Parses XML document to initialize {@link javax.sql.DataSource}s * configuration properties * * @param stream * @return {@link Document} * @throws IOException */ public static Document parse(InputStream stream) throws IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder; Document document; try { builder = factory.newDocumentBuilder(); document = builder.parse(stream); } catch (ParserConfigurationException ex) { throw new IOException(ex); } catch (SAXException ex) { throw new IOException(ex); } return document; } public void setDataFromJBossDriver(NodeList nodeList, Properties properties) { Element thisElement = (Element) getFirst(nodeList); String name = getContext(thisElement); String driverName = DriverConfig.getDriverName(name); properties.setProperty(ConnectionConfig.DRIVER_PROPERTY.property, driverName); } /** * Gets security information from {@link javax.sql.DataSource} meta data * * @param nodeList * @param properties */ public void setDataFromJBossSecurity(NodeList nodeList, Properties properties) { for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); NodeList userList = thisElement.getElementsByTagName(USER_TAG); int elementLength = userList.getLength(); if (elementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element userElement = (Element) getFirst(userList); String user = getContext(userElement); properties.setProperty(ConnectionConfig.USER_PROPERTY.property, user); NodeList passList = thisElement.getElementsByTagName(PASSWORD_TAG); elementLength = passList.getLength(); if (elementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element passElement = (Element) getFirst(passList); String password = getContext(passElement); properties.setProperty(ConnectionConfig.PASSWORD_PROPERTY.property, password); } } /** * Gets security information from {@link javax.sql.DataSource} meta data * * @param nodeList * @param properties */ public void setDataFromJBossPool(NodeList nodeList, Properties properties) { for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); NodeList minPoolSizeList = thisElement .getElementsByTagName(MIN_POOL_TAG); int elementLength = minPoolSizeList.getLength(); if (elementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element minPoolSizeElement = (Element) getFirst(minPoolSizeList); String minPoolSize = getContext(minPoolSizeElement); properties.setProperty(PoolConfig.Defaults.MIN_POOL_SIZE.key, minPoolSize); NodeList maxPoolSizeList = thisElement .getElementsByTagName(MAX_POOL_TAG); elementLength = maxPoolSizeList.getLength(); if (elementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element maxPoolSizeElement = (Element) getFirst(maxPoolSizeList); String maxPoolSize = getContext(maxPoolSizeElement); properties.setProperty(PoolConfig.Defaults.MAX_POOL_SIZE.key, maxPoolSize); NodeList initPoolSizeList = thisElement .getElementsByTagName(INITIAL_POOL_TAG); elementLength = initPoolSizeList.getLength(); if (elementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element initPoolSizeElement = (Element) getFirst(initPoolSizeList); String prefill = getContext(initPoolSizeElement); if (Boolean.valueOf(prefill)) { properties.setProperty( PoolConfig.Defaults.INITIAL_POOL_SIZE.key, minPoolSize); } } } /** * Gets {@link javax.sql.DataSource}s configuration properties as * {@link List} of {@link Properties} * * @param nodeList * @return */ public List<Properties> getDataFromJBoss(NodeList nodeList) { List<Properties> properties = new ArrayList<Properties>(); String jndiName; String clearName; for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); Properties props = new Properties(); jndiName = thisElement.getAttribute(JNDI_NAME_TAG); clearName = NamingUtils.clearDataSourceName(jndiName); props.setProperty(ConnectionConfig.JNDI_NAME_PROPERTY.property, jndiName); props.setProperty(ConnectionConfig.NAME_PROPERTY.property, clearName); NodeList urlList = thisElement .getElementsByTagName(CONNECTION_URL_TAG); int urlElementLength = urlList.getLength(); if (urlElementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element urlElement = (Element) getFirst(urlList); String url = getContext(urlElement); props.setProperty(ConnectionConfig.URL_PROPERTY.property, url); NodeList securityList = thisElement .getElementsByTagName(SECURITY_TAG); setDataFromJBossSecurity(securityList, props); NodeList poolList = thisElement.getElementsByTagName(POOL_TAG); setDataFromJBossPool(poolList, props); NodeList driverList = thisElement.getElementsByTagName(DRIVER_TAG); setDataFromJBossDriver(driverList, props); properties.add(props); } return properties; } private static NodeList getDataSourceTags(Document document) { NodeList nodeList = document.getElementsByTagName(DATA_SURCE_TAG); return nodeList; } private static NodeList getDataSourceTags(File file) throws IOException { Document document = document(file); NodeList nodeList = getDataSourceTags(document); return nodeList; } private static NodeList getDataSourceTags(String dataSourcePath) throws IOException { File file = new File(dataSourcePath); NodeList nodeList = getDataSourceTags(file); return nodeList; } /** * Retrieves data source JNDI names from passed file * * @param dataSourcePath * @return * @throws IOException */ public static Collection<String> dataSourceNames(String dataSourcePath) throws IOException { Collection<String> jndiNames = new HashSet<String>(); NodeList nodeList = getDataSourceTags(dataSourcePath); String jndiName; for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); jndiName = thisElement.getAttribute(JNDI_NAME_TAG); jndiNames.add(jndiName); } return jndiNames; } /** * Parses standalone.xml file and initializes {@link javax.sql.DataSource}s * and binds them to JNDI context * * @param dataSourcePath * @throws IOException */ public void parseStandaloneXml(String dataSourcePath) throws IOException { NodeList nodeList = getDataSourceTags(dataSourcePath); List<Properties> properties = getDataFromJBoss(nodeList); // Blocking semaphore before all data source initialization finished CountDownLatch blocker = new CountDownLatch(properties.size()); BeanLoader.DataSourceParameters parameters; for (Properties props : properties) { try { // Initializes and fills BeanLoader.DataSourceParameters class // to deploy data source parameters = new BeanLoader.DataSourceParameters(); parameters.properties = props; parameters.blocker = blocker; BeanLoader.initializeDatasource(parameters); } catch (IOException ex) { LOG.error(InitMessages.INITIALIZING_ERROR, ex); } } try { blocker.await(); } catch (InterruptedException ex) { throw new IOException(ex); } Initializer.setDsAsInitialized(dataSourcePath); } }
package org.loverde.geographiccoordinate; import java.math.BigDecimal; import org.loverde.geographiccoordinate.compass.CompassDirection; import org.loverde.geographiccoordinate.exception.GeographicCoordinateException; import org.loverde.util.number.bigdecimal.BigDecimalCompare; /** * A class containing an exact bearing and a mapping of the bearing to a general compass direction * * @param <T> An implementation of {@linkplain CompassDirection} */ public class Bearing<T extends CompassDirection> { private T compassDirection; private BigDecimal bearing; public Bearing() {} public Bearing( final T compassDirection, final BigDecimal bearing ) { setCompassDirection( compassDirection ); setBearing( bearing ); } public T getCompassDirection() { return compassDirection; } public void setCompassDirection( final T compassDirection ) { this.compassDirection = compassDirection; } public BigDecimal getBearing() { return bearing; } public void setBearing( final BigDecimal bearing ) { if( bearing == null ) throw new GeographicCoordinateException( GeographicCoordinateException.Messages.BEARING_BEARING_NULL ); if( !BigDecimalCompare.isWithinInclusiveRange(bearing, BigDecimal.ZERO, new BigDecimal(360)) ) throw new GeographicCoordinateException( GeographicCoordinateException.Messages.BEARING_OUT_OF_RANGE ); this.bearing = bearing; } }
package org.lumi.rumpelstiltskin.util; import java.net.URL; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; public class Repository { public void importImage(String imageName, String file, String version, String created, String description, String build) { } /*Constructor*/ public Repository(URL osvUrl, Path confPath) { this.osvUrl = osvUrl; this.confPath = confPath; images = new ArrayList<Image>(); } /*Setters & Getters*/ public URL getOsvUrl() { return osvUrl; } public void setOsvUrl(URL osvUrl) { this.osvUrl = osvUrl; } public Path getConfPath() { return confPath; } public void setConfPath(Path confPath) { this.confPath = confPath; } public List<Image> getImages() { return images; } public void setImages(List<Image> images) { this.images = images; } /*Repository related variables*/ private URL osvUrl; private Path confPath; private List<Image> images; }
package org.sanju.ml.plugin; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Properties; /** * Maven plugin to deploy the MarkLogic modules into MarkLogic module database. * * @author Sanju Thomas * @date 20th Sep, 2016 * */ import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.sanju.ml.ApplicationServer; import org.sanju.ml.ConnectionManager; import org.sanju.ml.Credential; import org.sanju.ml.Server; import org.sanju.ml.deployer.ModuleTypes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.marklogic.client.DatabaseClient; /** * * @author Sanju Thomas * */ @Mojo(name = "ml-module-deployer", defaultPhase = LifecyclePhase.INSTALL) public class MLModuleDeployerMojo extends AbstractMojo { private static final Logger logger = LoggerFactory.getLogger(MLModuleDeployerMojo.class); @Parameter(property = "ml.configuration", defaultValue = "${basedir}/src/main/resources/ml-server-config.properties") private String mlConfiguration; /** * * @param mlConfiguration */ public void setMlConfiguration(final String mlConfiguration) { this.mlConfiguration = mlConfiguration; } @Override public void execute() throws MojoExecutionException { logger.info("Starting the ml-module-deployer plugin execution. ml.configuration file is {}", this.mlConfiguration); DatabaseClient databasecClient = null; try { final Properties properties = new Properties(); properties.load(new FileInputStream(new File(this.mlConfiguration))); final String host = properties.getProperty(PropertyConstants.ML_HOST); final Integer port = Integer.valueOf(properties.getProperty(PropertyConstants.ML_PORT)); final String username = properties.getProperty(PropertyConstants.ML_USERNAME); final String password = properties.getProperty(PropertyConstants.ML_PASSWORD); final Credential credential = new Credential(username, password); final Server server = new Server(host, credential); final ApplicationServer applicationServer = new ApplicationServer(server, port); databasecClient = ConnectionManager.getClient(applicationServer); for (final ModuleTypes type : ModuleTypes.values()) { final Constructor<?> constructor = Class.forName(properties.getProperty(type.getDeployerClass())).getConstructor(DatabaseClient.class, Properties.class); final Object instance = constructor.newInstance(databasecClient, properties); final Method method = instance.getClass().getMethod(PropertyConstants.ML_MODULE_DEPLOYER_METHOD); method.invoke(instance); } } catch (NoSuchMethodException | SecurityException | ClassNotFoundException | IOException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { logger.error("Error occurred while execting MarkLogic Module Deployer Maven Plugin", e.getMessage(), e); } finally{ ConnectionManager.close(databasecClient); logger.info("Ending the ml-module-deployer plugin execution."); } } }
package org.spout.physics.body; import org.spout.physics.collision.shape.CollisionShape; import org.spout.physics.math.Matrix3x3; import org.spout.physics.math.Transform; import org.spout.physics.math.Vector3; /** * Represents an immobile rigid body. Such a body cannot move, but has all of the properties of a * normal rigid body, except for velocities. This can be used for scenery, floors, walls, etc. */ public class ImmobileRigidBody extends RigidBody { private static final Vector3 ZERO = new Vector3(0, 0, 0); protected float mMass; protected final Matrix3x3 mInertiaTensorLocal = new Matrix3x3(); protected final Matrix3x3 mInertiaTensorLocalInverse = new Matrix3x3(); protected final Vector3 mExternalForce = new Vector3(); protected final Vector3 mExternalTorque = new Vector3(); /** * Constructs a new rigid body from its transform, mass, local inertia tensor, collision shape and * ID. * * @param transform The transform (position and orientation) * @param mass The mass * @param inertiaTensorLocal The local inertial tensor * @param collisionShape The collision shape * @param id The ID */ public ImmobileRigidBody(Transform transform, float mass, Matrix3x3 inertiaTensorLocal, CollisionShape collisionShape, int id) { super(transform, collisionShape, id); mMass = mass; mInertiaTensorLocal.set(inertiaTensorLocal); mInertiaTensorLocalInverse.set(inertiaTensorLocal.getInverse()); } /** * Gets the mass of the body. * * @return The body's mass */ @Override public float getMass() { return mMass; } /** * Gets the inverse of the mass of the body. * * @return The inverse of the mass */ @Override public float getMassInverse() { return (1/mMass); } /** * Sets the mass of the body. * * @param mass The mass to set */ public void setMass(float mass) { mMass = mass; } /** * Gets the local inertia tensor of the body (in body coordinates). * * @return The local inertia tensor */ public Matrix3x3 getInertiaTensorLocal() { return mInertiaTensorLocal; } /** * Sets the local inertia tensor of the body (in body coordinates). * * @param inertiaTensorLocal The local inertia tensor to set */ public void setInertiaTensorLocal(Matrix3x3 inertiaTensorLocal) { mInertiaTensorLocal.set(inertiaTensorLocal); mInertiaTensorLocalInverse.set(mInertiaTensorLocal.getInverse()); } /** * Gets the inertia tensor in world coordinates. The inertia tensor I_w in world coordinates is * computed with the local inertia tensor I_b in body coordinates by I_w = R * I_b * R^T, where R * is the rotation matrix (and R^T its transpose) of the current orientation quaternion of the * body. * * @return The world inertia tensor */ public Matrix3x3 getInertiaTensorWorld() { final Matrix3x3 orientation = mliveTransform.getOrientation().getMatrix(); return Matrix3x3.multiply(Matrix3x3.multiply(orientation, mInertiaTensorLocal), orientation.getTranspose()); } @Override public Matrix3x3 getInertiaTensorInverseWorld() { final Matrix3x3 orientation = mliveTransform.getOrientation().getMatrix(); return Matrix3x3.multiply(Matrix3x3.multiply(orientation, mInertiaTensorLocalInverse), orientation.getTranspose()); } @Override public Vector3 getExternalForce() { return mExternalForce; } @Override public void setExternalForce(Vector3 force) { mExternalForce.set(force); } @Override public Vector3 getExternalTorque() { return mExternalTorque; } @Override public void setExternalTorque(Vector3 torque) { mExternalTorque.set(torque); } /** * Always returns the zero vector. * * @return The zero vector */ @Override public Vector3 getLinearVelocity() { return new Vector3(ZERO); } /** * Always returns the zero vector. * * @return The zero vector */ @Override public Vector3 getAngularVelocity() { return new Vector3(ZERO); } /** * Always returns false. * * @return False, always */ @Override public boolean isMotionEnabled() { return false; } }
package pl.java.scalatech.config; import javax.sql.DataSource; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.Profile; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; import com.zaxxer.hikari.HikariDataSource; import lombok.extern.slf4j.Slf4j; import net.sf.log4jdbc.Log4jdbcProxyDataSource; import net.sf.log4jdbc.tools.Log4JdbcCustomFormatter; import net.sf.log4jdbc.tools.LoggingType; @Configuration @Slf4j @Profile("logger") public class JpaLoggerConfig { public JpaLoggerConfig() { log.info("++++ JpaLoggerConfig...."); } private DataSource dataSourceOrginal() { return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2).build(); } @Bean @Primary public DataSource dataSource() { log.info("+++++ dataSource init ...."); Log4jdbcProxyDataSource dataSource = new Log4jdbcProxyDataSource(dataSourceOrginal()); dataSource.setLogFormatter(logFormater()); return dataSource; } @Bean public Log4JdbcCustomFormatter logFormater() { Log4JdbcCustomFormatter formatter = new Log4JdbcCustomFormatter(); formatter.setLoggingType(LoggingType.SINGLE_LINE); formatter.setSqlPrefix("SQL:\r"); return formatter; } }
package seedu.address.logic.commands; import seedu.address.commons.core.Messages; import seedu.address.commons.core.UnmodifiableObservableList; import seedu.address.commons.exceptions.IllegalValueException; import seedu.address.model.item.Item; import seedu.address.model.item.Name; import seedu.address.model.item.ReadOnlyItem; import seedu.address.model.item.UniquePersonList; import seedu.address.model.item.UniquePersonList.PersonNotFoundException; /** * Edits an item identified using it's last displayed index from the task manager. */ public class EditCommand extends Command { public static final String COMMAND_WORD = "edit"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Edits the item identified by the index number used in the last item listing.\n" + "Parameters: INDEX (must be a positive integer)" + "n/NAME" + "\n" + "Example: " + COMMAND_WORD + " 1" + "buy milk"; public static final String MESSAGE_EDIT_ITEM_SUCCESS = "Edited Item: %1$s"; public final int targetIndex; public final Name newName; public EditCommand(int targetIndex, String name) throws IllegalValueException { this.targetIndex = targetIndex; this.newName = new Name(name); } @Override public CommandResult execute() { UnmodifiableObservableList<ReadOnlyItem> lastShownList = model.getFilteredPersonList(); if (lastShownList.size() < targetIndex) { indicateAttemptToExecuteIncorrectCommand(); return new CommandResult(Messages.MESSAGE_INVALID_PERSON_DISPLAYED_INDEX); } ReadOnlyItem itemToDelete = lastShownList.get(targetIndex - 1); Item itemToAdd = new Item(itemToDelete); itemToAdd.setName(newName); try { model.deleteItem(itemToDelete); } catch (PersonNotFoundException pnfe) { assert false : "The target item cannot be missing"; } try { model.addItem(itemToAdd); return new CommandResult(String.format(MESSAGE_EDIT_ITEM_SUCCESS, itemToAdd)); } catch (UniquePersonList.DuplicatePersonException e) { return new CommandResult(MESSAGE_DUPLICATE_ITEM); } } }
package seedu.address.logic.commands; import seedu.address.commons.core.EventsCenter; import seedu.address.commons.events.ui.ExitAppRequestEvent; /** * Terminates the program. */ public class ExitCommand extends Command { public static final String COMMAND_WORD = "exit"; public static final String MESSAGE_EXIT_ACKNOWLEDGEMENT = "Exiting Lifekeeper as requested ..."; public ExitCommand() {} @Override public CommandResult execute() { EventsCenter.getInstance().post(new ExitAppRequestEvent()); return new CommandResult(MESSAGE_EXIT_ACKNOWLEDGEMENT); } }
package seedu.emeraldo.logic.commands; import seedu.emeraldo.commons.exceptions.IllegalValueException; import seedu.emeraldo.model.tag.Tag; import seedu.emeraldo.model.tag.UniqueTagList; import seedu.emeraldo.model.task.*; import java.util.HashSet; import java.util.Set; /** * Adds a task to the task manager. */ public class AddCommand extends Command { public static final String COMMAND_WORD = "add"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Adds a task to the task manager. " + "Parameters: \"TASK_DESCRIPTION\" [on DATE] [by DEADLINE_TIME] [from START_TIME] [to END_TIME] [#TAGS]...\n" + "Example: " + COMMAND_WORD + " \"CS2103T Lecture\" on 7 Oct 2016 from 2pm to 4pm #Important"; public static final String MESSAGE_SUCCESS = "New task added: %1$s"; public static final String MESSAGE_DUPLICATE_TASK = "This task already exists in the task manager"; private final Task toAdd; public AddCommand(String description, String dateTime, Set<String> tags) throws IllegalValueException { final Set<Tag> tagSet = new HashSet<>(); for (String tagName : tags) { tagSet.add(new Tag(tagName)); } this.toAdd = new Task( new Description(description), new DateTime(dateTime), new UniqueTagList(tagSet) ); } @Override public CommandResult execute() { assert model != null; try { model.addTask(toAdd); return new CommandResult(String.format(MESSAGE_SUCCESS, toAdd)); } catch (UniqueTaskList.DuplicateTaskException e) { return new CommandResult(MESSAGE_DUPLICATE_TASK); } } }
/** @@author A0142130A **/ package seedu.taskell.logic.commands; import java.util.ArrayList; import java.util.logging.Logger; import seedu.taskell.commons.core.EventsCenter; import seedu.taskell.commons.core.LogsCenter; import seedu.taskell.commons.events.model.DisplayListChangedEvent; import seedu.taskell.model.CommandHistory; import seedu.taskell.model.Model; import seedu.taskell.model.task.Task; import seedu.taskell.model.task.UniqueTaskList.DuplicateTaskException; import seedu.taskell.model.task.UniqueTaskList.TaskNotFoundException; /** * Undo previously executed command (add or delete only for now) * Note: only for current session only (meaning after app is closed, history will be cleared) */ public class UndoCommand extends Command { private static final Logger logger = LogsCenter.getLogger(UndoCommand.class.getName()); public static final String COMMAND_WORD = "undo"; public static final String EDIT = "edit"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Undo a previously executed command.\n" + "Parameters: INDEX (must be a positive integer)\n" + "Example: " + COMMAND_WORD + " 3"; private static final String MESSAGE_DELETE_TASK_SUCCESS = "Deleted Task: %1$s"; private static final String MESSAGE_ADD_TASK_SUCCESS = "Task added back: %1$s"; private static final String MESSAGE_EDIT_TASK_SUCCESS = "Task edited back to old version: %1$s"; private static final String MESSAGE_DUPLICATE_TASK = "This task already exists in the task manager"; private static final String MESSAGE_NO_TASK_TO_UNDO = "No add or delete commands available to undo."; private static final String MESSAGE_COMMAND_HISTORY_EMPTY = "No command history available for undo."; private static final String MESSAGE_INVALID_INDEX = "Index is invalid"; private static ArrayList<CommandHistory> commandHistoryList; private static UndoCommand self; private int index; private CommandHistory commandHistory; public UndoCommand(int index) { logger.info("Creating UndoCommand with index: " + index); this.index = index; } public static UndoCommand getInstance() { if (self == null) { self = new UndoCommand(0); } return self; } public static ArrayList<String> getListOfCommandHistoryText() { assert commandHistoryList != null; UndoCommand.getInstance().updateCommandList(); ArrayList<String> list = new ArrayList<>(); for (CommandHistory history: commandHistoryList) { list.add(history.getCommandText()); } return list; } //removes commandHistory with tasks not present in system private void updateCommandList() { if (model == null) { logger.severe("model is null"); } for (CommandHistory commandHistory: commandHistoryList) { if (isCommandTypeAddOrEdit(commandHistory) && !model.isTaskPresent(commandHistory.getTask())) { commandHistoryList.remove(commandHistory); } else if (isUndoEditCommand(commandHistory) && !model.isTaskPresent(commandHistory.getOldTask())) { commandHistoryList.remove(commandHistory); } } } private boolean isCommandTypeAddOrEdit(CommandHistory commandHistory) { return (commandHistory.getCommandType().contains(AddCommand.COMMAND_WORD) || commandHistory.getCommandType().contains(EDIT)) && !commandHistory.isRedoTrue(); } private boolean isUndoEditCommand(CommandHistory commandHistory) { return commandHistory.isRedoTrue() && commandHistory.getCommandType().contains(EDIT); } @Override public CommandResult execute() { if (commandHistoryList.isEmpty()) { return new CommandResult(String.format(MESSAGE_COMMAND_HISTORY_EMPTY)); } else if (index > commandHistoryList.size()) { return new CommandResult(String.format(MESSAGE_INVALID_INDEX)); } commandHistory = commandHistoryList.get(getOffset(index)); if (commandHistory.isRedoTrue()) { return redoUndo(); } switch (commandHistory.getCommandType()) { case AddCommand.COMMAND_WORD: return undoAdd(); case DeleteCommand.COMMAND_WORD: return undoDelete(); case EditStartDateCommand.COMMAND_WORD: return undoEdit(); case EditEndDateCommand.COMMAND_WORD: return undoEdit(); case EditDescriptionCommand.COMMAND_WORD_1: return undoEdit(); case EditDescriptionCommand.COMMAND_WORD_2: return undoEdit(); case EditStartTimeCommand.COMMAND_WORD: return undoEdit(); case EditEndTimeCommand.COMMAND_WORD: return undoEdit(); case EditPriorityCommand.COMMAND_WORD: return undoEdit(); default: logger.severe("CommandHistory is invalid"); return new CommandResult(String.format(MESSAGE_NO_TASK_TO_UNDO)); } } private CommandResult redoUndo() { switch (commandHistory.getCommandType()) { case AddCommand.COMMAND_WORD: return undoDelete(); case DeleteCommand.COMMAND_WORD: return undoAdd(); case EditStartDateCommand.COMMAND_WORD: return redoEdit(); case EditEndDateCommand.COMMAND_WORD: return redoEdit(); case EditDescriptionCommand.COMMAND_WORD_1: return redoEdit(); case EditDescriptionCommand.COMMAND_WORD_2: return redoEdit(); case EditStartTimeCommand.COMMAND_WORD: return redoEdit(); case EditEndTimeCommand.COMMAND_WORD: return redoEdit(); case EditPriorityCommand.COMMAND_WORD: return redoEdit(); default: logger.severe("CommandHistory is invalid"); return new CommandResult(String.format(MESSAGE_NO_TASK_TO_UNDO)); } } private CommandResult undoEdit() { try { model.editTask(commandHistory.getTask(), commandHistory.getOldTask()); deleteCommandHistory(); addUndoCommand(commandHistory); indicateDisplayListChanged(); return new CommandResult(String.format(MESSAGE_EDIT_TASK_SUCCESS, commandHistory.getOldTask())); } catch (DuplicateTaskException e) { return new CommandResult(MESSAGE_DUPLICATE_TASK); } catch (TaskNotFoundException e) { assert false : "The target task cannot be missing"; } assert false: "Undo edit should return a command result"; return null; } private CommandResult redoEdit() { try { model.editTask(commandHistory.getOldTask(), commandHistory.getTask()); deleteCommandHistory(); indicateDisplayListChanged(); return new CommandResult(String.format(MESSAGE_EDIT_TASK_SUCCESS, commandHistory.getTask())); } catch (DuplicateTaskException e) { return new CommandResult(MESSAGE_DUPLICATE_TASK); } catch (TaskNotFoundException e) { assert false : "The target task cannot be missing"; } assert false: "Redo edit should return a command result"; return null; } private CommandResult undoDelete() { try { model.addTask(commandHistory.getTask()); deleteCommandHistory(); addUndoCommand(commandHistory); indicateDisplayListChanged(); return new CommandResult(String.format(MESSAGE_ADD_TASK_SUCCESS, commandHistory.getTask())); } catch (DuplicateTaskException e) { return new CommandResult(MESSAGE_DUPLICATE_TASK); } } private CommandResult undoAdd() { try { model.deleteTask(commandHistory.getTask()); deleteCommandHistory(); addUndoCommand(commandHistory); indicateDisplayListChanged(); } catch (TaskNotFoundException e) { assert false : "The target task cannot be missing"; } return new CommandResult(String.format(MESSAGE_DELETE_TASK_SUCCESS, commandHistory.getTask())); } private void deleteCommandHistory() { commandHistoryList.remove(commandHistory); } private void addUndoCommand(CommandHistory commandHistory) { if (commandHistory.isRedoTrue()) { return; } commandHistory.setCommandText("undo " + commandHistory.getCommandText()); commandHistory.setToRedoToTrue(); commandHistoryList.add(commandHistory); } public static void initializeCommandHistory() { if (commandHistoryList == null) { commandHistoryList = new ArrayList<>(); } } public static void clearCommandHistory() { commandHistoryList.clear(); } private static int getOffset(int index) { return index - 1; } public static void addCommandToHistory(String commandText, String commandType) { assert commandHistoryList != null; commandHistoryList.add(new CommandHistory(commandText, commandType)); } public static void addTaskToCommandHistory(Task task) { logger.info("Adding task to history"); if (commandHistoryList.isEmpty()) { logger.warning("No command history to add task to"); return; } commandHistoryList.get(getOffset(commandHistoryList.size())).setTask(task); } public static void addOldTaskToCommandHistory(Task task) { logger.info("Adding old task to history"); if (commandHistoryList.isEmpty()) { logger.warning("No command history to add task to"); return; } commandHistoryList.get(getOffset(commandHistoryList.size())).setOldTask(task); } public static void deletePreviousCommand() { logger.info("Command unsuccessfully executed. Deleting command history."); if (commandHistoryList.isEmpty()) { logger.warning("No command history to delete"); return; } commandHistoryList.remove(getOffset(commandHistoryList.size())); } public void indicateDisplayListChanged() { EventsCenter.getInstance().post( new DisplayListChangedEvent(getListOfCommandHistoryText())); } }
package seedu.taskmanager.logic.parser; import java.util.regex.Pattern; import seedu.taskmanager.logic.parser.ArgumentTokenizer.Prefix; /** * Contains Command Line Interface (CLI) syntax definitions common to multiple commands */ public class CliSyntax { /* Prefix definitions */ public static final Prefix PREFIX_STARTDATE = new Prefix("s/"); public static final Prefix PREFIX_ENDDATE = new Prefix("e/"); public static final Prefix PREFIX_DESCRIPTION = new Prefix("d/"); public static final Prefix PREFIX_TAG = new Prefix(" /* Alternative prefix definitions for natural variations of user input*/ // public static final Prefix ALTERNATIVE_PREFIX_STARTDATE = new Prefix("start on "); // public static final Prefix ALTERNATIVE_PREFIX_ENDDATE = new Prefix("end on "); // public static final Prefix ALTERNATIVE_PREFIX_DESCRIPTION = new Prefix("with description "); /* Patterns definitions */ public static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace // File Path allows numbers, "/.-", space, lowercase and uppercase letters public static final Pattern FILEPATH_ARGS_FORMAT = Pattern.compile("([ 0-9a-zA-Z/_.-])+"); }