answer
stringlengths
17
10.2M
package net.fortuna.ical4j.model; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import junit.framework.TestCase; import junit.framework.TestSuite; import net.fortuna.ical4j.util.CompatibilityHints; import net.fortuna.ical4j.util.TimeZones; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * @author Ben Fortuna * */ public class DateTimeTest extends TestCase { private static Log log = LogFactory.getLog(DateTimeTest.class); private static TimeZoneRegistry registry = TimeZoneRegistryFactory.getInstance().createRegistry(); private DateTime dateTime; private String expectedToString; /** * @param testMethod */ public DateTimeTest(String testMethod) { super(testMethod); } /** * Default constructor. */ public DateTimeTest(DateTime dateTime, String expectedToString) { super("testToString"); this.dateTime = dateTime; this.expectedToString = expectedToString; } /* (non-Javadoc) * @see junit.framework.TestCase#setUp() */ protected void setUp() throws Exception { super.setUp(); // ensure relaxing parsing is disabled for these tests.. CompatibilityHints.setHintEnabled(CompatibilityHints.KEY_RELAXED_PARSING, false); } public void testToString() { assertEquals("Incorrect string representation", expectedToString, dateTime.toString()); } /* * Class under test for void DateTime(String) */ public void testDateTimeString() throws Exception { try { new DateTime("20050630"); fail("Should throw ParseException"); } catch (ParseException pe) { log.info("Exception occurred: " + pe.getMessage()); } try { new DateTime("20000402T020000", registry.getTimeZone("America/Los_Angeles")); fail("Should throw ParseException"); } catch (ParseException pe) { log.info("Exception occurred: " + pe.getMessage()); } } /** * Test equality of DateTime instances created using different constructors. * @throws ParseException */ public void testDateTimeEquals() throws ParseException { DateTime date1 = new DateTime("20050101T093000"); Calendar calendar = Calendar.getInstance(); //TimeZone.getTimeZone("Etc/UTC")); calendar.clear(); calendar.set(2005, 0, 1, 9, 30, 00); calendar.set(Calendar.MILLISECOND, 1); DateTime date2 = new DateTime(calendar.getTime()); assertEquals(date1.toString(), date2.toString()); assertEquals(date1, date2); } /** * Test UTC date-times. */ public void testUtc() throws ParseException { // ordinary date.. DateTime date1 = new DateTime("20050101T093000"); assertFalse(date1.isUtc()); // UTC date.. DateTime date2 = new DateTime(true); assertTrue(date2.isUtc()); TimeZone utcTz = registry.getTimeZone(TimeZones.UTC_ID); utcTz.setID(TimeZones.UTC_ID); // UTC timezone, but not UTC.. DateTime date3 = new DateTime("20050101T093000", utcTz); // date3.setUtc(false); assertFalse(date3.isUtc()); DateTime date4 = new DateTime(); date4.setUtc(true); assertTrue(date4.isUtc()); date4.setUtc(false); assertFalse(date4.isUtc()); DateTime date5 = new DateTime(false); date5.setTimeZone(utcTz); assertFalse(date5.isUtc()); } public String getName() { if (StringUtils.isNotEmpty(expectedToString)) { return super.getName() + " [" + expectedToString + "]"; } return super.getName(); } /** * @return */ public static TestSuite suite() throws ParseException { TestSuite suite = new TestSuite(); // test DateTime(long).. DateTime dt = new DateTime(0); dt.setUtc(true); // dt.setTimeZone(TimeZoneRegistryFactory.getInstance().createRegistry().getTimeZone(TimeZones.GMT_ID)); // assertEquals("19700101T000000", dt.toString()); suite.addTest(new DateTimeTest(dt, "19700101T000000Z")); // test DateTime(Date).. Calendar cal = Calendar.getInstance(); //TimeZone.getTimeZone("GMT")); cal.set(Calendar.YEAR, 1984); // months are zero-based.. cal.set(Calendar.MONTH, 3); cal.set(Calendar.DAY_OF_MONTH, 17); cal.set(Calendar.HOUR_OF_DAY, 3); cal.set(Calendar.MINUTE, 15); cal.set(Calendar.SECOND, 34); suite.addTest(new DateTimeTest(new DateTime(cal.getTime()), "19840417T031534")); // test DateTime(String).. suite.addTest(new DateTimeTest(new DateTime("20000827T020000"), "20000827T020000")); suite.addTest(new DateTimeTest(new DateTime("20070101T080000"), "20070101T080000")); suite.addTest(new DateTimeTest(new DateTime("20050630T093000"), "20050630T093000")); suite.addTest(new DateTimeTest(new DateTime("20050630T093000Z"), "20050630T093000Z")); suite.addTest(new DateTimeTest(new DateTime("20000402T020000", registry.getTimeZone("Australia/Melbourne")), "20000402T020000")); suite.addTest(new DateTimeTest(new DateTime("20000402T020000"), "20000402T020000")); DateFormat df = new SimpleDateFormat("yyyyMMdd'T'HHmmss"); // Calendar cal = Calendar.getInstance(); //java.util.TimeZone.getTimeZone("America/Los_Angeles")); cal.clear(); cal.set(2000, 0, 1, 2, 0, 0); for (int i = 0; i < 365; i++) { String dateString = df.format(cal.getTime()); suite.addTest(new DateTimeTest(new DateTime(dateString), dateString)); cal.add(Calendar.DAY_OF_YEAR, 1); } // other tests.. suite.addTest(new DateTimeTest("testDateTimeString")); suite.addTest(new DateTimeTest("testDateTimeEquals")); suite.addTest(new DateTimeTest("testUtc")); return suite; } }
package dr.inference.model; import dr.util.NumberFormatter; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.*; /** * A likelihood function which is simply the product of a set of likelihood functions. * * @author Alexei Drummond * @author Andrew Rambaut * @version $Id: CompoundLikelihood.java,v 1.19 2005/05/25 09:14:36 rambaut Exp $ */ public class CompoundLikelihood implements Likelihood { public CompoundLikelihood(int threads, Collection<Likelihood> likelihoods) { if (threads < 0 && likelihoods.size() > 1) { // asking for an automatic threadpool size threadCount = likelihoods.size(); } else { threadCount = threads; } if (threadCount > 0) { pool = Executors.newFixedThreadPool(threadCount); // } else if (threads < 0) { // // create a cached thread pool which should create one thread per likelihood... // pool = Executors.newCachedThreadPool(); } else { pool = null; } for (Likelihood l : likelihoods) { addLikelihood(l); } } private void addLikelihood(Likelihood likelihood) { if (!likelihoods.contains(likelihood)) { likelihoods.add(likelihood); if (likelihood.getModel() != null) { compoundModel.addModel(likelihood.getModel()); } likelihoodCallers.add(new LikelihoodCaller(likelihood)); } } public int getLikelihoodCount() { return likelihoods.size(); } public final Likelihood getLikelihood(int i) { return likelihoods.get(i); } // Likelihood IMPLEMENTATION public Model getModel() { return compoundModel; } // todo: remove in release static int DEBUG = 0; public double getLogLikelihood() { double logLikelihood = 0.0; if (pool == null) { // Single threaded for (Likelihood likelihood : likelihoods) { final double l = likelihood.getLogLikelihood(); // if the likelihood is zero then short cut the rest of the likelihoods // This means that expensive likelihoods such as TreeLikelihoods should // be put after cheap ones such as BooleanLikelihoods if( l == Double.NEGATIVE_INFINITY ) return Double.NEGATIVE_INFINITY; logLikelihood += l; } } else { try { List<Future<Double>> results = pool.invokeAll(likelihoodCallers); for (Future<Double> result : results) { double logL = result.get(); logLikelihood += logL; } } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } if( DEBUG > 0 ) { int t = DEBUG; DEBUG = 0; System.err.println(getId() + ": " + getDiagnosis(0) + " = " + logLikelihood); DEBUG = t; } return logLikelihood; } public void makeDirty() { for( Likelihood likelihood : likelihoods ) { likelihood.makeDirty(); } } public String getDiagnosis() { return getDiagnosis(0); } public String getDiagnosis(int indent) { String message = ""; boolean first = true; final NumberFormatter nf = new NumberFormatter(6); for( Likelihood lik : likelihoods ) { if( !first ) { message += ", "; } else { first = false; } if (indent >= 0) { message += "\n"; for (int i = 0; i < indent; i++) { message += " "; } } message += lik.prettyName() + "="; if( lik instanceof CompoundLikelihood ) { final String d = ((CompoundLikelihood) lik).getDiagnosis(indent < 0 ? -1 : indent + 2); if( d != null && d.length() > 0 ) { message += "(" + d; if (indent >= 0) { message += "\n"; for (int i = 0; i < indent; i++) { message += " "; } } message += ")"; } } else { final double logLikelihood = lik.getLogLikelihood(); if( logLikelihood == Double.NEGATIVE_INFINITY ) { message += "-Inf"; } else if( Double.isNaN(logLikelihood) ) { message += "NaN"; } else { message += nf.formatDecimal(logLikelihood, 4); } } } return message; } public String toString() { return getId(); // really bad for debugging //return Double.toString(getLogLikelihood()); } public String prettyName() { return Abstract.getPrettyName(this); } public boolean isUsed() { return used; } public void setUsed() { used = true; for (Likelihood l : likelihoods) { l.setUsed(); } } public int getThreadCount() { return threadCount; } // Loggable IMPLEMENTATION /** * @return the log columns. */ public dr.inference.loggers.LogColumn[] getColumns() { return new dr.inference.loggers.LogColumn[]{ new LikelihoodColumn(getId()) }; } private class LikelihoodColumn extends dr.inference.loggers.NumberColumn { public LikelihoodColumn(String label) { super(label); } public double getDoubleValue() { return getLogLikelihood(); } } // Identifiable IMPLEMENTATION private String id = null; public void setId(String id) { this.id = id; } public String getId() { return id; } private boolean used = false; private final int threadCount; private final ExecutorService pool; private final ArrayList<Likelihood> likelihoods = new ArrayList<Likelihood>(); private final CompoundModel compoundModel = new CompoundModel("compoundModel"); private final List<Callable<Double>> likelihoodCallers = new ArrayList<Callable<Double>>(); class LikelihoodCaller implements Callable<Double> { public LikelihoodCaller(Likelihood likelihood) { this.likelihood = likelihood; } public Double call() throws Exception { return likelihood.getLogLikelihood(); } private final Likelihood likelihood; } }
package org.ethereum.db; import org.ethereum.core.BlockHeaderWrapper; import org.ethereum.db.index.ArrayListIndex; import org.ethereum.db.index.Index; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; /** * @author Mikhail Kalinin * @since 16.09.2015 */ public class HeaderStoreMem implements HeaderStore { private static final Logger logger = LoggerFactory.getLogger("blockqueue"); private Map<Long, BlockHeaderWrapper> headers = Collections.synchronizedMap(new HashMap<Long, BlockHeaderWrapper>()); private final Index index = new ArrayListIndex(Collections.<Long>emptySet()); private final Object mutex = new Object(); @Override public void open() { logger.info("Header store opened"); } @Override public void close() { } @Override public void add(BlockHeaderWrapper header) { synchronized (mutex) { if (index.contains(header.getNumber())) { return; } headers.put(header.getNumber(), header); index.add(header.getNumber()); } } @Override public void addBatch(Collection<BlockHeaderWrapper> headers) { synchronized (mutex) { List<Long> numbers = new ArrayList<>(headers.size()); for (BlockHeaderWrapper b : headers) { if(!index.contains(b.getNumber()) && !numbers.contains(b.getNumber())) { this.headers.put(b.getNumber(), b); numbers.add(b.getNumber()); } } index.addAll(numbers); } } @Override public BlockHeaderWrapper peek() { synchronized (mutex) { if(index.isEmpty()) { return null; } Long idx = index.peek(); return headers.get(idx); } } @Override public BlockHeaderWrapper poll() { synchronized (mutex) { return pollInner(); } } @Override public List<BlockHeaderWrapper> pollBatch(int qty) { if (index.isEmpty()) { return Collections.emptyList(); } List<BlockHeaderWrapper> headers = new ArrayList<>(qty > size() ? qty : size()); synchronized (mutex) { while (headers.size() < qty) { BlockHeaderWrapper header = pollInner(); if (header == null) break; headers.add(header); } } return headers; } @Override public boolean isEmpty() { return index.isEmpty(); } @Override public int size() { return index.size(); } @Override public void clear() { headers.clear(); index.clear(); } @Override public void drop(byte[] nodeId) { List<Long> removed = new ArrayList<>(); synchronized (index) { boolean hasSent = false; for (Long idx : index) { BlockHeaderWrapper h = headers.get(idx); if (!hasSent) { hasSent = h.sentBy(nodeId); } if (hasSent) removed.add(idx); } headers.keySet().removeAll(removed); index.removeAll(removed); } if (logger.isDebugEnabled()) { if (removed.isEmpty()) { logger.debug("0 headers are dropped out"); } else { logger.debug("{} headers [{}..{}] are dropped out", removed.size(), removed.get(0), removed.get(removed.size() - 1)); } } } private BlockHeaderWrapper pollInner() { if (index.isEmpty()) { return null; } Long idx = index.poll(); BlockHeaderWrapper header = headers.get(idx); headers.remove(idx); if (header == null) { logger.error("Header for index {} is null", idx); } return header; } }
package edu.cmu.cs.diamond.opendiamond; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * A Diamond filter. Use with {@link SearchFactory} to perform Diamond searches. * */ public class Filter { final private FilterCode code; final private List<String> dependencies; final private List<String> arguments; final private String name; final private double minScore; final private double maxScore; final private byte blob[]; final private Signature blobSig; /** * Constructs a new filter with the given parameters (including blob * and maxScore). * * @param name * the name of this filter * @param code * the binary code that implements the Filter * @param minScore * the filter score below which an object will be dropped * @param maxScore * the filter score above which an object will be dropped * @param dependencies * a list of other filter names that this filter depends on * @param arguments * a list of arguments to the filter * @param blob * a binary argument to this filter */ public Filter(String name, FilterCode code, double minScore, double maxScore, Collection<String> dependencies, List<String> arguments, byte blob[]) { this.name = name.trim(); this.code = code; this.minScore = minScore; this.maxScore = maxScore; this.dependencies = new ArrayList<String>(dependencies); this.arguments = new ArrayList<String>(arguments); this.blob = blob; blobSig = new Signature(blob); } /** * Constructs a new Filter with the given parameters (including * maxScore). * * @param name * the name of the new filter * @param code * the binary code that implements the filter * @param minScore * the filter score below which an object will be dropped * @param maxScore * the filter score above which an object will be dropped * @param dependencies * a list of other filter names that this filter depends on * @param arguments * a list of arguments to the filter */ public Filter(String name, FilterCode code, double minScore, double maxScore, Collection<String> dependencies, List<String> arguments) { this(name, code, minScore, maxScore, dependencies, arguments, new byte[0]); } /** * Constructs a new filter with the given parameters (including blob). * * @param name * the name of this filter * @param code * the binary code that implements the Filter * @param minScore * the filter score below which an object will be dropped * @param dependencies * a list of other filter names that this filter depends on * @param arguments * a list of arguments to the filter * @param blob * a binary argument to this filter */ public Filter(String name, FilterCode code, double minScore, Collection<String> dependencies, List<String> arguments, byte blob[]) { this(name, code, minScore, Double.POSITIVE_INFINITY, dependencies, arguments, blob); } /** * Constructs a new Filter with the given parameters. * * @param name * the name of the new filter * @param code * the binary code that implements the filter * @param minScore * the filter score below which an object will be dropped * @param dependencies * a list of other filter names that this filter depends on * @param arguments * a list of arguments to the filter */ public Filter(String name, FilterCode code, double minScore, Collection<String> dependencies, List<String> arguments) { this(name, code, minScore, Double.POSITIVE_INFINITY, dependencies, arguments, new byte[0]); } @Override public String toString() { return getName() + ", bloblen: " + blob.length; } /** * Gets the name of this filter. * * @return the name of this filter */ public String getName() { return name; } public List<String> getDependencies() { return dependencies; } FilterCode getFilterCode() { return code; } Signature getBlobSig() { return blobSig; } List<String> getArguments() { return arguments; } double getMinScore() { return minScore; } double getMaxScore() { return maxScore; } byte[] getBlob() { return blob; } }
package com.gooddata.util; import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import au.com.bytecode.opencsv.CSVReader; import com.ibm.icu.text.Transliterator; /** * GoodData * * @author zd <zd@gooddata.com> * @version 1.0 */ public class StringUtil { private static String[][] DATE_FORMAT_CONVERSION = {{"MM","%m"},{"yyyy","%Y"},{"yy","%y"},{"dd","%d"},{"hh","%h"}, {"HH","%H"},{"mm","%i"},{"ss","%s"}}; /** * Formats a string as identifier * Currently only converts to the lowercase and replace spaces * @param s the string to convert to identifier * @return converted string */ public static String toIdentifier(String s) { return convertToIdentifier(s); } /** * Formats a string as title * Currently does nothing TBD * @param s the string to convert to a title * @return converted string */ public static String toTitle(String s) { if(s == null) return s; Transliterator t = Transliterator.getInstance("Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC"); s = t.transliterate(s); s = s.replaceAll("\"",""); return s.trim(); } private static String convertToIdentifier(String s) { Transliterator t = Transliterator.getInstance("Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC"); s = t.transliterate(s); s = s.replaceAll("[^a-zA-Z0-9_]", ""); s = s.replaceAll("^[0-9_]*", ""); //s = s.replaceAll("[_]*$", ""); //s = s.replaceAll("[_]+", "_"); return s.toLowerCase().trim(); } /** * Converts the Java date format string to the MySQL format * @param dateFormat Java date format * @return MySQL date format */ public static String convertJavaDateFormatToMySql(String dateFormat) { for(int i=0; i < DATE_FORMAT_CONVERSION.length; i++) dateFormat = dateFormat.replace(DATE_FORMAT_CONVERSION[i][0], DATE_FORMAT_CONVERSION[i][1]); return dateFormat; } /** * Converts a {@link Collection} to a <tt>separator<tt> separated string * * @param separator * @param list * @return <tt>separator<tt> separated string version of the given list */ public static String join(String separator, Collection<String> list) { return join(separator, list, null); } /** * Converts a {@link Collection} to a <tt>separator<tt> separated string. * If the <tt>replacement</tt> parameter is not null, it is used to populate * the result string instead of list elements. * * @param separator * @param list * @param replacement * @return <tt>separator<tt> separated string version of the given list */ public static String join(String separator, Collection<String> list, String replacement) { StringBuffer sb = new StringBuffer(); boolean first = true; for (final String s : list) { if (first) first = false; else sb.append(separator); sb.append(replacement == null ? s : replacement); } return sb.toString(); } /** * Parse CSV line * @param elements CSV line * @return alements as String[] */ public static List<String> parseLine(String elements) throws java.io.IOException { if (elements == null) { return new ArrayList<String>(); } CSVReader cr = new CSVReader(new StringReader(elements)); return Arrays.asList(cr.readNext()); } }
package net.minecraft.src; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.awt.image.ImageObserver; import cpw.mods.fml.client.FMLClientHandler; import cpw.mods.fml.client.FMLTextureFX; /** * A texture override for animations, it takes a vertical image of * texture frames and constantly rotates them in the texture. */ public class ModTextureAnimation extends FMLTextureFX { private final int tickRate; private byte[][] images; private int index = 0; private int ticks = 0; private String targetTex = null; private BufferedImage imgData = null; public ModTextureAnimation(int icon, int target, BufferedImage image, int tickCount) { this(icon, 1, target, image, tickCount); } public ModTextureAnimation(int icon, int size, int target, BufferedImage image, int tickCount) { this(icon, size, (target == 0 ? "/terrain.png" : "/gui/items.png"), image, tickCount); } public ModTextureAnimation(int icon, int size, String target, BufferedImage image, int tickCount) { super(icon); RenderEngine re = FMLClientHandler.instance().getClient().field_71446_o; targetTex = target; field_76849_e = size; field_76847_f = re.func_78341_b(target); tickRate = tickCount; ticks = tickCount; imgData = image; } @Override public void setup() { super.setup(); int sWidth = imgData.getWidth(); int sHeight = imgData.getHeight(); int tWidth = tileSizeBase; int tHeight = tileSizeBase; int frames = (int)Math.floor((double)(sHeight / sWidth)); if (frames < 1) { throw new IllegalArgumentException(String.format("Attempted to create a TextureAnimation with no complete frames: %dx%d", sWidth, sHeight)); } else { images = new byte[frames][]; BufferedImage image = imgData; if (sWidth != tWidth) { BufferedImage b = new BufferedImage(tWidth, tHeight * frames, 6); Graphics2D g = b.createGraphics(); g.drawImage(imgData, 0, 0, tWidth, tHeight * frames, 0, 0, sWidth, sHeight, (ImageObserver)null); g.dispose(); image = b; } for (int frame = 0; frame < frames; frame++) { int[] pixels = new int[tileSizeSquare]; image.getRGB(0, tHeight * frame, tWidth, tHeight, pixels, 0, tWidth); images[frame] = new byte[tileSizeSquare << 2]; for (int i = 0; i < pixels.length; i++) { int i4 = i * 4; images[frame][i4 + 0] = (byte)(pixels[i] >> 16 & 255); images[frame][i4 + 1] = (byte)(pixels[i] >> 8 & 255); images[frame][i4 + 2] = (byte)(pixels[i] >> 0 & 255); images[frame][i4 + 3] = (byte)(pixels[i] >> 24 & 255); } } } } public void func_76846_a() { if (++ticks >= tickRate) { if (++index >= images.length) { index = 0; } field_76852_a = images[index]; ticks = 0; } } public void func_76845_a(RenderEngine renderEngine) { GL11.glBindTexture(GL11.GL_TEXTURE_2D, field_76847_f); } }
package nl.rubensten.pp2lal2pp.parser; import nl.rubensten.pp2lal2pp.ParseException; import nl.rubensten.pp2lal2pp.api.APIFunction; import nl.rubensten.pp2lal2pp.lang.*; import nl.rubensten.pp2lal2pp.lang.Number; import nl.rubensten.pp2lal2pp.util.Regex; import java.util.*; /** * Takes one big code string and parses it to a {@link Program}. * <p> * This class does not handle inclusions and definitions. * * @author Ruben Schellekens */ public class Parser { /** * The complete string of code to parse. */ String input; /** * The program that gets parsed. */ private Program program; /** * The name of the function that is currently being parsed. */ private String currentFunction; /** * The last comment covered. */ private Comment lastComment; Parser(String input) { this.input = input; } /** * Parses the complete input. */ public Program parse() throws ParseException { program = new Program(); program.setHeader(parseHeaderComment(input)); Function function; LineTokeniser lines = new LineTokeniser(input); List<String> pp2doc = new ArrayList<>(); for (Iterator<String> it = lines.iterator(); it.hasNext(); ) { String line = it.next(); Tokeniser tokens = new Tokeniser(line); // Parse global variables. if (parseGlobal(program, tokens)) { pp2doc.clear(); continue; } // Comments if (line.startsWith(" String comment = Regex.replaceAll("^#;?\\s*", line, ""); lastComment = new Comment(comment); pp2doc.add(comment); continue; } // Define else if (line.startsWith("define")) { program.addDefinition(parseDefine(new Tokeniser(line))); continue; } boolean isInterrupt = line.startsWith("interrupt"); if (!line.startsWith("function") && !isInterrupt) { continue; } // Parse function. currentFunction = tokens.getToken(1); List<Variable> arguments = new ArrayList<>(); if (!tokens.equals(2, "(")) { throw new ParseException("Function " + currentFunction + " must be followed by " + "parentheses."); } for (int i = 3; i <= tokens.size(); i++) { if (isInterrupt) { break; } if (tokens.equals(i, ")")) { break; } if (tokens.equals(i, ",")) { continue; } String name = tokens.getToken(i); if (arguments.parallelStream().anyMatch(v -> v.getName().equals(name))) { throw new ParseException("Function " + currentFunction + " can't have two of " + "the same argument names (" + name + ")."); } arguments.add(new Variable(name)); } if (isInterrupt) { function = new Interrupt(currentFunction, new ArrayList<>(pp2doc)); } else { function = new Function(currentFunction, new ArrayList<>(pp2doc), arguments); } pp2doc.clear(); Block block = parseFunction(it); function.setContents(block); program.addFunction(function); } return program; } /** * Parses the contents of a function block. * * @param lines * The iterator of the LineTokeniser. * @return The block of code for the given function. */ private Block parseFunction(Iterator<String> lines) { List<Element> body = new ArrayList<>(); while (lines.hasNext()) { Tokeniser line = new Tokeniser(lines.next()); StringBuilder comment = new StringBuilder(); boolean parsed = false; // Function is done. if (line.isFirst("}")) { break; } // Variable declarations. if (line.isFirst("var")) { body.add(parseVariable(line)); parsed = true; } // Loops else if (line.isFirst("loop")) { body.add(parseLoop(lines, line)); parsed = true; } // If-Else statements. else if (line.isFirst("if")) { body.add(parseIfElse(lines, line)); parsed = true; } // Function calls. else if (isFunctionCall(line)) { body.add(parseFunctionCall(line)); parsed = true; } // Continue else if (line.isFirst("continue")) { body.add(new Continue()); parsed = true; } // Return else if (line.isFirst("return")) { body.add(parseReturn(line)); parsed = true; } // Full line comment. else if (line.equals(0, " parsed = true; } // Inject raw assembly. else if (line.isFirst("inject")) { body.add(parseInject(lines, line)); parsed = true; } // Anything else. if (!parsed && !line.isFirstIgnore("else", "}")) { Operation op = parseOperation(line.iterator(), line); if (op != null) { body.add(op); } } // Comments for (String token : line) { // Comments if (token.equals(" comment.append(" "); continue; } if (comment.length() > 0) { comment.append(token).append(" "); } } if (comment.length() > 0) { body.add(new Comment(comment.toString().trim())); } } return new Block(body); } /** * @return The parsed program if the {@link Parser#parse()} method has been called. * @throws ParseException * if the parse method has not been called before. */ public Program getProgram() throws ParseException { if (program == null) { throw new ParseException("parse() has not been called."); } return program; } /** * Parses a definition. * * @param line * The line the definition is on. * @return The Definition-object. */ private Definition parseDefine(Tokeniser line) { if (line.size() != 3) { throw new ParseException("illegal definition at line '" + line.getOriginal() + "'"); } String comment = (lastComment == null ? "" : lastComment.getContents()); String name = line.getToken(1); Value value = Value.parse(line.getToken(2), program); if (!(value instanceof Number)) { throw new ParseException("wrong number format at definition '" + line.getOriginal() + ""); } return new Definition(name, value, comment); } /** * Parses a return statement. * * @param line * The line the return statement is on. * @return The return object. */ private Return parseReturn(Tokeniser line) { if (line.size() == 1) { return new Return(); } if (line.size() >= 3) { if (line.equals(2, "(") && !line.equals(1, "'")) { // Function call. String name = line.getToken(1); List<Variable> variables = new ArrayList<>(); if (!line.equals(3, ")")) { for (int i = 3; i < line.sizeNoComments(); i += 2) { Value val = Value.parse(line.getToken(i), program); if (val instanceof Number) { variables.add(new Variable("num", val).setJustNumber(true)); } else { variables.add(new Variable(val.stringRepresentation())); } } } FunctionCall call = new FunctionCall(name, variables); program.registerAPIFunction(name); return new ElementReturn(call); } else if (line.equals(1, "'") && line.equals(3, "'")) { String character = line.join(1, 3, ""); Value value = Value.parse(character, program); return new Return(value); } } Value value = Value.parse(line.getToken(1), program); if (value instanceof Number) { return new Return(value); } return new ElementReturn(new Variable(value.stringRepresentation(), value)); } /** * Parses the header comment lines. * * @return A list of all lines in the header comment WITHOUT #s. */ private List<String> parseHeaderComment(String contents) { List<String> header = new ArrayList<>(); for (String string : contents.split("\n")) { String line = string.trim(); if (!line.startsWith(" return header; } header.add(Regex.replaceAll("^#\\s*;\\s*", line, "")); } return header; } /** * Parses the line to a FunctionCall. * * @param line * The line to parse. * @return The parsed FunctionCall object. */ private FunctionCall parseFunctionCall(Tokeniser line) { try { List<Variable> args = new ArrayList<>(); if (!line.equals(2, ")")) { for (int i = 2; i < line.sizeNoComments(); i += 2) { Value value = Value.parse(line.getToken(i), program); if (value instanceof Number) { args.add(new Variable("number" + i, value).setJustNumber(true)); } else { args.add(new Variable(line.getToken(i))); } } } String name = line.getToken(0); program.registerAPIFunction(name); return new FunctionCall(name, args); } catch (IndexOutOfBoundsException exception) { throw new ParseException("Invalid definition for function call '" + line.getOriginal() + "'."); } } /** * Checks if the given line is a function call. * * @param line * The line to check for. * @return <code>true</code> if the line is a function call, <code>false</code> otherwise. */ private boolean isFunctionCall(Tokeniser line) { String contents = line.getOriginal(); if (line.sizeNoComments() < 3) { return false; } if (!line.equals(1, "(")) { return false; } return line.equals(line.sizeNoComments() - 1, ")"); } /** * Parses the contents of a block. * * @param lines * The iterator that iterates over the lines. * @param line * The line where the iterator is at currently. * @return The parsed contents of the block. */ private Block parseBlock(Iterator<String> lines, Tokeniser line) { List<Element> body = new ArrayList<>(); if (!lines.hasNext()) { return new Block(body); } boolean skipped = false; if (line.last().equals("{")) { line = new Tokeniser(lines.next()); skipped = true; } while (true) { if (!lines.hasNext()) { break; } if (!skipped) { line = new Tokeniser(lines.next()); } skipped = false; if (line.isFirst("}")) { break; } StringBuilder comment = new StringBuilder(); boolean parsed = false; // Variable declarations. if (line.isFirst("var")) { body.add(parseVariable(line)); parsed = true; } // Loops else if (line.isFirst("loop")) { body.add(parseLoop(lines, line)); parsed = true; } // If-Else statements. else if (line.isFirst("if")) { body.add(parseIfElse(lines, line)); parsed = true; } // Function calls. else if (isFunctionCall(line)) { body.add(parseFunctionCall(line)); parsed = true; } // Return else if (line.isFirst("return")) { body.add(parseReturn(line)); parsed = true; } // Continue else if (line.isFirst("continue")) { body.add(new Continue()); parsed = true; } // Full line comment. else if (line.equals(0, " parsed = true; } // Inject raw assembly. else if (line.isFirst("inject")) { body.add(parseInject(lines, line)); parsed = true; } // Anything else. if (!parsed && !line.isFirstIgnore("else", "}")) { Operation op = parseOperation(line.iterator(), line); if (op != null) { body.add(op); } } // Comments for (String token : line) { // Comments if (token.equals(" comment.append(" "); continue; } if (comment.length() > 0) { comment.append(token).append(" "); } } if (comment.length() > 0) { body.add(new Comment(comment.toString().trim())); } if (line.sizeNoComments() > 0) { if (line.equals(line.sizeNoComments() - 1, "}")) { break; } } } return new Block(body); } /** * Parses the operation that is present on the given line. * * @param it * The iterator of the line to continue from. * @param line * The line containing the expression. * @return The parsed operation. */ private Operation parseOperation(Iterator<String> it, Tokeniser line) { Element first; Operator op = null; Element second; ListIterator<String> li = (ListIterator<String>)it; // isInputOn API function shizz. if (line.isFirst("!") && line.equals(1, "isInputOn")) { String name = "isInputOn"; Value value = Value.parse(line.getToken(3), program); FunctionCall call = new FunctionCall(name, new ArrayList<Variable>() {{ add(new Variable("num", value).setJustNumber(true)); }}); program.registerAPIFunction(name); return new Operation(call, Operator.BOOLEAN_NEGATION, null); } // Check for assignments String or = line.getOriginal(); Tokeniser orTokens = new Tokeniser(or); if (orTokens.sizeNoComments() > 3) { if (orTokens.equals(1, "=") || orTokens.equals(orTokens.sizeNoComments() - 2, ":=")) { first = new Variable(orTokens.getToken(0)); op = Operator.ASSIGN; Tokeniser newLine = new Tokeniser(orTokens.join(2, orTokens.sizeNoComments() - 2, " ")); second = parseOperation(newLine.iterator(), newLine); return new Operation(first, op, second); } else if (orTokens.equals(orTokens.sizeNoComments() - 2, "=:")) { Tokeniser newLine = new Tokeniser(orTokens.join(0, orTokens.sizeNoComments() - 2, " ")); second = parseOperation(newLine.iterator(), newLine); op = Operator.ASSIGN; first = new Variable(orTokens.getToken(orTokens.sizeNoComments() - 1)); return new Operation(first, op, second); } } String token = it.next(); if (token.equals(")")) { token = it.next(); } // First element if (token.equals("(")) { first = parseOperation(it, line); } // Unary number negation. else if (token.equals("-")) { try { token = it.next(); first = new Number(Integer.parseInt("-" + token)); } catch (NumberFormatException nfe) { return new Operation(new Variable(token), Operator.MULTIPLICATION, Number.MINUS_ONE); } } // Unary NOT else if (token.equals("~")) { try { token = it.next(); first = new Number(~Integer.parseInt(token)); } catch (NumberFormatException nfe) { return new Operation(new Variable(token), Operator.BITWISE_XOR, Number.ALL_1S); } } else { Value val = Value.parse(token, program); if (val.getObject() instanceof String) { if (val.getObject() instanceof String) { first = new Variable(token); } else { first = new Variable(token); } } else { first = val; } } if (!it.hasNext()) { return null; } token = it.next(); if (token.equals(")")) { if (!it.hasNext()) { return null; } token = it.next(); } // Operator // Function call. if (token.equals("(")) { String prevToken = line.getToken(li.previousIndex() - 1); boolean apiFunction = APIFunction.isAPIFunction(prevToken); if ((program.getFunction(prevToken).isPresent() || currentFunction.equals(prevToken)) || apiFunction) { if (apiFunction) { program.registerAPIFunction(prevToken); } List<Variable> arguments = new ArrayList<>(); String elt = ""; while (it.hasNext()) { elt = it.next(); if (elt.equals(")")) { break; } if (elt.equals(",")) { continue; } Value value = Value.parse(elt, program); if (value instanceof Number) { arguments.add(new Variable(elt, value).setJustNumber(true)); } else { arguments.add(new Variable(elt)); } } first = new FunctionCall(prevToken, arguments); program.registerAPIFunction(prevToken); if (it.hasNext()) { token = it.next(); } else { return new Operation(first, null, null); } } else { throw new ParseException("Wrong function call at line '" + or + "'."); } } // Regular operator. Optional<Operator> operator = Operator.getBySign(token); if (!operator.isPresent()) { // Negative number if (first instanceof Variable) { Variable var = (Variable)first; if (var.getName().equals("-")) { first = Value.parse("-" + token, program); return new Operation(first, null, null); } } throw new ParseException("Could not find operator '" + token + "' for line '" + or + "'."); } else { op = operator.get(); } if (!it.hasNext()) { return new Operation(first, null, null); } token = it.next(); // Second element if (token.equals("(")) { second = parseOperation(it, line); } else if (token.equals("-")) { second = Value.parse("-" + it.next(), program); } else { Value val = Value.parse(token, program); if (val.getObject() instanceof String) { second = new Variable(token); } else { second = val; } } if (it.hasNext()) { token = it.next(); } // Second function call. if (token.equals("(")) { String prevToken = line.getToken(li.previousIndex() - 1); if ((program.getFunction(prevToken).isPresent() || currentFunction.equals(prevToken))) { List<Variable> arguments = new ArrayList<>(); String elt; while (!(elt = it.next()).equals(")")) { if (elt.equals(",")) { continue; } arguments.add(new Variable(elt)); } second = new FunctionCall(prevToken, arguments); program.registerAPIFunction(prevToken); } else { throw new ParseException("Wrong function call at line '" + or + "'."); } } return new Operation(first, op, second); } /** * Parses if-else statements. * * @param lines * The iterator of the LineTokeniser. * @param line * The line with the footprint of the if-statement. * @return The parsed if-object. */ private IfElse parseIfElse(Iterator<String> lines, Tokeniser line) { Block ifBlock; Block elseBlock = Block.EMPTY; String or = line.getOriginal(); try { if (!line.equals(1, "(") && !line.equals(0, "else")) { throw new ParseException("IfElse '" + or + "' has an improper opening bracket"); } // Parse expression. String lineNoIfElse = line.getOriginal().replaceAll("(( *else *)?if *\\()|\\) *\\{", ""); Tokeniser lineExp = new Tokeniser(lineNoIfElse); Operation expression = parseOperation(lineExp.iterator(), lineExp); // Parse block. if (line.equals(line.sizeNoComments() - 1, "{")) { ifBlock = parseBlock(lines, line); } else { ifBlock = parseBlock(lines, new Tokeniser(lines.next())); } // If there is no else. if (!line.isFirstIgnore("else", "}")) { line = new Tokeniser(lines.next()); } if (!line.isFirstIgnore("else", "}")) { ListIterator<String> listIterator = (ListIterator<String>)lines; listIterator.previous(); return new IfElse(expression, ifBlock, Block.EMPTY); } // Parse else. if (line.isFirstTwoIgnore("else", "if", "}")) { Tokeniser newLine = new Tokeniser(line.join(1, line.sizeNoComments() - 1, " ")); Element elt = parseIfElse(lines, newLine); elseBlock = new Block(new ArrayList<Element>() {{ add(elt); }}); return new IfElse(expression, ifBlock, elseBlock); } if (line.isFirstIgnore("else", "}")) { if (line.equals(line.sizeNoComments() - 1, "{")) { elseBlock = parseBlock(lines, line); } else { elseBlock = parseBlock(lines, new Tokeniser(lines.next())); } } return new IfElse(expression, ifBlock, elseBlock); } catch (IndexOutOfBoundsException exception) { throw new ParseException("IfElse is not correctly defined: '" + or + "'."); } } /** * Parses loop statements. * * @param line * The line with the footprint of the loop. * @param lines * The iterator of the LineTokeniser. * @return The parsed loop-object. */ private Loop parseLoop(Iterator<String> lines, Tokeniser line) { Loop loop = null; Block block = null; try { // Check syntax. if (!line.equals(1, "(")) { throw new ParseException("Loop '" + line.getOriginal() + "' has an improper " + "opening bracket."); } if (!line.equals(3, "from")) { throw new ParseException("Loop '" + line.getOriginal() + "' lacks a from-keyword."); } if (!line.equals(5, "to")) { throw new ParseException("Loop '" + line.getOriginal() + "' lacks a to-keyword."); } // Parse loop-statement. Variable var = new Variable(line.getToken(2)); Number from; try { from = new Number(Integer.parseInt(line.getToken(4))); } catch (NumberFormatException exception) { throw new ParseException("Loop '" + line.getOriginal() + "' has an incorrect " + "from-value."); } Number to; try { to = new Number(Integer.parseInt(line.getToken(6))); } catch (NumberFormatException exception) { throw new ParseException("Loop '" + line.getOriginal() + "' has an incorrect " + "to-value."); } if (line.equals(7, ")")) { Number step = from.getIntValue() <= to.getIntValue() ? Number.ONE : Number.MINUS_ONE; return new Loop(parseBlock(lines, line), var, from, to, step); } else { if (!line.equals(9, ")")) { throw new ParseException("Loop '" + line.getOriginal() + "' has an improper " + "closing bracket."); } if (!line.equals(7, "step")) { throw new ParseException("Loop '" + line.getOriginal() + "' lacks a " + "step-keyword."); } Value step = Value.parse(line.getToken(8), program); if (step.getObject() instanceof String) { throw new ParseException("Loop '" + line.getOriginal() + "' lacks a correct " + "step-value"); } return new Loop(parseBlock(lines, line), var, from, to, step); } } catch (IndexOutOfBoundsException exception) { throw new ParseException("Loop is not correctly defined: '" + line.getOriginal() + "'" + "."); } } /** * Turns a line into a variable. * * @param line * The line where the variable declaration occurs. * @return The variable-object representing the declared variable. */ private Declaration parseVariable(Tokeniser line) throws ParseException { if (line.sizeNoComments() == 2) { String varName = line.getToken(1); return new Declaration(new Variable(varName), Declaration.DeclarationScope.LOCAL); } else if (line.sizeNoComments() >= 4) { if (!line.equals(2, "=")) { throw new ParseException("Wrong declaration for variable '" + line.getOriginal() + "'."); } String varName = line.getToken(1); Value value = null; // Function call if (line.equals(4, "(")) { String funcName = line.getToken(3); List<Variable> variables = new ArrayList<>(); if (!line.equals(5, ")")) { for (int i = 5; i < line.sizeNoComments(); i += 2) { Value val = Value.parse(line.getToken(i), program); if (val instanceof Number) { variables.add(new Variable("num", val).setJustNumber(true)); } else { variables.add(new Variable(val.stringRepresentation())); } } } value = new FunctionCall(funcName, variables); program.registerAPIFunction(funcName); } // Variable declaration else { value = Value.parse(line.getToken(3), program); } return new Declaration(new Variable(varName, value), value, Declaration.DeclarationScope.LOCAL); } else { throw new ParseException("Wrong declaration for variable '" + line.getOriginal() + "'."); } } /** * Checks the given line for global statements and handles them. * * @return <code>true</code> if the line was a global statement, <code>false</code> otherwise. */ private boolean parseGlobal(Program program, Tokeniser line) throws ParseException { if (!line.isFirst("global")) { return false; } String firstToken = line.getToken(1); GlobalVariable var; // If there is no value specified if (line.sizeNoComments() == 2) { var = new GlobalVariable(firstToken, lastComment); } else { var = new GlobalVariable(firstToken, Value.parse(line.getToken(3), program), lastComment); } program.addGlobalVariable(var); return true; } /** * Parses inject statements. * * @param line * The line with the footprint of the loop. * @param lines * The iterator of the LineTokeniser. * @return The parsed inject-object. */ private Inject parseInject(Iterator<String> lines, Tokeniser line) { Inject inject; try { if (!line.equals(1, "{")) { throw new ParseException("Incorrect syntax for inject statement: missing opening brace in " + line.getOriginal()); } StringBuilder contents = new StringBuilder(); while (lines.hasNext()) { line = new Tokeniser(lines.next()); if (line.isFirst("}")) { break; } if (line.getOriginal().contains("}")) { int untilIndex = line.getOriginal().indexOf("}"); contents.append(line.getOriginal().subSequence(0, untilIndex).toString().trim()); break; } else { contents.append(line.getOriginal().trim()); contents.append("\n"); } } inject = new Inject(contents.toString()); } catch (IndexOutOfBoundsException exception) { throw new ParseException("Inject is not correctly defined: '" + line.getOriginal() + "'" + "."); } return inject; } }
package fault.java.circuit; public class NoOpCircuitBreaker implements CircuitBreaker { @Override public boolean isOpen() { return false; } @Override public void informBreakerOfResult(boolean successful) { } @Override public void setBreakerConfig(BreakerConfig breakerConfig) { } }
package org.voltcore.utils; import java.io.ByteArrayOutputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.net.Inet4Address; import java.net.InetAddress; import java.net.MalformedURLException; import java.net.NetworkInterface; import java.net.SocketException; import java.net.URL; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import jsr166y.LinkedTransferQueue; import org.voltcore.logging.VoltLogger; import vanilla.java.affinity.impl.PosixJNAAffinity; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; public class CoreUtils { private static final VoltLogger hostLog = new VoltLogger("HOST"); public static final int SMALL_STACK_SIZE = 1024 * 128; /** * Get a single thread executor that caches it's thread meaning that the thread will terminate * after keepAlive milliseconds. A new thread will be created the next time a task arrives and that will be kept * around for keepAlive milliseconds. On creation no thread is allocated, the first task creates a thread. * * Uses LinkedTransferQueue to accept tasks and has a small stack. */ public static ListeningExecutorService getCachedSingleThreadExecutor(String name, long keepAlive) { return MoreExecutors.listeningDecorator(new ThreadPoolExecutor( 0, 1, keepAlive, TimeUnit.MILLISECONDS, new LinkedTransferQueue<Runnable>(), CoreUtils.getThreadFactory(null, name, SMALL_STACK_SIZE, false, null))); } /** * Create an unbounded single threaded executor */ public static ListeningExecutorService getSingleThreadExecutor(String name) { ExecutorService ste = Executors.newSingleThreadExecutor(CoreUtils.getThreadFactory(null, name, SMALL_STACK_SIZE, false, null)); return MoreExecutors.listeningDecorator(ste); } /** * Create a bounded single threaded executor that rejects requests if more than capacity * requests are outstanding. */ public static ListeningExecutorService getBoundedSingleThreadExecutor(String name, int capacity) { LinkedBlockingQueue<Runnable> lbq = new LinkedBlockingQueue<Runnable>(capacity); ThreadPoolExecutor tpe = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS, lbq, CoreUtils.getThreadFactory(name)); return MoreExecutors.listeningDecorator(tpe); } /* * Have shutdown actually means shutdown. Tasks that need to complete should use * futures. */ public static ScheduledThreadPoolExecutor getScheduledThreadPoolExecutor(String name, int poolSize, int stackSize) { ScheduledThreadPoolExecutor ses = new ScheduledThreadPoolExecutor(poolSize, getThreadFactory(null, name, stackSize, poolSize > 1, null)); ses.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); ses.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); return ses; } public static ListeningExecutorService getListeningExecutorService( final String name, final int threads) { return getListeningExecutorService(name, threads, new LinkedTransferQueue<Runnable>(), null); } public static ListeningExecutorService getListeningExecutorService( final String name, final int coreThreads, final int threads) { return getListeningExecutorService(name, coreThreads, threads, new LinkedTransferQueue<Runnable>(), null); } public static ListeningExecutorService getListeningExecutorService( final String name, final int threads, Queue<String> coreList) { return getListeningExecutorService(name, threads, new LinkedTransferQueue<Runnable>(), coreList); } public static ListeningExecutorService getListeningExecutorService( final String name, int threadsTemp, final BlockingQueue<Runnable> queue, final Queue<String> coreList) { if (coreList != null && !coreList.isEmpty()) { threadsTemp = coreList.size(); } final int threads = threadsTemp; if (threads < 1) { throw new IllegalArgumentException("Must specify > 0 threads"); } if (name == null) { throw new IllegalArgumentException("Name cannot be null"); } return MoreExecutors.listeningDecorator( new ThreadPoolExecutor(threads, threads, 0L, TimeUnit.MILLISECONDS, queue, getThreadFactory(null, name, SMALL_STACK_SIZE, threads > 1 ? true : false, coreList))); } public static ListeningExecutorService getListeningExecutorService( final String name, int coreThreadsTemp, int threadsTemp, final BlockingQueue<Runnable> queue, final Queue<String> coreList) { if (coreThreadsTemp < 0) { throw new IllegalArgumentException("Must specify >= 0 core threads"); } if (coreThreadsTemp > threadsTemp) { throw new IllegalArgumentException("Core threads must be <= threads"); } if (coreList != null && !coreList.isEmpty()) { threadsTemp = coreList.size(); if (coreThreadsTemp > threadsTemp) { coreThreadsTemp = threadsTemp; } } final int coreThreads = coreThreadsTemp; final int threads = threadsTemp; if (threads < 1) { throw new IllegalArgumentException("Must specify > 0 threads"); } if (name == null) { throw new IllegalArgumentException("Name cannot be null"); } return MoreExecutors.listeningDecorator( new ThreadPoolExecutor(coreThreads, threads, 1L, TimeUnit.MINUTES, queue, getThreadFactory(null, name, SMALL_STACK_SIZE, threads > 1 ? true : false, coreList))); } public static ThreadFactory getThreadFactory(String name) { return getThreadFactory(name, SMALL_STACK_SIZE); } public static ThreadFactory getThreadFactory(String groupName, String name) { return getThreadFactory(groupName, name, SMALL_STACK_SIZE, true, null); } public static ThreadFactory getThreadFactory(String name, int stackSize) { return getThreadFactory(null, name, stackSize, true, null); } /** * Creates a thread factory that creates threads within a thread group if * the group name is given. The threads created will catch any unhandled * exceptions and log them to the HOST logger. * * @param groupName * @param name * @param stackSize * @return */ public static ThreadFactory getThreadFactory( final String groupName, final String name, final int stackSize, final boolean incrementThreadNames, final Queue<String> coreList) { ThreadGroup group = null; if (groupName != null) { group = new ThreadGroup(Thread.currentThread().getThreadGroup(), groupName); } final ThreadGroup finalGroup = group; return new ThreadFactory() { private final AtomicLong m_createdThreadCount = new AtomicLong(0); private final ThreadGroup m_group = finalGroup; @Override public synchronized Thread newThread(final Runnable r) { final String threadName = name + (incrementThreadNames ? " - " + m_createdThreadCount.getAndIncrement() : ""); String coreTemp = null; if (coreList != null && !coreList.isEmpty()) { coreTemp = coreList.poll(); } final String core = coreTemp; Runnable runnable = new Runnable() { @Override public void run() { if (core != null) { PosixJNAAffinity.INSTANCE.setAffinity(core); } try { r.run(); } catch (Throwable t) { hostLog.error("Exception thrown in thread " + threadName, t); } } }; Thread t = new Thread(m_group, runnable, threadName, stackSize); t.setDaemon(true); return t; } }; } /** * Return the local hostname, if it's resolvable. If not, * return the IPv4 address on the first interface we find, if it exists. * If not, returns whatever address exists on the first interface. * @return the String representation of some valid host or IP address, * if we can find one; the empty string otherwise */ public static String getHostnameOrAddress() { try { final InetAddress addr = InetAddress.getLocalHost(); return addr.getHostName(); } catch (UnknownHostException e) { try { // XXX-izzy Won't we randomly pull localhost here sometimes? Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces(); if (interfaces == null) { return ""; } NetworkInterface intf = interfaces.nextElement(); Enumeration<InetAddress> addresses = intf.getInetAddresses(); while (addresses.hasMoreElements()) { InetAddress address = addresses.nextElement(); if (address instanceof Inet4Address) { return address.getHostAddress(); } } addresses = intf.getInetAddresses(); if (addresses.hasMoreElements()) { return addresses.nextElement().getHostAddress(); } return ""; } catch (SocketException e1) { return ""; } } } /** * Return the local IP address, if it's resolvable. If not, * return the IPv4 address on the first interface we find, if it exists. * If not, returns whatever address exists on the first interface. * @return the String representation of some valid host or IP address, * if we can find one; the empty string otherwise */ public static InetAddress getLocalAddress() { try { final InetAddress addr = InetAddress.getLocalHost(); return addr; } catch (UnknownHostException e) { try { Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces(); if (interfaces == null) { return null; } NetworkInterface intf = interfaces.nextElement(); Enumeration<InetAddress> addresses = intf.getInetAddresses(); while (addresses.hasMoreElements()) { InetAddress address = addresses.nextElement(); if (address instanceof Inet4Address) { return address; } } addresses = intf.getInetAddresses(); if (addresses.hasMoreElements()) { return addresses.nextElement(); } return null; } catch (SocketException e1) { return null; } } } public static long getHSIdFromHostAndSite(int host, int site) { long HSId = site; HSId = (HSId << 32) + host; return HSId; } public static int getHostIdFromHSId(long HSId) { return (int) (HSId & 0xffffffff); } public static String hsIdToString(long hsId) { return Integer.toString((int)hsId) + ":" + Integer.toString((int)(hsId >> 32)); } public static String hsIdCollectionToString(Collection<Long> ids) { List<String> idstrings = new ArrayList<String>(); for (Long id : ids) { idstrings.add(hsIdToString(id)); } // Easy hack, sort hsIds lexically. Collections.sort(idstrings); StringBuilder sb = new StringBuilder(); boolean first = false; for (String id : idstrings) { if (!first) { first = true; } else { sb.append(", "); } sb.append(id); } return sb.toString(); } public static int getSiteIdFromHSId(long siteId) { return (int)(siteId>>32); } public static <K,V> ImmutableMap<K, ImmutableList<V>> unmodifiableMapCopy(Map<K, List<V>> m) { ImmutableMap.Builder<K, ImmutableList<V>> builder = ImmutableMap.builder(); for (Map.Entry<K, List<V>> e : m.entrySet()) { builder.put(e.getKey(), ImmutableList.<V>builder().addAll(e.getValue()).build()); } return builder.build(); } public static byte[] urlToBytes(String url) { if (url == null) { return null; } try { // get the URL/path for the deployment and prep an InputStream InputStream input = null; try { URL inputURL = new URL(url); input = inputURL.openStream(); } catch (MalformedURLException ex) { // Invalid URL. Try as a file. try { input = new FileInputStream(url); } catch (FileNotFoundException e) { throw new RuntimeException(e); } } catch (IOException ioex) { throw new RuntimeException(ioex); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte readBytes[] = new byte[1024 * 8]; while (true) { int read = input.read(readBytes); if (read == -1) { break; } baos.write(readBytes, 0, read); } return baos.toByteArray(); } catch (Exception e) { throw new RuntimeException(e); } } public static String throwableToString(Throwable t) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); t.printStackTrace(pw); pw.flush(); return sw.toString(); } public static String hsIdKeyMapToString(Map<Long, ?> m) { StringBuilder sb = new StringBuilder(); sb.append('{'); boolean first = true; for (Map.Entry<Long, ?> entry : m.entrySet()) { if (!first) sb.append(", "); first = false; sb.append(CoreUtils.hsIdToString(entry.getKey())); sb.append(entry.getValue()); } sb.append('}'); return sb.toString(); } public static int availableProcessors() { return Math.max(1, Runtime.getRuntime().availableProcessors()); } }
package org.azavea.otm.rest; import java.io.IOException; import java.io.UnsupportedEncodingException; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.azavea.otm.App; import org.azavea.otm.data.Model; import org.azavea.otm.data.Plot; import android.content.Context; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpVersion; import org.apache.http.message.BasicHeader; import org.apache.http.params.CoreConnectionPNames; import org.apache.http.params.CoreProtocolPNames; import org.apache.http.params.HttpParams; import android.content.Context; import android.util.Base64; import android.util.Log; import com.loopj.android.http.*; // This class is designed to take care of the base-url // and otm api-key for REST requests public class RestClient { private String baseUrl; private String apiKey; private AsyncHttpClient client; public RestClient() { baseUrl = getBaseUrl(); apiKey = getApiKey(); client = new AsyncHttpClient(); } // Dependency injection to support mocking // in unit-tests public void setAsyncClient(AsyncHttpClient client) { this.client = client; } public void get(String url, RequestParams params, AsyncHttpResponseHandler responseHandler) { RequestParams reqParams = prepareParams(params); Log.d("rc", "Sending get request..."); client.get(getAbsoluteUrl(url), reqParams, responseHandler); } public void post(Context context, String url, int id, Model model, AsyncHttpResponseHandler response) throws UnsupportedEncodingException { String completeUrl = getAbsoluteUrl(url); completeUrl += id + "?apikey=" + getApiKey(); client.post(context, completeUrl, new StringEntity(model.getData().toString()), "application/json", response); } public void post(String url, RequestParams params, AsyncHttpResponseHandler responseHandler) { RequestParams reqParams = prepareParams(params); client.post(getAbsoluteUrl(url), reqParams, responseHandler); } public void put(Context context, String url, int id, Model model, AsyncHttpResponseHandler response) throws UnsupportedEncodingException { String completeUrl = getAbsoluteUrl(url); completeUrl += id + "?apikey=" + getApiKey(); client.put(context, completeUrl, new StringEntity(model.getData().toString()), "application/json", response); } /** * Executes a get request and adds basic authentication headers to the request. */ public void getWithAuthentication(Context context, String url, String username, String password, RequestParams params, AsyncHttpResponseHandler responseHandler) { RequestParams reqParams = prepareParams(params); Header[] headers = {createBasicAuthenticationHeader(username, password)}; Log.d("rc", "Sending get request..."); client.get(context, getAbsoluteUrl(url), headers, reqParams, responseHandler); } /** * Executes a put request and adds basic authentication headers to the request. */ public void putWithAuthentication(Context context, String url, String username, String password, int id, Model model, AsyncHttpResponseHandler response) throws UnsupportedEncodingException { String completeUrl = getAbsoluteUrl(url); completeUrl += id + "?apikey=" + getApiKey(); Header[] headers = {createBasicAuthenticationHeader(username, password)}; StringEntity modelEntity = new StringEntity(model.getData().toString()); client.put(context, completeUrl, headers, modelEntity, "application/json", response); } /** * Executes a post request and adds basic authentication headers to the request. */ public void postWithAuthentication(Context context, String url, String username, String password, Model model, AsyncHttpResponseHandler responseHandler) throws UnsupportedEncodingException { String completeUrl = getAbsoluteUrl(url); completeUrl += "?apikey=" + getApiKey(); Header[] headers = {createBasicAuthenticationHeader(username, password)}; StringEntity modelEntity = new StringEntity(model.getData().toString()); client.post(context, completeUrl, headers, new StringEntity(model.getData().toString()), "application/json", responseHandler); } public void delete(String url, AsyncHttpResponseHandler responseHandler) { client.setBasicAuth("administrator", "123456"); client.delete(getAbsoluteUrl(url), responseHandler); } /** * Executes a delete request and adds basic authentication headers to the request. */ public void deleteWithAuthentication(Context context, String url, String username, String password, AsyncHttpResponseHandler responseHandler) { Header[] headers = {createBasicAuthenticationHeader(username, password)}; client.delete(context, getAbsoluteUrl(url), headers, responseHandler); } private RequestParams prepareParams(RequestParams params) { // We'll always need a RequestParams object since we'll always // be sending an apikey RequestParams reqParams; if (params == null) { reqParams = new RequestParams(); } else { reqParams = params; } reqParams.put("apikey", apiKey); return reqParams; } private String getBaseUrl() { // TODO: Expand once configuration management has been implemented return "http://10.0.2.2:9100/gr/api/v0.1"; } private String getApiKey() { // TODO: Expand once authentication management has been implemented return "APIKEY"; } private String getAbsoluteUrl(String relativeUrl) { Log.d(App.LOG_TAG, baseUrl + relativeUrl); return baseUrl + relativeUrl; } private Header createBasicAuthenticationHeader(String username, String password) { String credentials = String.format("%s:%s", username, password); String encoded = Base64.encodeToString(credentials.getBytes(), Base64.NO_WRAP); return new BasicHeader("Authorization", String.format("%s %s", "Basic", encoded)); } }
package uk.gov.dvla.domain.mib; import com.fasterxml.jackson.annotation.JsonProperty; import uk.gov.dvla.domain.*; import java.util.Date; import java.util.List; public class MibResponse { private MibDriver driver; private List<Message> messages; private String ruleApplied; public MibResponse(){}; public MibResponse(Driver driver) { this.driver = new MibDriver(driver); } public class MibDriver { private MibLicence licence; private List<Integer> stopMarker; private List<Integer> restrictionKey; private List<String> caseType; private List<String> errorCode; private String statusCode = null; public MibDriver(){}; public MibDriver(Driver driver) { licence = new MibLicence(driver.getLicence().get(0)); stopMarker = driver.getStopMarker(); restrictionKey = driver.getRestrictionKey(); caseType = driver.getCaseType(); errorCode = driver.getErrorCode(); } public MibLicence getLicence() { return this.licence; } public List<Integer> getStopMarker() { return stopMarker; } public void setStopMarker(List<Integer> markers) { this.stopMarker = markers; } public List<Integer> getRestrictionKey() { return restrictionKey; } public void setRestrictionKey(List<Integer> keys) { this.restrictionKey = keys; } public List<String> getCaseType() { return this.caseType; } public void setCaseType(List<String> caseTypes) { this.caseType = caseTypes; } public List<String> getErrorCode() { return this.errorCode; } public void setErrorCode(List<String> errorCodes) { this.errorCode = errorCodes; } public String getStatusCode() { return statusCode; } public void setStatusCode(String statusCode) { this.statusCode = statusCode; } } public class MibLicence { public Date validFrom; public Date validTo; public String status; public Integer directiveStatus; private List<Entitlement> entitlements; private List<Endorsement> endorsements; public MibLicence(){}; public MibLicence(Licence licence) { // this.driver = new MibDriver(driver); } public void setDirectiveStatus(Integer directiveStatus) { this.directiveStatus = directiveStatus; } public Integer getDirectiveStatus() { return directiveStatus; } public Date getValidFrom() { return validFrom; } public void setValidFrom(Date validFrom) { this.validFrom = validFrom; } public Date getValidTo() { return validTo; } public void setValidTo(Date validTo) { this.validTo = validTo; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public List<Entitlement> getEntitlements() { return entitlements; } public void setEntitlements(List<Entitlement> entitlements) { this.entitlements = entitlements; } public List<Endorsement> getEndorsements() { return endorsements; } public void setEndorsements(List<Endorsement> endorsements) { this.endorsements = endorsements; } } public class MibEntitlement { private String code; private Date validFrom; private Date validTo; private Boolean isProvisional = null; private List<EntitlementRestriction> restrictions; private TestPassStatus testPassStatus; private Date datePassed; public String getCode() { return code; } public void setCode(String code) { this.code = code; } public Date getValidFrom() { return validFrom; } public void setValidFrom(Date validFrom) { this.validFrom = validFrom; } public Date getValidTo() { return validTo; } public void setValidTo(Date validTo) { this.validTo = validTo; } public Boolean getIsProvisional() { return isProvisional; } public void setIsProvisional(Boolean provisional) { this.isProvisional = provisional; } public List<EntitlementRestriction> getRestrictions() { return restrictions; } public void setRestrictions(List<EntitlementRestriction> restrictions) { this.restrictions = restrictions; } // Calculated field used to simplify data sent to the MIB @JsonProperty("entitlementType") public EntitlementType getEntitlementType() { EntitlementType entitlementType = EntitlementType.Full; if (isProvisional) { if (testPassStatus == TestPassStatus.Unclaimed) { entitlementType = EntitlementType.UnclaimedTestPass; }else { entitlementType = EntitlementType.Provisional; } } return entitlementType; } } public class MibEndorsement { public Boolean isDisqualification; public String offenceCode; public Date offenceDate; //Disqualification Only public Date convictionDate; public Date sentencingDate; public String period; public double fine; //Penalty Points Only public Integer numberOfPoints; public Boolean getDisqualification() { return isDisqualification; } public void setDisqualification(Boolean disqualification) { isDisqualification = disqualification; } public String getOffenceCode() { return offenceCode; } public void setOffenceCode(String offenceCode) { this.offenceCode = offenceCode; } public Date getOffenceDate() { return offenceDate; } public void setOffenceDate(Date offenceDate) { this.offenceDate = offenceDate; } public Date getConvictionDate() { return convictionDate; } public void setConvictionDate(Date convictionDate) { this.convictionDate = convictionDate; } public Date getSentencingDate() { return sentencingDate; } public void setSentencingDate(Date sentencingDate) { this.sentencingDate = sentencingDate; } public String getPeriod() { return period; } public void setPeriod(String period) { this.period = period; } public Number getFine() { return fine; } public void setFine(double fine) { this.fine = fine; } public Integer getNumberOfPoints() { return numberOfPoints; } public void setNumberOfPoints(Integer numberOfPoints) { this.numberOfPoints = numberOfPoints; } } }
package br.net.mirante.singular.form; import java.util.*; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.google.common.collect.Lists.newArrayList; public class SIList<E extends SInstance> extends SInstance implements Iterable<E>, ICompositeInstance { private List<E> values; private SType<E> elementsType; public SIList() { } @SuppressWarnings("unchecked") static <I extends SInstance> SIList<I> of(SType<I> elementsType) { // MILista<I> lista = new MILista<>(); SIList<I> lista = (SIList<I>) elementsType.getDictionary().getType(STypeList.class).newInstance(); lista.setType(elementsType.getDictionary().getType(STypeList.class)); lista.elementsType = elementsType; return lista; } @Override public STypeList<?, ?> getType() { return (STypeList<?, ?>) super.getType(); } @SuppressWarnings("unchecked") public SType<E> getElementsType() { if (elementsType == null) { elementsType = (SType<E>) getType().getElementsType(); } return elementsType; } @Override public List<Object> getValue() { if (values == null) { return Collections.emptyList(); } return values.stream().map(SInstance::getValue).collect(Collectors.toList()); } @Override public void clearInstance() { if (values != null) { int size = values.size(); for (int i = 0; i < size; i++) { remove(0); } } } @Override public final <T> T getValue(String fieldPath, Class<T> resultClass) { return getValue(new PathReader(fieldPath), resultClass); } @Override public boolean isEmptyOfData() { return isEmpty() || values.stream().allMatch(SInstance::isEmptyOfData); } public E addNew() { E instance = getElementsType().newInstance(getDocument()); return addInternal(instance, true, -1); } public E addNew(Consumer<E> consumer) { E instance = addNew(); consumer.accept(instance); return instance; } @SuppressWarnings("unchecked") public E addElement(E e) { E instance = e; instance.setDocument(getDocument()); return addInternal(instance, true, -1); } public E addElementAt(int index, E e) { E instance = e; instance.setDocument(getDocument()); return addInternal(instance, false, index); } public E addNewAt(int index) { E instance = getElementsType().newInstance(getDocument()); return addInternal(instance, false, index); } public E addValue(Object value) { E instance = addNew(); try { instance.setValue(value); } catch (RuntimeException e) { values.remove(values.size() - 1); throw e; } return instance; } public SIList<E> addValues(Collection<?> values) { for (Object valor : values) addValue(valor); return this; } private E addInternal(E instance, boolean atEnd, int index) { if (values == null) { values = new ArrayList<>(); } if (atEnd) { values.add(instance); } else { values.add(index, instance); } instance.setParent(this); instance.init(); return instance; } public SInstance get(int index) { return getChecking(index, null); } @Override public SInstance getField(String path) { return getField(new PathReader(path)); } @Override public Optional<SInstance> getFieldOpt(String path) { return getFieldOpt(new PathReader(path)); } @Override final SInstance getFieldLocal(PathReader pathReader) { SInstance instance = getChecking(pathReader); if (instance == null) { SFormUtil.resolveFieldType(getType(), pathReader); } return instance; } @Override Optional<SInstance> getFieldLocalOpt(PathReader pathReader) { int index = resolveIndex(pathReader); if (values != null && index < values.size()) { return Optional.ofNullable(values.get(index)); } return Optional.empty(); } @Override final SInstance getFieldLocalWithoutCreating(PathReader pathReader) { return getChecking(pathReader); } private E getChecking(PathReader pathReader) { return getChecking(resolveIndex(pathReader), pathReader); } private E getChecking(int index, PathReader pathReader) { if (index < 0 || index + 1 > size()) { String msg = "índice inválido: " + index + ((index < 0) ? " < 0" : " > que a lista (size= " + size() + ")"); if (pathReader == null) { throw new SingularFormException(msg, this); } throw new SingularFormException(pathReader.getErroMsg(this, msg)); } return values.get(index); } private int resolveIndex(PathReader pathReader) { if (!pathReader.isIndex()) { throw new SingularFormException(pathReader.getErroMsg(this, "Era esperado um indice do elemento (exemplo field[1]), mas em vez disso foi solicitado '" + pathReader.getTrecho() + "'")); } int index = pathReader.getIndex(); if (index < 0) { throw new SingularFormException(pathReader.getErroMsg(this, index + " é um valor inválido de índice")); } return index; } @Override public void setValue(Object obj) { if (obj instanceof SIList<?>) { @SuppressWarnings("unchecked") SIList<E> list = (SIList<E>) obj; clearInstance(); Iterator<E> it = list.iterator(); while (it.hasNext()){ E o = it.next(); it.remove(); addElement(o); } elementsType = list.getElementsType(); list.getValue().clear(); } else if (obj instanceof List) { clearInstance(); for (Object o : (List)obj){ addValue(o); } } else { throw new SingularFormException("SList só suporta valores de mesmo tipo da lista", this); } } @Override public final void setValue(String fieldPath, Object value) { setValue(new PathReader(fieldPath), value); } @Override void setValue(PathReader pathReader, Object value) { SInstance instance = getChecking(pathReader); if (pathReader.isLast()) { instance.setValue(value); } else { instance.setValue(pathReader.next(), value); } } public E remove(int index) { E e = getChecking(index, null); values.remove(index); return internalRemove(e); } private E internalRemove(E e){ e.internalOnRemove(); if (asAtr().getUpdateListener() != null) { asAtr().getUpdateListener().accept(this); } return e; } public Object getValueAt(int index) { return get(index).getValue(); } public int indexOf(SInstance supposedChild) { for (int i = size() - 1; i != -1; i if (values.get(i) == supposedChild) { return i; } } return -1; } public int size() { return (values == null) ? 0 : values.size(); } public boolean isEmpty() { return (values == null) || values.isEmpty(); } public List<E> getValues() { return (values == null) ? Collections.emptyList() : values; } @Override public List<E> getChildren() { return getValues(); } @Override public Iterator<E> iterator() { return (values == null) ? Collections.emptyIterator() : new Iterator<E>() { Iterator<E> it = values.iterator(); E current; @Override public boolean hasNext() { return it.hasNext(); } @Override public E next() { return current = it.next(); } @Override public void remove() { it.remove(); SIList.this.internalRemove(current); } }; } @Override public Stream<E> stream() { return getValues().stream(); } public String toDebug() { return stream().map(SInstance::toStringDisplay).collect(Collectors.joining("; ")); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((elementsType == null) ? 0 : elementsType.hashCode()); for (E e : this) result = prime * result + (e == null ? 0 : e.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; SIList<?> other = (SIList<?>) obj; if (size() != other.size()) { return false; } else if (!getType().equals(other.getType())) { return false; } else if (!Objects.equals(getElementsType(), other.getElementsType())) return false; for (int i = size() - 1; i != -1; i if (!Objects.equals(get(i), other.get(i))) { return false; } } return true; } @Override public String toString() { return String.format("%s(%s)", getClass().getSimpleName(), getAllChildren()); } public E first() { if (hasValues()) return values.get(0); return null; } public boolean hasValues() { return values != null && !values.isEmpty(); } public E last() { if (hasValues()) return values.get(values.size() - 1); return null; } @SuppressWarnings("unchecked") public E remove(E e) { return (E) remove(values.indexOf(e)); } }
package info.ata4.unity.asset; import java.io.File; import java.io.FilenameFilter; import java.util.regex.Pattern; /** * File filter for Unity asset files. * * @author Nico Bergemann <barracuda415 at yahoo.de> */ public class AssetFileFilter implements FilenameFilter { private static final Pattern ASSET_PATTERN = Pattern.compile("^CAB-[0-9a-f]{32}$|\\.(shared)?asset(s)?$", Pattern.CASE_INSENSITIVE); @Override public boolean accept(File dir, String name) { // scene files in asset bundles have no extension if (dir == null && !name.contains(".")) { return true; } return ASSET_PATTERN.matcher(name).find(); } }
package info.tregmine.listeners; //import java.util.Random; import info.tregmine.Tregmine; import info.tregmine.api.TregminePlayer; import info.tregmine.quadtree.Point; import info.tregmine.zones.*; import java.util.*; import org.bukkit.*; import org.bukkit.entity.*; import org.bukkit.event.*; import org.bukkit.event.entity.*; import org.bukkit.event.entity.EntityDamageEvent.DamageCause; import org.bukkit.inventory.*; import org.bukkit.inventory.meta.SkullMeta; //import org.bukkit.inventory.meta.ItemMeta; public class TauntListener implements Listener { private Tregmine plugin; public TauntListener(Tregmine instance) { this.plugin = instance; } @EventHandler public void onEntityDeath(EntityDeathEvent event) { if (!(event instanceof PlayerDeathEvent)) { return; } Player player = (Player) event.getEntity(); PlayerDeathEvent e = (PlayerDeathEvent) event; Random rand = new Random(); int msgIndex = rand.nextInt(plugin.getInsults().size()); String death = ChatColor.DARK_GRAY + "DIED - " + player.getName() + " " + plugin.getInsults().get(msgIndex); EntityDamageEvent damage = player.getLastDamageCause(); DamageCause cause = damage.getCause(); boolean playerCause = false; if (damage.getEntity() instanceof Player) playerCause = true; Location location = player.getLocation(); Point pos = new Point(location.getBlockX(), location.getBlockZ()); ZoneWorld world = plugin.getWorld(player.getWorld()); TregminePlayer player2 = plugin.getPlayer(player); Zone currentZone = player2.getCurrentZone(); if (currentZone == null || !currentZone.contains(pos)) { currentZone = world.findZone(pos); player2.setCurrentZone(currentZone); } if (currentZone == null) { e.setDeathMessage(death); return; } boolean lotPVP = false; Lot potentialLot = world.findLot(location); if (potentialLot != null && potentialLot.hasFlag(Lot.Flags.PVP)) { lotPVP = true; } if (cause == DamageCause.ENTITY_ATTACK && (currentZone.isPvp() || lotPVP) && playerCause == true) { World w = player.getWorld(); Entity a = w.spawnEntity(player.getLocation(), EntityType.ZOMBIE); if (!(a instanceof Zombie)) { return; } ItemStack item = new ItemStack(Material.SKULL_ITEM, 1, (byte) 3); SkullMeta meta = (SkullMeta) item.getItemMeta(); meta.setOwner(player.getName()); meta.setDisplayName(ChatColor.GRAY + player.getName()); List<String> lore = new ArrayList<String>(); lore.add(ChatColor.stripColor(e.getDeathMessage())); meta.setLore(lore); item.setItemMeta(meta); Zombie zomb = (Zombie) a; zomb.setCustomName(player.getDisplayName()); zomb.setCustomNameVisible(true); EntityEquipment ee = zomb.getEquipment(); ee.setHelmet(item); ee.setHelmetDropChance(1F); zomb.setCanPickupItems(false); } e.setDeathMessage(death); } }
package it.unibo.deis.lia.ramp.util; import java.io.File; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import org.apache.commons.io.comparator.NameFileComparator; import com.opencsv.CSVWriter; import it.unibo.deis.lia.ramp.RampEntryPoint; public class Benchmark { private static String DATE_TIME_FORMAT = new String("yyyy-MM-dd HH:mm:ss"); private static String DATE_FORMAT = new String("yyyy_MM_dd"); private static String TIME_FORMAT = new String("HH:mm:ss"); private static String HEAD = new String( "StorageDate#StorageTime#Device#Milliseconds#Type#Packet ID#Sender#Recipient"); // new String("Date#Time#Milliseconds#Type#Packet ID#Sender#Recipient"); private static String BENCH_DIR = "./logs"; private static String FILENAME = null; private static String FILE_EXTENSION = ".csv"; private static String BENCH_PATH = null; private static String ENDS_WITH = "benchmark"; private static CSVWriter CSV_WRITER = null; private static String DEVICE = null; static { try { if (GeneralUtils.isAndroidContext()) { GeneralUtils.prepareAndroidContext(); BENCH_DIR = android.os.Environment.getExternalStorageDirectory() + "/ramp/logs"; DEVICE = "android"; } else { if (Files.notExists(Paths.get(BENCH_DIR), LinkOption.NOFOLLOW_LINKS)) { Files.createDirectories(new File(BENCH_DIR).toPath()); } if (System.getProperty("user.name").equalsIgnoreCase("pi")) { DEVICE = "raspberry"; } else if (System.getProperty("os.name").startsWith("Windows") || System.getProperty("os.name").startsWith("Linux")) { DEVICE = "laptop"; } else { DEVICE = "unknown"; } } String lastFile = getLastFilename(BENCH_DIR, ENDS_WITH, FILE_EXTENSION); if (lastFile == null) { CSV_WRITER = createFile(); } else { CSV_WRITER = new CSVWriter(new FileWriter(lastFile, true), ';'); BENCH_PATH = lastFile; } // System.out.println("Benchmark: CSV_WRITER aperto " + lastFile); } catch (Exception e) { e.printStackTrace(); } } public static synchronized CSVWriter createFile() { CSVWriter csvWriter = null; try { int nfiles = getNumberFiles(BENCH_DIR, ENDS_WITH, FILE_EXTENSION); if (nfiles == 0) { nfiles++; } FILENAME = getDate(DATE_FORMAT.toString()) + "-" + String.format("%02d", nfiles) + "-" + ENDS_WITH + FILE_EXTENSION; BENCH_PATH = BENCH_DIR + "/" + FILENAME; if (RampEntryPoint.getAndroidContext() != null) { File benchFile = new File(BENCH_PATH); if (!benchFile.exists()) { try { benchFile.createNewFile(); } catch (IOException e) { e.printStackTrace(); } } } csvWriter = new CSVWriter(new FileWriter(BENCH_PATH, true), ';'); String[] entries = HEAD.split(" csvWriter.writeNext(entries); // FIXME csvWriter.flush(); csvWriter.close(); } catch (Exception e) { e.printStackTrace(); } return csvWriter; } public static synchronized void append(long millis, String type, int packetId, int sender, int recipient) { // String sender, String recipient new Thread(new Runnable() { @Override public void run() { try { // FIXME CSVWriter csvWriter = new CSVWriter(new FileWriter(BENCH_PATH, true), ';'); Date date = new Date(); SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT); SimpleDateFormat timeFormat = new SimpleDateFormat(TIME_FORMAT); String row = dateFormat.format(date.getTime()) + "#" + timeFormat.format(date.getTime()) + "#" + DEVICE + "#" + millis + "#" + type + "#" + packetId + "#" + sender + "#" + recipient; // CSV_WRITER.writeNext(row.split(" // FIXME csvWriter.writeNext(row.split(" csvWriter.flush(); csvWriter.close(); } catch (Exception e) { e.printStackTrace(); } } }).start(); } public static String getLastFilename(String dirname, String endsWith, String fileExtension) { File[] files = null; try { File dir = new File(dirname); files = dir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(endsWith + fileExtension); } }); Arrays.sort(files, NameFileComparator.NAME_INSENSITIVE_COMPARATOR); } catch (Exception e) { e.printStackTrace(); } if (files.length != 0) return files[files.length - 1].getPath(); return null; } public static int getNumberFiles(String dirname, String endsWith, String fileExtension) { File dir = new File(dirname); File[] files = dir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(endsWith + fileExtension); } }); Arrays.sort(files, NameFileComparator.NAME_INSENSITIVE_COMPARATOR); return files.length; } public static String getDate(String format) { // example "HH:mm:ss", "yyyy-MM-dd'T'HH:mm:ss" ecc... SimpleDateFormat dateFormat = new SimpleDateFormat(format); return dateFormat.format(new Date()); } public static void closeCsvWriter() { try { if (CSV_WRITER != null) { // System.out.println("Benchmark, closeCsvWriter(): CSV_WRITER // prima di chiudere"); CSV_WRITER.flush(); CSV_WRITER.close(); // System.out.println("Benchmark, closeCsvWriter(): CSV_WRITER // chiuso"); } } catch (Exception e) { e.printStackTrace(); } } private static String getFileExtension(String filename) { if (filename.lastIndexOf(".") != -1 && filename.lastIndexOf(".") != 0) return filename.substring(filename.lastIndexOf(".") + 1); else return ""; } }
package net.sf.picard.sam; import net.sf.picard.cmdline.CommandLineParser; import net.sf.picard.cmdline.Option; import net.sf.picard.cmdline.StandardOptionDefinitions; import net.sf.picard.cmdline.Usage; import net.sf.picard.metrics.MetricsFile; import net.sf.picard.util.Histogram; import net.sf.picard.util.Log; import net.sf.picard.PicardException; import net.sf.picard.io.IoUtil; import net.sf.picard.util.ProgressLogger; import net.sf.samtools.*; import net.sf.samtools.SAMFileHeader.SortOrder; import net.sf.samtools.util.CloseableIterator; import net.sf.samtools.util.SortingCollection; import net.sf.samtools.util.SortingLongCollection; import java.io.*; import java.util.*; /** * A better duplication marking algorithm that handles all cases including clipped * and gapped alignments. * * @author Tim Fennell */ public class MarkDuplicates extends AbstractDuplicateFindingAlgorithm { private final Log log = Log.getInstance(MarkDuplicates.class); /** * If more than this many sequences in SAM file, don't spill to disk because there will not * be enough file handles. */ @Usage public final String USAGE = CommandLineParser.getStandardUsagePreamble(getClass()) + "Examines aligned records in the supplied SAM or BAM file to locate duplicate molecules. " + "All records are then written to the output file with the duplicate records flagged."; @Option(shortName=StandardOptionDefinitions.INPUT_SHORT_NAME, doc="One or more input SAM or BAM files to analyze. Must be coordinate sorted.") public List<File> INPUT; @Option(shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, doc="The output file to right marked records to") public File OUTPUT; @Option(shortName="M", doc="File to write duplication metrics to") public File METRICS_FILE; @Option(shortName=StandardOptionDefinitions.PROGRAM_RECORD_ID_SHORT_NAME, doc="The program record ID for the @PG record(s) created by this program. Set to null to disable " + "PG record creation. This string may have a suffix appended to avoid collision with other " + "program record IDs.", optional=true) public String PROGRAM_RECORD_ID = "MarkDuplicates"; @Option(shortName="PG_VERSION", doc="Value of VN tag of PG record to be created. If not specified, the version will be detected automatically.", optional=true) public String PROGRAM_GROUP_VERSION; @Option(shortName="PG_COMMAND", doc="Value of CL tag of PG record to be created. If not supplied the command line will be detected automatically.", optional=true) public String PROGRAM_GROUP_COMMAND_LINE; @Option(shortName="PG_NAME", doc="Value of PN tag of PG record to be created.") public String PROGRAM_GROUP_NAME = "MarkDuplicates"; @Option(shortName="CO", doc="Comment(s) to include in the output file's header.", optional=true) public List<String> COMMENT = new ArrayList<String>(); @Option(doc="If true do not write duplicates to the output file instead of writing them with appropriate flags set.") public boolean REMOVE_DUPLICATES = false; @Option(shortName=StandardOptionDefinitions.ASSUME_SORTED_SHORT_NAME, doc="If true, assume that the input file is coordinate sorted even if the header says otherwise.") public boolean ASSUME_SORTED = false; @Option(shortName="MAX_SEQS", doc="This option is obsolete. ReadEnds will always be spilled to disk.") public int MAX_SEQUENCES_FOR_DISK_READ_ENDS_MAP = 50000; @Option(shortName="MAX_FILE_HANDLES", doc="Maximum number of file handles to keep open when spilling read ends to disk. " + "Set this number a little lower than the per-process maximum number of file that may be open. " + "This number can be found by executing the 'ulimit -n' command on a Unix system.") public int MAX_FILE_HANDLES_FOR_READ_ENDS_MAP = 8000; @Option(doc="This number, plus the maximum RAM available to the JVM, determine the memory footprint used by " + "some of the sorting collections. If you are running out of memory, try reducing this number.") public double SORTING_COLLECTION_SIZE_RATIO = 0.25; private SortingCollection<ReadEnds> pairSort; private SortingCollection<ReadEnds> fragSort; private SortingLongCollection duplicateIndexes; private int numDuplicateIndices = 0; final private Map<String,Short> libraryIds = new HashMap<String,Short>(); private short nextLibraryId = 1; // Variables used for optical duplicate detection and tracking private final Histogram<Short> opticalDupesByLibraryId = new Histogram<Short>(); // All PG IDs seen in merged input files in first pass. These are gather for two reasons: // - to know how many different PG records to create to represent this program invocation. // - to know what PG IDs are already used to avoid collisions when creating new ones. // Note that if there are one or more records that do not have a PG tag, then a null value // will be stored in this set. private final Set<String> pgIdsSeen = new HashSet<String>(); /** Stock main method. */ public static void main(final String[] args) { System.exit(new MarkDuplicates().instanceMain(args)); } /** * Main work method. Reads the BAM file once and collects sorted information about * the 5' ends of both ends of each read (or just one end in the case of pairs). * Then makes a pass through those determining duplicates before re-reading the * input file and writing it out with duplication flags set correctly. */ protected int doWork() { for (final File f : INPUT) IoUtil.assertFileIsReadable(f); IoUtil.assertFileIsWritable(OUTPUT); IoUtil.assertFileIsWritable(METRICS_FILE); reportMemoryStats("Start of doWork"); log.info("Reading input file and constructing read end information."); buildSortedReadEndLists(); reportMemoryStats("After buildSortedReadEndLists"); generateDuplicateIndexes(); reportMemoryStats("After generateDuplicateIndexes"); log.info("Marking " + this.numDuplicateIndices + " records as duplicates."); log.info("Found " + ((long) this.opticalDupesByLibraryId.getSumOfValues()) + " optical duplicate clusters."); final Map<String,DuplicationMetrics> metricsByLibrary = new HashMap<String,DuplicationMetrics>(); final SamHeaderAndIterator headerAndIterator = openInputs(); final SAMFileHeader header = headerAndIterator.header; final SAMFileHeader outputHeader = header.clone(); outputHeader.setSortOrder(SAMFileHeader.SortOrder.coordinate); for (final String comment : COMMENT) outputHeader.addComment(comment); // Key: previous PG ID on a SAM Record (or null). Value: New PG ID to replace it. final Map<String, String> chainedPgIds; // Generate new PG record(s) if (PROGRAM_RECORD_ID != null) { final PgIdGenerator pgIdGenerator = new PgIdGenerator(outputHeader); if (PROGRAM_GROUP_VERSION == null) { PROGRAM_GROUP_VERSION = this.getVersion(); } if (PROGRAM_GROUP_COMMAND_LINE == null) { PROGRAM_GROUP_COMMAND_LINE = this.getCommandLine(); } chainedPgIds = new HashMap<String, String>(); for (final String existingId : pgIdsSeen) { final String newPgId = pgIdGenerator.getNonCollidingId(PROGRAM_RECORD_ID); chainedPgIds.put(existingId, newPgId); final SAMProgramRecord programRecord = new SAMProgramRecord(newPgId); programRecord.setProgramVersion(PROGRAM_GROUP_VERSION); programRecord.setCommandLine(PROGRAM_GROUP_COMMAND_LINE); programRecord.setProgramName(PROGRAM_GROUP_NAME); programRecord.setPreviousProgramGroupId(existingId); outputHeader.addProgramRecord(programRecord); } } else { chainedPgIds = null; } final SAMFileWriter out = new SAMFileWriterFactory().makeSAMOrBAMWriter(outputHeader, true, OUTPUT); // Now copy over the file while marking all the necessary indexes as duplicates long recordInFileIndex = 0; long nextDuplicateIndex = (this.duplicateIndexes.hasNext() ? this.duplicateIndexes.next(): -1); for(final SAMReadGroupRecord readGroup : header.getReadGroups()) { final String library = readGroup.getLibrary(); DuplicationMetrics metrics = metricsByLibrary.get(library); if (metrics == null) { metrics = new DuplicationMetrics(); metrics.LIBRARY = library; metricsByLibrary.put(library, metrics); } } final ProgressLogger progress = new ProgressLogger(log, (int) 1e7, "Written"); final CloseableIterator<SAMRecord> iterator = headerAndIterator.iterator; while (iterator.hasNext()) { final SAMRecord rec = iterator.next(); if (!rec.isSecondaryOrSupplementary()) { final String library = getLibraryName(header, rec); DuplicationMetrics metrics = metricsByLibrary.get(library); if (metrics == null) { metrics = new DuplicationMetrics(); metrics.LIBRARY = library; metricsByLibrary.put(library, metrics); } // First bring the simple metrics up to date if (rec.getReadUnmappedFlag()) { ++metrics.UNMAPPED_READS; } else if (!rec.getReadPairedFlag() || rec.getMateUnmappedFlag()) { ++metrics.UNPAIRED_READS_EXAMINED; } else { ++metrics.READ_PAIRS_EXAMINED; // will need to be divided by 2 at the end } if (recordInFileIndex == nextDuplicateIndex) { rec.setDuplicateReadFlag(true); // Update the duplication metrics if (!rec.getReadPairedFlag() || rec.getMateUnmappedFlag()) { ++metrics.UNPAIRED_READ_DUPLICATES; } else { ++metrics.READ_PAIR_DUPLICATES;// will need to be divided by 2 at the end } // Now try and figure out the next duplicate index if (this.duplicateIndexes.hasNext()) { nextDuplicateIndex = this.duplicateIndexes.next(); } else { // Only happens once we've marked all the duplicates nextDuplicateIndex = -1; } } else { rec.setDuplicateReadFlag(false); } } recordInFileIndex++; if (this.REMOVE_DUPLICATES && rec.getDuplicateReadFlag()) { // do nothing } else { if (PROGRAM_RECORD_ID != null) { rec.setAttribute(SAMTag.PG.name(), chainedPgIds.get(rec.getStringAttribute(SAMTag.PG.name()))); } out.addAlignment(rec); progress.record(rec); } } this.duplicateIndexes.cleanup(); reportMemoryStats("Before output close"); out.close(); reportMemoryStats("After output close"); // Write out the metrics final MetricsFile<DuplicationMetrics,Double> file = getMetricsFile(); for (final Map.Entry<String,DuplicationMetrics> entry : metricsByLibrary.entrySet()) { final String libraryName = entry.getKey(); final DuplicationMetrics metrics = entry.getValue(); metrics.READ_PAIRS_EXAMINED = metrics.READ_PAIRS_EXAMINED / 2; metrics.READ_PAIR_DUPLICATES = metrics.READ_PAIR_DUPLICATES / 2; // Add the optical dupes to the metrics final Short libraryId = this.libraryIds.get(libraryName); if (libraryId != null) { final Histogram<Short>.Bin bin = this.opticalDupesByLibraryId.get(libraryId); if (bin != null) { metrics.READ_PAIR_OPTICAL_DUPLICATES = (long) bin.getValue(); } } metrics.calculateDerivedMetrics(); file.addMetric(metrics); } if (metricsByLibrary.size() == 1) { file.setHistogram(metricsByLibrary.values().iterator().next().calculateRoiHistogram()); } file.write(METRICS_FILE); return 0; } /** Little class used to package up a header and an iterable/iterator. */ private static final class SamHeaderAndIterator { final SAMFileHeader header; final CloseableIterator<SAMRecord> iterator; private SamHeaderAndIterator(final SAMFileHeader header, final CloseableIterator<SAMRecord> iterator) { this.header = header; this.iterator = iterator; } } /** * Since MarkDuplicates reads it's inputs more than once this method does all the opening * and checking of the inputs. */ private SamHeaderAndIterator openInputs() { final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>(INPUT.size()); final List<SAMFileReader> readers = new ArrayList<SAMFileReader>(INPUT.size()); for (final File f : INPUT) { final SAMFileReader reader = new SAMFileReader(f); final SAMFileHeader header = reader.getFileHeader(); if (!ASSUME_SORTED && header.getSortOrder() != SortOrder.coordinate) { throw new PicardException("Input file " + f.getAbsolutePath() + " is not coordinate sorted."); } headers.add(header); readers.add(reader); } if (headers.size() == 1) { return new SamHeaderAndIterator(headers.get(0), readers.get(0).iterator()); } else { final SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(SortOrder.coordinate, headers, false); final MergingSamRecordIterator iterator = new MergingSamRecordIterator(headerMerger, readers, ASSUME_SORTED); return new SamHeaderAndIterator(headerMerger.getMergedHeader(), iterator); } } /** Print out some quick JVM memory stats. */ private void reportMemoryStats(final String stage) { System.gc(); final Runtime runtime = Runtime.getRuntime(); log.info(stage + " freeMemory: " + runtime.freeMemory() + "; totalMemory: " + runtime.totalMemory() + "; maxMemory: " + runtime.maxMemory()); } /** * Goes through all the records in a file and generates a set of ReadEnds objects that * hold the necessary information (reference sequence, 5' read coordinate) to do * duplication, caching to disk as necssary to sort them. */ private void buildSortedReadEndLists() { final int maxInMemory = (int) ((Runtime.getRuntime().maxMemory() * SORTING_COLLECTION_SIZE_RATIO) / ReadEnds.SIZE_OF); log.info("Will retain up to " + maxInMemory + " data points before spilling to disk."); this.pairSort = SortingCollection.newInstance(ReadEnds.class, new ReadEndsCodec(), new ReadEndsComparator(), maxInMemory, TMP_DIR); this.fragSort = SortingCollection.newInstance(ReadEnds.class, new ReadEndsCodec(), new ReadEndsComparator(), maxInMemory, TMP_DIR); final SamHeaderAndIterator headerAndIterator = openInputs(); final SAMFileHeader header = headerAndIterator.header; final ReadEndsMap tmp = new DiskReadEndsMap(MAX_FILE_HANDLES_FOR_READ_ENDS_MAP); long index = 0; final ProgressLogger progress = new ProgressLogger(log, (int) 1e6, "Read"); final CloseableIterator<SAMRecord> iterator = headerAndIterator.iterator; while (iterator.hasNext()) { final SAMRecord rec = iterator.next(); // This doesn't have anything to do with building sorted ReadEnd lists, but it can be done in the same pass // over the input if (PROGRAM_RECORD_ID != null) { pgIdsSeen.add(rec.getStringAttribute(SAMTag.PG.name())); } if (rec.getReadUnmappedFlag()) { if (rec.getReferenceIndex() == -1) { // When we hit the unmapped reads with no coordinate, no reason to continue. break; } // If this read is unmapped but sorted with the mapped reads, just skip it. } else if (!rec.isSecondaryOrSupplementary()){ final ReadEnds fragmentEnd = buildReadEnds(header, index, rec); this.fragSort.add(fragmentEnd); if (rec.getReadPairedFlag() && !rec.getMateUnmappedFlag()) { final String key = rec.getAttribute(ReservedTagConstants.READ_GROUP_ID) + ":" + rec.getReadName(); ReadEnds pairedEnds = tmp.remove(rec.getReferenceIndex(), key); // See if we've already seen the first end or not if (pairedEnds == null) { pairedEnds = buildReadEnds(header, index, rec); tmp.put(pairedEnds.read2Sequence, key, pairedEnds); } else { final int sequence = fragmentEnd.read1Sequence; final int coordinate = fragmentEnd.read1Coordinate; // If the second read is actually later, just add the second read data, else flip the reads if (sequence > pairedEnds.read1Sequence || (sequence == pairedEnds.read1Sequence && coordinate >= pairedEnds.read1Coordinate)) { pairedEnds.read2Sequence = sequence; pairedEnds.read2Coordinate = coordinate; pairedEnds.read2IndexInFile = index; pairedEnds.orientation = getOrientationByte(pairedEnds.orientation == ReadEnds.R, rec.getReadNegativeStrandFlag()); } else { pairedEnds.read2Sequence = pairedEnds.read1Sequence; pairedEnds.read2Coordinate = pairedEnds.read1Coordinate; pairedEnds.read2IndexInFile = pairedEnds.read1IndexInFile; pairedEnds.read1Sequence = sequence; pairedEnds.read1Coordinate = coordinate; pairedEnds.read1IndexInFile = index; pairedEnds.orientation = getOrientationByte(rec.getReadNegativeStrandFlag(), pairedEnds.orientation == ReadEnds.R); } pairedEnds.score += getScore(rec); this.pairSort.add(pairedEnds); } } } // Print out some stats every 1m reads ++index; if (progress.record(rec)) { log.info("Tracking " + tmp.size() + " as yet unmatched pairs. " + tmp.sizeInRam() + " records in RAM."); } } log.info("Read " + index + " records. " + tmp.size() + " pairs never matched."); iterator.close(); // Tell these collections to free up memory if possible. this.pairSort.doneAdding(); this.fragSort.doneAdding(); } /** Builds a read ends object that represents a single read. */ private ReadEnds buildReadEnds(final SAMFileHeader header, final long index, final SAMRecord rec) { final ReadEnds ends = new ReadEnds(); ends.read1Sequence = rec.getReferenceIndex(); ends.read1Coordinate = rec.getReadNegativeStrandFlag() ? rec.getUnclippedEnd() : rec.getUnclippedStart(); ends.orientation = rec.getReadNegativeStrandFlag() ? ReadEnds.R : ReadEnds.F; ends.read1IndexInFile = index; ends.score = getScore(rec); // Doing this lets the ends object know that it's part of a pair if (rec.getReadPairedFlag() && !rec.getMateUnmappedFlag()) { ends.read2Sequence = rec.getMateReferenceIndex(); } // Fill in the library ID ends.libraryId = getLibraryId(header, rec); // Fill in the location information for optical duplicates if (addLocationInformation(rec.getReadName(), ends)) { // calculate the RG number (nth in list) ends.readGroup = 0; final String rg = (String) rec.getAttribute("RG"); final List<SAMReadGroupRecord> readGroups = header.getReadGroups(); if (rg != null && readGroups != null) { for (final SAMReadGroupRecord readGroup : readGroups) { if (readGroup.getReadGroupId().equals(rg)) break; else ends.readGroup++; } } } return ends; } /** Get the library ID for the given SAM record. */ private short getLibraryId(final SAMFileHeader header, final SAMRecord rec) { final String library = getLibraryName(header, rec); Short libraryId = this.libraryIds.get(library); if (libraryId == null) { libraryId = this.nextLibraryId++; this.libraryIds.put(library, libraryId); } return libraryId; } /** * Gets the library name from the header for the record. If the RG tag is not present on * the record, or the library isn't denoted on the read group, a constant string is * returned. */ private String getLibraryName(final SAMFileHeader header, final SAMRecord rec) { final String readGroupId = (String) rec.getAttribute("RG"); if (readGroupId != null) { final SAMReadGroupRecord rg = header.getReadGroup(readGroupId); if (rg != null) { return rg.getLibrary(); } } return "Unknown Library"; } /** * Returns a single byte that encodes the orientation of the two reads in a pair. */ private byte getOrientationByte(final boolean read1NegativeStrand, final boolean read2NegativeStrand) { if (read1NegativeStrand) { if (read2NegativeStrand) return ReadEnds.RR; else return ReadEnds.RF; } else { if (read2NegativeStrand) return ReadEnds.FR; else return ReadEnds.FF; } } /** Calculates a score for the read which is the sum of scores over Q20. */ private short getScore(final SAMRecord rec) { short score = 0; for (final byte b : rec.getBaseQualities()) { if (b >= 15) score += b; } return score; } /** * Goes through the accumulated ReadEnds objects and determines which of them are * to be marked as duplicates. * * @return an array with an ordered list of indexes into the source file */ private void generateDuplicateIndexes() { // Keep this number from getting too large even if there is a huge heap. final int maxInMemory = (int) Math.min((Runtime.getRuntime().maxMemory() * 0.25) / SortingLongCollection.SIZEOF, (double)(Integer.MAX_VALUE - 5)); log.info("Will retain up to " + maxInMemory + " duplicate indices before spilling to disk."); this.duplicateIndexes = new SortingLongCollection(maxInMemory, TMP_DIR.toArray(new File[TMP_DIR.size()])); ReadEnds firstOfNextChunk = null; final List<ReadEnds> nextChunk = new ArrayList<ReadEnds>(200); // First just do the pairs log.info("Traversing read pair information and detecting duplicates."); for (final ReadEnds next : this.pairSort) { if (firstOfNextChunk == null) { firstOfNextChunk = next; nextChunk.add(firstOfNextChunk); } else if (areComparableForDuplicates(firstOfNextChunk, next, true)) { nextChunk.add(next); } else { if (nextChunk.size() > 1) { markDuplicatePairs(nextChunk); } nextChunk.clear(); nextChunk.add(next); firstOfNextChunk = next; } } markDuplicatePairs(nextChunk); this.pairSort.cleanup(); this.pairSort = null; // Now deal with the fragments log.info("Traversing fragment information and detecting duplicates."); boolean containsPairs = false; boolean containsFrags = false; for (final ReadEnds next : this.fragSort) { if (firstOfNextChunk != null && areComparableForDuplicates(firstOfNextChunk, next, false)) { nextChunk.add(next); containsPairs = containsPairs || next.isPaired(); containsFrags = containsFrags || !next.isPaired(); } else { if (nextChunk.size() > 1 && containsFrags) { markDuplicateFragments(nextChunk, containsPairs); } nextChunk.clear(); nextChunk.add(next); firstOfNextChunk = next; containsPairs = next.isPaired(); containsFrags = !next.isPaired(); } } markDuplicateFragments(nextChunk, containsPairs); this.fragSort.cleanup(); this.fragSort = null; log.info("Sorting list of duplicate records."); this.duplicateIndexes.doneAddingStartIteration(); } private boolean areComparableForDuplicates(final ReadEnds lhs, final ReadEnds rhs, final boolean compareRead2) { boolean retval = lhs.libraryId == rhs.libraryId && lhs.read1Sequence == rhs.read1Sequence && lhs.read1Coordinate == rhs.read1Coordinate && lhs.orientation == rhs.orientation; if (retval && compareRead2) { retval = lhs.read2Sequence == rhs.read2Sequence && lhs.read2Coordinate == rhs.read2Coordinate; } return retval; } private void addIndexAsDuplicate(final long bamIndex) { this.duplicateIndexes.add(bamIndex); ++this.numDuplicateIndices; } /** * Takes a list of ReadEnds objects and removes from it all objects that should * not be marked as duplicates. * * @param list */ private void markDuplicatePairs(final List<ReadEnds> list) { short maxScore = 0; ReadEnds best = null; for (final ReadEnds end : list) { if (end.score > maxScore || best == null) { maxScore = end.score; best = end; } } for (final ReadEnds end : list) { if (end != best) { addIndexAsDuplicate(end.read1IndexInFile); addIndexAsDuplicate(end.read2IndexInFile); } } trackOpticalDuplicates(list); } /** * Looks through the set of reads and identifies how many of the duplicates are * in fact optical duplicates, and stores the data in the instance level histogram. */ private void trackOpticalDuplicates(final List<ReadEnds> list) { final boolean[] opticalDuplicateFlags = findOpticalDuplicates(list, OPTICAL_DUPLICATE_PIXEL_DISTANCE); int opticalDuplicates = 0; for (final boolean b: opticalDuplicateFlags) if (b) ++opticalDuplicates; if (opticalDuplicates > 0) { this.opticalDupesByLibraryId.increment(list.get(0).libraryId, opticalDuplicates); } } /** * Takes a list of ReadEnds objects and removes from it all objects that should * not be marked as duplicates. * * @param list */ private void markDuplicateFragments(final List<ReadEnds> list, final boolean containsPairs) { if (containsPairs) { for (final ReadEnds end : list) { if (!end.isPaired()) addIndexAsDuplicate(end.read1IndexInFile); } } else { short maxScore = 0; ReadEnds best = null; for (final ReadEnds end : list) { if (end.score > maxScore || best == null) { maxScore = end.score; best = end; } } for (final ReadEnds end : list) { if (end != best) { addIndexAsDuplicate(end.read1IndexInFile); } } } } /** Comparator for ReadEnds that orders by read1 position then pair orientation then read2 position. */ static class ReadEndsComparator implements Comparator<ReadEnds> { public int compare(final ReadEnds lhs, final ReadEnds rhs) { int retval = lhs.libraryId - rhs.libraryId; if (retval == 0) retval = lhs.read1Sequence - rhs.read1Sequence; if (retval == 0) retval = lhs.read1Coordinate - rhs.read1Coordinate; if (retval == 0) retval = lhs.orientation - rhs.orientation; if (retval == 0) retval = lhs.read2Sequence - rhs.read2Sequence; if (retval == 0) retval = lhs.read2Coordinate - rhs.read2Coordinate; if (retval == 0) retval = (int) (lhs.read1IndexInFile - rhs.read1IndexInFile); if (retval == 0) retval = (int) (lhs.read2IndexInFile - rhs.read2IndexInFile); return retval; } } static class PgIdGenerator { private int recordCounter; private final Set<String> idsThatAreAlreadyTaken = new HashSet<String>(); PgIdGenerator(final SAMFileHeader header) { for (final SAMProgramRecord pgRecord : header.getProgramRecords()) { idsThatAreAlreadyTaken.add(pgRecord.getProgramGroupId()); } recordCounter = idsThatAreAlreadyTaken.size(); } String getNonCollidingId(final String recordId) { if(!idsThatAreAlreadyTaken.contains(recordId)) { // don't remap 1st record. If there are more records // with this id, they will be remapped in the 'else'. idsThatAreAlreadyTaken.add(recordId); ++recordCounter; return recordId; } else { String newId; // Below we tack on one of roughly 1.7 million possible 4 digit base36 at random. We do this because // our old process of just counting from 0 upward and adding that to the previous id led to 1000s of // calls idsThatAreAlreadyTaken.contains() just to resolve 1 collision when merging 1000s of similarly // processed bams. while(idsThatAreAlreadyTaken.contains(newId = recordId + "." + SamFileHeaderMerger.positiveFourDigitBase36Str(recordCounter++))); idsThatAreAlreadyTaken.add( newId ); return newId; } } } }
package org.jdesktop.swingx; import org.jdesktop.swingx.calendar.DateSpan; import org.jdesktop.swingx.calendar.JXMonthView; import javax.swing.*; import javax.swing.JFormattedTextField.AbstractFormatter; import javax.swing.JFormattedTextField.AbstractFormatterFactory; import javax.swing.border.Border; import javax.swing.border.LineBorder; import javax.swing.text.DefaultFormatterFactory; import java.awt.*; import java.awt.event.*; import java.text.DateFormat; import java.text.MessageFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; /** * A component that combines a button, an editable field and a JXMonthView * component. The user can select a date from the calendar component, which * appears when the button is pressed. The selection from the calendar * component will be displayed in editable field. Values may also be modified * manually by entering a date into the editable field using one of the * supported date formats. * * @author Joshua Outwater */ public class JXDatePicker extends JComponent { /** The editable date field that displays the date */ private JFormattedTextField _dateField; /** * Popup that displays the month view with controls for * traversing/selecting dates. */ private JXDatePickerPopup _popup; private JPanel _linkPanel; private long _linkDate; private MessageFormat _linkFormat; private JButton _popupButton; private int _popupButtonWidth = 20; private JXMonthView _monthView; private Handler _handler; private String _actionCommand = "selectionChanged"; /** * Create a new date picker using the current date as the initial * selection and the default abstract formatter * <code>JXDatePickerFormatter</code>. */ public JXDatePicker() { this(System.currentTimeMillis()); } /** * Create a new date picker using the specified time as the initial * selection and the default abstract formatter * <code>JXDatePickerFormatter</code>. * * @param millis initial time in milliseconds */ public JXDatePicker(long millis) { _monthView = new JXMonthView(); _monthView.setTraversable(true); _dateField = createEditor(); _dateField.setName("dateField"); _dateField.setBorder(null); _handler = new Handler(); _popupButton = new JButton(); _popupButton.setName("popupButton"); _popupButton.setRolloverEnabled(false); _popupButton.addMouseListener(_handler); _popupButton.addMouseMotionListener(_handler); // this is a trick to get hold of the client prop which // prevents closing of the popup JComboBox box = new JComboBox(); Object preventHide = box.getClientProperty("doNotCancelPopup"); _popupButton.putClientProperty("doNotCancelPopup", preventHide); KeyStroke enterKey = KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0, false); InputMap inputMap = _dateField.getInputMap(JComponent.WHEN_FOCUSED); inputMap.put(enterKey, "COMMIT_EDIT"); ActionMap actionMap = _dateField.getActionMap(); actionMap.put("COMMIT_EDIT", new CommitEditAction()); KeyStroke spaceKey = KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0, false); inputMap = _popupButton.getInputMap(JComponent.WHEN_FOCUSED); inputMap.put(spaceKey, "TOGGLE_POPUP"); actionMap = _popupButton.getActionMap(); actionMap.put("TOGGLE_POPUP", new TogglePopupAction()); add(_dateField); add(_popupButton); updateUI(); _linkDate = System.currentTimeMillis(); _linkPanel = new TodayPanel(); _dateField.setValue(new Date(millis)); } /** * Resets the UI property to a value from the current look and feel. */ public void updateUI() { int cols = UIManager.getInt("JXDatePicker.numColumns"); if (cols == -1) { cols = 10; } _dateField.setColumns(cols); String str = UIManager.getString("JXDatePicker.arrowDown.tooltip"); if (str == null) { str = "Show Calendar"; } _popupButton.setToolTipText(str); Icon icon = UIManager.getIcon("JXDatePicker.arrowDown.image"); if (icon == null) { icon = (Icon)UIManager.get("Tree.expandedIcon"); } _popupButton.setIcon(icon); Border border = UIManager.getBorder("JXDatePicker.border"); if (border == null) { border = BorderFactory.createCompoundBorder( LineBorder.createGrayLineBorder(), BorderFactory.createEmptyBorder(3, 3, 3, 3)); } _dateField.setBorder(border); String formatString = UIManager.getString("JXDatePicker.linkFormat"); if (formatString == null) { formatString = "Today is {0,date, dd MMMM yyyy}"; } _linkFormat = new MessageFormat(formatString); } /** * Replaces the currently installed formatter and factory used by the * editor. These string formats are defined by the * <code>java.text.SimpleDateFormat</code> class. * * @param formats The string formats to use. * @see java.text.SimpleDateFormat */ public void setFormats(String[] formats) { DateFormat[] dateFormats = new DateFormat[formats.length]; for (int counter = formats.length - 1; counter >= 0; counter dateFormats[counter] = new SimpleDateFormat(formats[counter]); } setFormats(dateFormats); } /** * Replaces the currently installed formatter and factory used by the * editor. * * @param formats The date formats to use. */ public void setFormats(DateFormat[] formats) { _dateField.setFormatterFactory(new DefaultFormatterFactory( new JXDatePickerFormatter(formats))); } /** * Returns an array of the formats used by the installed formatter * if it is a subclass of <code>JXDatePickerFormatter<code>. * <code>javax.swing.JFormattedTextField.AbstractFormatter</code> * and <code>javax.swing.text.DefaultFormatter</code> do not have * support for accessing the formats used. * * @return array of formats or null if unavailable. */ public DateFormat[] getFormats() { // Dig this out from the factory, if possible, otherwise return null. AbstractFormatterFactory factory = _dateField.getFormatterFactory(); if (factory != null) { AbstractFormatter formatter = factory.getFormatter(_dateField); if (formatter instanceof JXDatePickerFormatter) { return ((JXDatePickerFormatter)formatter).getFormats(); } } return null; } /** * Set the currently selected date. * * @param date date */ public void setDate(Date date) { _dateField.setValue(date); } /** * Set the currently selected date. * * @param millis milliseconds */ public void setDateInMillis(long millis) { _dateField.setValue(new Date(millis)); } /** * Returns the currently selected date. * * @return Date */ public Date getDate() { return (Date)_dateField.getValue(); } /** * Returns the currently selected date in milliseconds. * * @return the date in milliseconds */ public long getDateInMillis() { return ((Date)_dateField.getValue()).getTime(); } /** * Return the <code>JXMonthView</code> used in the popup to * select dates from. * * @return the month view component */ public JXMonthView getMonthView() { return _monthView; } public void setMonthView(JXMonthView monthView) { _monthView = monthView; _popup = null; } /** * Set the date the link will use and the string defining a MessageFormat * to format the link. If no valid date is in the editor when the popup * is displayed the popup will focus on the month the linkDate is in. Calling * this method will replace the currently installed linkPanel and install * a new one with the requested date and format. * * @param linkDate Date in milliseconds * @param linkFormatString String used to format the link * @see java.text.MessageFormat */ public void setLinkDate(long linkDate, String linkFormatString) { _linkDate = linkDate; _linkFormat = new MessageFormat(linkFormatString); setLinkPanel(new TodayPanel()); } /** * Return the panel that is used at the bottom of the popup. The default * implementation shows a link that displays the current month. * * @return The currently installed link panel */ public JPanel getLinkPanel() { return _linkPanel; } /** * Set the panel that will be used at the bottom of the popup. * * @param linkPanel The new panel to install in the popup */ public void setLinkPanel(JPanel linkPanel) { // If the popup is null we haven't shown it yet. if (_popup != null) { _popup.remove(_linkPanel); _popup.add(linkPanel, BorderLayout.SOUTH); } _linkPanel = linkPanel; } /** * Returns the formatted text field used to edit the date selection. * * @return the formatted text field */ public JFormattedTextField getEditor() { return _dateField; } /** * Creates the editor used to edit the date selection. Subclasses should * override this method if they want to substitute in their own editor. * * @return an instance of a JFormattedTextField */ protected JFormattedTextField createEditor() { return new JFormattedTextField(new JXDatePickerFormatter()); } /** * Returns true if the current value being edited is valid. * * @return true if the current value being edited is valid. */ public boolean isEditValid() { return _dateField.isEditValid(); } /** * Forces the current value to be taken from the AbstractFormatter and * set as the current value. This has no effect if there is no current * AbstractFormatter installed. */ public void commitEdit() throws ParseException { _dateField.commitEdit(); } /** * Enables or disables the date picker and all its subcomponents. * * @param value true to enable, false to disable */ public void setEnabled(boolean value) { if (isEnabled() == value) { return; } super.setEnabled(value); _dateField.setEnabled(value); _popupButton.setEnabled(value); } /** * Returns the string currently used to identiy fired ActionEvents. * * @return String The string used for identifying ActionEvents. */ public String getActionCommand() { return _actionCommand; } /** * Sets the string used to identify fired ActionEvents. * * @param actionCommand The string used for identifying ActionEvents. */ public void setActionCommand(String actionCommand) { _actionCommand = actionCommand; } /** * Adds an ActionListener. * <p> * The ActionListener will receive an ActionEvent when a selection has * been made. * * @param l The ActionListener that is to be notified */ public void addActionListener(ActionListener l) { listenerList.add(ActionListener.class, l); } /** * Removes an ActionListener. * * @param l The action listener to remove. */ public void removeActionListener(ActionListener l) { listenerList.remove(ActionListener.class, l); } /** * Fires an ActionEvent to all listeners. */ protected void fireActionPerformed() { Object[] listeners = listenerList.getListenerList(); ActionEvent e = null; for (int i = listeners.length - 2; i >= 0; i -=2) { if (listeners[i] == ActionListener.class) { if (e == null) { e = new ActionEvent(JXDatePicker.this, ActionEvent.ACTION_PERFORMED, _actionCommand); } ((ActionListener)listeners[i + 1]).actionPerformed(e); } } } /** * {@inheritDoc} */ public void doLayout() { int width = getWidth(); int height = getHeight(); Insets insets = getInsets(); _dateField.setBounds(insets.left, insets.bottom, width - _popupButtonWidth, height); _popupButton.setBounds(width - _popupButtonWidth + insets.left, insets.bottom, _popupButtonWidth, height); } /** * {@inheritDoc} */ public Dimension getMinimumSize() { return getPreferredSize(); } /** * {@inheritDoc} */ public Dimension getPreferredSize() { Dimension dim = _dateField.getPreferredSize(); dim.width += _popupButton.getPreferredSize().width; Insets insets = getInsets(); dim.width += insets.left + insets.right; dim.height += insets.top + insets.bottom; return dim; } /** * Action used to commit the current value in the JFormattedTextField. * This action is used by the keyboard bindings. */ private class TogglePopupAction extends AbstractAction { public TogglePopupAction() { super("TogglePopup"); } public void actionPerformed(ActionEvent ev) { _handler.toggleShowPopup(); } } /** * Action used to commit the current value in the JFormattedTextField. * This action is used by the keyboard bindings. */ private class CommitEditAction extends AbstractAction { public CommitEditAction() { super("CommitEditPopup"); } public void actionPerformed(ActionEvent ev) { try { // Commit the current value. _dateField.commitEdit(); // Reformat the value according to the formatter. _dateField.setValue(_dateField.getValue()); fireActionPerformed(); } catch (java.text.ParseException ex) { } } } private class Handler implements MouseListener, MouseMotionListener { private boolean _forwardReleaseEvent = false; public void mouseClicked(MouseEvent ev) { } public void mousePressed(MouseEvent ev) { if (!isEnabled()) { return; } if (_dateField.isEditValid()) { try { _dateField.commitEdit(); } catch (java.text.ParseException ex) { } } toggleShowPopup(); } public void mouseReleased(MouseEvent ev) { if (!isEnabled()) { return; } // Retarget mouse event to the month view. if (_forwardReleaseEvent) { ev = SwingUtilities.convertMouseEvent(_popupButton, ev, _monthView); _monthView.dispatchEvent(ev); _forwardReleaseEvent = false; } } public void mouseEntered(MouseEvent ev) { } public void mouseExited(MouseEvent ev) { } public void mouseDragged(MouseEvent ev) { if (!isEnabled()) { return; } _forwardReleaseEvent = true; if (!_popup.isShowing()) { return; } // Retarget mouse event to the month view. ev = SwingUtilities.convertMouseEvent(_popupButton, ev, _monthView); _monthView.dispatchEvent(ev); } public void mouseMoved(MouseEvent ev) { } public void toggleShowPopup() { if (_popup == null) { _popup = new JXDatePickerPopup(); } if (!_popup.isVisible()) { if (_dateField.getValue() == null) { _dateField.setValue(new Date(_linkDate)); } DateSpan span = new DateSpan((java.util.Date)_dateField.getValue(), (java.util.Date)_dateField.getValue()); _monthView.setSelectedDateSpan(span); _monthView.ensureDateVisible( ((Date)_dateField.getValue()).getTime()); _popup.show(JXDatePicker.this, 0, JXDatePicker.this.getHeight()); } else { _popup.setVisible(false); } } } /** * Popup component that shows a JXMonthView component along with controlling * buttons to allow traversal of the months. Upon selection of a date the * popup will automatically hide itself and enter the selection into the * editable field of the JXDatePicker. */ protected class JXDatePickerPopup extends JPopupMenu implements ActionListener { public JXDatePickerPopup() { _monthView.setActionCommand("MONTH_VIEW"); _monthView.addActionListener(this); setLayout(new BorderLayout()); add(_monthView, BorderLayout.CENTER); if (_linkPanel != null) { add(_linkPanel, BorderLayout.SOUTH); } } public void actionPerformed(ActionEvent ev) { String command = ev.getActionCommand(); if ("MONTH_VIEW".equals(command)) { DateSpan span = _monthView.getSelectedDateSpan(); _dateField.setValue(span.getStartAsDate()); _popup.setVisible(false); fireActionPerformed(); } } } private final class TodayPanel extends JXPanel { TodayPanel() { super(new FlowLayout()); setDrawGradient(true); setGradientPaint(new GradientPaint(0, 0, new Color(238, 238, 238), 0, 1, Color.WHITE)); JXHyperlink todayLink = new JXHyperlink(new TodayAction()); Color textColor = new Color(16, 66, 104); todayLink.setUnclickedColor(textColor); todayLink.setClickedColor(textColor); add(todayLink); } protected void paintComponent(Graphics g) { super.paintComponent(g); g.setColor(new Color(187, 187, 187)); g.drawLine(0, 0, getWidth(), 0); g.setColor(new Color(221, 221, 221)); g.drawLine(0, 1, getWidth(), 1); } private final class TodayAction extends AbstractAction { TodayAction() { super(_linkFormat.format(new Object[] { new Date(_linkDate) })); } public void actionPerformed(ActionEvent ae) { DateSpan span = new DateSpan(_linkDate, _linkDate); _monthView.ensureDateVisible(span.getStart()); } } } /** * Default formatter for the JXDatePicker component. This factory * creates and returns a formatter that can handle a variety of date * formats. */ static class JXDatePickerFormatter extends JFormattedTextField.AbstractFormatter { private DateFormat _formats[] = null; public JXDatePickerFormatter() { _formats = new DateFormat[3]; String format = UIManager.getString("JXDatePicker.longFormat"); if (format == null) { format = "EEE MM/dd/yyyy"; } _formats[0] = new SimpleDateFormat(format); format = UIManager.getString("JXDatePicker.mediumFormat"); if (format == null) { format = "MM/dd/yyyy"; } _formats[1] = new SimpleDateFormat(format); format = UIManager.getString("JXDatePicker.shortFormat"); if (format == null) { format = "MM/dd"; } _formats[2] = new SimpleDateFormat(format); } public JXDatePickerFormatter(DateFormat formats[]) { _formats = formats; } public DateFormat[] getFormats() { return _formats; } /** * {@inheritDoc} */ public Object stringToValue(String text) throws ParseException { Object result = null; ParseException pex = null; if (text == null || text.trim().length() == 0) { return null; } // If the current formatter did not work loop through the other // formatters and see if any of them can parse the string passed for (DateFormat _format : _formats) { try { result = (_format).parse(text); pex = null; break; } catch (ParseException ex) { pex = ex; } } if (pex != null) { throw pex; } return result; } /** * {@inheritDoc} */ public String valueToString(Object value) throws ParseException { if (value != null) { return _formats[0].format(value); } return null; } } }
package org.priha.util; import java.io.*; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.nio.channels.FileChannel.MapMode; import java.util.Map; import java.util.Properties; /** * Provides a very fast way of saving and loading Property files. The format * is almost the same as with regular ones, with few key differences: * <ul> * <li>The file encoding is UTF-8 * <li>Unicode entities (\\u) are not recognized * <li>Line continuations are not supported (line ends with a backslash) * <li>Always uses \\n for ending the line, on all architectures. * </ul> */ public class FastPropertyStore { public static void store(OutputStream out, Properties props) throws IOException { BufferedWriter o = new BufferedWriter(new OutputStreamWriter(out,"UTF-8") ); for( Map.Entry<Object,Object> e : props.entrySet() ) { String key = (String)e.getKey(); String value = (String)e.getValue(); o.write( key ); o.write( "=" ); o.write( value ); o.write( "\n" ); } o.flush(); } public static Properties load(FileInputStream in) throws IOException { Properties props = new Properties(); FileChannel fc = in.getChannel(); //MappedByteBuffer bb = fc.map(MapMode.READ_ONLY, 0, fc.size() ); ByteBuffer ba = ByteBuffer.allocate( (int) fc.size() ); fc.read( ba ); String c = new String( ba.array(), "UTF-8" ); /* BufferedReader i = new BufferedReader(new InputStreamReader( in,"UTF-8" )); */ /* ByteArrayOutputStream ba = new ByteArrayOutputStream(); FileUtil.copyContents( in, ba ); String c = new String( ba.toByteArray(), "UTF-8" ); */ BufferedStringReader i = new BufferedStringReader(c); String line; while( (line = i.readLine()) != null ) { line = line.trim(); if( line.length() == 0 || line.charAt( 0 ) == '#' ) continue; int eqSign = line.indexOf( '=' ); if( eqSign == -1 ) throw new IOException("Illegal format in property file"); String key = line.substring( 0, eqSign ); String val = line.substring( eqSign+1 ); props.put( key, val ); } return props; } /** * This is a very fast String reader which implements the readLine() * method by returning substrings of the given string. EOL is * determined by the \\n character. */ private static class BufferedStringReader { String m_string; int m_pos; public BufferedStringReader( String s ) { m_string = s; } public String readLine() { if( m_pos >= m_string.length() ) return null; String result; int newline = m_string.indexOf( '\n', m_pos ); if( newline >= 0 ) { result = m_string.substring( m_pos, newline ); m_pos = newline+1; } else { result = m_string.substring( m_pos ); m_pos = m_string.length()+1; } return result; } } }
import com.cubethree.GPIOLib.*; import java.util.Scanner; class MotorTest { public static void main( String args[] ){ try{ PWMPin pwm = GPIOLib.allocatePWM( 17 ); GPIOPin a = GPIOLib.allocateGPIO( 22 ); GPIOPin b = GPIOLib.allocateGPIO( 21 ); boolean aValue = false; boolean bValue = false; double pwmValue = 0; pwm.setDuty( 0 ); a.setDirection( GPIOPinDirection.OUT ); b.setDirection( GPIOPinDirection.OUT ); a.setValue( false ); b.setValue( false ); System.out.format( "Current Value: A=%b B=%b PWM=%f\n", aValue, bValue, pwmValue ); Scanner s = new Scanner( System.in ); String line; while((line = s.nextLine()) != null){ String parts[] = line.split(" "); if( parts[0].equals( "quit" ) ){ return; } else if( parts[0].equals( "pwm" ) ){ pwm.setDuty( (pwmValue = Double.parseDouble( parts[1] )) ); } else if( parts[0].equals( "a" ) ){ if( parts[1].equals( "on" ) ){ a.setValue( true ); aValue = true; } else{ a.setValue( false ); aValue = false; } }else if( parts[0].equals( "b" ) ){ if( parts[1].equals( "on" ) ){b.setValue( true ); bValue = true; } else{ b.setValue( false ); bValue = false; } } System.out.format( "Current Value: A=%b B=%b PWM=%f\n", aValue, bValue, pwmValue ); } }catch( Exception e ){ e.printStackTrace(); }finally{ GPIOLib.cleanup(); } } }
package org.verapdf.gui; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import javax.swing.JOptionPane; import javax.swing.SwingWorker; import javax.xml.bind.JAXBException; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.stream.XMLStreamException; import javax.xml.transform.TransformerException; import org.apache.log4j.Logger; import org.verapdf.core.ValidationException; import org.verapdf.features.pb.PBFeatureParser; import org.verapdf.features.tools.FeaturesCollection; import org.verapdf.gui.config.Config; import org.verapdf.gui.tools.GUIConstants; import org.verapdf.metadata.fixer.MetadataFixer; import org.verapdf.metadata.fixer.MetadataFixerResultImpl; import org.verapdf.metadata.fixer.impl.pb.FixerConfigImpl; import org.verapdf.metadata.fixer.utils.FileGenerator; import org.verapdf.metadata.fixer.utils.FixerConfig; import org.verapdf.model.ModelLoader; import org.verapdf.pdfa.MetadataFixerResult; import org.verapdf.pdfa.results.ValidationResult; import org.verapdf.pdfa.results.ValidationResults; import org.verapdf.pdfa.validation.Profiles; import org.verapdf.pdfa.validation.ValidationProfile; import org.verapdf.pdfa.validation.Validator; import org.verapdf.report.HTMLReport; import org.verapdf.report.XMLReport; import org.xml.sax.SAXException; /** * Validates PDF in a new threat. * * @author Maksim Bezrukov */ class ValidateWorker extends SwingWorker<ValidationResult, Integer> { private static final Logger LOGGER = Logger.getLogger(ValidateWorker.class); private File pdf; private ValidationProfile profile; private CheckerPanel parent; private Config settings; private File xmlReport = null; private File htmlReport = null; private int flag; private boolean isFixMetadata; private long startTimeOfValidation; private long endTimeOfValidation; ValidateWorker(CheckerPanel parent, File pdf, ValidationProfile profile, Config settings, int flag, boolean isFixMetadata) { if (pdf == null || !pdf.isFile() || !pdf.canRead()) { throw new IllegalArgumentException( "PDF file doesn't exist or it can not be read"); } if (profile == null) { throw new IllegalArgumentException( "Profile doesn't exist or it can not be read"); } this.parent = parent; this.pdf = pdf; this.profile = profile; this.settings = settings; this.flag = flag; this.isFixMetadata = isFixMetadata; } @Override protected ValidationResult doInBackground() { xmlReport = null; htmlReport = null; ValidationResult info = null; FeaturesCollection collection = null; startTimeOfValidation = System.currentTimeMillis(); try (ModelLoader loader = new ModelLoader(new FileInputStream( this.pdf.getPath()))) { if ((flag & 1) == 1) { info = runValidator(loader.getRoot()); if (this.isFixMetadata) { this.fixMetadata(info, loader); } } if ((flag & (1 << 1)) == (1 << 1)) { try { collection = PBFeatureParser.getFeaturesCollection(loader .getPDDocument()); } catch (Exception e) { JOptionPane.showMessageDialog(this.parent, "Some error in creating features collection.", GUIConstants.ERROR, JOptionPane.ERROR_MESSAGE); LOGGER.error("Exception in creating features collection: ", e); } } endTimeOfValidation = System.currentTimeMillis(); writeReports(info, collection); } catch (IOException e) { this.parent .errorInValidatingOccur(GUIConstants.ERROR_IN_PARSING, e); } return info; } private void fixMetadata(ValidationResult info, ModelLoader loader) throws IOException { FixerConfig fixerConfig = FixerConfigImpl.getFixerConfig( loader.getPDDocument(), info); Path path = settings.getFixMetadataPathFolder(); File tempFile = File.createTempFile("fixedTempFile", ".pdf"); tempFile.deleteOnExit(); OutputStream tempOutput = new BufferedOutputStream( new FileOutputStream(tempFile)); MetadataFixerResult fixerResult = MetadataFixer.fixMetadata(tempOutput, fixerConfig); if (fixerResult.getRepairStatus().equals( MetadataFixerResult.RepairStatus.SUCCESS) || fixerResult.getRepairStatus().equals( MetadataFixerResult.RepairStatus.ID_REMOVED)) { File resFile; boolean flag = true; while (flag) { if (!path.toString().trim().isEmpty()) { resFile = FileGenerator.createOutputFile(settings .getFixMetadataPathFolder().toFile(), this.pdf .getName(), settings.getMetadataFixerPrefix()); } else { resFile = FileGenerator.createOutputFile(this.pdf, settings.getMetadataFixerPrefix()); } try { Files.copy(tempFile.toPath(), resFile.toPath()); flag = false; } catch (FileAlreadyExistsException e) { LOGGER.error(e); } } } } private ValidationResult runValidator( org.verapdf.model.baselayer.Object root) { try { return Validator.validate(this.profile, root, false); } catch (ValidationException e) { this.parent.errorInValidatingOccur( GUIConstants.ERROR_IN_VALIDATING, e); } return null; } @Override protected void done() { this.parent.validationEnded(this.xmlReport, this.htmlReport); } private void writeReports(ValidationResult result, FeaturesCollection collection) { try { xmlReport = File.createTempFile("veraPDF-tempXMLReport", ".xml"); xmlReport.deleteOnExit(); ValidationResults.toXml(result, new FileOutputStream(htmlReport), Boolean.TRUE); try { htmlReport = File.createTempFile("veraPDF-tempHTMLReport", ".html"); htmlReport.deleteOnExit(); HTMLReport.writeHTMLReport(result, new FileOutputStream(htmlReport)); } catch (IOException | TransformerException e) { JOptionPane.showMessageDialog(this.parent, GUIConstants.ERROR_IN_SAVING_HTML_REPORT, GUIConstants.ERROR, JOptionPane.ERROR_MESSAGE); LOGGER.error("Exception saving the HTML report", e); htmlReport = null; } } catch (IOException | JAXBException e) { JOptionPane.showMessageDialog(this.parent, GUIConstants.ERROR_IN_SAVING_XML_REPORT, GUIConstants.ERROR, JOptionPane.ERROR_MESSAGE); LOGGER.error("Exception saving the XML report", e); xmlReport = null; } } }
package water.fvec; import org.junit.BeforeClass; import org.junit.Test; import water.TestUtil; import water.util.ArrayUtils; import water.util.Log; import water.util.PrettyPrint; public class ChunkSpeedTest extends TestUtil { @BeforeClass() public static void setup() { stall_till_cloudsize(1); } @Test public void run() { final int cols = 100; final int rows = 100000; final int rep = 10; double[][] raw = new double[cols][rows]; for (int j=0; j<cols; ++j) { for (int i = 0; i < rows; ++i) { // switch (j%1) { //just do 1 byte chunks // switch (j%2) { //just do 1/2 byte chunks switch (j%3) { // do all 3 chunk types case 0: raw[j][i] = i % 200; //C1NChunk - 1 byte integer break; case 1: raw[j][i] = i % 500; //C2Chunk - 2 byte integer break; case 2: raw[j][i] = i == 17 ? 1 : 0; //CX0Chunk - sparse break; } } } Chunk[] chunks = new Chunk[cols]; for (int j=0; j<cols; ++j) { chunks[j] = new NewChunk(raw[j]).compress(); Log.info("Column " + j + " compressed into: " + chunks[j].getClass().toString()); } // raw data { long start = 0; double sum = 0; for (int r = 0; r < rep; ++r) { if (r==rep/10) start = System.currentTimeMillis(); for (int j=0; j<cols; ++j) { for (int i = 0; i < rows; ++i) { sum += raw[j][i]; } } } long done = System.currentTimeMillis(); Log.info("Sum: " + sum); Log.info("Data size: " + PrettyPrint.bytes(rows * cols * 8)); Log.info("Time to access raw double[]: " + PrettyPrint.msecs(done - start, true)); Log.info(""); } // chunks { long start = 0; double sum = 0; for (int r = 0; r < rep; ++r) { if (r==rep/10) start = System.currentTimeMillis(); for (int j=0; j<cols; ++j) { for (int i = 0; i < rows; ++i) { sum += chunks[j].atd(i); } } } long done = System.currentTimeMillis(); Log.info("Sum: " + sum); long siz = 0; for (int j=0; j<cols; ++j) { siz += chunks[j].byteSize(); } Log.info("Data size: " + PrettyPrint.bytes(siz)); Log.info("Time to access via atd(): " + PrettyPrint.msecs(done - start, true)); Log.info(""); } } }
package com.cordovaplugincamerapreview; import android.app.Activity; import android.app.FragmentManager; import android.app.FragmentTransaction; import android.content.pm.PackageManager; import android.hardware.Camera; import android.Manifest; import android.util.DisplayMetrics; import android.util.Log; import android.util.TypedValue; import android.view.ViewGroup; import android.widget.FrameLayout; import org.apache.cordova.PermissionHelper; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.PluginResult; import org.json.JSONArray; import org.json.JSONException; public class CameraPreview extends CordovaPlugin implements CameraActivity.CameraPreviewListener { private final String TAG = "CameraPreview"; private final String setOnPictureTakenHandlerAction = "setOnPictureTakenHandler"; private final String setColorEffectAction = "setColorEffect"; private final String startCameraAction = "startCamera"; private final String stopCameraAction = "stopCamera"; private final String switchCameraAction = "switchCamera"; private final String setFlashModeAction = "setFlashMode"; private final String takePictureAction = "takePicture"; private final String showCameraAction = "showCamera"; private final String hideCameraAction = "hideCamera"; private final String permission = Manifest.permission.CAMERA; private final int permissionsReqId = 0; private CallbackContext execCallback; private JSONArray execArgs; private CameraActivity fragment; private CallbackContext takePictureCallbackContext; private FrameLayout containerView; public CameraPreview(){ super(); Log.d(TAG, "Constructing"); } @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { if (setOnPictureTakenHandlerAction.equals(action)){ return setOnPictureTakenHandler(args, callbackContext); } else if (startCameraAction.equals(action)){ if (PermissionHelper.hasPermission(permission)) { return startCamera(args, callbackContext); } else { execCallback = callbackContext; execArgs = args; PermissionHelper.requestPermission(this, permissionsReqId, permission); } } else if (takePictureAction.equals(action)){ return takePicture(args, callbackContext); } else if (setColorEffectAction.equals(action)){ return setColorEffect(args, callbackContext); } else if (stopCameraAction.equals(action)){ return stopCamera(args, callbackContext); } else if (hideCameraAction.equals(action)){ return hideCamera(args, callbackContext); } else if (showCameraAction.equals(action)){ return showCamera(args, callbackContext); } else if (switchCameraAction.equals(action)){ return switchCamera(args, callbackContext); } else if (setFlashModeAction.equals(action)){ return setFlashMode(args, callbackContext); } return false; } private boolean startCamera(final JSONArray args, CallbackContext callbackContext) { if(fragment != null){ return false; } fragment = new CameraActivity(); fragment.setEventListener(this); cordova.getActivity().runOnUiThread(new Runnable() { @Override public void run() { try { DisplayMetrics metrics = cordova.getActivity().getResources().getDisplayMetrics(); int x = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, args.getInt(0), metrics); int y = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, args.getInt(1), metrics); int width = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, args.getInt(2), metrics); int height = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, args.getInt(3), metrics); String defaultCamera = args.getString(4); Boolean tapToTakePicture = args.getBoolean(5); Boolean dragEnabled = args.getBoolean(6); Boolean toBack = args.getBoolean(7); fragment.defaultCamera = defaultCamera; fragment.tapToTakePicture = tapToTakePicture; fragment.dragEnabled = dragEnabled; fragment.setRect(x, y, width, height); //create or update the layout params for the container view Activity activity = cordova.getActivity(); if(containerView == null){ containerView = new FrameLayout(activity.getApplicationContext()); // Look up a view id we inject to ensure there are no conflicts int cameraViewId = activity.getResources().getIdentifier(activity.getClass().getPackage().getName() + ":id/camera_container", null, null); containerView.setId(cameraViewId); } if (containerView.getParent() != webView.getParent()) { if (containerView.getParent() != null) { ((ViewGroup) containerView.getParent()).removeView(containerView); } FrameLayout.LayoutParams containerLayoutParams = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT); ((ViewGroup) webView.getParent()).addView(containerView, containerLayoutParams); } //display camera bellow the webview if(toBack){ webView.setBackgroundColor(0x00000000); ((ViewGroup)webView).bringToFront(); } else{ //set camera back to front containerView.setAlpha(Float.parseFloat(args.getString(8))); containerView.bringToFront(); } //add the fragment to the container FragmentManager fragmentManager = cordova.getActivity().getFragmentManager(); FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction(); fragmentTransaction.add(containerView.getId(), fragment); fragmentTransaction.commit(); } catch(Exception e){ e.printStackTrace(); } } }); return true; } private boolean takePicture(final JSONArray args, CallbackContext callbackContext) { if(fragment == null){ return false; } PluginResult pluginResult = new PluginResult(PluginResult.Status.OK); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); try { double maxWidth = args.getDouble(0); double maxHeight = args.getDouble(1); fragment.takePicture(maxWidth, maxHeight); } catch(Exception e){ e.printStackTrace(); return false; } return true; } public void onPictureTaken(String originalPicturePath){ PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, originalPicturePath); pluginResult.setKeepCallback(true); takePictureCallbackContext.sendPluginResult(pluginResult); } private boolean setColorEffect(final JSONArray args, CallbackContext callbackContext) { if(fragment == null){ return false; } Camera camera = fragment.getCamera(); if (camera == null){ return true; } Camera.Parameters params = camera.getParameters(); try { String effect = args.getString(0); if (effect.equals("aqua")) { params.setColorEffect(Camera.Parameters.EFFECT_AQUA); } else if (effect.equals("blackboard")) { params.setColorEffect(Camera.Parameters.EFFECT_BLACKBOARD); } else if (effect.equals("mono")) { params.setColorEffect(Camera.Parameters.EFFECT_MONO); } else if (effect.equals("negative")) { params.setColorEffect(Camera.Parameters.EFFECT_NEGATIVE); } else if (effect.equals("none")) { params.setColorEffect(Camera.Parameters.EFFECT_NONE); } else if (effect.equals("posterize")) { params.setColorEffect(Camera.Parameters.EFFECT_POSTERIZE); } else if (effect.equals("sepia")) { params.setColorEffect(Camera.Parameters.EFFECT_SEPIA); } else if (effect.equals("solarize")) { params.setColorEffect(Camera.Parameters.EFFECT_SOLARIZE); } else if (effect.equals("whiteboard")) { params.setColorEffect(Camera.Parameters.EFFECT_WHITEBOARD); } fragment.setCameraParameters(params); return true; } catch(Exception e) { e.printStackTrace(); return false; } } private boolean stopCamera(final JSONArray args, CallbackContext callbackContext) { if(fragment == null){ return false; } FragmentManager fragmentManager = cordova.getActivity().getFragmentManager(); FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction(); fragmentTransaction.remove(fragment); fragmentTransaction.commit(); fragment = null; return true; } private boolean showCamera(final JSONArray args, CallbackContext callbackContext) { if(fragment == null){ return false; } FragmentManager fragmentManager = cordova.getActivity().getFragmentManager(); FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction(); fragmentTransaction.show(fragment); fragmentTransaction.commit(); return true; } private boolean hideCamera(final JSONArray args, CallbackContext callbackContext) { if(fragment == null) { return false; } FragmentManager fragmentManager = cordova.getActivity().getFragmentManager(); FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction(); fragmentTransaction.hide(fragment); fragmentTransaction.commit(); return true; } private boolean switchCamera(final JSONArray args, CallbackContext callbackContext) { if(fragment == null){ return false; } fragment.switchCamera(); return true; } private boolean setFlashMode(final JSONArray args, CallbackContext callbackContext) { if(fragment == null){ return false; } try { fragment.setFlashMode(args.getInt(0)); } catch(Exception e){ e.printStackTrace(); return false; } return true; } private boolean setOnPictureTakenHandler(JSONArray args, CallbackContext callbackContext) { Log.d(TAG, "setOnPictureTakenHandler"); takePictureCallbackContext = callbackContext; return true; } @Override public void onRequestPermissionResult(int requestCode, String[] permissions, int[] grantResults) throws JSONException { for(int r:grantResults) { if(r == PackageManager.PERMISSION_DENIED) { execCallback.sendPluginResult(new PluginResult(PluginResult.Status.ILLEGAL_ACCESS_EXCEPTION)); return; } } if (requestCode == permissionsReqId) { startCamera(execArgs, execCallback); } } }
package play.test; import com.fasterxml.jackson.databind.JsonNode; import play.api.libs.json.JsValue; import play.api.mvc.AnyContent; import play.api.mvc.AnyContentAsJson; import play.api.mvc.AnyContentAsRaw; import play.api.mvc.AnyContentAsText; import play.api.mvc.AnyContentAsXml; import play.api.mvc.RawBuffer; import play.libs.*; import play.mvc.*; import java.util.*; import org.xml.sax.InputSource; import scala.collection.Seq; /** * Fake HTTP request implementation. */ public class FakeRequest { @SuppressWarnings("rawtypes") play.api.test.FakeRequest fake; /** * Constructs a new GET / fake request. */ public FakeRequest() { this.fake = play.api.test.FakeRequest.apply(); } /** * Constructs a new request. */ public FakeRequest(String method, String path) { this.fake = play.api.test.FakeRequest.apply(method, path); } /** * Change the remote-address for this request. * @param remoteAddress the remote address, e.g. "127.0.0.1" * @return the Fake Request */ @SuppressWarnings(value = "unchecked") public FakeRequest withRemoteAddress(String remoteAddress) { fake = new play.api.test.FakeRequest(fake.method(), fake.uri(), fake.headers(), fake.body(), remoteAddress, fake.version(), fake.id(), fake.tags(), fake.secure()); return this; } /** * Add additional headers to this request. */ @SuppressWarnings(value = "unchecked") public FakeRequest withHeader(String name, String value) { fake = fake.withHeaders(Scala.varargs(Scala.Tuple(name, value))); return this; } /** * Set a AnyContent to this request. * @param content the AnyContent * @param contentType Content-Type header value * @param method The method to be set * @return the Fake Request */ @SuppressWarnings(value = "unchecked") public FakeRequest withAnyContent(AnyContent content, String contentType, String method) { Map<String, Seq<String>> map = new HashMap<String, Seq<String>>(Scala.asJava(fake.headers().toMap())); map.put("Content-Type", Scala.toSeq(new String[] {contentType})); fake = new play.api.test.FakeRequest(method, fake.uri(), new play.api.test.FakeHeaders(Scala.asScala(map).toSeq()), content, fake.remoteAddress(), fake.version(), fake.id(), fake.tags(), fake.secure()); return this; } /** * Set a Json Body to this request. * The <tt>Content-Type</tt> header of the request is set to <tt>application/json</tt>. * @param node the Json Node * @return the Fake Request */ @SuppressWarnings(value = "unchecked") public FakeRequest withJsonBody(JsonNode node) { return withJsonBody(play.api.libs.json.JacksonJson$.MODULE$.jsonNodeToJsValue(node)); } /** * Set a Json Body to this request. * The <tt>Content-Type</tt> header of the request is set to <tt>application/json</tt>. * @param json the JsValue * @return the Fake Request */ public FakeRequest withJsonBody(JsValue json) { return withAnyContent(new AnyContentAsJson(json), "application/json", this.fake.getMethod()); } /** * Set a Json Body to this request. * The <tt>Content-Type</tt> header of the request is set to <tt>application/json</tt>. * @param node the Json Node * @param method the HTTP method. <tt>POST</tt> if set to <code>null</code> * @return the Fake Request */ @SuppressWarnings(value = "unchecked") public FakeRequest withJsonBody(JsonNode node, String method) { if (method == null) { method = Helpers.POST; } Map<String, Seq<String>> map = new HashMap<String, Seq<String>>(Scala.asJava(fake.headers().toMap())); map.put("Content-Type", Scala.toSeq(new String[] {"application/json"})); AnyContentAsJson content = new AnyContentAsJson(play.api.libs.json.JacksonJson$.MODULE$.jsonNodeToJsValue(node)); fake = new play.api.test.FakeRequest(method, fake.uri(), new play.api.test.FakeHeaders(Scala.asScala(map).toSeq()), content, fake.remoteAddress(), fake.version(), fake.id(), fake.tags(), fake.secure()); return this; } /** * Set a Json Body to this request. * The <tt>Content-Type</tt> header of the request is set to <tt>application/json</tt>. * @param json the JsValue * @param method the HTTP method. <tt>POST</tt> if set to <code>null</code> * @return the Fake Request */ public FakeRequest withJsonBody(JsValue json, String method) { return withAnyContent(new AnyContentAsJson(json), "application/json", method); } /** * Add addtional session to this request. */ @SuppressWarnings(value = "unchecked") public FakeRequest withFlash(String name, String value) { fake = fake.withFlash(Scala.varargs(Scala.Tuple(name, value))); return this; } /** * Add addtional session to this request. */ @SuppressWarnings(value = "unchecked") public FakeRequest withSession(String name, String value) { fake = fake.withSession(Scala.varargs(Scala.Tuple(name, value))); return this; } /** * Add cookies to this request */ @SuppressWarnings(value = "unchecked") public FakeRequest withCookies(Http.Cookie... cookies) { List <play.api.mvc.Cookie> scalacookies = new ArrayList<play.api.mvc.Cookie>(); for (Http.Cookie c : cookies) { scalacookies.add(new play.api.mvc.Cookie(c.name(), c.value(), Scala.<Object>Option(c.maxAge()), c.path(), Scala.Option(c.domain()), c.secure(), c.httpOnly()) ); } fake = fake.withCookies(Scala.varargs(scalacookies.toArray())); return this; } /** * Set a Form url encoded body to this request. */ @SuppressWarnings(value = "unchecked") public FakeRequest withFormUrlEncodedBody(java.util.Map<String,String> data) { List<scala.Tuple2<String,String>> args = new ArrayList<scala.Tuple2<String,String>>(); for(String key: data.keySet()) { scala.Tuple2<String,String> pair = Scala.Tuple(key, data.get(key)); args.add(pair); } fake = fake.withFormUrlEncodedBody(Scala.toSeq(args)); return this; } @SuppressWarnings(value = "unchecked") public play.api.mvc.Request<play.mvc.Http.RequestBody> getWrappedRequest() { return ((play.api.test.FakeRequest<play.api.mvc.AnyContent>)fake).map(new scala.runtime.AbstractFunction1<play.api.mvc.AnyContent, play.mvc.Http.RequestBody>() { public play.mvc.Http.RequestBody apply(play.api.mvc.AnyContent anyContent) { return new play.core.j.JavaParsers.DefaultRequestBody( anyContent.asFormUrlEncoded(), anyContent.asRaw(), anyContent.asText(), anyContent.asJson(), anyContent.asXml(), anyContent.asMultipartFormData() ); } }); } /** * Set a Binary Data to this request. * The <tt>Content-Type</tt> header of the request is set to <tt>application/octet-stream</tt>. * @param data the Binary Data * @return the Fake Request */ public FakeRequest withRawBody(byte[] data) { return withAnyContent(new AnyContentAsRaw(new RawBuffer(data.length, data)), "application/octet-stream", this.fake.getMethod()); } /** * Set a XML to this request. * The <tt>Content-Type</tt> header of the request is set to <tt>application/xml</tt>. * @param xml the XML * @return the Fake Request */ public FakeRequest withXmlBody(InputSource xml) { return withAnyContent(new AnyContentAsXml(scala.xml.XML.load(xml)), "application/xml", this.fake.getMethod()); } /** * Set a Text to this request. * The <tt>Content-Type</tt> header of the request is set to <tt>text/plain</tt>. * @param text the text * @return the Fake Request */ public FakeRequest withTextBody(String text) { return withAnyContent(new AnyContentAsText(text), "text/plain", this.fake.getMethod()); } /** * Set a any body to this request. * @param body the Body * @return the Fake Request */ @SuppressWarnings(value = "unchecked") public <T> FakeRequest withBody(T body) { this.fake = this.fake.withBody(body); return this; } }
package water.util; import org.junit.Assert; import org.junit.Test; import java.util.Arrays; import java.util.Random; import java.util.stream.IntStream; import static org.junit.Assert.*; import static water.util.ArrayUtils.*; /** * Test FrameUtils interface. */ public class ArrayUtilsTest { @Test public void testAppendBytes() { byte[] sut = {1, 2, 3}; byte[] sut2 = {3, 4}; byte[] expected = {1, 2, 3, 3, 4}; byte[] empty = {}; assertArrayEquals(null, append((byte[]) null, null)); assertArrayEquals(sut, append(null, sut)); assertArrayEquals(sut, append(sut, null)); assertArrayEquals(empty, append(null, empty)); assertArrayEquals(empty, append(empty, null)); assertArrayEquals(sut, append(empty, sut)); assertArrayEquals(sut, append(sut, empty)); assertArrayEquals(expected, append(sut, sut2)); } @Test public void testAppendInts() { int[] sut = {1, 2, 3}; int[] sut2 = {3, 4}; int[] expected = {1, 2, 3, 3, 4}; int[] empty = {}; assertArrayEquals(null, append((int[]) null, null)); assertArrayEquals(sut, append(null, sut)); assertArrayEquals(sut, append(sut, null)); assertArrayEquals(empty, append(null, empty)); assertArrayEquals(empty, append(empty, null)); assertArrayEquals(sut, append(empty, sut)); assertArrayEquals(sut, append(sut, empty)); assertArrayEquals(expected, append(sut, sut2)); } @Test public void testAppendDouble() { double[] sut = {1.0, 2.0, 3.0}; double[] expected = {1.0, 2.0, 3.0, 3.0}; double[] empty = {}; assertArrayEquals(expected, append(sut, 3.0), 0.0); assertArrayEquals(new double[]{3.0}, append(empty, 3.0), 0.0); assertArrayEquals(new double[]{3.0}, append(null, 3.0), 0.0); } @Test public void testAppendLongs() { long[] sut = {1, 2, 3}; long[] sut2 = {3, 4}; long[] expected = {1, 2, 3, 3, 4}; long[] empty = {}; assertArrayEquals(null, append((int[]) null, null)); assertArrayEquals(sut, append(null, sut)); assertArrayEquals(sut, append(sut, null)); assertArrayEquals(empty, append(null, empty)); assertArrayEquals(empty, append(empty, null)); assertArrayEquals(sut, append(empty, sut)); assertArrayEquals(sut, append(sut, empty)); assertArrayEquals(expected, append(sut, sut2)); } @Test public void testRemoveOneObject() { Integer[] sut = {1, 2, 3}; Integer[] sutWithout1 = {2, 3}; Integer[] sutWithout2 = {1, 3}; Integer[] sutWithout3 = {1, 2}; assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MIN_VALUE)); assertArrayEquals("Should not have deleted ", sut, remove(sut, -1)); assertArrayEquals("Should have deleted first", sutWithout1, remove(sut, 0)); assertArrayEquals("Should have deleted second", sutWithout2, remove(sut, 1)); assertArrayEquals("Should have deleted third", sutWithout3, remove(sut, 2)); assertArrayEquals("Should have not deleted", sut, remove(sut, 3)); assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MAX_VALUE)); } @Test public void testRemoveOneObjectFromSingleton() { Integer[] sut = {1}; Integer[] sutWithout1 = {}; assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MIN_VALUE)); assertArrayEquals("Should not have deleted ", sut, remove(sut, -1)); assertArrayEquals("Should have deleted first", sutWithout1, remove(sut, 0)); assertArrayEquals("Should have not deleted", sut, remove(sut, 1)); assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MAX_VALUE)); } @Test public void testRemoveOneObjectFromEmpty() { Integer[] sut = {}; assertArrayEquals("Nothing to remove", sut, remove(sut, -1)); assertArrayEquals("Nothing to remove", sut, remove(sut, 0)); assertArrayEquals("Nothing to remove", sut, remove(sut, 1)); } @Test public void testRemoveOneByte() { byte[] sut = {1, 2, 3}; byte[] sutWithout1 = {2, 3}; byte[] sutWithout2 = {1, 3}; byte[] sutWithout3 = {1, 2}; assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MIN_VALUE)); assertArrayEquals("Should not have deleted ", sut, remove(sut, -1)); assertArrayEquals("Should have deleted first", sutWithout1, remove(sut, 0)); assertArrayEquals("Should have deleted second", sutWithout2, remove(sut, 1)); assertArrayEquals("Should have deleted third", sutWithout3, remove(sut, 2)); assertArrayEquals("Should have not deleted", sut, remove(sut, 3)); assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MAX_VALUE)); } @Test public void testRemoveOneByteFromSingleton() { byte[] sut = {1}; byte[] sutWithout1 = {}; assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MIN_VALUE)); assertArrayEquals("Should not have deleted ", sut, remove(sut, -1)); assertArrayEquals("Should have deleted first", sutWithout1, remove(sut, 0)); assertArrayEquals("Should have not deleted", sut, remove(sut, 1)); assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MAX_VALUE)); } @Test public void testRemoveOneByteFromEmpty() { byte[] sut = {}; assertArrayEquals("Nothing to remove", sut, remove(sut, -1)); assertArrayEquals("Nothing to remove", sut, remove(sut, 0)); assertArrayEquals("Nothing to remove", sut, remove(sut, 1)); } @Test public void testRemoveOneInt() { int[] sut = {1, 2, 3}; int[] sutWithout1 = {2, 3}; int[] sutWithout2 = {1, 3}; int[] sutWithout3 = {1, 2}; assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MIN_VALUE)); assertArrayEquals("Should not have deleted ", sut, remove(sut, -1)); assertArrayEquals("Should have deleted first", sutWithout1, remove(sut, 0)); assertArrayEquals("Should have deleted second", sutWithout2, remove(sut, 1)); assertArrayEquals("Should have deleted third", sutWithout3, remove(sut, 2)); assertArrayEquals("Should have not deleted", sut, remove(sut, 3)); assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MAX_VALUE)); } @Test public void testRemoveOneIntFromSingleton() { int[] sut = {1}; int[] sutWithout1 = {}; assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MIN_VALUE)); assertArrayEquals("Should not have deleted ", sut, remove(sut, -1)); assertArrayEquals("Should have deleted first", sutWithout1, remove(sut, 0)); assertArrayEquals("Should have not deleted", sut, remove(sut, 1)); assertArrayEquals("Should have not deleted", sut, remove(sut, Integer.MAX_VALUE)); } @Test public void testRemoveOneIntFromEmpty() { int[] sut = {}; assertArrayEquals("Nothing to remove", sut, remove(sut, -1)); assertArrayEquals("Nothing to remove", sut, remove(sut, 0)); assertArrayEquals("Nothing to remove", sut, remove(sut, 1)); } @Test public void testCountNonZeroes() { double[] empty = {}; assertEquals(0, countNonzeros(empty)); double[] singlenz = {1.0}; assertEquals(1, countNonzeros(singlenz)); double[] threeZeroes = {0.0, 0.0, 0.0}; assertEquals(0, countNonzeros(threeZeroes)); double[] somenz = {-1.0, Double.MIN_VALUE, 0.0, Double.MAX_VALUE, 0.001, 0.0, 42.0}; assertEquals(5, countNonzeros(somenz)); } @Test public void testSortIndicesCutoffBranch() { int arrayLen = 10; int[] indices = ArrayUtils.range(0, arrayLen - 1); double[] values = new double[]{-12, -5, 1, 255, 1.25, -1, 0, 1, -26, 16}; double[] valuesInput = Arrays.copyOf(values, values.length); sort(indices, valuesInput, 500, 1); assertArrayEquals("Not correctly sorted", new int[]{8, 0, 1, 5, 6, 2, 7, 4, 9, 3}, indices); assertArrayEquals("Values array is changed", values, valuesInput, 0); for (int index = 1; index < arrayLen; index++) Assert.assertTrue(values[indices[index-1]]+" should be <= "+values[indices[index]], values[indices[index-1]] <= values[indices[index]]); sort(indices, valuesInput, 500, -1); assertArrayEquals("Not correctly sorted", new int[]{3, 9, 4, 2, 7, 6, 5, 1, 0, 8}, indices); assertArrayEquals("Values array is changed", values, valuesInput, 0); for (int index = 1; index < arrayLen; index++) Assert.assertTrue(values[indices[index-1]]+" should be >= "+values[indices[index]], values[indices[index-1]] >= values[indices[index]]); } @Test public void testSortIndicesJavaSortBranch() { int arrayLen = 10; int[] indices = ArrayUtils.range(0, arrayLen - 1); double[] values = new double[]{-12, -5, 1, 255, 1.25, -1, 0, 1, -26, 16}; double[] valuesInput = Arrays.copyOf(values, values.length); sort(indices, valuesInput, -1, 1); assertArrayEquals("Not correctly sorted", new int[]{8, 0, 1, 5, 6, 2, 7, 4, 9, 3}, indices); assertArrayEquals("Values array is changed", values, valuesInput, 0); for (int index = 1; index < arrayLen; index++) Assert.assertTrue(values[indices[index-1]]+" should be <= "+values[indices[index]], values[indices[index-1]] <= values[indices[index]]); sort(indices, valuesInput, -1, -1); assertArrayEquals("Not correctly sorted", new int[]{3, 9, 4, 2, 7, 6, 5, 1, 0, 8}, indices); assertArrayEquals("Values array is changed", values, valuesInput, 0); for (int index = 1; index < arrayLen; index++) Assert.assertTrue(values[indices[index-1]]+" should be >= "+values[indices[index]], values[indices[index-1]] >= values[indices[index]]); } @Test public void testSortIndicesRandomAttackJavaSortBranch() { Random randObj = new Random(12345); int arrayLen = 100; int[] indices = new int[arrayLen]; double[] values = new double[arrayLen]; for (int index = 0; index < arrayLen; index++) {// generate data array values[index] = randObj.nextDouble(); indices[index] = index; } sort(indices, values, -1, 1); // sorting in ascending order for (int index = 1; index < arrayLen; index++) // check correct sorting in ascending order Assert.assertTrue(values[indices[index-1]]+" should be <= "+values[indices[index]], values[indices[index-1]] <= values[indices[index]]); sort(indices, values, -1, -1); // sorting in descending order for (int index = 1; index < arrayLen; index++) // check correct sorting in descending order Assert.assertTrue(values[indices[index-1]]+" should be >= "+values[indices[index]], values[indices[index-1]] >= values[indices[index]]); } @Test public void testSortIndicesRandomAttackCutoffBranch() { Random randObj = new Random(12345); int arrayLen = 100; int[] indices = new int[arrayLen]; double[] values = new double[arrayLen]; for (int index = 0; index < arrayLen; index++) {// generate data array values[index] = randObj.nextDouble(); indices[index] = index; } sort(indices, values, 500, 1); // sorting in ascending order for (int index = 1; index < arrayLen; index++) // check correct sorting in ascending order Assert.assertTrue(values[indices[index-1]]+" should be <= "+values[indices[index]], values[indices[index-1]] <= values[indices[index]]); sort(indices, values, 500, -1); // sorting in descending order for (int index = 1; index < arrayLen; index++) // check correct sorting in descending order Assert.assertTrue(values[indices[index-1]]+" should be >= "+values[indices[index]], values[indices[index-1]] >= values[indices[index]]); } @Test public void testAddWithCoefficients() { float[] a = {1.0f, 2.0f, 3.0f}; float[] b = {100.0f, 200.0f, 300.0f}; float[] result = ArrayUtils.add(10.0f, a, 2.0f, b); assertTrue(result == a); assertArrayEquals(new float[]{210.0f, 420.0f, 630.0f}, a, 0.001f); } @Test public void test_encodeAsInt() { byte[]bs = new byte[]{0,0,0,0,1}; assertEquals(0, encodeAsInt(bs, 0)); assertEquals(0x1000000, encodeAsInt(bs, 1)); try { encodeAsInt(bs, 2); fail("Should not work"); } catch (Throwable ignore) {} bs[0] = (byte)0xfe; assertEquals(0xfe, encodeAsInt(bs, 0)); bs[1] = (byte)0xca; assertEquals(0xcafe, encodeAsInt(bs, 0)); bs[2] = (byte)0xde; assertEquals(0xdecafe, encodeAsInt(bs, 0)); bs[3] = (byte)0x0a; assertEquals(0xadecafe, encodeAsInt(bs, 0)); assertEquals(0x10adeca, encodeAsInt(bs, 1)); } @Test public void test_decodeAsInt() { byte[]bs = new byte[]{1,2,3,4,5}; assertArrayEquals(new byte[]{0,0,0,0,5}, decodeAsInt(0, bs, 0)); try { decodeAsInt(1, bs, 3); fail("Should not work"); } catch (Throwable ignore) {} try { decodeAsInt(256, bs, 4); fail("Should not work"); } catch (Throwable ignore) {} assertArrayEquals(new byte[]{(byte)0xfe,0,0,0,5}, decodeAsInt(0xfe, bs, 0)); assertArrayEquals(new byte[]{(byte)0xfe,(byte)0xca,0,0,5}, decodeAsInt(0xcafe, bs, 0)); assertArrayEquals(new byte[]{(byte)0xfe,(byte)0xca,(byte)0xde,0,5}, decodeAsInt(0xdecafe, bs, 0)); assertArrayEquals(new byte[]{(byte)0xfe,(byte)0xca,(byte)0xde,(byte)0x80,5}, decodeAsInt(0x80decafe, bs, 0)); } @Test public void testFloatsToDouble() { assertNull(toDouble((float[]) null)); assertArrayEquals(new double[]{1.0, 2.2}, toDouble(new float[]{1.0f, 2.2f}), 1e-7); } @Test public void testIntsToDouble() { assertNull(toDouble((int[]) null)); assertArrayEquals(new double[]{1.0, 42.0}, toDouble(new int[]{1, 42}), 0); } @Test public void testOccurrenceCount() { byte[] arr = new byte[]{ 1, 2, 1, 1, 3, 4 }; assertEquals("Occurrence count mismatch.", 3, ArrayUtils.occurrenceCount(arr, (byte) 1)); assertEquals("Occurrence count mismatch.", 0, ArrayUtils.occurrenceCount(arr, (byte) 0)); } @Test public void testOccurrenceCountEmptyArray() { byte[] arr = new byte[]{}; assertEquals("Occurrence count mismatch.", 0, ArrayUtils.occurrenceCount(arr, (byte) 1)); } @Test public void testByteArraySelect() { byte[] arr = new byte[]{ 1, 2, 3, 4, 5, 6 }; int[] idxs = new int[]{ 3, 1, 5 }; byte[] expectedSelectedElements = new byte[]{ 4, 2, 6 }; assertArrayEquals("Selected array elements mismatch.", expectedSelectedElements, ArrayUtils.select(arr, idxs)); } @Test public void testByteArrayEmptySelect() { byte[] arr = new byte[]{ 1, 2, 3, 4, 5, 6 }; int[] idxs = new int[]{}; byte[] expectedSelectedElements = new byte[]{}; assertArrayEquals("Selected array elements mismatch.", expectedSelectedElements, ArrayUtils.select(arr, idxs)); } @Test public void testToStringQuotedElements(){ final Object[] names = new String[]{"", "T16384"}; final String outputString = toStringQuotedElements(names); assertEquals("[\"\", \"T16384\"]", outputString); } @Test public void testToStringQuotedElementsNullInput(){ final String outputString = toStringQuotedElements(null); assertEquals("null", outputString); } @Test public void testToStringQuotedElementsEmptyInput(){ final Object[] emptyNames = new String[0]; final String outputString = toStringQuotedElements(emptyNames); assertEquals("[]", outputString); } @Test public void testToStringQuotedElements_with_max_items() { final Object[] names = IntStream.range(1, 10).mapToObj(Integer::toString).toArray(); final String outputString = toStringQuotedElements(names, 5); assertEquals("[\"1\", \"2\", \"3\", ...4 not listed..., \"8\", \"9\"]", outputString); } @Test public void testToStringQuotedElements_with_max_items_corner_cases() { final Object[] names = IntStream.range(1, 4).mapToObj(Integer::toString).toArray(); assertEquals("[\"1\", \"2\", \"3\"]", toStringQuotedElements(names, -1)); assertEquals("[\"1\", \"2\", \"3\"]", toStringQuotedElements(names, 0)); assertEquals("[\"1\", ...2 not listed...]", toStringQuotedElements(names, 1)); assertEquals("[\"1\", ...1 not listed..., \"3\"]", toStringQuotedElements(names, 2)); assertEquals("[\"1\", \"2\", \"3\"]", toStringQuotedElements(names, 3)); assertEquals("[\"1\", \"2\", \"3\"]", toStringQuotedElements(names, 4)); } }
/* * Open Source Software published under the Apache Licence, Version 2.0. */ package io.github.vocabhunter.gui.model; import io.github.vocabhunter.analysis.filter.WordFilter; import io.github.vocabhunter.analysis.session.SessionState; import io.github.vocabhunter.gui.i18n.SupportedLocale; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.beans.property.SimpleStringProperty; import java.nio.file.Path; import java.util.Optional; import javax.inject.Singleton; import static javafx.beans.binding.Bindings.isNotEmpty; @Singleton public class MainModel { private final SimpleStringProperty title = new SimpleStringProperty(); private SessionState sessionState; private SessionModel sessionModel; private final SimpleBooleanProperty sessionOpen = new SimpleBooleanProperty(false); private final SimpleBooleanProperty selectionAvailable = new SimpleBooleanProperty(false); private final SimpleBooleanProperty editMode = new SimpleBooleanProperty(true); private final SimpleObjectProperty<Path> sessionFile = new SimpleObjectProperty<>(null); private final SimpleStringProperty documentName = new SimpleStringProperty(); private final SimpleBooleanProperty changesSaved = new SimpleBooleanProperty(true); private final SimpleObjectProperty<FilterSettings> filterSettings = new SimpleObjectProperty<>(); private final SimpleBooleanProperty enableFilters = new SimpleBooleanProperty(true); private final SimpleObjectProperty<SupportedLocale> locale = new SimpleObjectProperty<>(SupportedLocale.DEFAULT_LOCALE); private WordFilter filter; public void replaceSessionModel(final SessionState sessionState, final SessionModel sessionModel, final Path sessionFile) { unbindOldSession(); this.sessionState = sessionState; this.sessionModel = sessionModel; this.sessionFile.set(sessionFile); sessionOpen.set(true); selectionAvailable.bind(isNotEmpty(sessionModel.getSelectedWords())); editMode.bindBidirectional(sessionModel.editableProperty()); sessionModel.filterSettingsProperty().bindBidirectional(filterSettings); sessionModel.enableFiltersProperty().bindBidirectional(enableFilters); documentName.bind(sessionModel.documentNameProperty()); changesSaved.bindBidirectional(sessionModel.changesSavedProperty()); } private void unbindOldSession() { selectionAvailable.unbind(); documentNameProperty().unbind(); if (sessionState != null) { editMode.unbindBidirectional(sessionModel.editableProperty()); changesSaved.unbindBidirectional(sessionModel.changesSavedProperty()); sessionModel.filterSettingsProperty().unbindBidirectional(filterSettings); sessionModel.enableFiltersProperty().unbindBidirectional(enableFilters); } } public SimpleObjectProperty<Path> sessionFileProperty() { return sessionFile; } public SimpleStringProperty titleProperty() { return title; } public void setTitle(final String title) { this.title.set(title); } public SimpleStringProperty documentNameProperty() { return documentName; } public SimpleBooleanProperty changesSavedProperty() { return changesSaved; } public void setChangesSaved(final boolean changesSaved) { this.changesSaved.set(changesSaved); } public boolean isChangesSaved() { return changesSaved.get(); } public SimpleBooleanProperty sessionOpenProperty() { return sessionOpen; } public SimpleBooleanProperty selectionAvailableProperty() { return selectionAvailable; } public SimpleBooleanProperty editModeProperty() { return editMode; } public SimpleObjectProperty<FilterSettings> filterSettingsProperty() { return filterSettings; } public void setFilterSettings(final FilterSettings filterSettings) { this.filterSettings.set(filterSettings); } public FilterSettings getFilterSettings() { return filterSettings.get(); } public void setFilter(final WordFilter filter) { this.filter = filter; } public WordFilter getFilter() { return filter; } public SimpleBooleanProperty enableFiltersProperty() { return enableFilters; } public void setEnableFilters(final boolean enableFilters) { this.enableFilters.set(enableFilters); } public void setSessionFile(final Path sessionFile) { this.sessionFile.set(sessionFile); } public boolean hasSessionFile() { return sessionFile.getValue() != null; } public Path getSessionFile() { return sessionFile.get(); } public Optional<SessionState> getSessionState() { return Optional.ofNullable(sessionState); } public Optional<SessionModel> getSessionModel() { return Optional.ofNullable(sessionModel); } public void setLocale(final SupportedLocale locale) { this.locale.set(locale); } public SimpleObjectProperty<SupportedLocale> localeProperty() { return locale; } }
package org.hamcrest; import org.hamcrest.core.IsEqual; import org.junit.Test; import static org.hamcrest.AbstractMatcherTest.*; import static org.junit.Assert.assertEquals; public final class FeatureMatcherTest { private final FeatureMatcher<Thingy, String> resultMatcher = resultMatcher(); @Test public void matchesPartOfAnObject() { assertMatches("feature", resultMatcher, new Thingy("bar")); assertDescription("Thingy with result \"bar\"", resultMatcher); } @Test public void mismatchesPartOfAnObject() { assertMismatchDescription("result mismatch-description", resultMatcher, new Thingy("foo")); } @Test public void doesNotThrowNullPointerException() { assertMismatchDescription("was null", resultMatcher, null); } @Test public void doesNotThrowClassCastException() { resultMatcher.matches(new ShouldNotMatch()); StringDescription mismatchDescription = new StringDescription(); resultMatcher.describeMismatch(new ShouldNotMatch(), mismatchDescription); assertEquals("was ShouldNotMatch <ShouldNotMatch>", mismatchDescription.toString()); } public static class Match extends IsEqual<String> { public Match(String equalArg) { super(equalArg); } @Override public void describeMismatch(Object item, Description description) { description.appendText("mismatch-description"); } } public static class Thingy { private final String result; public Thingy(String result) { this.result = result; } public String getResult() { return result; } } public static class ShouldNotMatch { @Override public String toString() { return "ShouldNotMatch"; } } private static FeatureMatcher<Thingy, String> resultMatcher() { return new FeatureMatcher<Thingy, String>(new Match("bar"), "Thingy with result", "result") { @Override public String featureValueOf(Thingy actual) { return actual.getResult(); } }; } }
package com.linkedin.helix.tools; import java.io.FileWriter; import java.io.PrintWriter; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.ConcurrentHashMap; import javax.management.MBeanAttributeInfo; import javax.management.MBeanInfo; import javax.management.MBeanOperationInfo; import javax.management.MBeanServerConnection; import javax.management.MBeanServerDelegate; import javax.management.MBeanServerNotification; import javax.management.Notification; import javax.management.NotificationListener; import javax.management.ObjectInstance; import javax.management.ObjectName; import javax.management.relation.MBeanServerNotificationFilter; import javax.management.remote.JMXConnector; import javax.management.remote.JMXConnectorFactory; import javax.management.remote.JMXServiceURL; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.log4j.Logger; public class JmxDumper implements NotificationListener { public static final String help = "help"; public static final String domain = "domain"; public static final String fields = "fields"; public static final String pattern = "pattern"; public static final String operations = "operations"; public static final String period = "period"; public static final String className = "className"; public static final String outputFile = "outputFile"; public static final String jmxUrl = "jmxUrl"; public static final String sampleCount = "sampleCount"; private static final Logger _logger = Logger.getLogger(JmxDumper.class); String _domain; MBeanServerConnection _mbeanServer; String _beanClassName; String _namePattern; int _samplePeriod; Map<ObjectName,ObjectName> _mbeanNames = new ConcurrentHashMap<ObjectName,ObjectName>(); Timer _timer; String _outputFileName; List<String> _outputFields = new ArrayList<String>(); Set<String> _operations = new HashSet<String>(); PrintWriter _outputFile; int _samples = 0; int _targetSamples = -1; String _jmxUrl; public JmxDumper(String jmxService, String domain, String beanClassName, String namePattern, int samplePeriod, List<String> fields, List<String> operations, String outputfile, int sampleCount ) throws Exception { _jmxUrl = jmxService; _domain = domain; _beanClassName = beanClassName; _samplePeriod = samplePeriod; _outputFields.addAll(fields); _operations.addAll(operations); _outputFileName = outputfile; _namePattern = namePattern; _targetSamples = sampleCount; JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://" + _jmxUrl + "/jmxrmi"); JMXConnector jmxc = JMXConnectorFactory.connect(url, null); _mbeanServer = jmxc.getMBeanServerConnection(); MBeanServerNotificationFilter filter = new MBeanServerNotificationFilter(); filter.enableAllObjectNames(); _mbeanServer.addNotificationListener(MBeanServerDelegate.DELEGATE_NAME, this, filter, null); init(); _timer = new Timer(true); _timer.scheduleAtFixedRate(new SampleTask(), _samplePeriod, _samplePeriod); } class SampleTask extends TimerTask { @Override public void run() { List<ObjectName> errorMBeans = new ArrayList<ObjectName>(); _logger.info("Sampling " + _mbeanNames.size() + " beans"); for(ObjectName beanName : _mbeanNames.keySet()) { MBeanInfo info; try { info = _mbeanServer.getMBeanInfo(beanName); } catch (Exception e) { _logger.error( e.getMessage()+" removing it"); errorMBeans.add(beanName); continue; } if(!info.getClassName().equals(_beanClassName)) { _logger.warn("Skip: className "+info.getClassName() + " expected : "+ _beanClassName); continue; } StringBuffer line = new StringBuffer(); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-hh:mm:ss:SSS"); String date = dateFormat.format(new Date()); line.append(date + " "); line.append(beanName.toString() + " "); MBeanAttributeInfo[] infos = info.getAttributes(); Map<String, MBeanAttributeInfo> infoMap = new HashMap<String, MBeanAttributeInfo>(); for(MBeanAttributeInfo infoItem : infos) { infoMap.put(infoItem.getName(), infoItem); } for(String outputField : _outputFields) { try { if(infoMap.containsKey(outputField)) { Object mbeanAttributeValue = _mbeanServer.getAttribute(beanName, outputField); line.append(mbeanAttributeValue.toString() + " "); } else { _logger.warn(outputField + " not found"); line.append("null "); } } catch (Exception e) { _logger.error("Error:", e); line.append("null "); continue; } } MBeanOperationInfo[] operations = info.getOperations(); Map<String, MBeanOperationInfo> opeMap = new HashMap<String, MBeanOperationInfo>(); for(MBeanOperationInfo opeItem : operations) { opeMap.put(opeItem.getName(), opeItem); } for(String ope : _operations) { if(opeMap.containsKey(ope)) { try { _mbeanServer.invoke(beanName, ope, new Object[0], new String[0]); //System.out.println(ope+" invoked"); } catch(Exception e) { _logger.error("Error:", e); continue; } } } _outputFile.println(line.toString()); //System.out.println(line); } for(ObjectName deadBean : errorMBeans) { _mbeanNames.remove(deadBean); } _samples ++; //System.out.println("samples:"+_samples); if(_samples == _targetSamples) { synchronized(JmxDumper.this) { _logger.info(_samples + " samples done, exiting..."); JmxDumper.this.notifyAll(); } } } } void init() throws Exception { try { Set<ObjectInstance> existingInstances = _mbeanServer.queryMBeans(new ObjectName(_namePattern), null); _logger.info("Total " + existingInstances.size() + " mbeans matched " + _namePattern); for(ObjectInstance instance : existingInstances) { if(instance.getClassName().equals(_beanClassName)) { _mbeanNames.put(instance.getObjectName(), instance.getObjectName()); _logger.info("Sampling " + instance.getObjectName()); } } FileWriter fos = new FileWriter(_outputFileName); System.out.println(_outputFileName); _outputFile = new PrintWriter(fos); } catch (Exception e) { _logger.error("fail to get all existing mbeans in " + _domain, e); throw e; } } @Override public void handleNotification(Notification notification, Object handback) { MBeanServerNotification mbs = (MBeanServerNotification) notification; if(MBeanServerNotification.REGISTRATION_NOTIFICATION.equals(mbs.getType())) { //System.out.println("Adding mbean " + mbs.getMBeanName()); _logger.info("Adding mbean " + mbs.getMBeanName()); if(mbs.getMBeanName().getDomain().equalsIgnoreCase(_domain)) { addMBean( mbs.getMBeanName()); } } else if(MBeanServerNotification.UNREGISTRATION_NOTIFICATION.equals(mbs.getType())) { //System.out.println("Removing mbean " + mbs.getMBeanName()); _logger.info("Removing mbean " + mbs.getMBeanName()); if(mbs.getMBeanName().getDomain().equalsIgnoreCase(_domain)) { removeMBean(mbs.getMBeanName()); } } } private void addMBean(ObjectName beanName) { _mbeanNames.put(beanName, beanName); } private void removeMBean(ObjectName beanName) { _mbeanNames.remove(beanName); } public static int processCommandLineArgs(String[] cliArgs) throws Exception { CommandLineParser cliParser = new GnuParser(); Options cliOptions = constructCommandLineOptions(); CommandLine cmd = null; try { cmd = cliParser.parse(cliOptions, cliArgs); } catch (ParseException pe) { System.err.println("CommandLineClient: failed to parse command-line options: " + pe.toString()); printUsage(cliOptions); System.exit(1); } boolean ret = checkOptionArgsNumber(cmd.getOptions()); if (ret == false) { printUsage(cliOptions); System.exit(1); } String portStr = cmd.getOptionValue(jmxUrl); //int portVal = Integer.parseInt(portStr); String periodStr = cmd.getOptionValue(period); int periodVal = Integer.parseInt(periodStr); String domainStr = cmd.getOptionValue(domain); String classNameStr = cmd.getOptionValue(className); String patternStr = cmd.getOptionValue(pattern); String fieldsStr = cmd.getOptionValue(fields); String operationsStr = cmd.getOptionValue(operations); String resultFile = cmd.getOptionValue(outputFile); String sampleCountStr = cmd.getOptionValue(sampleCount, "-1"); int sampleCount = Integer.parseInt(sampleCountStr); List<String> fields = Arrays.asList(fieldsStr.split(",")); List<String> operations = Arrays.asList(operationsStr.split(",")); JmxDumper dumper = null; try { dumper = new JmxDumper(portStr, domainStr, classNameStr, patternStr, periodVal, fields, operations, resultFile, sampleCount); synchronized(dumper) { dumper.wait(); } } finally { if(dumper != null) { dumper.flushFile(); } } return 0; } private void flushFile() { if(_outputFile != null) { _outputFile.flush(); _outputFile.close(); } } private static boolean checkOptionArgsNumber(Option[] options) { for (Option option : options) { int argNb = option.getArgs(); String[] args = option.getValues(); if (argNb == 0) { if (args != null && args.length > 0) { System.err.println(option.getArgName() + " shall have " + argNb + " arguments (was " + Arrays.toString(args) + ")"); return false; } } else { if (args == null || args.length != argNb) { System.err.println(option.getArgName() + " shall have " + argNb + " arguments (was " + Arrays.toString(args) + ")"); return false; } } } return true; } @SuppressWarnings("static-access") private static Options constructCommandLineOptions() { Option helpOption = OptionBuilder.withLongOpt(help) .withDescription("Prints command-line options info") .create(); Option domainOption = OptionBuilder.withLongOpt(domain) .withDescription("Domain of the JMX bean") .create(); domainOption.setArgs(1); domainOption.setRequired(true); Option fieldsOption = OptionBuilder.withLongOpt(fields) .withDescription("Fields of the JMX bean to sample") .create(); fieldsOption.setArgs(1); fieldsOption.setRequired(false); Option operationOption = OptionBuilder.withLongOpt(operations) .withDescription("Operation to invoke") .create(); operationOption.setArgs(1); operationOption.setRequired(true); Option periodOption = OptionBuilder.withLongOpt(period) .withDescription("Sampling period in MS") .create(); periodOption.setArgs(1); periodOption.setRequired(false); Option classOption = OptionBuilder.withLongOpt(className) .withDescription("Classname of the MBean") .create(); classOption.setArgs(1); classOption.setRequired(true); Option patternOption = OptionBuilder.withLongOpt(pattern) .withDescription("pattern of the MBean") .create(); patternOption.setArgs(1); patternOption.setRequired(true); Option outputFileOption = OptionBuilder.withLongOpt(outputFile) .withDescription("outputFileName") .create(); outputFileOption.setArgs(1); outputFileOption.setRequired(false); Option jmxUrlOption = OptionBuilder.withLongOpt(jmxUrl) .withDescription("jmx port to connect to") .create(); jmxUrlOption.setArgs(1); jmxUrlOption.setRequired(true); Option sampleCountOption = OptionBuilder.withLongOpt(sampleCount) .withDescription("# of samples to take") .create(); sampleCountOption.setArgs(1); sampleCountOption.setRequired(false); Options options = new Options(); options.addOption(helpOption); options.addOption(domainOption); options.addOption(fieldsOption); options.addOption(operationOption); options.addOption(classOption); options.addOption(outputFileOption); options.addOption(jmxUrlOption); options.addOption(patternOption); options.addOption(periodOption); options.addOption(sampleCountOption); return options; } public static void printUsage(Options cliOptions) { HelpFormatter helpFormatter = new HelpFormatter(); helpFormatter.printHelp("java " + JmxDumper.class.getName(), cliOptions); } public static void main(String[] args) throws Exception { /* List<String> fields = Arrays.asList(new String("AvgLatency,MaxLatency,MinLatency,PacketsReceived,PacketsSent").split(",")); List<String> operations = Arrays.asList(new String("resetCounters").split(",")); JmxDumper dumper = new JmxDumper(27961, "org.apache.zooKeeperService", "org.apache.zookeeper.server.ConnectionBean", "org.apache.ZooKeeperService:name0=*,name1=Connections,name2=*,name3=*", 1000, fields, operations, "/tmp/1.csv"); Thread.currentThread().join(); */ int ret = processCommandLineArgs(args); System.exit(ret); } }
package edu.cmu.neuron2; import java.io.*; import java.net.*; import java.util.*; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.Hashtable; import java.util.List; import java.util.Properties; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.logging.*; import java.util.logging.Formatter; import org.apache.mina.common.ByteBuffer; import org.apache.mina.common.IoHandlerAdapter; import org.apache.mina.common.IoServiceConfig; import org.apache.mina.common.IoSession; import org.apache.mina.transport.socket.nio.DatagramAcceptor; import org.apache.mina.transport.socket.nio.DatagramAcceptorConfig; import edu.cmu.neuron2.RonTest.RunMode; class LabelFilter implements Filter { private final HashSet<String> suppressedLabels; private final boolean suppressAll; public LabelFilter(HashSet<String> suppressedLabels) { this.suppressedLabels = suppressedLabels; this.suppressAll = suppressedLabels.contains("all"); } public boolean isLoggable(LogRecord record) { if (suppressAll) return false; String[] parts = record.getLoggerName().split("\\.", 2); return parts.length == 1 || !suppressedLabels.contains(parts[1]); } } public class NeuRonNode extends Thread { private final ExecutorService executor; private final ScheduledExecutorService scheduler; public short myNid; private final boolean isCoordinator; private final String coordinatorHost; private final int basePort; private final AtomicBoolean doQuit = new AtomicBoolean(); private Logger logger; private final Hashtable<Short, NodeInfo> nodes = new Hashtable<Short, NodeInfo>(); // probeTable[i] = node members[i]'s probe table. value // at index j in row i is the link latency between nodes members[i]->members[j]. short[][] probeTable; private GridNode[][] grid; private short numCols, numRows; private final HashSet<GridNode> overflowNeighbors = new HashSet<GridNode>(); private Hashtable<Short, Short> nextHopTable = new Hashtable<Short, Short>(); private Hashtable<Short, HashSet<Short>> nextHopOptions = new Hashtable<Short, HashSet<Short>>(); private final IoServiceConfig cfg = new DatagramAcceptorConfig(); private final Hashtable<InetAddress, Short> addr2id = new Hashtable<InetAddress, Short>(); private short currentStateVersion; public final int neighborBroadcastPeriod; public final int probePeriod; private final NodeInfo coordNode; private final DatagramSocket sendSocket; private final RunMode mode; private final short numNodesHint; private final Semaphore semAllJoined; private final InetAddress myCachedAddr; private ArrayList<Short> cachedMemberNids = new ArrayList<Short>(); // sorted list of members private short cachedMemberNidsVersion; private final boolean blockJoins; private final boolean capJoins; private final int joinTimeLimit; // seconds private final int dumpPeriod; private final FileHandler fh; private final short origNid; private final short sessionId; private final int failoverTimeout; private final int membershipBroadcastPeriod; private static final String defaultLabelSet = "send.Ping recv.Ping stale.Ping send.Pong recv.Pong stale.Pong send.Measurement send.RoutingRecs"; private final Hashtable<Short,Long> lastSentMbr = new Hashtable<Short,Long>(); private final double smoothingFactor; private void createLabelFilter(Properties props, String labelSet, Handler handler) { String[] labels = props.getProperty(labelSet, defaultLabelSet).split(" "); final HashSet<String> suppressedLabels = new HashSet<String>(Arrays.asList(labels)); handler.setFilter(new LabelFilter(suppressedLabels)); } public NeuRonNode(short id, ExecutorService executor, ScheduledExecutorService scheduler, Properties props, short numNodes, Semaphore semJoined, InetAddress myAddr, String coordinatorHost, NodeInfo coordNode) { if ((coordNode == null) || (coordNode.addr == null)){ throw new RuntimeException("coordNode is null!"); } dumpPeriod = Integer.parseInt(props.getProperty("dumpPeriod", "60")); myNid = id; origNid = id; currentStateVersion = (short)0; cachedMemberNidsVersion = (short)-1; joinTimeLimit = Integer.parseInt(props.getProperty("joinTimeLimit", "10")); // wait up to 10 secs by default for coord to be available membershipBroadcastPeriod = Integer.parseInt(props.getProperty("membershipBroadcastPeriod", "0")); // NOTE note that you'll probably want to set this, always! sessionId = Short.parseShort(props.getProperty("sessionId", "0")); blockJoins = Boolean.valueOf(props.getProperty("blockJoins", "true")); capJoins = Boolean.valueOf(props.getProperty("capJoins", "true")); this.coordinatorHost = coordinatorHost; this.coordNode = coordNode; basePort = Integer.parseInt(props.getProperty("basePort", "9000")); mode = RunMode.valueOf(props.getProperty("mode", "sim").toUpperCase()); neighborBroadcastPeriod = Integer.parseInt(props.getProperty("neighborBroadcastPeriod", "60")); // for simulations we can safely reduce the probing frequency, or even turn it off probePeriod = Integer.parseInt(props.getProperty("probePeriod", "15")); timeout = Integer.parseInt(props.getProperty("timeout", "" + probePeriod * 3)); failoverTimeout = Integer.parseInt(props.getProperty("failoverTimeout", "" + timeout)); scheme = RoutingScheme.valueOf(props.getProperty("scheme", "SIMPLE").toUpperCase()); smoothingFactor = Double.parseDouble(props.getProperty("smoothingFactor", "0.9")); Formatter fmt = new Formatter() { public String format(LogRecord record) { StringBuilder buf = new StringBuilder(); buf.append(record.getMillis()).append(' ').append(new Date(record.getMillis())).append(" ").append( record.getLevel()).append(" ").append( record.getLoggerName()).append(": ").append( record.getMessage()).append("\n"); return buf.toString(); } }; Logger rootLogger = Logger.getLogger(""); rootLogger.getHandlers()[0].setFormatter(fmt); logger = Logger.getLogger("node" + myNid); createLabelFilter(props, "consoleLogFilter", rootLogger.getHandlers()[0]); try { String logFileBase = props.getProperty("logFileBase", "%t/scaleron-log-"); fh = new FileHandler(logFileBase + myNid, true); fh.setFormatter(fmt); createLabelFilter(props, "fileLogFilter", fh); logger.addHandler(fh); sendSocket = new DatagramSocket(); } catch (IOException ex) { throw new RuntimeException(ex); } this.executor = executor; this.scheduler = scheduler; probeTable = null; grid = null; numCols = numRows = 0; isCoordinator = myNid == 0; numNodesHint = Short.parseShort(props.getProperty("numNodesHint", "" + numNodes)); semAllJoined = semJoined; if (myAddr == null) { try { myCachedAddr = InetAddress.getLocalHost(); } catch (UnknownHostException ex) { throw new RuntimeException(ex); } } else { myCachedAddr = myAddr; } myPort = basePort + myNid; } private final int myPort; private void handleInit(Init im) { if (im.id == -1) { throw new PlannedException("network is full; aborting"); } System.out.println("Had nodeId = " + myNid + ". New nodeId = " + im.id); myNid = im.id; logger = Logger.getLogger("node_" + myNid); logger.addHandler(fh); currentStateVersion = im.version; log("got from coord => Init " + im.id); updateMembers(im.members); } private String bytes2string(byte[] buf) { String s = "[ "; for (byte b : buf) { s += b + " "; } s += "]"; return s; } private void log(String msg) { logger.info(msg); } private void warn(String msg) { logger.warning(msg); } private void err(String msg) { logger.severe(msg); } private void err(Exception ex) { StringWriter s = new StringWriter(); PrintWriter p = new PrintWriter(s); ex.printStackTrace(p); err(s.toString()); } /** * Used for logging data, such as neighbor lists. * * @param name - the name of the data, e.g.: "neighbors", "info" * @param value */ private void log(String name, Object value) { Logger.getLogger(logger.getName() + "." + name).info(value.toString()); } public static final class PlannedException extends RuntimeException { public PlannedException(String msg) { super(msg); } } public final AtomicReference<Exception> failure = new AtomicReference<Exception>(); public void run() { try { run2(); } catch (PlannedException ex) { log(ex.getMessage()); failure.set(ex); if (semAllJoined != null) semAllJoined.release(); } catch (Exception ex) { err(ex); failure.set(ex); if (semAllJoined != null) semAllJoined.release(); } } private short nextNodeId = 1; //private Runnable makeSafeRunnable(Runnable r) { // return new Runnable() { // public void run() { // try { // run(); // } catch (Exception ex) { // err(ex); public void run2() { if (isCoordinator) { try { scheduler.scheduleAtFixedRate(new Runnable() { public void run() { try { synchronized (NeuRonNode.this) { log("checkpoint: " + nodes.size() + " nodes"); printMembers(); printGrid(); } } catch (Exception ex) { err(ex); } } }, dumpPeriod, dumpPeriod, TimeUnit.SECONDS); if (membershipBroadcastPeriod > 0) { scheduler.scheduleAtFixedRate(new Runnable() { public void run() { synchronized (NeuRonNode.this) { try { if (membersChanged.get()) { broadcastMembershipChange((short) 0); } } catch (Exception ex) { // failure-oblivious: swallow any exceptions and // just try resuming err(ex); } } } }, 1, membershipBroadcastPeriod, TimeUnit.SECONDS); } // do not remove this for now Thread.sleep(2000); new DatagramAcceptor().bind(new InetSocketAddress(InetAddress .getLocalHost(), basePort), new CoordReceiver(), cfg); System.out.println("allllll"); ServerSocket ss = new ServerSocket(basePort); try { // TODO the coord should also be kept aware of who's alive // and who's not. this means we need to ping the coord, and // the coord needs to maintain timeouts like everyone else. ss.setReuseAddress(true); ss.setSoTimeout(1000); log("Beep!"); final Hashtable<Short, Socket> incomingSocks = new Hashtable<Short, Socket>(); while (!doQuit.get()) { final Socket incoming; try { incoming = ss.accept(); } catch (SocketTimeoutException ex) { continue; } final short nodeId; // this is OK since nid orderings are irrelevant synchronized (NeuRonNode.this) { nodeId = nextNodeId++; } executor.submit(new Runnable() { public void run() { try { Join msg = (Join) new Serialization().deserialize(new DataInputStream(incoming.getInputStream())); synchronized (NeuRonNode.this) { System.out.println("delta"); incomingSocks.put(nodeId, incoming); if (!capJoins || nodes.size() < numNodesHint) { addMember(nodeId, msg.addr, msg.port, msg.src); if (nodes.size() == numNodesHint) { semAllJoined.release(); } if (blockJoins) { if (nodes.size() >= numNodesHint) { // time to broadcast ims to everyone ArrayList<NodeInfo> memberList = getMemberInfos(); for (NodeInfo m : memberList) { try { doit(incomingSocks, memberList, m.id); } finally { incomingSocks.get(m.id).close(); } } } } else { doit(incomingSocks, getMemberInfos(), nodeId); broadcastMembershipChange(nodeId); } } else if (capJoins && nodes.size() == numNodesHint) { Init im = new Init(); im.src = myNid; im.id = -1; im.members = new ArrayList<NodeInfo>(); sendit(incoming, im); } } } catch (Exception ex) { System.out.println("epsilon"); err(ex); } finally { try { if (!blockJoins) incoming.close(); } catch (IOException ex) { err(ex); } } } private void doit( final Hashtable<Short, Socket> incomingSocks, ArrayList<NodeInfo> memberList, short nid) throws IOException { Init im = new Init(); im.id = nid; im.src = myNid; im.version = currentStateVersion; im.members = memberList; sendit(incomingSocks.get(nid), im); } private void sendit( Socket socket, Init im) throws IOException { DataOutputStream dos = new DataOutputStream(socket.getOutputStream()); new Serialization().serialize(im, dos); dos.flush(); } }); } } finally { System.out.println("gamma"); ss.close(); log("coord done"); } } catch (Exception ex) { System.out.println("beta"); throw new RuntimeException(ex); } } else { try { Socket s = null; long startTime = System.currentTimeMillis(); int count = 0; while (true) { if (count++ > joinTimeLimit) { throw new PlannedException("exceeded join try limit; aborting"); } // if ((System.currentTimeMillis() - startTime) / 1000 > joinTimeLimit) { // throw new PlannedException("exceeded join time limit; aborting"); // Connect to the co-ordinator try { s = new Socket(coordinatorHost, basePort); //if (count > 0) throw new Exception(); break; } catch (Exception ex) { log("couldn't connect to coord, retrying in 1 sec: " + ex.getMessage()); try { Thread.sleep(1000); } catch (InterruptedException ie) { } } } try { // talk to coordinator log("sending join to coordinator at " + coordinatorHost + ":" + basePort); Join msg = new Join(); msg.addr = myCachedAddr; msg.src = myNid; // informs coord of orig id msg.port = myPort; DataOutputStream dos = new DataOutputStream(s.getOutputStream()); new Serialization().serialize(msg, dos); dos.flush(); log("waiting for InitMsg"); ByteArrayOutputStream minibaos = new ByteArrayOutputStream(); byte[] minibuf = new byte[8192]; int amt; while ((amt = s.getInputStream().read(minibuf)) > 0) { minibaos.write(minibuf, 0, amt); } byte[] buf = minibaos.toByteArray(); try { Init im = (Init) new Serialization().deserialize(new DataInputStream(new ByteArrayInputStream(buf))); handleInit(im); } catch (Exception ex) { err("got buffer: " + bytes2string(buf)); throw ex; } } finally { try { s.close(); } catch (Exception ex) { throw new RuntimeException(ex); } } // wait for coordinator to announce my existence to others Thread.sleep(membershipBroadcastPeriod * 1000); } catch (PlannedException ex) { throw ex; } catch (SocketException ex) { log(ex.getMessage()); return; } catch (Exception ex) { throw new RuntimeException(ex); } // now start accepting pings and other msgs, // also start sending probes and sending out other msgs try { new DatagramAcceptor().bind(new InetSocketAddress(myCachedAddr, myPort), new Receiver(), cfg); log("server started on " + myCachedAddr + ":" + (basePort + myNid)); scheduler.scheduleAtFixedRate(new Runnable() { public void run() { synchronized (NeuRonNode.this) { try { pingAll(); } catch (Exception ex) { // failure-oblivious: swallow any exceptions and // just try resuming err(ex); } } } }, 1, probePeriod, TimeUnit.SECONDS); scheduler.scheduleAtFixedRate(new Runnable() { public void run() { synchronized (NeuRonNode.this) { try { broadcastMeasurements(); if (scheme != RoutingScheme.SIMPLE) { if (scheme == RoutingScheme.SQRT_SPECIAL) { broadcastRecommendations2(); } else { broadcastRecommendations(); } } } catch (Exception ex) { // failure-oblivious: swallow any exceptions and // just try resuming err(ex); } } } }, 1, neighborBroadcastPeriod, TimeUnit.SECONDS); if (semAllJoined != null) semAllJoined.release(); } catch (IOException ex) { throw new RuntimeException(ex); } } } private final HashSet<Short> ignored = new HashSet<Short>(); public synchronized void ignore(short nid) { log("ignoring " + nid); ignored.add(nid); /* ArrayList<Short> sorted_nids = memberNids(); probeTable[sorted_nids.indexOf(myNid)][sorted_nids.indexOf(nid)] = Short.MAX_VALUE; Short nextHop = nextHopTable.get(nid); if ((nextHop != null) && (nextHop == myNid)) { nextHopTable.remove(nid); } HashSet<Short> nhSet = nextHopOptions.get(nid); if (nhSet != null) { for (Iterator<Short> it = nhSet.iterator(); it.hasNext();) { if (it.next() == myNid) { it.remove(); } } } */ } public synchronized void unignore(short nid) { log("unignoring " + nid); ignored.remove(nid); } private void pingAll() { log("pinging"); Ping ping = new Ping(); ping.time = System.currentTimeMillis(); NodeInfo tmp = nodes.get(myNid); ping.info = new NodeInfo(); ping.info.id = origNid; // note that the ping info uses the original id ping.info.addr = tmp.addr; ping.info.port = tmp.port; for (short nid : nodes.keySet()) if (nid != myNid) sendObject(ping, nid); /* send ping to the membership server (co-ord) - this might not be required if everone makes their own local decision i.e. each node notices that no other node can reach a node (say X), then each node sends the co-ord a msg saying that "i think X is dead". The sending of this msg can be staggered in time so that the co-ord is not flooded with mesgs. The co-ordinator can then make a decision on keeping or removing node Y from the membership. On seeing a subsequent msg from the co-ord that X has been removed from the overlay, if a node Y has not sent its "i think X is dead" msg, it can cancel this event. */ sendObject(ping, (short)0); } private Msg deserialize(Object o) { ByteBuffer buf = (ByteBuffer) o; byte[] bytes = new byte[buf.limit()]; buf.get(bytes); try { return (Msg) new Serialization().deserialize(new DataInputStream(new ByteArrayInputStream(bytes))); } catch (Exception ex) { err("deserialization exception: " + ex.getMessage()); return null; } } private Hashtable<Short,Short> id2id = new Hashtable<Short,Short>(); private Hashtable<Short,String> id2name = new Hashtable<Short,String>(); /** * coordinator's msg handling loop */ public final class CoordReceiver extends IoHandlerAdapter { @Override public void messageReceived(IoSession session, Object obj) throws Exception { try { Msg msg = deserialize(obj); if (msg == null) return; synchronized (NeuRonNode.this) { if (msg.session == sessionId) { if (nodes.containsKey(msg.src)) { log("recv." + msg.getClass().getSimpleName(), "from " + msg.src + " (oid " + id2id.get(msg.src) + ", " + id2name.get(msg.src) + ")"); resetTimeoutAtCoord(msg.src); if (msg.version < currentStateVersion) { log("updating stale membership"); sendMembership(msg.src); } if (msg instanceof Ping) { // ignore the ping } else { throw new Exception("can't handle that message type"); } } else { if ((!capJoins || nodes.size() < numNodesHint) && msg instanceof Ping) { Ping ping = (Ping) msg; log("dead." + ping.getClass().getSimpleName(), "from '" + ping.src + "' " + ping.info.addr.getHostName()); Short mappedId = addr2id.get(ping.info.addr); short nid; if (mappedId == null) { nid = nextNodeId++; addMember(nid, ping.info.addr, ping.info.port, ping.info.id); broadcastMembershipChange(nid); } else { nid = mappedId; } Init im = new Init(); im.id = nid; im.src = myNid; im.version = currentStateVersion; im.members = getMemberInfos(); sendObject(im, nid); } else { log("dead." + msg.getClass().getSimpleName(), "from '" + msg.src + "'"); } } } else { // log("recv." + msg.getClass().getSimpleName(), "ignored from " + msg.src + " session " + msg.session); } } } catch (Exception ex) { err(ex); } } } /** * receiver's msg handling loop */ public final class Receiver extends IoHandlerAdapter { @Override public void messageReceived(IoSession session, Object obj) throws Exception { try { Msg msg = deserialize(obj); if (msg == null) return; synchronized (NeuRonNode.this) { if ((msg.src == 0 || nodes.containsKey(msg.src)) && msg.session == sessionId || msg instanceof Ping) { //if (ignored.contains(msg.src)) return; log("recv." + msg.getClass().getSimpleName(), "from " + msg.src); // always act on pings/pong (for rtt collection) if (msg instanceof Ping) { Ping ping = ((Ping) msg); Pong pong = new Pong(); pong.time = ping.time; sendObject(pong, ping.info); } else if (msg instanceof Pong) { Pong pong = (Pong) msg; short rtt = (short) (System.currentTimeMillis() - pong.time); log("latency", "one way latency to " + pong.src + " = " + rtt/2); } // for other messages, make sure their state version is // the same as ours if (msg.version > currentStateVersion) { if (msg instanceof Membership) { currentStateVersion = msg.version; Membership m = (Membership) msg; myNid = m.yourId; updateMembers(m.members); } else { // i am out of date - request latest membership // sendObject(new MemberPoll(), 0); // commented out - membership updates now // implicitly handled via pings } } else if (msg.version == currentStateVersion) { if (msg instanceof Membership) { Membership m = (Membership) msg; myNid = m.yourId; updateMembers(m.members); } else if (msg instanceof Measurements) { log(((Measurements) msg).toString()); updateNetworkState((Measurements) msg); } else if (msg instanceof RoutingRecs) { log(((RoutingRecs) msg).toString()); handleRecommendation(((RoutingRecs) msg).recs); log(toStringNextHopTable()); } else if (msg instanceof Ping) { // nothing to do, already handled above } else if (msg instanceof Pong) { Pong pong = (Pong) msg; resetTimeoutAtNode(pong.src); short rtt = (short) (System.currentTimeMillis() - pong.time); ArrayList<Short> sortedNids = memberNids(); int i = sortedNids.indexOf(myNid), j = sortedNids.indexOf(pong.src); probeTable[i][j] = (short) ( smoothingFactor * (rtt / 2) + (1 - smoothingFactor) * probeTable[i][j]); } else if (msg instanceof PeeringRequest) { PeeringRequest pr = (PeeringRequest) msg; GridNode newNeighbor = new GridNode(); newNeighbor.id = pr.src; newNeighbor.isAlive = true; overflowNeighbors.add(newNeighbor); } else if (msg instanceof Init) { handleInit((Init) msg); } else { throw new Exception("can't handle that message type"); } } else { log("stale." + msg.getClass().getSimpleName(), "from " + msg.src + " version " + msg.version); } } else { // log("ignored." + msg.getClass().getSimpleName(), "ignored from " + msg.src + " session " + msg.session); } } } catch (Exception ex) { err(ex); } } } /** * If we don't hear from a node for this number of seconds, then consider * them dead. */ private int timeout; private Hashtable<Short, ScheduledFuture<?>> timeouts = new Hashtable<Short, ScheduledFuture<?>>(); /** * a coord-only method * * @param nid */ private void resetTimeoutAtCoord(final short nid) { if (nodes.containsKey(nid)) { ScheduledFuture<?> oldFuture = timeouts.get(nid); if (oldFuture != null) { oldFuture.cancel(false); } ScheduledFuture<?> future = scheduler.schedule(new Runnable() { public void run() { try { synchronized (NeuRonNode.this) { removeMember(nid); } } catch (Exception ex) { err(ex); } } }, timeout, TimeUnit.SECONDS); timeouts.put(nid, future); } } private void resetTimeoutAtNode(final short nid) { if (nodes.containsKey(nid)) { ScheduledFuture<?> oldFuture = timeouts.get(nid); if (oldFuture != null) { oldFuture.cancel(false); } for (short i = 0; i < numRows; i++) { for (short j = 0; j < numCols; j++) { if (grid[i][j].id == nid) { grid[i][j].isAlive = true; } } } ScheduledFuture<?> future = scheduler.schedule(new Runnable() { public void run() { try { synchronized (NeuRonNode.this) { for (short i = 0; i < numRows; i++) { for (short j = 0; j < numCols; j++) { if (grid[i][j].id == nid) { grid[i][j].isAlive = false; } } } ArrayList<Short> sorted_nids = memberNids(); probeTable[sorted_nids.indexOf(myNid)][sorted_nids.indexOf(nid)] = Short.MAX_VALUE; } } catch (Exception ex) { err(ex); } } }, failoverTimeout, TimeUnit.SECONDS); timeouts.put(nid, future); } } /** * a coordinator-only method */ private NodeInfo addMember(short newNid, InetAddress addr, int port, short origId) { NodeInfo info = new NodeInfo(); info.id = newNid; info.addr = addr; info.port = port; nodes.put(newNid, info); id2id.put(newNid, origId); id2name.put(newNid, addr.getHostName()); addr2id.put(addr, newNid); log("adding new node: " + newNid + " oid " + origId + " name " + id2name.get(newNid)); currentStateVersion++; resetTimeoutAtCoord(newNid); return info; } private ArrayList<Short> memberNids() { if ((cachedMemberNidsVersion < currentStateVersion) || (cachedMemberNids == null) ) { //log("NEW cachedMemberNids (" + cachedMemberNidsVersion + ", " + currentStateVersion); cachedMemberNidsVersion = currentStateVersion; cachedMemberNids = new ArrayList<Short>(nodes.keySet()); Collections.sort(cachedMemberNids); //log("Size = " + cachedMemberNids.size()); } return cachedMemberNids; } private ArrayList<Short> getUncachedmemberNids() { ArrayList<Short> nids = new ArrayList<Short>(nodes.keySet()); Collections.sort(nids); return nids; } private final AtomicBoolean membersChanged = new AtomicBoolean(); /** * a coordinator-only method * * @param exceptNid - if this is 0, then we must have been called by the * periodic membership-broadcast daemon thread, so actually send stuff; * otherwise, we should just signal to the daemon thread a pending change */ private void broadcastMembershipChange(short exceptNid) { if (exceptNid == 0 || membershipBroadcastPeriod == 0) { for (short nid : nodes.keySet()) { if (nid != exceptNid) { sendMembership(nid); } } } } ArrayList<NodeInfo> getMemberInfos() { return new ArrayList<NodeInfo>(nodes.values()); } /** * a coordinator-only method * * throttles these messages so they're sent at most once per second */ private void sendMembership(short nid) { Membership msg = new Membership(); msg.yourId = nid; //Long last = lastSentMbr.get(nid); //if (last == null || System.currentTimeMillis() - last.longValue() > 1000) { // scheduler.schedule(); //} else { // scheduler.schedule(); //lastSentMbr.put(msg.src, msg.id); msg.members = getMemberInfos(); sendObject(msg, nid); } /** * a coordinator-only method * * @param nid */ private void removeMember(short nid) { log("removing dead node " + nid + " oid " + id2id.get(nid) + " " + id2name.get(nid)); NodeInfo info = nodes.remove(nid); Short mid = addr2id.remove(info.addr); assert mid != null; currentStateVersion++; broadcastMembershipChange(nid); } private void updateMembers(List<NodeInfo> newNodes) { List<Short> oldNids = getUncachedmemberNids(); nodes.clear(); for (NodeInfo node : newNodes) { nodes.put(node.id, node); } Hashtable<Short, Short> newNextHopTable = new Hashtable<Short, Short>(nodes.size()); Hashtable<Short, HashSet<Short>> newNextHopOptions = new Hashtable<Short, HashSet<Short>>(nodes.size()); for (NodeInfo node : newNodes) { if (node.id != myNid) { Short nextHop = nextHopTable.get(node.id); if (nextHop == null) { // new node ! /* newNextHopTable.put(node.id, myNid); HashSet<Short> nextHops = new HashSet<Short>(); nextHops.add(myNid); newNextHopOptions.put(node.id, nextHops); */ } else { // check if this old next hop is in the new membership list if (nodes.get(nextHop) != null) { // we have some next hop that is alive - leave it as is newNextHopTable.put(node.id, nextHop); } else { // the next hop vanaished. i am next hop to this node now /* newNextHopTable.put(node.id, myNid); */ } // of all the possible next hop options to the node, // remove those that are dead. HashSet<Short> nextHops = nextHopOptions.get(node.id); if (nextHops != null) { for (Iterator<Short> it = nextHops.iterator (); it.hasNext (); ) { Short someNextHop = it.next(); if (nodes.get(someNextHop) == null) { it.remove (); } } newNextHopOptions.put(node.id, nextHops); } else { /* HashSet<Short> nh = new HashSet<Short>(); nextHops.add(myNid); newNextHopOptions.put(node.id, nh); */ } } } else { //newNextHopTable.put(myNid, myNid); } } nextHopTable = newNextHopTable; // forget about the old one nextHopOptions = newNextHopOptions; repopulateGrid(oldNids); repopulateProbeTable(oldNids); // printGrid(); log("new state version: " + currentStateVersion); log(toStringNeighborList()); } private void repopulateGrid(List<Short> oldNids) { int oldNumCols = numCols; int oldNumRows = numRows; numCols = (short) Math.ceil(Math.sqrt(nodes.size())); numRows = (short) Math.ceil((double) nodes.size() / (double) numCols); Hashtable<Short, GridNode> oldNidsTable = new Hashtable<Short, GridNode>(oldNids.size()); for (short i = 0; i < oldNumRows; i++) { for (short j = 0; j < oldNumCols; j++) { oldNidsTable.put(grid[i][j].id, grid[i][j]); } } grid = new GridNode[numRows][numCols]; List<Short> nids = memberNids(); short m = 0; for (short i = 0; i < numRows; i++) { for (short j = 0; j < numCols; j++) { if (m >= nids.size()) { m = 0; } GridNode gn = oldNidsTable.get(nids.get(m)); if (gn == null) { gn = new GridNode(); gn.id = nids.get(m); gn.isAlive = true; } grid[i][j] = gn; m++; } } overflowNeighbors.clear(); // repopulateNeighborList(); } public static enum RoutingScheme { SIMPLE, SQRT, SQRT_NOFAILOVER, SQRT_RC_FAILOVER, SQRT_SPECIAL }; private final RoutingScheme scheme; private HashSet<GridNode> getNeighborList() { HashSet<GridNode> neighborSet = new HashSet<GridNode>(); // iterate over all grid positions, looking for self for (short r = 0; r < numRows; r++) { for (short c = 0; c < numCols; c++) { // this can happen at most twice if (scheme == RoutingScheme.SIMPLE) { neighborSet.add(grid[r][c]); } else if (grid[r][c].id == myNid) { // all the nodes in row i, and all the nodes in column j are // belong to us :) // O(N^1.5) :( // for each node in this row that's not me for (short x = 0; x < numCols; x++) { if (grid[r][x].id != myNid) { GridNode neighbor = grid[r][x]; // if they're alive, then add them a neighbor and move on if (neighbor.isAlive) { neighborSet.add(neighbor); } else if (scheme != RoutingScheme.SQRT_NOFAILOVER) { // for each node in this col that's not me, check for another failed node for (short i = 0; i < numRows; i++) { if ( (i != r) && (grid[i][c].isAlive == false) ) { /* (r, x) and (i, c) can't be reached * (i, x) needs a failover R node */ log("R node failover!"); boolean bFoundReplacement = false; // search for a failover in row i for (short j = 0; j < numCols; j++) { if ( (grid[i][j].id != myNid) && (grid[i][j].isAlive == true) ) { // request them as a failover and add them as a neighbor PeeringRequest pr = new PeeringRequest(); sendObject(pr, grid[i][j].id); neighborSet.add(grid[i][j]); log("Failing over (Row) to node " + grid[i][j] + " as R node for node " + grid[i][x]); bFoundReplacement = true; // TODO :: maybe maintain a table with failovers, or something similar to that. break; } } // if no failover found if ((bFoundReplacement == false) && ((scheme == RoutingScheme.SQRT_RC_FAILOVER) || (scheme == RoutingScheme.SQRT_SPECIAL))) { // search for a failover in column x for (short j = 0; j < numRows; j++) { if ( (grid[j][x].id != myNid) && (grid[j][x].isAlive == true) ) { // request them as a failover and add them as a neighbor PeeringRequest pr = new PeeringRequest(); sendObject(pr, grid[j][x].id); neighborSet.add(grid[j][x]); log("Failing over (Column) to node " + grid[j][x] + " as R node for node " + grid[i][x]); bFoundReplacement = true; break; } } } } } } } } for (short x = 0; x < numRows; x++) { if (grid[x][c].id != myNid) { neighborSet.add(grid[x][c]); } } } } } neighborSet.addAll(overflowNeighbors); return neighborSet; } private HashSet<GridNode> getOtherMembers() { HashSet<GridNode> memberSet = new HashSet<GridNode>(); for (short r = 0; r < numRows; r++) { for (short c = 0; c < numCols; c++) { if (grid[r][c].id != myNid) { memberSet.add(grid[r][c]); } } } return memberSet; } /** * expands the probes table to reflect changes in the new membership view. * assumes that "nodes" has been updated with the new membership. copies * over probe info from previous table for the nodes that are common across * the two membership views. */ private void repopulateProbeTable(List<Short> oldNids) { short newProbeTable[][] = new short[nodes.size()][nodes.size()]; int nodeIndex = memberNids().indexOf(myNid); for (int i = 0; i < memberNids().size(); i++) { if (i == nodeIndex) { newProbeTable[i][i] = 0; } else { newProbeTable[nodeIndex][i] = Short.MAX_VALUE; } } // copy over old probe data. for (int i = 0; i < oldNids.size(); i++) { int node_index = memberNids().indexOf(oldNids.get(i)); if (node_index != -1) { for (int j = 0; j < oldNids.size(); j++) { int node_index_2 = memberNids().indexOf(oldNids.get(j)); if (node_index_2 != -1) newProbeTable[node_index][node_index_2] = probeTable[i][j]; } } } probeTable = newProbeTable; // forget about the old one. /* // for testing if (nodeIndex == 0) { for (int i = 1; i < memberNids().size(); i++) { probeTable[nodeIndex][i] = 1; } } else { probeTable[nodeIndex][0] = 1; } */ } private String toStingMembership() { String s = new String("Membership for Node " + myNid + ". Membership = ["); for (Short memberId : memberNids()) { s += memberId + ", "; } s += "]"; return s; } private String toStringNeighborList() { String s = new String("Neighbors for Node " + myNid + ". Neighbors = ["); HashSet<GridNode> neighbors = getNeighborList(); for (GridNode neighbor : neighbors) { s += neighbor.id + ", "; } s += "]"; return s; } private String toStringNextHopTable() { String s = new String("Next-hop table for " + myNid + " = ["); for (Short node : nextHopTable.keySet()) { s += node + " -> " + nextHopTable.get(node) + "; "; } s += "]"; return s; } private void printMembers() { String s = "members:"; for (NodeInfo node : nodes.values()) { s += "\n " + node.id + " oid " + id2id.get(node.id) + " " + id2name.get(node.id) + " " + node.port; } log(s); } // PERF private void printGrid() { String s = "grid:"; if (grid != null) { for (int i = 0; i < numRows; i++) { s += "\n "; for (int j = 0; j < numCols; j++) { s += "\t" + grid[i][j]; } } } log(s); } private void printProbeTable() { ArrayList<Short> sorted_nids = memberNids(); int myIndex = sorted_nids.indexOf(myNid); String s = new String("Adj table for " + myNid + " = ["); for (int i = 0; i < probeTable[myIndex].length; i++) { s += sorted_nids.get(i) + ":" + probeTable[myIndex][i] + "; "; } s += "]"; log(s); } private void printProbeTable(int probeTableOffset) { ArrayList<Short> sorted_nids = memberNids(); String s = new String("Adj table for " + sorted_nids.get(probeTableOffset) + " = ["); for (int i = 0; i < probeTable[probeTableOffset].length; i++) { s += sorted_nids.get(i) + ":" + probeTable[probeTableOffset][i] + "; "; } s += "]"; log(s); } /** * for each neighbor, find for him the min-cost hops to all other neighbors, * and send this info to him (the intermediate node may be one of the * endpoints, meaning a direct route is cheapest) */ private void broadcastRecommendations() { HashSet<GridNode> nl = getNeighborList(); overflowNeighbors.clear(); ArrayList<Short> sortedNids = memberNids(); int totalSize = 0; for (GridNode src : nl) { int srcOffset = sortedNids.indexOf(src.id); ArrayList<Rec> recs = new ArrayList<Rec>(); long min = Long.MAX_VALUE; int mini = -1; for (GridNode dst : nl) { int dstOffset = sortedNids.indexOf(dst.id); if (src.id != dst.id) { for (int i = 0; i < probeTable[srcOffset].length; i++) { // we assume bi-directional links for the time being // i.e. link from a-> b is the same as b -> a long cur = probeTable[srcOffset][i] + probeTable[dstOffset][i]; if (cur < min) { min = cur; mini = i; } } Rec rec = new Rec(); rec.dst = dst.id; rec.via = sortedNids.get(mini); recs.add(rec); } } RoutingRecs msg = new RoutingRecs(); msg.recs = recs; totalSize += sendObject(msg, src.id); } log("Sending recommendations to neighbors, total " + totalSize + " bytes. " + toStringNeighborList()); } /** * for each neighbor, find for him the min-cost hops to *all other nodes* (as opposed to neighbors), * and send this info to him (the intermediate node may be one of the * endpoints, meaning a direct route is cheapest) */ private void broadcastRecommendations2() { HashSet<GridNode> nl = getNeighborList(); overflowNeighbors.clear(); ArrayList<Short> sortedNids = memberNids(); HashSet<GridNode> others = getOtherMembers(); others.removeAll(nl); int totalSize = 0; for (GridNode src : nl) { int srcOffset = sortedNids.indexOf(src.id); ArrayList<Rec> recs = new ArrayList<Rec>(); // src = neighbor, dst = neighbor for (GridNode dst : nl) { int dstOffset = sortedNids.indexOf(dst.id); long min = Long.MAX_VALUE; int mini = -1; if (src.id != dst.id) { for (int i = 0; i < probeTable[srcOffset].length; i++) { // we assume bi-directional links for the time being // i.e. link from a-> b is the same as b -> a long cur = probeTable[srcOffset][i] + probeTable[dstOffset][i]; if (cur < min) { min = cur; mini = i; } } Rec rec = new Rec(); rec.dst = dst.id; rec.via = sortedNids.get(mini); recs.add(rec); } } // src = neighbor, dst != neighbor for (GridNode dst : others) { int dstOffset = sortedNids.indexOf(dst.id); long min = probeTable[srcOffset][dstOffset]; int mini = srcOffset; if (src.id != dst.id) { for (GridNode neighborHop : nl) { int neighborHopOffset = sortedNids.indexOf(neighborHop.id); long curMin = probeTable[srcOffset][neighborHopOffset] + probeTable[neighborHopOffset][dstOffset]; if (curMin < min) { min = curMin; mini = neighborHop.id; } } Rec rec = new Rec(); rec.dst = dst.id; rec.via = (short) sortedNids.get(mini); recs.add(rec); } } RoutingRecs msg = new RoutingRecs(); msg.recs = recs; totalSize += sendObject(msg, src.id); } log("Sending recommendations to neighbors, total " + totalSize + " bytes. " + toStringNeighborList()); } // /** // * caches names // */ // private String id2nm(short nid, String addr) { // if (nid >= 0) { // String name = id2name.get(nid); // if (name == null) { // NodeInfo node = nodes.get(nid); // if (node != null) { // name = node.getHostName(); // id2name.put(nid, name); // } else { // id2name.put(nid, node); // return name; // } else { // return addr; private Serialization senderSer = new Serialization(); private int sendObject(final Msg o, InetAddress addr, int port, short nid) { o.src = myNid; o.version = currentStateVersion; o.session = sessionId; try { /* * note that it's unsafe to re-use these output streams - at * least, i don't know how (reset() is insufficient) */ ByteArrayOutputStream baos = new ByteArrayOutputStream(); senderSer.serialize(o, new DataOutputStream(baos)); byte[] buf = baos.toByteArray(); String who = nid >= 0 ? "" + nid : (addr + ":" + port); log("send." + o.getClass().getSimpleName(), "to " + who + " len " + buf.length); if (!ignored.contains(nid)) { sendSocket.send(new DatagramPacket(buf, buf.length, addr, port)); } else { log("droppng packet sent to " + who); } return buf.length; } catch (Exception ex) { throw new RuntimeException(ex); } } private int sendObject(final Msg o, NodeInfo info, short nid) { return sendObject(o, info.addr, info.port, nid); } private int sendObject(final Msg o, NodeInfo info) { return sendObject(o, info, (short)-1); } private int sendObject(final Msg o, short nid) { return nid != myNid && !ignored.contains(nid) ? sendObject(o, nid == 0 ? coordNode : nodes.get(nid), nid) : 0; } private void broadcastMeasurements() { Measurements rm = new Measurements(); //rm.membershipList = memberNids(); ArrayList<Short> sorted_nids = memberNids(); rm.probeTable = probeTable[sorted_nids.indexOf(myNid)].clone(); rm.inflation = new byte[rm.probeTable.length]; HashSet<GridNode> nl = getNeighborList(); int totalSize = 0; for (GridNode neighbor : nl) { totalSize += sendObject(rm, neighbor.id); } log("Sending measurements to neighbors, total " + totalSize + " bytes. " + toStringNeighborList()); //printProbeTable(); } private void updateNetworkState(Measurements m) { int offset = memberNids().indexOf(m.src); // Make sure that we have the exact same world-views before proceeding, // as otherwise the neighbor sets may be completely different. Steps can // be taken to tolerate differences and to give best-recommendations // based on incomplete info, but it may be better to take a step back // and re-evaluate our approach to consistency as a whole first. For // now, this simple central-coordinator approach will at least work. //if (offset != -1 && m.membershipList.equals(memberNids())) { if (offset != -1) { for (int i = 0; i < m.probeTable.length; i++) { probeTable[offset][i] = m.probeTable[i]; } } //printProbeTable(offset); } private void handleRecommendation(ArrayList<Rec> recs) { if (recs != null) { for (Rec r : recs) { // For the algorithm where the R-points only send recos about their neighbors: // For each dst - only 2 nodes can tell us about the best hop to dst. // They are our R-points. Trust them and update your entry blindly. // For the algorithm where the R-points send recos about // everyone else this logic will have to be more complex // (like check if the reco was better) if ( isReachable(r.via) || ((r.via == myNid) && isReachable(r.dst)) ) { nextHopTable.put(r.dst, r.via); HashSet<Short> nextHops = nextHopOptions.get(r.dst); if (nextHops == null) { nextHops = new HashSet<Short>(); nextHopOptions.put(r.dst, nextHops); } nextHops.add(r.via); } } } countReachableNodes(); } private boolean isReachable(short nid) { ArrayList<Short> sortedNids = memberNids(); int i = sortedNids.indexOf(myNid); int j = sortedNids.indexOf(nid); if ((j != -1) && (probeTable[i][j] != Short.MAX_VALUE)) { return true; } return false; } private boolean isReachable(short neighbor_src, int nid_dst) { ArrayList<Short> sortedNids = memberNids(); int i = sortedNids.indexOf(neighbor_src); int j = sortedNids.indexOf(nid_dst); if ( (i != -1) && (j != -1) && (probeTable[i][j] != Short.MAX_VALUE)) { return true; } return false; } private void countReachableNodes() { // for each node in the system // reachability // - either it is in the nexthop table // or we can reach it directly // or we can reach it using our neighbors (in their adj table) // # of one-hop paths available // - is this needed? log(toStringNextHopTable()); ArrayList<Short> sortedNids = memberNids(); HashSet<GridNode> nl = getNeighborList(); log("yo " + sortedNids.size()); int numReachable = 0; for (Short nid : sortedNids) { Short nextHop = nextHopTable.get(nid); boolean canReach = false; if (nextHop != null) { if (nextHop != myNid) { if (isReachable(nextHop)) { numReachable++; canReach = true; } } else if (isReachable(nid)) { numReachable++; canReach = true; } else { // TODO :: what do we do here? // right now it falls through and searches in the other cases // as canReach is false // but is this correct? } } if (!canReach) { // there was no next hop to take you there // can you reach nid directly? if (isReachable(nid)) { numReachable++; canReach = true; } else { // look through the adj tables of your neighbors and // see if they can reach nid - even though nid might not be their neighbor // cycle through the probe table // and fine if neighbor is reachable from me && nid is reachable from neighbor for (GridNode neighbor : nl) { if (isReachable(neighbor.id) && isReachable(neighbor.id, nid)) { numReachable++; canReach = true; break; } } } } } // // remove log call later !!! // log(toStringNextHopTable()); // int reachable = 0; // int oneHopPathsAvailable = 0; // for (short nid : nextHopTable.keySet()) { // int nextHop = nextHopTable.get(nid); // if (nextHop != myNid) { // if (isReachable(nextHop)) { // reachable++; // } else if (isReachable(nid)) { // reachable++; // HashSet<Short> nhSet = nextHopOptions.get(nid); // if (nhSet != null) { // for (Iterator<Short> it = nhSet.iterator(); it.hasNext();) { // Short someNextHop = it.next(); // if (isReachable(someNextHop)) { // oneHopPathsAvailable++; // int j = sortedNids.indexOf(nid); // // TODO :: // // cycle through the probe table - if pt[me][someNeighbor] + pt[someNeighbor][nid] is not infinity // // then add someNeighbor to the set of oneHops available if it is not already in the nhSet // /* // // this crap is needed because if eveyone but one member (X) in your row is unreachable // // then there is no one to tell you about X // Set<Short> knownMembers = new HashSet<Short>(nodes.keySet()); // Set<Short> recoBasedReachableMembers = nextHopTable.keySet(); // // knownMembers.removeAll(recoBasedReachableMembers); // for (short nid : knownMembers) { // if (!ignored.contains(nid) && (nid != myNid)) { // reachable++; // oneHopPathsAvailable++; // } // } // */ log("Reachability Count = " + numReachable + " of " + (nodes.size() - 1) + " nodes in 1 or less hops."); //int avgOneHopsAvailable = oneHopPathsAvailable / reachable; //log("Avg # of one hop or direct paths available = " + avgOneHopsAvailable); } public void quit() { this.doQuit.set(true); } } class GridNode { public short id; public boolean isAlive; public String toString() { return id + (isAlive ? "(up)" : "(DOWN)"); } public int hashCode() { return new Integer(id).hashCode(); } public boolean equals(Object other) { if (other != null && getClass() == other.getClass()) { GridNode otherItem = (GridNode) other; return (otherItem.id == this.id) && (otherItem.isAlive == this.isAlive); } else return false; } } // welcome to my // DEATH MACHINE, // interloper!!!!!!!11 class NodeInfo { short id; int port; InetAddress addr; } class Rec { short dst; short via; } class Msg { short src; short version; short session; } class Join extends Msg { InetAddress addr; int port; } class Init extends Msg { short id; ArrayList<NodeInfo> members; } class Membership extends Msg { ArrayList<NodeInfo> members; short numNodes; short yourId; } class RoutingRecs extends Msg { ArrayList<Rec> recs; } class Ping extends Msg { long time; NodeInfo info; } class Pong extends Msg { long time; } class Measurements extends Msg { short[] probeTable; byte[] inflation; } class MemberPoll extends Msg { } class PeeringRequest extends Msg { } class Serialization { public void serialize(Object obj, DataOutputStream out) throws IOException { if (false) {} else if (obj.getClass() == NodeInfo.class) { NodeInfo casted = (NodeInfo) obj; out.writeInt(0); out.writeShort(casted.id); out.writeInt(casted.port); byte[] buf = casted.addr.getAddress();out.writeInt(buf.length);out.write(buf); } else if (obj.getClass() == Rec.class) { Rec casted = (Rec) obj; out.writeInt(1); out.writeShort(casted.dst); out.writeShort(casted.via); } else if (obj.getClass() == Msg.class) { Msg casted = (Msg) obj; out.writeInt(2); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Join.class) { Join casted = (Join) obj; out.writeInt(3); byte[] buf = casted.addr.getAddress();out.writeInt(buf.length);out.write(buf); out.writeInt(casted.port); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Init.class) { Init casted = (Init) obj; out.writeInt(4); out.writeShort(casted.id); out.writeInt(casted.members.size()); for (int i = 0; i < casted.members.size(); i++) { out.writeShort(casted.members.get(i).id); out.writeInt(casted.members.get(i).port); byte[] buf = casted.members.get(i).addr.getAddress();out.writeInt(buf.length);out.write(buf); } out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Membership.class) { Membership casted = (Membership) obj; out.writeInt(5); out.writeInt(casted.members.size()); for (int i = 0; i < casted.members.size(); i++) { out.writeShort(casted.members.get(i).id); out.writeInt(casted.members.get(i).port); byte[] buf = casted.members.get(i).addr.getAddress();out.writeInt(buf.length);out.write(buf); } out.writeShort(casted.numNodes); out.writeShort(casted.yourId); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == RoutingRecs.class) { RoutingRecs casted = (RoutingRecs) obj; out.writeInt(6); out.writeInt(casted.recs.size()); for (int i = 0; i < casted.recs.size(); i++) { out.writeShort(casted.recs.get(i).dst); out.writeShort(casted.recs.get(i).via); } out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Ping.class) { Ping casted = (Ping) obj; out.writeInt(7); out.writeLong(casted.time); out.writeShort(casted.info.id); out.writeInt(casted.info.port); byte[] buf = casted.info.addr.getAddress();out.writeInt(buf.length);out.write(buf); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Pong.class) { Pong casted = (Pong) obj; out.writeInt(8); out.writeLong(casted.time); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Measurements.class) { Measurements casted = (Measurements) obj; out.writeInt(9); out.writeInt(casted.probeTable.length); for (int i = 0; i < casted.probeTable.length; i++) { out.writeShort(casted.probeTable[i]); } out.write(casted.inflation); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == MemberPoll.class) { MemberPoll casted = (MemberPoll) obj; out.writeInt(10); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == PeeringRequest.class) { PeeringRequest casted = (PeeringRequest) obj; out.writeInt(11); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } } public Object deserialize(DataInputStream in) throws IOException { switch (readInt(in)) { case 0: { // NodeInfo NodeInfo obj; { obj = new NodeInfo(); { obj.id = in.readShort(); } { obj.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } obj.addr = InetAddress.getByAddress(buf); } } return obj;} case 1: { // Rec Rec obj; { obj = new Rec(); { obj.dst = in.readShort(); } { obj.via = in.readShort(); } } return obj;} case 2: { // Msg Msg obj; { obj = new Msg(); { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } return obj;} case 3: { // Join Join obj; { obj = new Join(); { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } obj.addr = InetAddress.getByAddress(buf); } { obj.port = readInt(in); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj;} case 4: { // Init Init obj; { obj = new Init(); { obj.id = in.readShort(); } { obj.members = new ArrayList<NodeInfo>(); for (int i = 0, len = readInt(in); i < len; i++) { NodeInfo x; { x = new NodeInfo(); { x.id = in.readShort(); } { x.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } x.addr = InetAddress.getByAddress(buf); } } obj.members.add(x); } } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj;} case 5: { // Membership Membership obj; { obj = new Membership(); { obj.members = new ArrayList<NodeInfo>(); for (int i = 0, len = readInt(in); i < len; i++) { NodeInfo x; { x = new NodeInfo(); { x.id = in.readShort(); } { x.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } x.addr = InetAddress.getByAddress(buf); } } obj.members.add(x); } } { obj.numNodes = in.readShort(); } { obj.yourId = in.readShort(); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj;} case 6: { // RoutingRecs RoutingRecs obj; { obj = new RoutingRecs(); { obj.recs = new ArrayList<Rec>(); for (int i = 0, len = readInt(in); i < len; i++) { Rec x; { x = new Rec(); { x.dst = in.readShort(); } { x.via = in.readShort(); } } obj.recs.add(x); } } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj;} case 7: { // Ping Ping obj; { obj = new Ping(); { obj.time = in.readLong(); } { obj.info = new NodeInfo(); { obj.info.id = in.readShort(); } { obj.info.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } obj.info.addr = InetAddress.getByAddress(buf); } } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj;} case 8: { // Pong Pong obj; { obj = new Pong(); { obj.time = in.readLong(); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj;} case 9: { // Measurements Measurements obj; { obj = new Measurements(); { obj.probeTable = new short[readInt(in)]; for (int i = 0; i < obj.probeTable.length; i++) { { obj.probeTable[i] = in.readShort(); } } } { obj.inflation = new byte[readInt(in)]; in.read(obj.inflation); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj;} case 10: { // MemberPoll MemberPoll obj; { obj = new MemberPoll(); { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj;} case 11: { // PeeringRequest PeeringRequest obj; { obj = new PeeringRequest(); { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj;} default:throw new RuntimeException("unknown obj type");}} private byte[] readBuffer = new byte[4]; public int readInt(DataInputStream dis) throws IOException { dis.readFully(readBuffer, 0, 4); return ( ((int)(readBuffer[0] & 255) << 24) + ((readBuffer[1] & 255) << 16) + ((readBuffer[2] & 255) << 8) + ((readBuffer[3] & 255) << 0)); } /* public static void main(String[] args) throws IOException { { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos); Pong pong = new Pong(); pong.src = 2; pong.version = 3; pong.time = 4; serialize(pong, out); byte[] buf = baos.toByteArray(); System.out.println(buf.length); Object obj = deserialize(new DataInputStream(new ByteArrayInputStream(buf))); System.out.println(obj); } { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos); Measurements m = new Measurements(); m.src = 2; m.version = 3; m.membershipList = new ArrayList<Integer>(); m.membershipList.add(4); m.membershipList.add(5); m.membershipList.add(6); m.ProbeTable = new long[5]; m.ProbeTable[1] = 7; m.ProbeTable[2] = 8; m.ProbeTable[3] = 9; serialize(m, out); byte[] buf = baos.toByteArray(); System.out.println(buf.length); Object obj = deserialize(new DataInputStream(new ByteArrayInputStream(buf))); System.out.println(obj); } { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos); Membership m = new Membership(); m.src = 2; m.version = 3; m.members = new ArrayList<NodeInfo>(); NodeInfo n1 = new NodeInfo(); n1.addr = InetAddress.getLocalHost(); n1.port = 4; n1.id = 5; m.members.add(n1); NodeInfo n2 = new NodeInfo(); n2.addr = InetAddress.getByName("google.com"); n2.port = 6; n2.id = 7; m.members.add(n2); m.numNodes = 8; serialize(m, out); byte[] buf = baos.toByteArray(); System.out.println(buf.length); Object obj = deserialize(new DataInputStream( new ByteArrayInputStream(buf))); System.out.println(obj); } }*/ }
package edu.cmu.neuron2; import java.io.*; import java.net.*; import java.util.*; import java.lang.annotation.*; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.logging.*; import java.util.logging.Formatter; import org.apache.mina.common.ByteBuffer; import org.apache.mina.common.IoHandlerAdapter; import org.apache.mina.common.IoServiceConfig; import org.apache.mina.common.IoSession; import org.apache.mina.transport.socket.nio.DatagramAcceptor; import org.apache.mina.transport.socket.nio.DatagramAcceptorConfig; import edu.cmu.neuron2.RonTest.RunMode; class LabelFilter implements Filter { private final HashSet<String> suppressedLabels; private final boolean suppressAll; public LabelFilter(HashSet<String> suppressedLabels) { this.suppressedLabels = suppressedLabels; this.suppressAll = suppressedLabels.contains("all"); } public boolean isLoggable(LogRecord record) { if (suppressAll) return false; String[] parts = record.getLoggerName().split("\\.", 2); return parts.length == 1 || !suppressedLabels.contains(parts[1]); } } public class NeuRonNode extends Thread { private final ExecutorService executor; private final ScheduledExecutorService scheduler; public short myNid; private final boolean isCoordinator; private final String coordinatorHost; private final int basePort; private final AtomicBoolean doQuit = new AtomicBoolean(); private Logger logger; /** * maps node id's to nodestates. this is the primary container. */ private final Hashtable<Short, NodeState> nodes = new Hashtable<Short, NodeState>(); /** * neighbors = rendesvousServers union rendezvousClients. we send our * routes to all servers in this set. */ /** * maps nid to {the set of rendezvous servers to that nid} */ private final Hashtable<Short, HashSet<NodeState>> rendezvousServers = new Hashtable<Short, HashSet<NodeState>>(); /** * the set of nodes that are relying us to get to someone. * * this is needed during route computation. i need to know who to calculate * routes among, and we want to include rendezvousclients in this set. */ private final SortedSet<NodeState> rendezvousClients = new TreeSet<NodeState>(); private NodeState[][] grid; private short numCols, numRows; private final Hashtable<InetAddress, Short> addr2id = new Hashtable<InetAddress, Short>(); private final Hashtable<Short, HashSet<NodeState>> defaultRendezvousServers = new Hashtable<Short, HashSet<NodeState>>(); private short currentStateVersion; public final int neighborBroadcastPeriod; public final int probePeriod; private final NodeInfo coordNode; private final DatagramSocket sendSocket; private final RunMode mode; private final short numNodesHint; private final Semaphore semAllJoined; private final Random rand = new Random(); private final InetAddress myCachedAddr; private ArrayList<Short> cachedMemberNids = new ArrayList<Short>(); // sorted list of members private short cachedMemberNidsVersion; private final boolean blockJoins; private final boolean capJoins; private final int joinRetries; // seconds private final int dumpPeriod; private final FileHandler fh; private final short origNid; private final short sessionId; private final int linkTimeout; private final int membershipBroadcastPeriod; private static final String defaultLabelSet = "send.Ping recv.Ping stale.Ping send.Pong recv.Pong stale.Pong send.Measurements send.RoutingRecs"; private final Hashtable<Short,Long> lastSentMbr = new Hashtable<Short,Long>(); private final double smoothingFactor; private final short resetLatency = Short.MAX_VALUE; private final Hashtable<Short, NodeInfo> coordNodes = new Hashtable<Short, NodeInfo>(); private final ArrayList<Short> memberNids = new ArrayList<Short>(); private final ArrayList<NodeState> otherNodes = new ArrayList<NodeState>(); private final ArrayList<NodeState> lastRendezvousServers = new ArrayList<NodeState>(); // TODO discard private final HashSet<NodeState> allDefaultServers = new HashSet<NodeState>(); private Runnable safeRun(final Runnable r) { return new Runnable() { public void run() { try { synchronized (NeuRonNode.this) { r.run(); } } catch (Exception ex) { err(ex); } } }; } private void createLabelFilter(Properties props, String labelSet, Handler handler) { String[] labels = props.getProperty(labelSet, defaultLabelSet).split(" "); final HashSet<String> suppressedLabels = new HashSet<String>(Arrays.asList(labels)); handler.setFilter(new LabelFilter(suppressedLabels)); } private final int joinDelay; public NeuRonNode(short id, ExecutorService executor, ScheduledExecutorService scheduler, Properties props, short numNodes, Semaphore semJoined, InetAddress myAddr, String coordinatorHost, NodeInfo coordNode) { joinDelay = rand.nextInt(Integer.parseInt(props.getProperty("joinDelayRange", "1"))); if ((coordNode == null) || (coordNode.addr == null)){ throw new RuntimeException("coordNode is null!"); } dumpPeriod = Integer.parseInt(props.getProperty("dumpPeriod", "60")); myNid = id; origNid = id; currentStateVersion = (short)0; cachedMemberNidsVersion = (short)-1; joinRetries = Integer.parseInt(props.getProperty("joinTimeLimit", "10")); // wait up to 10 secs by default for coord to be available membershipBroadcastPeriod = Integer.parseInt(props.getProperty("membershipBroadcastPeriod", "0")); // NOTE note that you'll probably want to set this, always! sessionId = Short.parseShort(props.getProperty("sessionId", "0")); blockJoins = Boolean.valueOf(props.getProperty("blockJoins", "true")); capJoins = Boolean.valueOf(props.getProperty("capJoins", "true")); this.coordinatorHost = coordinatorHost; this.coordNode = coordNode; basePort = Integer.parseInt(props.getProperty("basePort", "9000")); mode = RunMode.valueOf(props.getProperty("mode", "sim").toUpperCase()); neighborBroadcastPeriod = Integer.parseInt(props.getProperty("neighborBroadcastPeriod", "60")); // for simulations we can safely reduce the probing frequency, or even turn it off if (mode == RunMode.SIM) { probePeriod = Integer.parseInt(props.getProperty("probePeriod", "60")); } else { probePeriod = Integer.parseInt(props.getProperty("probePeriod", "10")); } membershipTimeout = Integer.parseInt(props.getProperty("timeout", "" + probePeriod * 3)); linkTimeout = Integer.parseInt(props.getProperty("failoverTimeout", "" + membershipTimeout)); scheme = RoutingScheme.valueOf(props.getProperty("scheme", "SIMPLE").toUpperCase()); smoothingFactor = Double.parseDouble(props.getProperty("smoothingFactor", "0.9")); Formatter fmt = new Formatter() { public String format(LogRecord record) { StringBuilder buf = new StringBuilder(); buf.append(record.getMillis()).append(' ').append(new Date(record.getMillis())).append(" ").append( record.getLevel()).append(" ").append( record.getLoggerName()).append(": ").append( record.getMessage()).append("\n"); return buf.toString(); } }; Logger rootLogger = Logger.getLogger(""); rootLogger.getHandlers()[0].setFormatter(fmt); logger = Logger.getLogger("node" + myNid); createLabelFilter(props, "consoleLogFilter", rootLogger.getHandlers()[0]); try { String logFileBase = props.getProperty("logFileBase", "%t/scaleron-log-"); fh = new FileHandler(logFileBase + myNid, true); fh.setFormatter(fmt); createLabelFilter(props, "fileLogFilter", fh); logger.addHandler(fh); sendSocket = new DatagramSocket(); } catch (IOException ex) { throw new RuntimeException(ex); } this.executor = executor; this.scheduler = scheduler; grid = null; numCols = numRows = 0; isCoordinator = myNid == 0; numNodesHint = Short.parseShort(props.getProperty("numNodesHint", "" + numNodes)); semAllJoined = semJoined; if (myAddr == null) { try { myCachedAddr = InetAddress.getLocalHost(); } catch (UnknownHostException ex) { throw new RuntimeException(ex); } } else { myCachedAddr = myAddr; } myPort = basePort + myNid; clientTimeout = Integer.parseInt(props.getProperty("clientTimeout", "" + 3 * neighborBroadcastPeriod)); } private final int myPort; private void handleInit(Init im) { if (im.id == -1) { throw new PlannedException("network is full; aborting"); } System.out.println("Had nodeId = " + myNid + ". New nodeId = " + im.id); myNid = im.id; logger = Logger.getLogger("node_" + myNid); logger.addHandler(fh); currentStateVersion = im.version; log("got from coord => Init " + im.id); updateMembers(im.members); } private String bytes2string(byte[] buf) { String s = "[ "; for (byte b : buf) { s += b + " "; } s += "]"; return s; } private void log(String msg) { logger.info(msg); } private void warn(String msg) { logger.warning(msg); } private void err(String msg) { logger.severe(msg); } private void err(Exception ex) { StringWriter s = new StringWriter(); PrintWriter p = new PrintWriter(s); ex.printStackTrace(p); err(s.toString()); } /** * Used for logging data, such as neighbor lists. * * @param name - the name of the data, e.g.: "neighbors", "info" * @param value */ private void log(String name, Object value) { Logger.getLogger(logger.getName() + "." + name).info(value.toString()); } public static final class PlannedException extends RuntimeException { public PlannedException(String msg) { super(msg); } } public final AtomicReference<Exception> failure = new AtomicReference<Exception>(); public void run() { try { run2(); } catch (PlannedException ex) { warn(ex.getMessage()); failure.set(ex); if (semAllJoined != null) semAllJoined.release(); } catch (Exception ex) { err(ex); failure.set(ex); if (semAllJoined != null) semAllJoined.release(); } } private short nextNodeId = 1; public void run2() { if (isCoordinator) { try { scheduler.scheduleAtFixedRate(safeRun(new Runnable() { public void run() { log("checkpoint: " + coordNodes.size() + " nodes"); printMembers(); //printGrid(); } }), dumpPeriod, dumpPeriod, TimeUnit.SECONDS); if (membershipBroadcastPeriod > 0) { scheduler.scheduleAtFixedRate(safeRun(new Runnable() { public void run() { if (membersChanged.get()) { broadcastMembershipChange((short) 0); } } }), 1, membershipBroadcastPeriod, TimeUnit.SECONDS); } // do not remove this for now Thread.sleep(2000); new DatagramAcceptor().bind(new InetSocketAddress(InetAddress .getLocalHost(), basePort), new CoordReceiver()); ServerSocket ss = new ServerSocket(basePort); try { ss.setReuseAddress(true); ss.setSoTimeout(1000); log("Beep!"); final Hashtable<Short, Socket> incomingSocks = new Hashtable<Short, Socket>(); while (!doQuit.get()) { final Socket incoming; try { incoming = ss.accept(); } catch (SocketTimeoutException ex) { continue; } final short nodeId; // this is OK since nid orderings are irrelevant synchronized (NeuRonNode.this) { nodeId = nextNodeId++; } executor.submit(new Runnable() { public void run() { try { Join msg = (Join) new Serialization().deserialize(new DataInputStream(incoming.getInputStream())); synchronized (NeuRonNode.this) { incomingSocks.put(nodeId, incoming); if (!capJoins || coordNodes.size() < numNodesHint) { addMember(nodeId, msg.addr, msg.port, msg.src); if (coordNodes.size() == numNodesHint) { semAllJoined.release(); } if (blockJoins) { if (coordNodes.size() >= numNodesHint) { // time to broadcast ims to everyone ArrayList<NodeInfo> memberList = getMemberInfos(); for (NodeInfo m : memberList) { try { doit(incomingSocks, memberList, m.id); } finally { incomingSocks.get(m.id).close(); } } } } else { doit(incomingSocks, getMemberInfos(), nodeId); broadcastMembershipChange(nodeId); } } else if (capJoins && coordNodes.size() == numNodesHint) { Init im = new Init(); im.src = myNid; im.id = -1; im.members = new ArrayList<NodeInfo>(); sendit(incoming, im); } } } catch (Exception ex) { err(ex); } finally { try { if (!blockJoins) incoming.close(); } catch (IOException ex) { err(ex); } } } private void doit( final Hashtable<Short, Socket> incomingSocks, ArrayList<NodeInfo> memberList, short nid) throws IOException { Init im = new Init(); im.id = nid; im.src = myNid; im.version = currentStateVersion; im.members = memberList; sendit(incomingSocks.get(nid), im); } private void sendit( Socket socket, Init im) throws IOException { DataOutputStream dos = new DataOutputStream(socket.getOutputStream()); new Serialization().serialize(im, dos); dos.flush(); } }); } } finally { ss.close(); log("coord done"); } } catch (Exception ex) { throw new RuntimeException(ex); } } else { int count = 0; try { Thread.sleep(1000 * joinDelay); } catch (InterruptedException ex) { throw new RuntimeException(ex); } while (true) { Socket s = null; try { if (count++ > joinRetries) { throw new PlannedException("exceeded join try limit; aborting"); } // connect to the coordinator try { s = new Socket(coordinatorHost, basePort); } catch (Exception ex) { log("couldn't connect to coord, retrying in 1 sec: " + ex.getMessage()); try { Thread.sleep(1000); } catch (InterruptedException ie) { } } if (s != null) { try { // talk to coordinator log("sending join to coordinator at " + coordinatorHost + ":" + basePort); Join msg = new Join(); msg.addr = myCachedAddr; msg.src = myNid; // informs coord of orig id msg.port = myPort; DataOutputStream dos = new DataOutputStream(s.getOutputStream()); new Serialization().serialize(msg, dos); dos.flush(); log("waiting for InitMsg"); ByteArrayOutputStream minibaos = new ByteArrayOutputStream(); byte[] minibuf = new byte[8192]; int amt; while ((amt = s.getInputStream().read(minibuf)) > 0) { minibaos.write(minibuf, 0, amt); } byte[] buf = minibaos.toByteArray(); try { Init im = (Init) new Serialization().deserialize(new DataInputStream(new ByteArrayInputStream(buf))); handleInit(im); } catch (Exception ex) { err("got buffer: " + bytes2string(buf)); throw ex; } break; } finally { try { s.close(); } catch (Exception ex) { throw new RuntimeException(ex); } } } } catch (PlannedException ex) { throw ex; } catch (SocketException ex) { warn(ex.getMessage()); return; } catch (Exception ex) { throw new RuntimeException(ex); } } // wait for coordinator to announce my existence to others try { Thread.sleep(membershipBroadcastPeriod * 1000); } catch (InterruptedException ex) { throw new RuntimeException(ex); } // now start accepting pings and other msgs, // also start sending probes and sending out other msgs try { new DatagramAcceptor().bind(new InetSocketAddress(myCachedAddr, myPort), new Receiver()); log("server started on " + myCachedAddr + ":" + (basePort + myNid)); scheduler.scheduleAtFixedRate(safeRun(new Runnable() { public void run() { pingAll(); } }), 1, probePeriod, TimeUnit.SECONDS); scheduler.scheduleAtFixedRate(safeRun(new Runnable() { public void run() { /* * path-finding and rendezvous finding is * interdependent. the fact that we do the path-finding * first before the rendezvous servers is arbitrary. */ Pair<Integer, Integer> p = findPathsForAllNodes(); log(p.first + " live nodes, " + p.second + " avg paths"); ArrayList<NodeState> measRecips = scheme == RoutingScheme.SIMPLE ? otherNodes : getAllRendezvousServers(); broadcastMeasurements(measRecips); if (scheme != RoutingScheme.SIMPLE) { broadcastRecommendations(); } } }), 1, neighborBroadcastPeriod, TimeUnit.SECONDS); if (semAllJoined != null) semAllJoined.release(); } catch (IOException ex) { throw new RuntimeException(ex); } } } private final HashSet<Short> ignored = new HashSet<Short>(); public synchronized void ignore(short nid) { if (nid != myNid) { log("ignoring " + nid); ignored.add(nid); } } public synchronized void unignore(short nid) { if (nid != myNid) { log("unignoring " + nid); ignored.remove(nid); } } private void pingAll() { log("pinging"); Ping ping = new Ping(); ping.time = System.currentTimeMillis(); NodeInfo tmp = nodes.get(myNid).info; ping.info = new NodeInfo(); ping.info.id = origNid; // note that the ping info uses the original id ping.info.addr = tmp.addr; ping.info.port = tmp.port; for (short nid : nodes.keySet()) if (nid != myNid) sendObject(ping, nid); /* send ping to the membership server (co-ord) - this might not be required if everone makes their own local decision. i.e. each node notices that no other node can reach a node (say X), then each node sends the co-ord a msg saying that "i think X is dead". The sending of this msg can be staggered in time so that the co-ord is not flooded with mesgs. The co-ordinator can then make a decision on keeping or removing node Y from the membership. On seeing a subsequent msg from the co-ord that X has been removed from the overlay, if a node Y has not sent its "i think X is dead" msg, it can cancel this event. */ sendObject(ping, (short)0); } private Msg deserialize(Object o) { ByteBuffer buf = (ByteBuffer) o; byte[] bytes = new byte[buf.limit()]; buf.get(bytes); try { return (Msg) new Serialization().deserialize(new DataInputStream(new ByteArrayInputStream(bytes))); } catch (Exception ex) { err("deserialization exception: " + ex.getMessage()); return null; } } private Hashtable<Short,Short> id2id = new Hashtable<Short,Short>(); private Hashtable<Short,String> id2name = new Hashtable<Short,String>(); /** * coordinator's msg handling loop */ public final class CoordReceiver extends IoHandlerAdapter { @Override public void messageReceived(IoSession session, Object obj) throws Exception { try { Msg msg = deserialize(obj); if (msg == null) return; synchronized (NeuRonNode.this) { if (msg.session == sessionId) { if (coordNodes.containsKey(msg.src)) { log("recv." + msg.getClass().getSimpleName(), "from " + msg.src + " (oid " + id2id.get(msg.src) + ", " + id2name.get(msg.src) + ")"); resetTimeoutAtCoord(msg.src); if (msg.version < currentStateVersion) { log("updating stale membership"); sendMembership(msg.src); } if (msg instanceof Ping) { // ignore the ping } else { throw new Exception("can't handle that message type"); } } else { if ((!capJoins || coordNodes.size() < numNodesHint) && msg instanceof Ping) { Ping ping = (Ping) msg; log("dead." + ping.getClass().getSimpleName(), "from '" + ping.src + "' " + ping.info.addr.getHostName()); Short mappedId = addr2id.get(ping.info.addr); short nid; if (mappedId == null) { nid = nextNodeId++; addMember(nid, ping.info.addr, ping.info.port, ping.info.id); broadcastMembershipChange(nid); } else { nid = mappedId; } Init im = new Init(); im.id = nid; im.src = myNid; im.version = currentStateVersion; im.members = getMemberInfos(); sendObject(im, nid); } else { log("dead." + msg.getClass().getSimpleName(), "from '" + msg.src + "'"); } } } } } catch (Exception ex) { err(ex); } } } /** * receiver's msg handling loop */ public final class Receiver extends IoHandlerAdapter { @Override public void messageReceived(IoSession session, Object obj) throws Exception { try { Msg msg = deserialize(obj); if (msg == null) return; synchronized (NeuRonNode.this) { if ((msg.src == 0 || nodes.containsKey(msg.src)) && msg.session == sessionId || msg instanceof Ping) { NodeState state = nodes.get(msg.src); log("recv." + msg.getClass().getSimpleName(), "from " + msg.src); // always reply to pings and log pongs if (msg instanceof Ping) { Ping ping = ((Ping) msg); Pong pong = new Pong(); pong.time = ping.time; sendObject(pong, ping.info); } else if (msg instanceof Pong) { Pong pong = (Pong) msg; short rtt = (short) (System.currentTimeMillis() - pong.time); if (state != null) { resetTimeoutAtNode(pong.src); NodeState self = nodes.get(myNid); short oldLatency = self.latencies.get(pong.src); short ewma = (short) (smoothingFactor * (rtt / 2) + (1 - smoothingFactor) * oldLatency); log("latency", pong.src + " = " + rtt/2 + ", ewma " + ewma); self.latencies.put(pong.src, ewma); } else { log("latency", "some " + pong.src + " = " + rtt/2); } } // for other messages, make sure their state version is // the same as ours if (msg.version > currentStateVersion) { if (msg instanceof Membership) { currentStateVersion = msg.version; Membership m = (Membership) msg; myNid = m.yourId; updateMembers(m.members); } else { // i am out of date - request latest membership // sendObject(new MemberPoll(), 0); // commented out - membership updates now // implicitly handled via pings } } else if (msg.version == currentStateVersion) { // from coordinator if (msg instanceof Membership) { Membership m = (Membership) msg; myNid = m.yourId; updateMembers(m.members); } else if (msg instanceof Measurements) { resetTimeoutOnRendezvousClient(msg.src); updateMeasurements((Measurements) msg); } else if (msg instanceof RoutingRecs) { RoutingRecs recs = (RoutingRecs) msg; handleRecommendations(recs); log("got recs " + routesToString(recs.recs)); } else if (msg instanceof Ping) { // nothing to do, already handled above } else if (msg instanceof Pong) { // nothing to do, already handled above } else if (msg instanceof Init) { handleInit((Init) msg); } else { throw new Exception("can't handle that message type"); } } else { log("stale." + msg.getClass().getSimpleName(), "from " + msg.src + " version " + msg.version + " current " + currentStateVersion); } } else { // log("ignored." + msg.getClass().getSimpleName(), "ignored from " + msg.src + " session " + msg.session); } } } catch (Exception ex) { err(ex); } } } /** * If we don't hear from a node for this number of seconds, then consider * them dead. */ private int membershipTimeout; private Hashtable<Short, ScheduledFuture<?>> timeouts = new Hashtable<Short, ScheduledFuture<?>>(); /** * a coord-only method * * @param nid */ private void resetTimeoutAtCoord(final short nid) { if (coordNodes.containsKey(nid)) { ScheduledFuture<?> oldFuture = timeouts.get(nid); if (oldFuture != null) { oldFuture.cancel(false); } ScheduledFuture<?> future = scheduler.schedule(safeRun(new Runnable() { public void run() { removeMember(nid); } }), membershipTimeout, TimeUnit.SECONDS); timeouts.put(nid, future); } } private final int clientTimeout; private final Hashtable<Short, ScheduledFuture<?>> rendezvousClientTimeouts = new Hashtable<Short, ScheduledFuture<?>>(); private void resetTimeoutOnRendezvousClient(final short nid) { final NodeState node = nodes.get(nid); if (!node.isReachable) return; ScheduledFuture<?> oldFuture = rendezvousClientTimeouts.get(nid); if (oldFuture != null) { oldFuture.cancel(false); } if (rendezvousClients.add(node)) { log("rendezvous client " + node + " added"); ///XXX System.out.println("rendezvous client " + node + " added"); } ScheduledFuture<?> future = scheduler.schedule(safeRun(new Runnable() { public void run() { if (rendezvousClients.remove(node)) { log("rendezvous client " + node + " removed"); ///XXX System.out.println("rendezvous client " + node + " removed"); } } }), clientTimeout, TimeUnit.SECONDS); rendezvousClientTimeouts.put(nid, future); } private void resetTimeoutAtNode(final short nid) { if (nodes.containsKey(nid)) { ScheduledFuture<?> oldFuture = timeouts.get(nid); if (oldFuture != null) { oldFuture.cancel(false); } final NodeState node = nodes.get(nid); if (!node.isReachable) log(nid + " reachable"); node.isReachable = true; ScheduledFuture<?> future = scheduler.schedule(safeRun(new Runnable() { public void run() { if (nodes.containsKey(nid)) { log(nid + " unreachable"); node.isReachable = false; nodes.get(myNid).latencies.remove(nid); rendezvousClients.remove(node); // XXX remove: findPaths(node); } } }), linkTimeout, TimeUnit.SECONDS); timeouts.put(nid, future); } } /** * a coordinator-only method */ private NodeInfo addMember(short newNid, InetAddress addr, int port, short origId) { NodeInfo info = new NodeInfo(); info.id = newNid; info.addr = addr; info.port = port; coordNodes.put(newNid, info); id2id.put(newNid, origId); id2name.put(newNid, addr.getHostName()); addr2id.put(addr, newNid); log("adding new node: " + newNid + " oid " + origId + " name " + id2name.get(newNid)); currentStateVersion++; resetTimeoutAtCoord(newNid); return info; } private final AtomicBoolean membersChanged = new AtomicBoolean(); /** * a coordinator-only method * * @param exceptNid - if this is 0, then we must have been called by the * periodic membership-broadcast daemon thread, so actually send stuff; * otherwise, we should just signal to the daemon thread a pending change */ private void broadcastMembershipChange(short exceptNid) { if (exceptNid == 0 || membershipBroadcastPeriod == 0) { for (short nid : coordNodes.keySet()) { if (nid != exceptNid) { sendMembership(nid); } } } } ArrayList<NodeInfo> getMemberInfos() { return new ArrayList<NodeInfo>(coordNodes.values()); } /** * a coordinator-only method * * throttles these messages so they're sent at most once per second */ private void sendMembership(short nid) { Membership msg = new Membership(); msg.yourId = nid; msg.members = getMemberInfos(); sendObject(msg, coordNodes.get(nid)); } /** * a coordinator-only method * * @param nid */ private void removeMember(short nid) { log("removing dead node " + nid + " oid " + id2id.get(nid) + " " + id2name.get(nid)); NodeInfo info = coordNodes.remove(nid); Short mid = addr2id.remove(info.addr); assert mid != null; currentStateVersion++; broadcastMembershipChange(nid); } /** * updates our member state. modifies data structures as necessary to * maintain invariants. * * @param newNodes */ private void updateMembers(List<NodeInfo> newNodes) { // add new nodes for (NodeInfo node : newNodes) if (!nodes.containsKey(node.id)) { nodes.put(node.id, new NodeState(node)); if (node.id != myNid) resetTimeoutAtNode(node.id); } // remove nodes HashSet<Short> newNids = new HashSet<Short>(); for (NodeInfo node : newNodes) newNids.add(node.id); HashSet<Short> toRemove = new HashSet<Short>(); for (Short nid : nodes.keySet()) if (!newNids.contains(nid)) toRemove.add(nid); for (Short nid : toRemove) nodes.remove(nid); // consistency cleanups: check that all nid references are still valid nid's for (NodeState state : nodes.values()) { if (!newNids.contains(state.hop)) state.hop = state.info.id; for (Iterator<Short> i = state.hopOptions.iterator(); i.hasNext();) if (!newNids.contains(i.next())) i.remove(); HashSet<Short> garbage = new HashSet<Short>(); for (short nid : state.latencies.keySet()) if (!newNids.contains(nid)) garbage.add(nid); for (short nid : garbage) state.latencies.remove(nid); } // regenerate alternative views of this data NodeState self = nodes.get(myNid); memberNids.clear(); memberNids.addAll(newNids); Collections.sort(memberNids); otherNodes.clear(); otherNodes.addAll(nodes.values()); otherNodes.remove(self); numCols = (short) Math.ceil(Math.sqrt(nodes.size())); numRows = (short) Math.ceil((double) nodes.size() / (double) numCols); grid = new NodeState[numRows][numCols]; List<Short> nids = memberNids; for (short i = 0, r = 0; r < numRows; r++) for (short c = 0; c < numCols; c++) grid[r][c] = nodes.get(nids.get(i++ % nids.size())); /* * simply forget about all our neighbors. thus, this forgets all our * failover clients and servers. since the grid is different. if this * somehow disrupts route computation, so be it - it'll only last for a * period. * * one worry is that others who miss this member update will continue to * broadcast to us. this is a non-issue because we ignore stale * messages, and when they do become updated, they'll forget about us * too. */ rendezvousClients.clear(); defaultRendezvousServers.clear(); for (int rz = 0; rz < numRows; rz++) { for (int cz = 0; cz < numCols; cz++) { if (grid[rz][cz] == self) { HashSet<NodeState> rendezvousClientRow = new HashSet<NodeState>(); HashSet<NodeState> rendezvousClientCol = new HashSet<NodeState>(); // add this column and row as clients for (int r1 = 0; r1 < numRows; r1++) { NodeState cli = grid[r1][cz]; if (cli.isReachable && cli != self) rendezvousClientCol.add(cli); } for (int c1 = 0; c1 < numCols; c1++) { NodeState cli = grid[rz][c1]; if (cli.isReachable && cli != self) rendezvousClientRow.add(cli); } rendezvousClients.addAll(rendezvousClientRow); rendezvousClients.addAll(rendezvousClientCol); // add the rendezvous servers to all nodes for (int r0 = 0; r0 < numRows; r0++) { for (int c0 = 0; c0 < numCols; c0++) { NodeState dst = grid[r0][c0]; HashSet<NodeState> rs = defaultRendezvousServers.get(dst.info.id); if (rs == null) { rs = new HashSet<NodeState>(); defaultRendezvousServers.put(dst.info.id, rs); } if (r0 != rz && c0 != cz) { // normally, add the pairs if (self != grid[rz][c0]) rs.add(grid[rz][c0]); if (self != grid[r0][cz]) rs.add(grid[r0][cz]); } else if (c0 == cz) { /* * if this is in our col (a neighbor), everyone * else in that col is in essence a rendezvous * server between us two */ rs.addAll(rendezvousClientCol); } else if (r0 == rz) { /* * ditto for rows */ rs.addAll(rendezvousClientRow); } } } } } } rendezvousServers.clear(); for (Entry<Short, HashSet<NodeState>> entry : defaultRendezvousServers.entrySet()) { rendezvousServers.put(entry.getKey(), new HashSet<NodeState>()); } lastRendezvousServers.clear(); allDefaultServers.clear(); allDefaultServers.addAll(rendezvousClients); for (int r0 = 0; r0 < numRows; r0++) { for (int c0 = 0; c0 < numCols; c0++) { NodeState n = grid[r0][c0]; for (int r1 = 0; r1 < numRows; r1++) n.defaultClients.add(grid[r1][c0]); for (int c1 = 0; c1 < numCols; c1++) n.defaultClients.add(grid[r0][c1]); n.defaultClients.remove(self); } } log("state " + currentStateVersion + ", mbrs " + nids); } /** * @param n * @param remoteNid * @return */ private boolean isFailedRendezvous(NodeState n, NodeState remote) { ///XXX if (myNid == 1 && n.info.id == 7 && remote.info.id == 8) System.out.println("old remote failures " + n.remoteFailures + ", failed = " + (!n.isReachable || n.remoteFailures.contains(remote))); return !n.isReachable || n.remoteFailures.contains(remote); } /** * @return failoverClients `union` nodes in my row and col (wherever i occur) */ private ArrayList<NodeState> getAllRendezvousClients() { ArrayList<NodeState> list = new ArrayList<NodeState>(rendezvousClients); Collections.sort(list); return list; } /** * makes one pass over the metaset of all rendezvous servers, removing any * failed rendezvous from the individual sets. * * for the simple routing scheme, this is the full set of nodes. as a * result, measurements are broadcast to everyone, as intended. (note that * there are no routing recommendation messages in this scheme.) * * @return the union of all the sets of non-failed rendezvous servers. */ private ArrayList<NodeState> getAllRendezvousServers() { HashSet<NodeState> servers = new HashSet<NodeState>(); NodeState self = nodes.get(myNid); for (int r0 = 0; r0 < numRows; r0++) { for (int c0 = 0; c0 < numCols; c0++) { NodeState dst = grid[r0][c0]; // if dst is not us and we believe that the node is not down if (dst != self && dst.hop != 0) { HashSet<NodeState> rs = rendezvousServers.get(dst.info.id); // check if any of our default rendezvous servers are once // more available; if so, add them back HashSet<NodeState> defaults = defaultRendezvousServers.get(dst.info.id); for (NodeState r : defaults) if (r.isReachable) servers.add(r); boolean hasDefaults = false; for (NodeState r : defaults) { hasDefaults = rs.contains(r); break; } HashSet<NodeState> old = new HashSet<NodeState>(rs); if (hasDefaults) { if (!defaults.equals(rs)) for (NodeState r : defaults) if (!isFailedRendezvous(r, dst)) rs.add(r); } else { boolean cleared = false; for (NodeState r : defaults) { if (!isFailedRendezvous(r, dst)) { if (!cleared) { rs.clear(); cleared = true; } rs.add(r); } } } if (!old.equals(rs)) { ///XXX System.out.println("restored rendezvous for " + dst + " from " + old + " to " + rs); log("restored rendezvous for " + dst + " from " + old + " to " + rs); } if (rs.isEmpty() && scheme != RoutingScheme.SQRT_NOFAILOVER) { // look for failovers // get candidates from col ArrayList<NodeState> cands = new ArrayList<NodeState>(); for (int r1 = 0; r1 < numRows; r1++) { NodeState cand = grid[r1][c0]; if (cand != self && cand.isReachable) cands.add(cand); } // get candidates from row for (int c1 = 0; c1 < numCols; c1++) { NodeState cand = grid[r0][c1]; if (cand != self && cand.isReachable) cands.add(cand); } // choose candidate uniformly at random NodeState failover = cands.get(rand.nextInt(cands.size())); log("new failover for " + dst + ": " + failover + ", prev rs = " + rs); rs.add(failover); servers.add(failover); ///XXX System.out.println("FAILOVER " + failover); } else { /* * when we remove nodes now, don't immediately look * for failovers. the next period, we will have * received link states from our neighbors, from * which we can determine whether dst is just down. */ for (Iterator<NodeState> i = rs.iterator(); i.hasNext();) { NodeState r = i.next(); if (isFailedRendezvous(r, dst)) { i.remove(); } else { servers.add(r); } } ///XXX if (myNid == 1 && dst.info.id == 8) System.out.println(rs); if (rs.isEmpty()) { log("all rs to " + dst + " failed"); System.out.println("ALL FAILED!"); } } } } } ArrayList<NodeState> list = new ArrayList<NodeState>(servers); Collections.sort(list); return list; } public static enum RoutingScheme { SIMPLE, SQRT, SQRT_NOFAILOVER, SQRT_RC_FAILOVER, SQRT_SPECIAL }; private final RoutingScheme scheme; private void printMembers() { String s = "members:"; for (NodeInfo info : coordNodes.values()) { s += "\n " + info.id + " oid " + id2id.get(info.id) + " " + id2name.get(info.id) + " " + info.port; } log(s); } // PERF private void printGrid() { String s = "grid:"; if (grid != null) { for (int i = 0; i < numRows; i++) { s += "\n "; for (int j = 0; j < numCols; j++) { s += "\t" + grid[i][j]; } } } log(s); } /** * in the sqrt routing scheme: for each neighbor, find for him the min-cost * hops to all other neighbors, and send this info to him (the intermediate * node may be one of the endpoints, meaning a direct route is cheapest). * * in the sqrt_special routing scheme, we instead find for each neighbor the * best intermediate other neighbor through which to route to every * destination. this still needs work, see various todos. * * a failed rendezvous wrt some node n is one which we cannot reach * (proximal failure) or which cannot reach n (remote failure). when all * current rendezvous to some node n fail, then we find a failover from node * n's row and col, and include it in our neighbor set. by befault, this * situation occurs when a row-col rendezvous pair fail. it can also occur * with any of our current failovers. */ private void broadcastRecommendations() { ArrayList<NodeState> clients = getAllRendezvousClients(); ArrayList<NodeState> dsts = new ArrayList<NodeState>(clients); dsts.add(nodes.get(myNid)); Collections.sort(dsts); int totalSize = 0; for (NodeState src : clients) { ArrayList<Rec> recs = new ArrayList<Rec>(); // dst <- nbrs, hop <- any findHops(dsts, memberNids, src, recs); ///XXX if (myNid == 7 && src.info.id == 1) System.out.println(routesToString(recs)); /* * TODO: need to additionally send back info about *how good* the * best hop is, so that the receiver can decide which of the many * recommendations to accept */ if (scheme == RoutingScheme.SQRT_SPECIAL) { // dst <- any, hop <- nbrs findHopsAlt(memberNids, dsts, src, recs); } RoutingRecs msg = new RoutingRecs(); msg.recs = recs; totalSize += sendObject(msg, src.info.id); } log("sent recs, " + totalSize + " bytes, to " + clients); } private void findHops(ArrayList<NodeState> dsts, ArrayList<Short> hops, NodeState src, ArrayList<Rec> recs) { for (NodeState dst : dsts) { if (src != dst) { short min = resetLatency; short minhop = -1; for (short hop : hops) { if (hop != src.info.id) { short src2hop = src.latencies.get(hop); short dst2hop = dst.latencies.get(hop); short latency = (short) (src2hop + dst2hop); if (latency < min) { min = latency; minhop = hop; } } } assert minhop != -1; Rec rec = new Rec(); rec.dst = dst.info.id; rec.via = minhop; recs.add(rec); } } } private void findHopsAlt(ArrayList<Short> dsts, ArrayList<NodeState> hops, NodeState src, ArrayList<Rec> recs) { for (short dst : dsts) { if (src.info.id != dst && nodes.get(dst).isReachable) { short min = resetLatency; short minhop = -1; for (NodeState hop : hops) { if (hop != src) { short src2hop = src.latencies.get(hop.info.id); short dst2hop = hop.latencies.get(dst); short latency = (short) (src2hop + dst2hop); if (latency < min) { min = latency; minhop = hop.info.id; } } } assert minhop != -1; Rec rec = new Rec(); rec.dst = dst; rec.via = minhop; recs.add(rec); } } } private String routesToString(ArrayList<Rec> recs) { String s = ""; for (Rec rec : recs) s += rec.via + "->" + rec.dst + " "; return s; } private Serialization senderSer = new Serialization(); private int sendObject(final Msg o, InetAddress addr, int port, short nid) { o.src = myNid; o.version = currentStateVersion; o.session = sessionId; try { /* * note that it's unsafe to re-use these output streams - at * least, i don't know how (reset() is insufficient) */ ByteArrayOutputStream baos = new ByteArrayOutputStream(); senderSer.serialize(o, new DataOutputStream(baos)); byte[] buf = baos.toByteArray(); String who = nid >= 0 ? "" + nid : (addr + ":" + port); log("send." + o.getClass().getSimpleName(), "to " + who + " len " + buf.length); if (!ignored.contains(nid)) { sendSocket.send(new DatagramPacket(buf, buf.length, addr, port)); } else { log("droppng packet sent to " + who); } return buf.length; } catch (Exception ex) { throw new RuntimeException(ex); } } private int sendObject(final Msg o, NodeInfo info, short nid) { return sendObject(o, info.addr, info.port, nid); } private int sendObject(final Msg o, NodeInfo info) { return sendObject(o, info, (short)-1); } private int sendObject(final Msg o, short nid) { return nid != myNid ? sendObject(o, nid == 0 ? coordNode : nodes.get(nid).info, nid) : 0; } private void broadcastMeasurements(ArrayList<NodeState> servers) { ShortShortMap latencies = nodes.get(myNid).latencies; Measurements rm = new Measurements(); rm.probeTable = new short[memberNids.size()]; for (int i = 0; i < rm.probeTable.length; i++) rm.probeTable[i] = latencies.get(memberNids.get(i)); rm.inflation = new byte[rm.probeTable.length]; int totalSize = 0; for (NodeState nbr : servers) { totalSize += sendObject(rm, nbr.info.id); } log("sent measurements, " + totalSize + " bytes, to " + servers); } private void updateMeasurements(Measurements m) { NodeState myState = nodes.get(m.src); for (int i = 0; i < m.probeTable.length; i++) myState.latencies.put(memberNids.get(i), m.probeTable[i]); } private void handleRecommendations(RoutingRecs msg) { ArrayList<Rec> recs = msg.recs; NodeState r = nodes.get(msg.src); r.dstsPresent.clear(); r.remoteFailures.clear(); for (Rec rec : recs) { r.dstsPresent.add(rec.dst); if (nodes.get(rec.via).isReachable) { if (scheme == RoutingScheme.SQRT_SPECIAL) { /* * TODO: add in support for processing sqrt_special * recommendations. first we need to add in the actual cost of * the route to these recommendations (see * broadcastRecommndations), then we need to compare all of * these and see which ones were better. a complication is that * routing recommendation broadcasts are not synchronized, so * while older messages may appear to have better routes, there * must be some threshold in time past which we disregard old * latencies. must keep some history */ nodes.get(rec.dst).hopOptions.add(rec.via); nodes.get(rec.dst).hop = rec.via; } else { // blindly trust the recommendations nodes.get(rec.dst).hop = rec.via; } } } if (scheme != RoutingScheme.SQRT_SPECIAL) { /* * get the full set of dsts that we depend on this node for. note * that the set of nodes it's actually serving may be different. */ for (NodeState dst : r.defaultClients) { if (!r.dstsPresent.contains(dst.info.id)) { /* * there was a comm failure between this rendezvous and the * dst for which this rendezvous did not provide a * recommendation. consider this a rendezvous failure, so that if * necessary during the next phase, we will find failovers. */ r.remoteFailures.add(dst); } } ///XXX if (r.info.id == 7 && myNid == 1) System.out.println("new remote failures " + r.remoteFailures); } } /** * counts the number of nodes that we can reach - either directly, through a * hop, or through any rendezvous client. * * @return */ private int countReachableNodes() { /* * TODO need to fix up hopOptions so that it actually gets updated * correctly, since currently things are *never* removed from it (they * need to expire) */ NodeState myState = nodes.get(myNid); int count = 0; for (NodeState node : otherNodes) { count += node.hop != 0 ? 1 : 0; } return count; } /** * counts the number of paths to a particular node */ private int findPaths(NodeState node) { ArrayList<NodeState> clients = getAllRendezvousClients(); ArrayList<NodeState> servers = lastRendezvousServers; HashSet<NodeState> options = new HashSet<NodeState>(); short nid = node.info.id; node.hop = node.isReachable ? node.info.id : 0; if (node.isReachable) options.add(node); // find best rendezvous client. note that this includes node itself. short min = resetLatency; for (NodeState client : clients) { short val = client.latencies.get(nid); if (val != resetLatency) { options.add(client); if (val < min) { node.hop = client.info.id; min = val; } } } // see if a rendezvous server can serve as the hop. (can't just iterate // through hopOptions, because that doesn't tell us which server to go // through.) for (NodeState server : servers) { if (server.dstsPresent.contains(min)) { options.add(server); if (node.hop == 0) node.hop = server.info.id; } } if (node.hop == 0) log("node " + node + " down"); return options.size(); } /** * counts the avg number of one-hop or direct paths available to nodes * @return */ private Pair<Integer, Integer> findPathsForAllNodes() { NodeState myState = nodes.get(myNid); int count = 0; int numNodesReachable = 0; for (NodeState node : otherNodes) { int d = findPaths(node); count += d; numNodesReachable += d > 0 ? 1 : 0; } if (numNodesReachable > 0) count /= numNodesReachable; return Pair.of(numNodesReachable, count); } public void quit() { doQuit.set(true); } private class NodeState implements Comparable<NodeState> { public String toString() { return "" + info.id; } /** * not null */ public final NodeInfo info; /** * updated in resetTimeoutAtNode(). if hop == 0, this must be false; if * hop == the nid, this must be true. * * this should also be made to correspond with the appropriate latencies in myNid */ public boolean isReachable = true; /** * the last known latencies to all other nodes. missing entry implies * resetLatency. this is populated/valid for rendezvous clients. * * invariants: * - keyset is a subset of current members (memberNids); enforced in * updateMembers() * - keyset contains only live nodes; enforced in resetTimeoutAtNode() * - values are not resetLatency * - undefined if not a rendezvous client */ public final ShortShortMap latencies = new ShortShortMap(resetLatency); /** * the recommended intermediate hop for us to get to this node, or 0 if * no way we know of to get to that node, and thus believe the node is * gone. * * invariants: * - always refers to a member or 0; enforced in * updateMembers() * - never refers to dead node; enforced in resetTimeoutAtNode() * - may refer to the node of this nodestate (may be dst) * - never refers to the owning neuronnode (never is src) * - cannot be the nid if !isReachable */ public short hop; /** * remote failures. applies only if this nodestate is of a rendezvous * node. contains nids of all nodes for which this rendezvous cannot * recommend routes. * * invariants: * - undefined if this is not a rendezvous node * - empty */ public final HashSet<NodeState> remoteFailures = new HashSet<NodeState>(); /** * dstsPresent, the complement of remoteFailures (in defaultClients). */ public final HashSet<Short> dstsPresent = new HashSet<Short>(); /** * basically, his row/col. (all the nodes that he's responsible for). */ public final HashSet<NodeState> defaultClients = new HashSet<NodeState>(); /** * this is unused at the moment. still need to re-design. */ public final HashSet<Short> hopOptions = new HashSet<Short>(); public NodeState(NodeInfo info) { this.info = info; latencies.put(info.id, (short) 0); } public int compareTo(NodeState o) { return new Short(info.id).compareTo(o.info.id); } } } class ShortShortMap { private final Hashtable<Short,Short> table = new Hashtable<Short, Short>(); private final short defaultValue; public ShortShortMap(short defaultValue) { this.defaultValue = defaultValue; } public Set<Short> keySet() { return table.keySet(); } public boolean containsKey(short key) { return table.containsKey(key); } public void remove(short key) { table.remove(key); } public short get(short key) { Short value = table.get(key); return value != null ? value : defaultValue; } public void put(short key, short value) { if (value == defaultValue) table.remove(key); else table.put(key, value); } } // welcome to my // DEATH MACHINE, // interloper!!!!!!!11 class NodeInfo { short id; int port; InetAddress addr; } class Rec { short dst; short via; } class Msg { short src; short version; short session; } class Join extends Msg { InetAddress addr; int port; } class Init extends Msg { short id; ArrayList<NodeInfo> members; } class Membership extends Msg { ArrayList<NodeInfo> members; short numNodes; short yourId; } class RoutingRecs extends Msg { ArrayList<Rec> recs; } class Ping extends Msg { long time; NodeInfo info; } class Pong extends Msg { long time; } class Measurements extends Msg { short[] probeTable; byte[] inflation; } class MemberPoll extends Msg { } class PeeringRequest extends Msg { } class Serialization { public void serialize(Object obj, DataOutputStream out) throws IOException { if (false) { } else if (obj.getClass() == NodeInfo.class) { NodeInfo casted = (NodeInfo) obj; out.writeInt(0); out.writeShort(casted.id); out.writeInt(casted.port); byte[] buf = casted.addr.getAddress(); out.writeInt(buf.length); out.write(buf); } else if (obj.getClass() == Rec.class) { Rec casted = (Rec) obj; out.writeInt(1); out.writeShort(casted.dst); out.writeShort(casted.via); } else if (obj.getClass() == Msg.class) { Msg casted = (Msg) obj; out.writeInt(2); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Join.class) { Join casted = (Join) obj; out.writeInt(3); byte[] buf = casted.addr.getAddress(); out.writeInt(buf.length); out.write(buf); out.writeInt(casted.port); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Init.class) { Init casted = (Init) obj; out.writeInt(4); out.writeShort(casted.id); out.writeInt(casted.members.size()); for (int i = 0; i < casted.members.size(); i++) { out.writeShort(casted.members.get(i).id); out.writeInt(casted.members.get(i).port); byte[] buf = casted.members.get(i).addr.getAddress(); out.writeInt(buf.length); out.write(buf); } out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Membership.class) { Membership casted = (Membership) obj; out.writeInt(5); out.writeInt(casted.members.size()); for (int i = 0; i < casted.members.size(); i++) { out.writeShort(casted.members.get(i).id); out.writeInt(casted.members.get(i).port); byte[] buf = casted.members.get(i).addr.getAddress(); out.writeInt(buf.length); out.write(buf); } out.writeShort(casted.numNodes); out.writeShort(casted.yourId); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == RoutingRecs.class) { RoutingRecs casted = (RoutingRecs) obj; out.writeInt(6); out.writeInt(casted.recs.size()); for (int i = 0; i < casted.recs.size(); i++) { out.writeShort(casted.recs.get(i).dst); out.writeShort(casted.recs.get(i).via); } out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Ping.class) { Ping casted = (Ping) obj; out.writeInt(7); out.writeLong(casted.time); out.writeShort(casted.info.id); out.writeInt(casted.info.port); byte[] buf = casted.info.addr.getAddress(); out.writeInt(buf.length); out.write(buf); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Pong.class) { Pong casted = (Pong) obj; out.writeInt(8); out.writeLong(casted.time); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Measurements.class) { Measurements casted = (Measurements) obj; out.writeInt(9); out.writeInt(casted.probeTable.length); for (int i = 0; i < casted.probeTable.length; i++) { out.writeShort(casted.probeTable[i]); } out.writeInt(casted.inflation.length); out.write(casted.inflation); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == MemberPoll.class) { MemberPoll casted = (MemberPoll) obj; out.writeInt(10); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == PeeringRequest.class) { PeeringRequest casted = (PeeringRequest) obj; out.writeInt(11); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } } public Object deserialize(DataInputStream in) throws IOException { switch (readInt(in)) { case 0: { // NodeInfo NodeInfo obj; { obj = new NodeInfo(); { obj.id = in.readShort(); } { obj.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } obj.addr = InetAddress.getByAddress(buf); } } return obj; } case 1: { // Rec Rec obj; { obj = new Rec(); { obj.dst = in.readShort(); } { obj.via = in.readShort(); } } return obj; } case 2: { // Msg Msg obj; { obj = new Msg(); { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } return obj; } case 3: { // Join Join obj; { obj = new Join(); { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } obj.addr = InetAddress.getByAddress(buf); } { obj.port = readInt(in); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 4: { // Init Init obj; { obj = new Init(); { obj.id = in.readShort(); } { obj.members = new ArrayList<NodeInfo>(); for (int i = 0, len = readInt(in); i < len; i++) { NodeInfo x; { x = new NodeInfo(); { x.id = in.readShort(); } { x.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } x.addr = InetAddress.getByAddress(buf); } } obj.members.add(x); } } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 5: { // Membership Membership obj; { obj = new Membership(); { obj.members = new ArrayList<NodeInfo>(); for (int i = 0, len = readInt(in); i < len; i++) { NodeInfo x; { x = new NodeInfo(); { x.id = in.readShort(); } { x.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } x.addr = InetAddress.getByAddress(buf); } } obj.members.add(x); } } { obj.numNodes = in.readShort(); } { obj.yourId = in.readShort(); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 6: { // RoutingRecs RoutingRecs obj; { obj = new RoutingRecs(); { obj.recs = new ArrayList<Rec>(); for (int i = 0, len = readInt(in); i < len; i++) { Rec x; { x = new Rec(); { x.dst = in.readShort(); } { x.via = in.readShort(); } } obj.recs.add(x); } } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 7: { // Ping Ping obj; { obj = new Ping(); { obj.time = in.readLong(); } { obj.info = new NodeInfo(); { obj.info.id = in.readShort(); } { obj.info.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } obj.info.addr = InetAddress.getByAddress(buf); } } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 8: { // Pong Pong obj; { obj = new Pong(); { obj.time = in.readLong(); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 9: { // Measurements Measurements obj; { obj = new Measurements(); { obj.probeTable = new short[readInt(in)]; for (int i = 0; i < obj.probeTable.length; i++) { { obj.probeTable[i] = in.readShort(); } } } { obj.inflation = new byte[readInt(in)]; in.read(obj.inflation); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 10: { // MemberPoll MemberPoll obj; { obj = new MemberPoll(); { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 11: { // PeeringRequest PeeringRequest obj; { obj = new PeeringRequest(); { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } default: throw new RuntimeException("unknown obj type"); } } private byte[] readBuffer = new byte[4]; public int readInt(DataInputStream dis) throws IOException { dis.readFully(readBuffer, 0, 4); return (((int) (readBuffer[0] & 255) << 24) + ((readBuffer[1] & 255) << 16) + ((readBuffer[2] & 255) << 8) + ((readBuffer[3] & 255) << 0)); } /* * public static void main(String[] args) throws IOException { { * ByteArrayOutputStream baos = new ByteArrayOutputStream(); * DataOutputStream out = new DataOutputStream(baos); Pong pong = new * Pong(); pong.src = 2; pong.version = 3; pong.time = 4; serialize(pong, * out); byte[] buf = baos.toByteArray(); System.out.println(buf.length); * Object obj = deserialize(new DataInputStream(new * ByteArrayInputStream(buf))); System.out.println(obj); } * { ByteArrayOutputStream baos = new ByteArrayOutputStream(); * DataOutputStream out = new DataOutputStream(baos); * * Measurements m = new Measurements(); m.src = 2; m.version = 3; * m.membershipList = new ArrayList<Integer>(); m.membershipList.add(4); * m.membershipList.add(5); m.membershipList.add(6); m.ProbeTable = new * long[5]; m.ProbeTable[1] = 7; m.ProbeTable[2] = 8; m.ProbeTable[3] = 9; * * serialize(m, out); byte[] buf = baos.toByteArray(); * System.out.println(buf.length); Object obj = deserialize(new * DataInputStream(new ByteArrayInputStream(buf))); System.out.println(obj); } { * ByteArrayOutputStream baos = new ByteArrayOutputStream(); * DataOutputStream out = new DataOutputStream(baos); * * Membership m = new Membership(); m.src = 2; m.version = 3; m.members = * new ArrayList<NodeInfo>(); NodeInfo n1 = new NodeInfo(); n1.addr = * InetAddress.getLocalHost(); n1.port = 4; n1.id = 5; m.members.add(n1); * NodeInfo n2 = new NodeInfo(); n2.addr = * InetAddress.getByName("google.com"); n2.port = 6; n2.id = 7; * m.members.add(n2); m.numNodes = 8; * * serialize(m, out); byte[] buf = baos.toByteArray(); * System.out.println(buf.length); Object obj = deserialize(new * DataInputStream( new ByteArrayInputStream(buf))); * System.out.println(obj); } } */ }
package edu.cmu.neuron2; import java.io.*; import java.net.*; import java.util.*; import java.lang.annotation.*; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.logging.*; import java.util.logging.Formatter; import java.nio.ByteBuffer; //import org.apache.mina.common.ByteBuffer; import org.apache.mina.common.IoHandlerAdapter; import org.apache.mina.common.IoServiceConfig; import org.apache.mina.common.IoSession; import org.apache.mina.transport.socket.nio.DatagramAcceptor; import org.apache.mina.transport.socket.nio.DatagramAcceptorConfig; import edu.cmu.neuron2.RonTest.RunMode; class LabelFilter implements Filter { private final HashSet<String> suppressedLabels; private final boolean suppressAll; public LabelFilter(HashSet<String> suppressedLabels) { this.suppressedLabels = suppressedLabels; this.suppressAll = suppressedLabels.contains("all"); } public boolean isLoggable(LogRecord record) { if (suppressAll) return false; String[] parts = record.getLoggerName().split("\\.", 2); return parts.length == 1 || !suppressedLabels.contains(parts[1]); } } public class NeuRonNode extends Thread { private final Reactor scheduler; public short myNid; private final boolean isCoordinator; private final String coordinatorHost; private final int basePort; private final AtomicBoolean doQuit = new AtomicBoolean(); private Logger logger; /** * maps node id's to nodestates. this is the primary container. */ private final Hashtable<Short, NodeState> nodes = new Hashtable<Short, NodeState>(); /** * neighbors = rendesvousServers union rendezvousClients. we send our * routes to all servers in this set. */ /** * maps nid to {the set of rendezvous servers to that nid} */ private final Hashtable<Short, HashSet<NodeState>> rendezvousServers = new Hashtable<Short, HashSet<NodeState>>(); /** * the set of nodes that are relying us to get to someone. * * this is needed during route computation. i need to know who to calculate * routes among, and we want to include rendezvousclients in this set. */ private final SortedSet<NodeState> rendezvousClients = new TreeSet<NodeState>(); // These variables used internally to updateMembers (keep this way, for // abstraction purposes), and are only exposed for printGrid(). private NodeState[][] grid; private short numCols, numRows; // Coord-only: maps addresses to nids private final Hashtable<InetAddress, Short> addr2id = new Hashtable<InetAddress, Short>(); // Specific to this node. Lookup from destination to default rs's to it private final Hashtable<Short, HashSet<NodeState>> defaultRendezvousServers = new Hashtable<Short, HashSet<NodeState>>(); // Lookup from node to a set of its rendezvous servers private final Hashtable<NodeState, HashSet<NodeState>> nodeDefaultRSs = new Hashtable<NodeState, HashSet<NodeState>>(); private short currentStateVersion; public final int neighborBroadcastPeriod; public final int probePeriod; public final int gcPeriod; private final NodeInfo coordNode; private final RunMode mode; private final short numNodesHint; private final Runnable semAllJoined; private final Random rand = new Random(); private final InetAddress myCachedAddr; private ArrayList<Short> cachedMemberNids = new ArrayList<Short>(); // sorted list of members private short cachedMemberNidsVersion; private final boolean blockJoins; private final boolean capJoins; private final int joinRetries; // seconds private final boolean enableSubpings; private final int subpingPeriod; // seconds // number of intervals which we'll split the probing time into private int numProbeIntervals; private Set[] pingTable; private final Hashtable<NodeState, Integer> pingId = new Hashtable<NodeState, Integer>(); private final int dumpPeriod; private final FileHandler fh; private final short origNid; private final short sessionId; private final int linkTimeout; private final int membershipBroadcastPeriod; private static final String defaultLabelSet = "subprobe send.Ping recv.Ping stale.Ping send.Pong recv.Pong send.Subprobe recv.Subprobe stale.Pong send.Measurements send.RoutingRecs subprobe"; private final Hashtable<Short,Long> lastSentMbr = new Hashtable<Short,Long>(); private final double smoothingFactor; private final short resetLatency = Short.MAX_VALUE; private final DatagramAcceptor acceptor; private final Hashtable<Short, NodeInfo> coordNodes = new Hashtable<Short, NodeInfo>(); private final ArrayList<Short> memberNids = new ArrayList<Short>(); private final ArrayList<NodeState> otherNodes = new ArrayList<NodeState>(); private final ArrayList<NodeState> lastRendezvousServers = new ArrayList<NodeState>(); private final double directBonus, hysteresisBonus; private final long startTime = System.currentTimeMillis(); private final int pingDumpPeriod, pingDumpInitialDelay; private final Reactor reactor; private Runnable safeRun(final Runnable r) { return new Runnable() { public void run() { try { r.run(); } catch (Throwable ex) { err(ex); } } }; } private void createLabelFilter(Properties props, String labelSet, Handler handler) { String[] labels = props.getProperty(labelSet, defaultLabelSet).split(" "); final HashSet<String> suppressedLabels = new HashSet<String>(Arrays.asList(labels)); handler.setFilter(new LabelFilter(suppressedLabels)); } private final int joinDelay; public NeuRonNode(short id, Properties props, short numNodes, Runnable semJoined, InetAddress myAddr, String coordinatorHost, NodeInfo coordNode, DatagramAcceptor acceptor, Reactor reactor) { this.reactor = reactor; joinDelay = rand.nextInt(Integer.parseInt(props.getProperty("joinDelayRange", "1"))); if ((coordNode == null) || (coordNode.addr == null)){ throw new RuntimeException("coordNode is null!"); } dumpPeriod = Integer.parseInt(props.getProperty("dumpPeriod", "60")); myNid = id; origNid = id; cachedMemberNidsVersion = (short)-1; joinRetries = Integer.parseInt(props.getProperty("joinRetries", "10")); // wait up to 10 secs by default for coord to be available membershipBroadcastPeriod = Integer.parseInt(props.getProperty("membershipBroadcastPeriod", "0")); // NOTE note that you'll probably want to set this, always! sessionId = Short.parseShort(props.getProperty("sessionId", "0")); blockJoins = Boolean.valueOf(props.getProperty("blockJoins", "true")); capJoins = Boolean.valueOf(props.getProperty("capJoins", "true")); this.coordinatorHost = coordinatorHost; this.coordNode = coordNode; mode = RunMode.valueOf(props.getProperty("mode", "sim").toUpperCase()); basePort = coordNode.port; scheme = RoutingScheme.valueOf(props.getProperty("scheme", "SQRT").toUpperCase()); if (scheme == RoutingScheme.SQRT) { neighborBroadcastPeriod = Integer.parseInt(props.getProperty("neighborBroadcastPeriod", "15")); } else { neighborBroadcastPeriod = Integer.parseInt(props.getProperty("neighborBroadcastPeriod", "30")); } gcPeriod = Integer.parseInt(props.getProperty("gcPeriod", neighborBroadcastPeriod + "")); enableSubpings = Boolean.valueOf(props.getProperty("enableSubpings", "true")); this.acceptor = acceptor; // for simulations we can safely reduce the probing frequency, or even turn it off //if (mode == RunMode.SIM) { //probePeriod = Integer.parseInt(props.getProperty("probePeriod", "60")); //} else { probePeriod = Integer.parseInt(props.getProperty("probePeriod", "30")); subpingPeriod = Integer.parseInt(props.getProperty("subpingPeriod", "" + probePeriod)); membershipTimeout = Integer.parseInt(props.getProperty("timeout", "" + 30*60)); linkTimeout = Integer.parseInt(props.getProperty("failoverTimeout", "" + membershipTimeout)); pingDumpInitialDelay = Integer.parseInt(props.getProperty("pingDumpInitialDelay", "60")); pingDumpPeriod = Integer.parseInt(props.getProperty("pingDumpPeriod", "60")); // Events are when simulated latencies change; these are substituted in // for real measured latencies, and can be useful in simulation. These // events must be specified in time order! To remove any sim latency // for a dst, set it to resetLatency. String simEventsSpec = props.getProperty("simEvents", ""); if (!simEventsSpec.equals("")) { String[] events = simEventsSpec.split(";"); for (String e : events) { String[] parts = e.split(" "); int secs = Integer.parseInt(parts[0]); short oid = Short.parseShort(parts[1]); if (oid == myNid) { short dst = Short.parseShort(parts[2]); short lat = Short.parseShort(parts[3]); simEvents.addLast(new SimEvent(secs, oid, dst, lat)); } } } smoothingFactor = Double.parseDouble(props.getProperty("smoothingFactor", "0.1")); directBonus = Double.parseDouble(props.getProperty("directBonus", "1.05")); hysteresisBonus = Double.parseDouble(props.getProperty("hysteresisBonus", "1.05")); Formatter minfmt = new Formatter() { public String format(LogRecord record) { StringBuilder buf = new StringBuilder(); buf.append(record.getMillis()).append(' ')/*.append(new Date(record.getMillis())).append(" ").append( record.getLevel()).append(" ")*/.append( record.getLoggerName()).append(": ").append( record.getMessage()).append("\n"); return buf.toString(); } }; Formatter fmt = new Formatter() { public String format(LogRecord record) { StringBuilder buf = new StringBuilder(); buf.append(record.getMillis()).append(' ').append(new Date(record.getMillis())).append(" ").append( record.getLevel()).append(" ").append( record.getLoggerName()).append(": ").append( record.getMessage()).append("\n"); return buf.toString(); } }; Logger rootLogger = Logger.getLogger(""); rootLogger.getHandlers()[0].setFormatter(fmt); logger = Logger.getLogger("node" + myNid); createLabelFilter(props, "consoleLogFilter", rootLogger.getHandlers()[0]); try { String logFileBase = props.getProperty("logFileBase", "%t/scaleron-log-"); fh = new FileHandler(logFileBase + myNid, true); fh.setFormatter(fmt); createLabelFilter(props, "fileLogFilter", fh); logger.addHandler(fh); } catch (IOException ex) { throw new RuntimeException(ex); } this.scheduler = reactor; grid = null; numCols = numRows = 0; isCoordinator = myNid == 0; currentStateVersion = (short) (isCoordinator ? 0 : -1); numNodesHint = Short.parseShort(props.getProperty("numNodesHint", "" + numNodes)); semAllJoined = semJoined; if (myAddr == null) { try { myCachedAddr = InetAddress.getLocalHost(); } catch (UnknownHostException ex) { throw new RuntimeException(ex); } } else { myCachedAddr = myAddr; } myPort = basePort + myNid; this.myAddr = new InetSocketAddress(myCachedAddr, myPort); clientTimeout = Integer.parseInt(props.getProperty("clientTimeout", "" + 3 * neighborBroadcastPeriod)); } private final int myPort; private final InetSocketAddress myAddr; private void handleInit(Init im) { if (im.id == -1) { throw new PlannedException("network is full; aborting"); } myNid = im.id; logger = Logger.getLogger("node_" + myNid); if (logger.getHandlers().length == 0) { logger.addHandler(fh); } currentStateVersion = im.version; log("got from coord => Init version " + im.version); updateMembers(im.members); } private String bytes2string(byte[] buf) { String s = "[ "; for (byte b : buf) { s += b + " "; } s += "]"; return s; } private void log(String msg) { logger.info(msg); } private void warn(String msg) { logger.warning(msg); } private void err(String msg) { logger.severe(msg); } public void err(Throwable ex) { StringWriter s = new StringWriter(); PrintWriter p = new PrintWriter(s); ex.printStackTrace(p); err(s.toString()); } /** * Used for logging data, such as neighbor lists. * * @param name - the name of the data, e.g.: "neighbors", "info" * @param value */ private void log(String name, Object value) { Logger.getLogger(logger.getName() + "." + name).info(value.toString()); } public static final class SimEvent { public int secs; public short oid, dst, lat; public SimEvent(int secs, short src, short dst, short lat) { this.secs = secs; this.oid = oid; this.dst = dst; this.lat = lat; } } public final ArrayDeque<SimEvent> simEvents = new ArrayDeque<SimEvent>(); public final ShortShortMap simLatencies = new ShortShortMap(resetLatency); public static final class PlannedException extends RuntimeException { public PlannedException(String msg) { super(msg); } } public final AtomicReference<Exception> failure = new AtomicReference<Exception>(); public void run() { try { run3(); } catch (PlannedException ex) { warn(ex.getMessage()); failure.set(ex); if (semAllJoined != null) semAllJoined.run(); } catch (Exception ex) { err(ex); failure.set(ex); if (semAllJoined != null) semAllJoined.run(); } } /** * Similar to fixed-rate scheduling, but doesn't try to make up multiple * overdue items, but rather allows us to skip over them. This should deal * better with PLab's overloaded hosts. * * @param r The runnable task. * @param initialDelay The initial delay in seconds. * @param period The period in seconds. */ private ScheduledFuture<?> safeSchedule(final Runnable r, long initialDelay, final long period) { final long bufferTime = 1000; // TODO parameterize return scheduler.schedule(new Runnable() { private long scheduledTime = -1; public void run() { if (scheduledTime < 0) scheduledTime = System.currentTimeMillis(); r.run(); long now = System.currentTimeMillis(); scheduledTime = Math.max(scheduledTime + period * 1000, now + bufferTime); scheduler.schedule(this, scheduledTime - now, TimeUnit.MILLISECONDS); } }, initialDelay, TimeUnit.SECONDS); } private ScheduledFuture<?> safeScheduleMs(final Callable<Integer> r, final int maxPoints, long initialDelay, final long period) { return scheduler.schedule(new Runnable() { private long scheduledTime = -1; public void run() { if (scheduledTime < 0) scheduledTime = System.currentTimeMillis(); int points = 0; while (true) { try { points += r.call(); } catch (Exception ex) { err(ex); } long now = System.currentTimeMillis(); scheduledTime = Math.max(scheduledTime + period, now); if (scheduledTime > now) { scheduler.schedule(this, scheduledTime - now, TimeUnit.MILLISECONDS); break; } if (points > maxPoints) { scheduler.schedule(this, now + period, TimeUnit.MILLISECONDS); break; } } } }, initialDelay, TimeUnit.MILLISECONDS); } private boolean hasJoined = false; private Session session = null; public void run3() { if (isCoordinator) { try { safeSchedule(safeRun(new Runnable() { public void run() { log("checkpoint: " + coordNodes.size() + " nodes"); printMembers(); //printGrid(); } }), dumpPeriod, dumpPeriod); if (false) { acceptor.bind(new InetSocketAddress(InetAddress .getLocalHost(), basePort), new CoordReceiver()); } else { final CoordHandler handler = new CoordHandler(); session = reactor.register(null, myAddr, handler); } } catch (Exception ex) { throw new RuntimeException(ex); } } else { try { if (false) { final Receiver receiver = new Receiver(); acceptor.bind(new InetSocketAddress(myCachedAddr, myPort), receiver); } else { final NodeHandler handler = new NodeHandler(); session = reactor.register(null, myAddr, handler); } log("server started on " + myCachedAddr + ":" + (basePort + myNid)); // Split up the probing period into many small intervals. In each // interval we will ping a small fraction of the nodes. numProbeIntervals = numNodesHint / 3; pingTable = new HashSet[numProbeIntervals]; for(int i=0; i<numProbeIntervals; i++) pingTable[i] = new HashSet(); int probeSubPeriod = (1000 * probePeriod) / numProbeIntervals; safeScheduleMs(new Callable<Integer>() { int pingIter = 0; public Integer call() { int points = 0; if (hasJoined) { points += pingAll(pingIter); pingIter = (pingIter + 1) % numProbeIntervals; } return 1; } }, 5, 1234, probeSubPeriod); safeSchedule(safeRun(new Runnable() { public void run() { if (hasJoined) { /* * path-finding and rendezvous finding is * interdependent. the fact that we do the path-finding * first before the rendezvous servers is arbitrary. */ // TODO the below can be decoupled. Actually, with the current bug, node.hop isn't // set and findPathsForAllNodes() doesn't actually do anything. Pair<Integer, Integer> p = findPathsForAllNodes(); log(p.first + " live nodes, " + p.second + " avg paths, " + nodes.get(myNid).latencies.keySet() .size() + " direct paths"); // TODO this can also be decoupled. However, we don't want too much time to pass // between calculating rendezvous servers and actually sending to them, since in // the mean time we will have received recommendations which hint at remote failures // etc. Also our notion of isReachable will have changed. For SIMPLE this is easy: // we can remove from the set any nodes that are no longer reachable. An ad-hoc // solution would be to run it just once and then check if the dst is reachable before // broadcasting. This might take longer in certain failure scenarios. // We can, before sending, check whether link is down or we received a more recent // measurement packing showing remote failure. If remote failure, we'd like to wait // anyway since dst might be down. Might be useless, but can still send measurements. // If link is down, just don't send anything. We'll try again in the next iteration. // SUMMARY: after constructing measRecips, put each destination onto the queue, // and if when popped !dst.isReachable, just don't send. ArrayList<NodeState> measRecips = scheme == RoutingScheme.SIMPLE ? getAllReachableNodes() : getAllRendezvousServers(); // TODO this can also be decoupled, and also split up // into intervals. We should keep the probeTable[] // in memory and always up to date. Further optimization // is to keep array of bytes, so no serialization. broadcastMeasurements(measRecips); // TODO this can also be decoupled. Don't use // getAllRendezvousClients(), just work directly with // the list. Order doesn't matter (confirm). // Also split calls to findHops into intervals. if (scheme != RoutingScheme.SIMPLE) { broadcastRecommendations(); } } } }), 7, neighborBroadcastPeriod); if (enableSubpings) { int subpingSubPeriod = (1000 * subpingPeriod) / numProbeIntervals; safeScheduleMs(new Callable<Integer>() { int pingIter = 0; public Integer call() { if(hasJoined) { subping(pingIter); pingIter = (pingIter + 1) % numProbeIntervals; } return 1; } }, 5, 5521, subpingSubPeriod); // TODO should these initial offsets be constants? } scheduler.scheduleWithFixedDelay(safeRun(new Runnable() { public void run() { log("received/sent " + pingpongCount + " pings/pongs " + pingpongBytes + " bytes"); log("received/sent " + subprobeCount + " subprobes " + subprobeBytes + " bytes"); } }), pingDumpInitialDelay, pingDumpPeriod, TimeUnit.SECONDS); final InetAddress coordAddr = InetAddress.getByName(coordinatorHost); scheduler.schedule(safeRun(new Runnable() { private int count = 0; public void run() { if (count > joinRetries) { warn("exceeded max tries!"); System.exit(0); } else if (!hasJoined) { log("sending join to coordinator at " + coordinatorHost + ":" + basePort + " (try " + count++ + ")"); Join msg = new Join(); msg.addr = myCachedAddr; msg.src = myNid; // informs coord of orig id msg.port = myPort; sendObject(msg, coordAddr, basePort, (short)-1); log("waiting for InitMsg"); scheduler.schedule(this, 10, TimeUnit.SECONDS); } } }), joinDelay, TimeUnit.SECONDS); if (semAllJoined != null) semAllJoined.run(); } catch (IOException ex) { throw new RuntimeException(ex); } } } private final HashSet<Short> ignored = new HashSet<Short>(); public synchronized void ignore(short nid) { if (nid != myNid) { log("ignoring " + nid); ignored.add(nid); } } public synchronized void unignore(short nid) { if (nid != myNid) { log("unignoring " + nid); ignored.remove(nid); } } private ArrayList<NodeState> getAllReachableNodes() { ArrayList<NodeState> nbrs = new ArrayList<NodeState>(); for (NodeState n : otherNodes) if (n.isReachable) nbrs.add(n); return nbrs; } private static final byte SUBPING = 0, SUBPING_FWD = 1, SUBPONG = 2, SUBPONG_FWD = 3; private Subprobe subprobe(InetSocketAddress nod, long time, byte type) { Subprobe p = new Subprobe(); p.src = myAddr; p.nod = nod; p.time = time; p.type = type; return p; } private int subping(int pingIter) { // We will only subping a fraction of the nodes at this iteration // Note: this synch. statement is redundant until we remove global lock List<Short> nids = new ArrayList<Short>(); int bytes = 0, initCount = subprobeCount; for (Object obj : pingTable[pingIter]) { NodeState dst = (NodeState) obj; // TODO: dst.hop almost always != 0 (except when dst is new node) if (dst.info.id != myNid && dst.hop != 0) { NodeState hop = nodes.get(dst.hop); long time = System.currentTimeMillis(); InetSocketAddress nod = new InetSocketAddress(dst.info.addr, dst.info.port); InetSocketAddress hopAddr = new InetSocketAddress( hop.info.addr, hop.info.port); bytes += sendObj(subprobe(nod, time, SUBPING), hopAddr); nids.add(dst.info.id); subprobeCount++; } } if (bytes > 0) { log("sent subpings " + bytes + " bytes, to " + nids); subprobeBytes += bytes; } return subprobeCount - initCount; } private final Serialization probeSer = new Serialization(); private int sendObj(Object o, InetSocketAddress dst) { try { // TODO investigate directly writing to ByteBuffer ByteArrayOutputStream baos = new ByteArrayOutputStream(); probeSer.serialize(o, new DataOutputStream(baos)); byte[] buf = baos.toByteArray(); session.send(ByteBuffer.wrap(buf), dst); return buf.length; } catch (Exception ex) { err(ex); return 0; } } private void handleSubping(Subprobe p) { if (myAddr.equals(p.nod)) { sendObj(subprobe(p.nod, p.time, SUBPONG_FWD), p.src); log("subprobe", "direct subpong from/to " + p.src); } else { // we also checked for p.src because eventually we'll need to // forward the subpong back too; if we don't know him, no point in // sending a subping sendObj(subprobe(p.src, p.time, SUBPING_FWD), p.nod); // log("subprobe", "subping fwd from " + p.src + " to " + p.nod); } } private void handleSubpingFwd(Subprobe p) { sendObj(subprobe(p.nod, p.time, SUBPONG), p.src); // log("subprobe", "subpong to " + p.nod + " via " + p.src); } private void handleSubpong(Subprobe p) { sendObj(subprobe(p.src, p.time, SUBPONG_FWD), p.nod); // log("subprobe", "subpong fwd from " + p.src + " to " + p.nod); } private final Hashtable<InetSocketAddress, NodeState> addr2node = new Hashtable<InetSocketAddress, NodeState>(); private int addr2nid(InetSocketAddress a) { return addr2node.get(a).info.id; } private void handleSubpongFwd(Subprobe p, long receiveTime) { long latency = (receiveTime - p.time) / 2; log("subpong from " + addr2nid(p.nod) + " via " + addr2nid(p.src) + ": " + latency + ", time " + p.time); } private int pingAll(int pingIter) { log("pinging"); // We will only ping a fraction of the nodes at this iteration // Note: this synch. statement is redundant until we remove global lock int initCount = pingpongCount; PeerPing ping = new PeerPing(); ping.time = System.currentTimeMillis(); ping.src = myAddr; for (Object node : pingTable[pingIter]) { NodeInfo info = ((NodeState) node).info; if (info.id != myNid) { pingpongCount++; pingpongBytes += sendObj(ping, new InetSocketAddress(info.addr, info.port)); } } /* * send ping to the membership server (co-ord) - this might not be * required if everone makes their own local decision. i.e. each node * notices that no other node can reach a node (say X), then each node * sends the co-ord a msg saying that "i think X is dead". The sending * of this msg can be staggered in time so that the co-ord is not * flooded with mesgs. The co-ordinator can then make a decision on * keeping or removing node Y from the membership. On seeing a * subsequent msg from the co-ord that X has been removed from the * overlay, if a node Y has not sent its "i think X is dead" msg, it can * cancel this event. */ // Only ping the coordinator once per ping interval (not per // subinterval) if (pingIter == 0) { Ping p = new Ping(); p.time = System.currentTimeMillis(); NodeInfo tmp = nodes.get(myNid).info; p.info = new NodeInfo(); p.info.id = origNid; // note that the ping info uses the // original id p.info.addr = tmp.addr; p.info.port = tmp.port; pingpongCount++; pingpongBytes += sendObject(p, (short) 0); } // log("sent pings, " + totalSize + " bytes"); return pingpongCount - initCount; } private Object deserialize(ByteBuffer buf) { byte[] bytes = new byte[buf.limit()]; buf.get(bytes); try { return new Serialization().deserialize(new DataInputStream(new ByteArrayInputStream(bytes))); } catch (Exception ex) { err(ex); return null; } } private Hashtable<Short,Short> id2oid = new Hashtable<Short,Short>(); private Hashtable<Short,String> id2name = new Hashtable<Short,String>(); /** * coordinator's msg handling loop */ public final class CoordHandler implements ReactorHandler { /** * Generates non-repeating random sequence of short IDs, and keeps * track of how many are emitted. */ public final class IdGenerator { private final Iterator<Short> iter; private short counter; public IdGenerator() { List<Short> list = new ArrayList<Short>(); for (short s = 1; s < Short.MAX_VALUE; s++) { list.add(s); } Collections.shuffle(list); iter = list.iterator(); } public short next() { counter++; return iter.next(); } public short count() { return counter; } } private IdGenerator nidGen = new IdGenerator(); private void sendInit(short nid, Join join) { Init im = new Init(); im.id = nid; im.members = getMemberInfos(); sendObject(im, join.addr, join.port, (short)-1); } @Override public void handle(Session session, InetSocketAddress src, java.nio.ByteBuffer buf) { try { Msg msg = (Msg) deserialize(buf); if (msg == null) return; if (msg.session == sessionId) { if (msg instanceof Join) { final Join join = (Join) msg ; if (id2oid.values().contains(msg.src)) { // we already added this guy; just resend him the init msg sendInit(addr2id.get(join.addr), join); } else { // need to add this guy and send him the init msg (if there's space) if (!capJoins || coordNodes.size() < numNodesHint) { short newNid = nidGen.next(); addMember(newNid, join.addr, join.port, join.src); if (blockJoins) { if (coordNodes.size() >= numNodesHint) { // time to broadcast ims to everyone ArrayList<NodeInfo> memberList = getMemberInfos(); for (NodeInfo m : memberList) { Init im = new Init(); im.id = m.id; im.members = memberList; sendObject(im, m); } } } else { sendInit(newNid, join); broadcastMembershipChange(newNid); } if (coordNodes.size() == numNodesHint) { semAllJoined.run(); } } else if (capJoins && coordNodes.size() == numNodesHint) { Init im = new Init(); im.id = -1; im.members = new ArrayList<NodeInfo>(); sendObject(im, join.addr, join.port, (short)-1); } } } else if (coordNodes.containsKey(msg.src)) { log("recv." + msg.getClass().getSimpleName(), "from " + msg.src + " (oid " + id2oid.get(msg.src) + ", " + id2name.get(msg.src) + ")"); resetTimeoutAtCoord(msg.src); if (msg.version < currentStateVersion) { // this includes joins log("updating stale membership"); sendMembership(msg.src); } else if (msg instanceof Ping) { // ignore the ping } else { throw new Exception("can't handle message type here: " + msg.getClass().getName()); } } else { if ((!capJoins || coordNodes.size() < numNodesHint) && msg instanceof Ping) { Ping ping = (Ping) msg; log("dead." + ping.getClass().getSimpleName(), "from '" + ping.src + "' " + ping.info.addr.getHostName()); Short mappedId = addr2id.get(ping.info.addr); short nid; if (mappedId == null) { nid = nidGen.next(); addMember(nid, ping.info.addr, ping.info.port, ping.info.id); broadcastMembershipChange(nid); } else { nid = mappedId; } Init im = new Init(); im.id = nid; im.src = myNid; im.version = currentStateVersion; im.members = getMemberInfos(); sendObject(im, nid); } else { log("dead." + msg.getClass().getSimpleName(), "from '" + msg.src + "'"); } } } } catch (Exception ex) { err(ex); } } } /** * coordinator's msg handling loop */ public final class CoordReceiver extends IoHandlerAdapter { @Override public void messageReceived(IoSession session, Object obj) throws Exception { assert false; } } private int pingpongCount, pingpongBytes, subprobeCount, subprobeBytes; public final class NodeHandler implements ReactorHandler { public short getSimLatency(short nid) { long time = System.currentTimeMillis(); for (SimEvent e : simEvents) { if (time - startTime >= e.secs * 1000) { // make this event happen simLatencies.put(e.dst, e.lat); } } return simLatencies.get(nid); } @Override public void handle(Session session, InetSocketAddress src, ByteBuffer buf) { try { // TODO check SimpleMsg.session Object obj = deserialize(buf); if (obj == null) return; long receiveTime; if (obj instanceof Subprobe) { Subprobe p = (Subprobe) obj; subprobeBytes += buf.limit(); subprobeCount += 2; switch (p.type) { case SUBPING: handleSubping(p); break; case SUBPING_FWD: handleSubpingFwd(p); break; case SUBPONG: handleSubpong(p); break; case SUBPONG_FWD: // TODO move into the new worker thread when it's here receiveTime = System.currentTimeMillis(); handleSubpongFwd(p, receiveTime); break; default: assert false; } return; // TODO early exit is unclean } else if (obj instanceof PeerPing) { PeerPing ping = ((PeerPing) obj); PeerPong pong = new PeerPong(); pong.time = ping.time; pong.src = myAddr; pingpongBytes += sendObj(pong, ping.src) + buf.limit(); pingpongCount += 2; return; // TODO early exit is unclean } else if (obj instanceof PeerPong) { PeerPong pong = (PeerPong) obj; long rawRtt = System.currentTimeMillis() - pong.time; if (mode == RunMode.SIM) { short l = getSimLatency(addr2node.get(pong.src).info.id); if (l < resetLatency) { rawRtt = 2 * l; } } NodeState state = addr2node.get(pong.src); // if the rtt was astronomical, just treat it as a dropped packet if (rawRtt / 2 < Short.MAX_VALUE) { // we define "latency" as rtt/2; this should be // a bigger point near the top of this file short latency = (short) (rawRtt / 2); short nid = state.info.id; if (state != null) { resetTimeoutAtNode(nid); NodeState self = nodes.get(myNid); short oldLatency = self.latencies.get(nid); final short ewma; if (oldLatency == resetLatency) { ewma = latency; } else { ewma = (short) (smoothingFactor * latency + (1 - smoothingFactor) * oldLatency); } log("latency", state + " = " + latency + ", ewma " + ewma + ", time " + pong.time); self.latencies.put(nid, ewma); } else { log("latency", "some " + nid + " = " + latency); } pingpongCount++; pingpongBytes += buf.limit(); } return; // TODO early exit is unclean } Msg msg = (Msg) obj; if ((msg.src == 0 || nodes.containsKey(msg.src) || msg instanceof Ping) && msg.session == sessionId) { NodeState state = nodes.get(msg.src); log("recv." + msg.getClass().getSimpleName(), "from " + msg.src + " len " + ((ByteBuffer) buf).limit()); // for other messages, make sure their state version is // the same as ours if (msg.version > currentStateVersion) { if (msg instanceof Membership && hasJoined) { currentStateVersion = msg.version; Membership m = (Membership) msg; assert myNid == m.yourId; updateMembers(m.members); } else if (msg instanceof Init) { hasJoined = true; if (semAllJoined != null) semAllJoined.run(); if (((Init) msg).id == -1) session.close(); handleInit((Init) msg); } else { // i am out of date - wait until i am updated } } else if (msg.version == currentStateVersion) { // from coordinator if (msg instanceof Membership) { Membership m = (Membership) msg; assert myNid == m.yourId; updateMembers(m.members); } else if (msg instanceof Measurements) { resetTimeoutOnRendezvousClient(msg.src); updateMeasurements((Measurements) msg); } else if (msg instanceof RoutingRecs) { RoutingRecs recs = (RoutingRecs) msg; handleRecommendations(recs); log("got recs " + routesToString(recs.recs)); } else if (msg instanceof Ping || msg instanceof Pong || msg instanceof Init) { // nothing to do, already handled above } else { throw new Exception("can't handle that message type"); } } else { log("stale." + msg.getClass().getSimpleName(), "from " + msg.src + " version " + msg.version + " current " + currentStateVersion); } } else { // log("ignored." + msg.getClass().getSimpleName(), "ignored from " + msg.src + " session " + msg.session); } } catch (Exception ex) { err(ex); } } } /** * receiver's msg handling loop */ public final class Receiver extends IoHandlerAdapter { @Override public void messageReceived(IoSession session, Object buf) throws Exception { assert false; } } /** * If we don't hear from a node for this number of seconds, then consider * them dead. */ private int membershipTimeout; private Hashtable<Short, ScheduledFuture<?>> timeouts = new Hashtable<Short, ScheduledFuture<?>>(); /** * a coord-only method * * @param nid */ private void resetTimeoutAtCoord(final short nid) { if (coordNodes.containsKey(nid)) { ScheduledFuture<?> oldFuture = timeouts.get(nid); if (oldFuture != null) { oldFuture.cancel(false); } ScheduledFuture<?> future = scheduler.schedule(safeRun(new Runnable() { public void run() { removeMember(nid); } }), membershipTimeout, TimeUnit.SECONDS); timeouts.put(nid, future); } } private final int clientTimeout; private final Hashtable<Short, ScheduledFuture<?>> rendezvousClientTimeouts = new Hashtable<Short, ScheduledFuture<?>>(); private void resetTimeoutOnRendezvousClient(final short nid) { final NodeState node = nodes.get(nid); // TODO: wrong semantics for isReachable if (!node.isReachable) return; ScheduledFuture<?> oldFuture = rendezvousClientTimeouts.get(nid); if (oldFuture != null) { oldFuture.cancel(false); } if (rendezvousClients.add(node)) { log("rendezvous client " + node + " added"); } ScheduledFuture<?> future = scheduler.schedule(safeRun(new Runnable() { public void run() { if (rendezvousClients.remove(node)) { log("rendezvous client " + node + " removed"); } } }), clientTimeout, TimeUnit.SECONDS); rendezvousClientTimeouts.put(nid, future); } private void resetTimeoutAtNode(final short nid) { if (nodes.containsKey(nid)) { ScheduledFuture<?> oldFuture = timeouts.get(nid); if (oldFuture != null) { oldFuture.cancel(false); } final NodeState node = nodes.get(nid); if (!node.isReachable) { log(nid + " reachable"); if (node.hop == 0) { node.isHopRecommended = false; node.cameUp = true; node.hop = nid; } } node.isReachable = true; node.isDead = false; ScheduledFuture<?> future = scheduler.schedule(safeRun(new Runnable() { public void run() { if (nodes.containsKey(nid)) { log(nid + " unreachable"); node.isReachable = false; nodes.get(myNid).latencies.remove(nid); // TODO: do we really want this? rendezvousClients.remove(node); findPaths(node, false); } } }), linkTimeout, TimeUnit.SECONDS); timeouts.put(nid, future); } } /** * a coordinator-only method */ private NodeInfo addMember(short newNid, InetAddress addr, int port, short origId) { NodeInfo info = new NodeInfo(); info.id = newNid; info.addr = addr; info.port = port; coordNodes.put(newNid, info); id2oid.put(newNid, origId); id2name.put(newNid, addr.getHostName()); addr2id.put(addr, newNid); log("adding new node: " + newNid + " oid " + origId + " name " + id2name.get(newNid)); currentStateVersion++; resetTimeoutAtCoord(newNid); return info; } private final AtomicBoolean membersChanged = new AtomicBoolean(); /** * a coordinator-only method * * @param exceptNid - if this is 0, then we must have been called by the * periodic membership-broadcast daemon thread, so actually send stuff; * otherwise, we should just signal to the daemon thread a pending change */ private void broadcastMembershipChange(short exceptNid) { if (exceptNid == 0 || membershipBroadcastPeriod == 0) { for (short nid : coordNodes.keySet()) { if (nid != exceptNid) { sendMembership(nid); } } } } ArrayList<NodeInfo> getMemberInfos() { return new ArrayList<NodeInfo>(coordNodes.values()); } /** * a coordinator-only method * * throttles these messages so they're sent at most once per second */ private void sendMembership(short nid) { Membership msg = new Membership(); msg.yourId = nid; msg.members = getMemberInfos(); sendObject(msg, coordNodes.get(nid)); } /** * a coordinator-only method * * NOTE: there is a hack workaround here for sim mode, because addr2id is * not storing unique host:port combos, only unique hosts. * * @param nid */ private void removeMember(short nid) { log("removing dead node " + nid + " oid " + id2oid.get(nid) + " " + id2name.get(nid)); id2oid.remove(nid); NodeInfo info = coordNodes.remove(nid); Short mid = addr2id.remove(info.addr); if (mode != RunMode.SIM) assert mid != null; currentStateVersion++; broadcastMembershipChange(nid); } /** * updates our member state. modifies data structures as necessary to * maintain invariants. * * @param newNodes */ private void updateMembers(List<NodeInfo> newNodes) { HashSet<Short> newNids = new HashSet<Short>(); for (NodeInfo node : newNodes) newNids.add(node.id); // add new nodes for (NodeInfo node : newNodes) if (!nodes.containsKey(node.id)) { NodeState newNode = new NodeState(node); // Choose a subinterval for this node during which we will ping it int loc = rand.nextInt(numProbeIntervals); pingTable[loc].add(newNode); pingId.put(newNode, loc); nodes.put(node.id, newNode); addr2node.put(new InetSocketAddress(node.addr, node.port), newNode); if (node.id != myNid) resetTimeoutAtNode(node.id); } // Remove nodes. We need toRemove to avoid // ConcurrentModificationException on the table that we'd be looping // through. for (NodeInfo node : newNodes) newNids.add(node.id); HashSet<Pair<Short, NodeState>> toRemove = new HashSet<Pair<Short,NodeState>>(); for (Map.Entry<Short, NodeState> entry : nodes.entrySet()) if (!newNids.contains(entry.getKey())) toRemove.add(Pair.of(entry.getKey(), entry.getValue())); for (Pair<Short, NodeState> pair : toRemove) { short nid = pair.first; NodeState node = pair.second; // Remove the node from the subinterval during which it // was pinged. int index = pingId.remove(node); pingTable[index].remove(node); addr2node.remove(new InetSocketAddress(node.info.addr, node.info.port)); NodeState n = nodes.remove(nid); assert n != null; } // consistency cleanups: check that all nid references are still valid nid's for (NodeState state : nodes.values()) { if (state.hop != 0 && !newNids.contains(state.hop)) { state.hop = state.info.id; state.isHopRecommended = false; } for (Iterator<Short> i = state.hopOptions.iterator(); i.hasNext();) if (!newNids.contains(i.next())) i.remove(); HashSet<Short> garbage = new HashSet<Short>(); for (short nid : state.latencies.keySet()) if (!newNids.contains(nid)) garbage.add(nid); for (short nid : garbage) state.latencies.remove(nid); // Clear the remote failures hash, since this node will now have a different // set of default rendezvous nodes. state.remoteFailures.clear(); } // regenerate alternative views of this data NodeState self = nodes.get(myNid); assert self != null; memberNids.clear(); memberNids.addAll(newNids); Collections.sort(memberNids); otherNodes.clear(); otherNodes.addAll(nodes.values()); otherNodes.remove(self); // ABOVE IS INDEPENDENT OF GRID // numRows needs to be >= numCols numRows = (short) Math.ceil(Math.sqrt(nodes.size())); numCols = (short) Math.ceil((double) nodes.size() / (double) numRows); grid = new NodeState[numRows][numCols]; // These are used temporarily for setting up the defaults Hashtable<NodeState, Short> gridRow = new Hashtable<NodeState, Short>(); Hashtable<NodeState, Short> gridColumn = new Hashtable<NodeState, Short>(); List<Short> nids = memberNids; short i = 0; // node counter short numberOfNodes = (short) memberNids.size(); short lastColUsed = (short) (numCols - 1); // default is for the full grid gridLoop: for (short r = 0; r < numRows; r++) { for (short c = 0; c < numCols; c++) { // Are there any more nodes to put into the grid? if(i > numberOfNodes - 1) { // Assert: We shouldn't create a grid with an empty last row. assert (r == numRows - 1) && (c > 0); lastColUsed = (short) (c - 1); break gridLoop; } grid[r][c] = nodes.get(nids.get(i++)); gridRow.put(grid[r][c], r); gridColumn.put(grid[r][c], c); } } // Algorithm described in model_choices.tex // Set up hash of each node's default rendezvous servers // Note: a node's default rendezvous servers will include itself. nodeDefaultRSs.clear(); for(NodeState node : nodes.values()) { int rn = gridRow.get(node); int cn = gridColumn.get(node); // We know the number of elements. Should be [1/(default load factor)]*size HashSet<NodeState> nodeDefaults = new HashSet<NodeState>((int) 1.4*(numRows + numCols - 1)); // If this is not the last row if(rn < numRows - 1) { // Add the whole row for (int c1 = 0; c1 < numCols; c1++) nodeDefaults.add(grid[rn][c1]); // If this is before the last col used (on last row) if(cn <= lastColUsed) { // Add whole column for (int r1 = 0; r1 < numRows; r1++) nodeDefaults.add(grid[r1][cn]); } else { // Add column up until last row for (int r1 = 0; r1 < numRows-1; r1++) nodeDefaults.add(grid[r1][cn]); // Add corresponding node from the last row (column rn); // only for the first lastColUsed rows. if(rn <= lastColUsed) { nodeDefaults.add(grid[numRows-1][rn]); } } } else { // This is the last row // Add whole column for (int r1 = 0; r1 < numRows; r1++) nodeDefaults.add(grid[r1][cn]); // Add whole last row up till lastColUsed for (int c1 = 0; c1 <= lastColUsed; c1++) nodeDefaults.add(grid[rn][c1]); // Add row cn for columns > lastColUsed for (int c1 = lastColUsed+1; c1 < numCols; c1++) nodeDefaults.add(grid[cn][c1]); } // Could also make an array of nodeDefaults, for less memory usage/faster nodeDefaultRSs.put(node, nodeDefaults); } // BELOW IS INDEPENDENT OF GRID /* * simply forget about all our neighbors. thus, this forgets all our * failover clients and servers. since the grid is different. if this * somehow disrupts route computation, so be it - it'll only last for a * period. * * one worry is that others who miss this member update will continue to * broadcast to us. this is a non-issue because we ignore stale * messages, and when they do become updated, they'll forget about us * too. */ // Set up rendezvous clients rendezvousClients.clear(); for (NodeState cli : nodeDefaultRSs.get(self)) { // TODO: wrong semantics for isReachable if (cli.isReachable && cli != self) rendezvousClients.add(cli); } // Put timeouts for all new rendezvous clients. If they can never // reach us, we should stop sending them recommendations. for (final NodeState clientNode : rendezvousClients) { ScheduledFuture<?> oldFuture = rendezvousClientTimeouts.get(clientNode.info.id); if (oldFuture != null) { oldFuture.cancel(false); } ScheduledFuture<?> future = scheduler.schedule(safeRun(new Runnable() { public void run() { if (rendezvousClients.remove(clientNode)) { log("rendezvous client " + clientNode + " removed"); } } }), clientTimeout, TimeUnit.SECONDS); rendezvousClientTimeouts.put(clientNode.info.id, future); } // Set up default rendezvous servers to all destinations // Note: In an earlier version of the code, for a destination in // our row/col, we did not add rendezvous nodes which are not // reachable. We no longer do this (but it shouldn't matter). defaultRendezvousServers.clear(); for (NodeState dst : nodes.values()) { // note: including self HashSet<NodeState> rs = new HashSet<NodeState>(); defaultRendezvousServers.put(dst.info.id, rs); // Take intersection of this node's default rendezvous and // the dst's default rendezvous servers, excluding self. // Running time for outer loop is 2n^{1.5} since we are using // a HashSet for quick lookups. Could be optimized further, // but code simplicity is preferred. HashSet<NodeState> dstDefaults = nodeDefaultRSs.get(dst); for (NodeState selfRS : nodeDefaultRSs.get(self)) { // Don't add self because we will never receive routing // recommendation messages from ourselves. if (selfRS != self && dstDefaults.contains(selfRS)) rs.add(selfRS); } } // Create empty set for default rendezvous servers, will be filled in // getAllRendezvousServers() rendezvousServers.clear(); for (Entry<Short, HashSet<NodeState>> entry : defaultRendezvousServers.entrySet()) { rendezvousServers.put(entry.getKey(), new HashSet<NodeState>()); } lastRendezvousServers.clear(); log("state " + currentStateVersion + ", mbrs " + nids); } /** * @param n * @param remoteNid * @return */ private boolean isFailedRendezvous(NodeState n, NodeState remote) { // TODO: isReachable semantics should be fixed (but how?) // NOTE: We may never receive this node's measurements since // it is our rendezvous client, but we don't offer to be // its rendezvous server. This is why we check for // remote in the recommendations response rather than // the measurements. // This assumes that the node's reachability is indicative // of its ability to send us recommendation messages. return !n.isReachable || n.remoteFailures.contains(remote); } /** * @return failoverClients `union` nodes in my row and col (wherever i occur) */ private ArrayList<NodeState> getAllRendezvousClients() { ArrayList<NodeState> list = new ArrayList<NodeState>(rendezvousClients); Collections.sort(list); return list; } private String mkString(HashSet<NodeState> ns, String glue) { String s = ""; for (NodeState n : ns) { s += n.info.id + glue; } return s; } /** * makes one pass over the metaset of all rendezvous servers, removing any * failed rendezvous from the individual sets. * * for the simple routing scheme, this is the full set of nodes. as a * result, measurements are broadcast to everyone, as intended. (note that * there are no routing recommendation messages in this scheme.) * * OLD ALGO * * for dst * if dst is not down * rs = dst's current rendezvous servers * ds = dst's default rendezvous servers * if any of ds are working to dst and rs is not ds * rs = working subset of ds * if rs = [] * rs += random node from dst's row/col * else * rs -= any failed rs * note that this may leave rs empty for the coming round * this is what we want bc it will delay failover-finding till the next round * * NEW ALGO * * // CHANGES START * build a hashtable for all rendezvous nodes currently used * call this F * // CHANGES END * for dst * if dst is not down * rs = dst's current rendezvous servers * ds = dst's default rendezvous servers * if any of ds are working to dst and rs is not ds * rs = working subset of ds * if rs = [] * // CHANGES START * for active failover in dst's default rendezvous nodes (according to F) * if failover works to dst, choose it as failover for dst as well * choose rand node from dst's default rendezvous nodes that is not currently in use * rs += whatever we chose; F += whatever we chose * // CHANGES END * else * rs -= any failed rs * note that this may leave rs empty for the coming round * this is what we want bc it will delay failover-finding till the next round * * @return the union of all the sets of non-failed rendezvous servers. */ private ArrayList<NodeState> getAllRendezvousServers() { NodeState self = nodes.get(myNid); HashSet<NodeState> currentRSs = new HashSet<NodeState>(); HashSet<NodeState> allDefaults = nodeDefaultRSs.get(self); // first, prepare currentRS so that we can share/reuse // rendezvous servers for (NodeState node : otherNodes) { for (NodeState n : rendezvousServers.get(node.info.id)) { // if this is an actual failover if (!allDefaults.contains(n)) currentRSs.add(n); } } // these are the rendezvous servers that we want to sent our // measurements to HashSet<NodeState> servers = new HashSet<NodeState>(); // iterate over all destination nodes that are not us for (NodeState dst : otherNodes) { if (!dst.isDead) { // this is our current (active) set of rendezvous servers HashSet<NodeState> rs = rendezvousServers.get(dst.info.id); // check if any of our default rendezvous servers are once // more available; if so, add them back HashSet<NodeState> defaults = defaultRendezvousServers.get(dst.info.id); // we always want to try talking to our default rendezvous // servers if we think they're reachable for (NodeState r : defaults) if (r.isReachable) servers.add(r); // rs consists of either default rendezvous servers or // non-default rendezvous, but never a mix of both; test // which type it is boolean hasDefaultsOnly = rs.isEmpty() ? true : defaults.contains(rs.iterator().next()); // the following code attempts to add default rendezvous // servers back into rs HashSet<NodeState> old = new HashSet<NodeState>(rs); if (hasDefaultsOnly) { // if any default rendezvous servers are in use, then // don't clear rs; simply add any more default servers // that are working if (!defaults.equals(rs)) for (NodeState r : defaults) if (!isFailedRendezvous(r, dst)) rs.add(r); } else { // if no default rendezvous servers are in use, then // try adding any that are working; if any are working, // we make sure to first clear rs boolean cleared = false; for (NodeState r : defaults) { if (!isFailedRendezvous(r, dst)) { if (!cleared) { rs.clear(); cleared = true; } rs.add(r); } } } if (!old.equals(rs)) { log("restored rendezvous server for " + dst + " from " + old + " to " + rs); } // Note that rs not being empty means that in previous iterations the nodes in // rs were alive, and either we did not ever receive any recommendations from them, // or the last recommendation we received from it did include routing information // for dst (and so there is no hint of a remote failure). In either case, as of // the previous iteration, n.remoteFailures.contains(remote) == false. // if { any node in rs has n.remoteFailures.contains(remote) == true, then we know that we // did receive a routing recommendation from it since the last round, and it is alive. // Remove it from rs and do nothing else this step, as the destination is likely dead. // set skipIteration=true. } // else { // If !n.isReachable for any node n in rs, remove it from rs. We don't expect it to be // helpful for routing ever. // If rs is now empty, choose a failover rendezvous node (in this iteration) // Else, any remaining nodes have n.remoteFailures.contains(remote) == false, which means // either that we did not yet receive a routing message from it, or we did and the dst // is reachable. In either case, do nothing. If this node is still alive, we will // eventually receive a routing recommendation from it. Otherwise, very soon we will find // that !n.isReachable. We add a bit of latency for waiting, but should be okay. /* * If we think a remote failure could have occured, don't immediately look * for failovers. The next period, we will have received link states from * our neighbors, from which we can determine whether dst is just down. * * The reason for this is that if a node fails, we don't want the entire network to flood * the row/col of that downed node (no need for failovers period). */ boolean skipIteration = false; // We use the iterator so that we can safely remove from the set for (Iterator<NodeState> i = rs.iterator(); i.hasNext();) { NodeState r = i.next(); if(r.remoteFailures.contains(dst)) { i.remove(); skipIteration = true; } else if(!r.isReachable) { i.remove(); } } if (!skipIteration && rs.isEmpty() && scheme != RoutingScheme.SQRT_NOFAILOVER) { // create debug string String s = "defaults"; for (NodeState r : defaults) { s += " " + r.info.id + ( !r.isReachable ? " unreachable" : " <-/-> " + mkString(r.remoteFailures, ",") ); } final String report = s; // look for failovers HashSet<NodeState> cands = new HashSet<NodeState>(); // first, start by looking at the failovers that are // currently in use which are default rs's for this dst, so // that we can share when possible. that is, if we know that a // failover works for a destination, keep using it. HashSet<NodeState> dstDefault = nodeDefaultRSs.get(dst); // currentRSs may contain rendezvous nodes which are no longer alive // or useful for reaching the destination for(NodeState f : currentRSs) { if (dstDefault.contains(f) && !isFailedRendezvous(f, dst)) { cands.add(f); } } if (cands.isEmpty()) { // only once we have determined that no current // failover works for us do we go ahead and randomly // select a new failover. this is a blind choice; // we don't have these node's routing recommendations, // so we could not hope to do better. // TODO (low priority): one exception is if any of the candidates // are rendezvous clients for us, in which case we // will have received their link state, and we could // smartly decide whether they can reach the destination. // Not obvious if we should (or how) take advantage of this. for(NodeState cand : dstDefault) { if (cand != self && cand.isReachable) cands.add(cand); } } // if any of the candidates are already selected to be in // 'servers', we want to make sure that we only choose from // these choices. HashSet<NodeState> candsInServers = new HashSet<NodeState>(); for (NodeState cand : cands) if (servers.contains(cand)) candsInServers.add(cand); // choose candidate uniformly at random ArrayList<NodeState> candsList = new ArrayList<NodeState>(candsInServers.isEmpty() ? cands : candsInServers); if (candsList.size() == 0) { log("no failover candidates! giving up; " + report); } else { NodeState failover = candsList.get(rand.nextInt(candsList.size())); // TODO (low priority): prev rs = ... is now broken since rs is empty log("new failover for " + dst + ": " + failover + ", prev rs = " + rs + "; " + report); rs.add(failover); // share this failover in this routing iteration too if (!allDefaults.contains(failover)) { currentRSs.add(failover); } } } else if (rs.isEmpty()) { log("all rs to " + dst + " failed"); System.out.println("ALL FAILED!"); } // Add any nodes that are in rs to the servers set for (NodeState r : rs) { servers.add(r); } } // end if dst.hop != 0 (destination is alive) } // end while loop over destinations ArrayList<NodeState> list = new ArrayList<NodeState>(servers); Collections.sort(list); return list; } public static enum RoutingScheme { SIMPLE, SQRT, SQRT_NOFAILOVER, SQRT_RC_FAILOVER, SQRT_SPECIAL }; private final RoutingScheme scheme; private void printMembers() { String s = "members:"; for (NodeInfo info : coordNodes.values()) { s += "\n " + info.id + " oid " + id2oid.get(info.id) + " " + id2name.get(info.id) + " " + info.port; } log(s); } // PERF private void printGrid() { String s = "grid:"; if (grid != null) { for (int i = 0; i < numRows; i++) { s += "\n "; for (int j = 0; j < numCols; j++) { s += "\t" + grid[i][j]; } } } log(s); } /** * in the sqrt routing scheme: for each neighbor, find for him the min-cost * hops to all other neighbors, and send this info to him (the intermediate * node may be one of the endpoints, meaning a direct route is cheapest). * * in the sqrt_special routing scheme, we instead find for each neighbor the * best intermediate other neighbor through which to route to every * destination. this still needs work, see various todos. * * a failed rendezvous wrt some node n is one which we cannot reach * (proximal failure) or which cannot reach n (remote failure). when all * current rendezvous to some node n fail, then we find a failover from node * n's row and col, and include it in our neighbor set. by befault, this * situation occurs when a row-col rendezvous pair fail. it can also occur * with any of our current failovers. */ private void broadcastRecommendations() { ArrayList<NodeState> clients = getAllRendezvousClients(); ArrayList<NodeState> dsts = new ArrayList<NodeState>(clients); dsts.add(nodes.get(myNid)); Collections.sort(dsts); int totalSize = 0; for (NodeState src : clients) { ArrayList<Rec> recs = new ArrayList<Rec>(); // dst <- nbrs, hop <- any findHops(dsts, memberNids, src, recs); /* * TODO: need to additionally send back info about *how good* the * best hop is, so that the receiver can decide which of the many * recommendations to accept */ if (scheme == RoutingScheme.SQRT_SPECIAL) { // dst <- any, hop <- nbrs findHopsAlt(memberNids, dsts, src, recs); } RoutingRecs msg = new RoutingRecs(); msg.recs = recs; totalSize += sendObject(msg, src.info.id); } log("sent recs, " + totalSize + " bytes, to " + clients); } /** * Given the src, find for each dst in dsts the ideal hop from hops, * storing these into recs. This may choose the dst itself as the hop. */ private void findHops(ArrayList<NodeState> dsts, ArrayList<Short> hops, NodeState src, ArrayList<Rec> recs) { for (NodeState dst : dsts) { if (src != dst) { short min = resetLatency; short minhop = -1; for (short hop : hops) { if (hop != src.info.id) { int src2hop = src.latencies.get(hop); int dst2hop = dst.latencies.get(hop); int latency = src2hop + dst2hop; // DEBUG // log(src + "->" + hop + " is " + src2hop + ", " + hop + // "->" + dst + " is " + dst2hop + ", sum " + // latency); if (latency < min) { min = (short) latency; minhop = hop; } } } // it's possible for us to have not found an ideal hop. this // may be counter-intuitive, since even if src<->dst is broken, // the fact that both are our clients means we should be able // to serve as a hop. however it may be that either one of them // was, or we were, recently added as a member to the network, // so they never had a chance to ping us yet (and hence we're // missing from their measurements). (TODO also is it possible // that we're missing their measurement entirely? are all // clients necessarily added on demand by measurement packets?) // what we can do is to try finding our own latency to the hop // (perhaps we've had a chance to ping them), and failing that, // estimating the latency (only if one of us was newly added). // however, these errors are transient anyway - by the next // routing period, several pings will have taken place that // would guarantee (or there was a failure, and eventually one // of {src,dst} will fall out of our client set). if (minhop != -1) { short directLatency = src.latencies.get(dst.info.id); Rec rec = new Rec(); rec.dst = dst.info.id; // We require that a recommended route (if not the direct // route and if direct route is working) yield at least a // 5% reduction in latency. // - if min-cost route is the direct route, just use it // - if direct-cost route is infinite, then no point // comparing to the min-cost hop route // - if min-cost route is not much better than direct // route, use direct route if (minhop == dst.info.id || directLatency == resetLatency || min * directBonus < directLatency) { // TODO (high priority): can you get a short overflow with above? directBonus is a double rec.via = minhop; recs.add(rec); } else { // At this point, // min-cost route is not the direct route && // src->dst is *not* infinite && // min * directBonus >= src->dst // So, recommend the direct route, if that is working. rec.via = dst.info.id; recs.add(rec); } } } } } private void findHopsAlt(ArrayList<Short> dsts, ArrayList<NodeState> hops, NodeState src, ArrayList<Rec> recs) { for (short dst : dsts) { if (src.info.id != dst && nodes.get(dst).isReachable) { short min = resetLatency; short minhop = -1; for (NodeState hop : hops) { if (hop != src) { short src2hop = src.latencies.get(hop.info.id); short dst2hop = hop.latencies.get(dst); short latency = (short) (src2hop + dst2hop); if (latency < min) { min = latency; minhop = hop.info.id; } } } assert minhop != -1; Rec rec = new Rec(); rec.dst = dst; rec.via = minhop; recs.add(rec); } } } private String routesToString(ArrayList<Rec> recs) { String s = ""; for (Rec rec : recs) s += rec.via + "->" + rec.dst + " "; return s; } private Serialization senderSer = new Serialization(); private int sendObject(final Msg o, InetAddress addr, int port, short nid) { o.src = myNid; o.version = currentStateVersion; o.session = sessionId; try { /* * note that it's unsafe to re-use these output streams - at * least, i don't know how (reset() is insufficient) */ ByteArrayOutputStream baos = new ByteArrayOutputStream(); senderSer.serialize(o, new DataOutputStream(baos)); byte[] buf = baos.toByteArray(); String who = nid >= 0 ? "" + nid : (addr + ":" + port); log("send." + o.getClass().getSimpleName(), "to " + who + " len " + buf.length); if (!ignored.contains(nid)) { session.send(ByteBuffer.wrap(buf), new InetSocketAddress(addr, port)); } else { log("droppng packet sent to " + who); } return buf.length; } catch (Exception ex) { throw new RuntimeException(ex); } } private int sendObject(final Msg o, NodeInfo info, short nid) { return sendObject(o, info.addr, info.port, nid); } private int sendObject(final Msg o, NodeInfo info) { return sendObject(o, info, (short)-1); } private int sendObject(final Msg o, short nid) { return nid != myNid ? sendObject(o, nid == 0 ? coordNode : (myNid == 0 ? coordNodes.get(nid) : nodes.get(nid).info), nid) : 0; } private void broadcastMeasurements(ArrayList<NodeState> servers) { ShortShortMap latencies = nodes.get(myNid).latencies; Measurements rm = new Measurements(); rm.probeTable = new short[memberNids.size()]; for (int i = 0; i < rm.probeTable.length; i++) rm.probeTable[i] = latencies.get(memberNids.get(i)); rm.inflation = new byte[rm.probeTable.length]; int totalSize = 0; for (NodeState nbr : servers) { totalSize += sendObject(rm, nbr.info.id); } log("sent measurements, " + totalSize + " bytes, to " + servers); } private void updateMeasurements(Measurements m) { NodeState src = nodes.get(m.src); for (int i = 0; i < m.probeTable.length; i++) src.latencies.put(memberNids.get(i), m.probeTable[i]); // TODO we aren't setting node.{hop,cameUp,isHopRecommended=false}... } private void handleRecommendations(RoutingRecs msg) { ArrayList<Rec> recs = msg.recs; NodeState r = nodes.get(msg.src); r.dstsPresent.clear(); r.remoteFailures.clear(); for (Rec rec : recs) { r.dstsPresent.add(rec.dst); if (nodes.get(rec.via).isReachable) { if (scheme == RoutingScheme.SQRT_SPECIAL) { /* * TODO: add in support for processing sqrt_special * recommendations. first we need to add in the actual cost of * the route to these recommendations (see * broadcastRecommndations), then we need to compare all of * these and see which ones were better. a complication is that * routing recommendation broadcasts are not synchronized, so * while older messages may appear to have better routes, there * must be some threshold in time past which we disregard old * latencies. must keep some history */ } else { // blindly trust the recommendations NodeState node = nodes.get(rec.dst); if (node.hop == 0 || node.isDead) { node.cameUp = true; node.isDead = false; } node.isHopRecommended = true; node.hop = rec.via; } } } if (scheme != RoutingScheme.SQRT_SPECIAL) { /* * get the full set of dsts that we depend on this node for. note * that the set of nodes it's actually serving may be different. */ // TODO (low priority): just use dstsPresent instead of remoteFailures for (NodeState dst : nodeDefaultRSs.get(r)) { if (!r.dstsPresent.contains(dst.info.id)) { /* * there was a comm failure between this rendezvous and the * dst for which this rendezvous did not provide a * recommendation. this a proximal rendezvous failure, so that if * necessary during the next phase, we will find failovers. */ r.remoteFailures.add(dst); } } } } /** * counts the number of nodes that we can reach - either directly, through a * hop, or through any rendezvous client. * * @return */ private int countReachableNodes() { /* * TODO need to fix up hopOptions so that it actually gets updated * correctly, since currently things are *never* removed from it (they * need to expire) */ NodeState myState = nodes.get(myNid); int count = 0; for (NodeState node : otherNodes) { count += node.hop != 0 ? 1 : 0; } return count; } /** * Counts the number of paths to a particular node. * * Note that this does not get run whenever nodes become reachable, only * when they become unreachable (and also in batch periodically). * Furthermore, we do not run this whenever we get a measurement packet. * The reason for these infelicities is one of performance. * * The logic among hop, isHopRecommended, and cameUp is tricky. */ private int findPaths(NodeState node, boolean batch) { ArrayList<NodeState> clients = getAllRendezvousClients(); ArrayList<NodeState> servers = lastRendezvousServers; HashSet<NodeState> options = new HashSet<NodeState>(); short min = resetLatency; short nid = node.info.id; boolean wasDead = node.hop == 0; NodeState self = nodes.get(myNid); // we would like to keep recommended nodes (they should be the best // choice, but also we have no ping data). but if it was not obtained // via recommendation (i.e., a previous findPaths() got this hop), then // we should feel free to update it. if (node.hop == 0) { node.isHopRecommended = false; } else { // we are not adding the hop if (!node.isHopRecommended) { node.hop = 0; } } // Unless node.hop == 0, this code below is useless // We would like to measure this... // keep track of subping. // direct hop if (node.isReachable) { options.add(node); if (!node.isHopRecommended) { node.hop = node.info.id; min = self.latencies.get(node.info.id); } } else { // If it is alive, we will set it to false in the next few lines node.isDead = true; } // find best rendezvous client. (`clients` are all reachable.) for (NodeState client : clients) { int val = client.latencies.get(nid); if (val != resetLatency) { if(!node.isReachable) node.isDead = false; options.add(client); val += self.latencies.get(client.info.id); if (!node.isHopRecommended && val < min) { node.hop = client.info.id; min = (short) val; } } } // see if a rendezvous server can serve as the hop. (can't just iterate // through hopOptions, because that doesn't tell us which server to go // through.) using the heuristic of just our latency to the server for (NodeState server : servers) { if (server.dstsPresent.contains(nid)) { options.add(server); short val = self.latencies.get(server.info.id); if (node.hop == 0 && val < min) { node.hop = server.info.id; min = val; } } } boolean isDead = node.hop == 0; // seems that (!isDead && wasDead) can be true, if a hop is found here // from a measurement (from a rclient). boolean cameUp = !isDead && wasDead || node.cameUp; boolean wentDown = isDead && !wasDead; // reset node.cameUp node.cameUp = false; // we always print something in non-batch mode. we also print stuff if // there was a change in the node's up/down status. if a node is reachable // then findPaths(node,) will only be called during batch processing, and // so wasDead will have been set either by the last unreachable call or by // the previous batch call. thus, the first batch call after a node goes // up, the "up" message will be printed. if (!batch || cameUp || wentDown) { String stateChange = cameUp ? " up" : (wentDown ? " down" : ""); log("node " + node + stateChange + " hop " + node.hop + " total " + options.size()); } return options.size(); } /** * Counts the avg number of one-hop or direct paths available to nodes * Calls findPaths(node) for all other nodes. This code is supposed to * a) find out a node is alive, and b) find the optimal one-hop route to * this destination. * @return */ private Pair<Integer, Integer> findPathsForAllNodes() { NodeState myState = nodes.get(myNid); int count = 0; int numNodesReachable = 0; for (NodeState node : otherNodes) { int d = findPaths(node, true); count += d; numNodesReachable += d > 0 ? 1 : 0; } if (numNodesReachable > 0) count /= numNodesReachable; return Pair.of(numNodesReachable, count); } public void quit() { doQuit.set(true); } private class NodeState implements Comparable<NodeState> { public String toString() { return "" + info.id; } /** * not null */ public final NodeInfo info; /** * updated in resetTimeoutAtNode(). if hop == 0, this must be false; if * hop == the nid, this must be true. * * this should also be made to correspond with the appropriate latencies in myNid */ public boolean isReachable = true; /** * the last known latencies to all other nodes. missing entry implies * resetLatency. this is populated/valid for rendezvous clients. * * invariants: * - keyset is a subset of current members (memberNids); enforced in * updateMembers() * - keyset contains only live nodes; enforced in resetTimeoutAtNode() * - values are not resetLatency * - undefined if not a rendezvous client */ public final ShortShortMap latencies = new ShortShortMap(resetLatency); /** * the recommended intermediate hop for us to get to this node, or 0 if * no way we know of to get to that node, and thus believe the node is * down. * * invariants: * - always refers to a member or 0; enforced in updateMembers() * - never refers to dead node; enforced in resetTimeoutAtNode() * - may be nid (may be dst) * - initially defaults to dst (since we don't know hops to it) * - never refers to the owning neuronnode (never is src) * - cannot be nid if !isReachable */ public short hop; /** * Keeps track of whether any node (including ourself) receives measurements * to the destination. Only consider this if node.isReachable is false. */ public boolean isDead = false; /** * this is set at certain places where we determine that a node is * alive, and reset in the next findPaths(). the only reason we need * this is to help produce the correct logging output for the * effectiveness timing analysis. */ public boolean cameUp; /** * this indicates how we got this hop. this is set in * handleRecommendations(), reset in resetTimeoutAtNode(), and * read/reset from batch-mode findPaths(). if it was recommended to * us, then we will want to keep it; otherwise, it was just something * we found in failover mode, so are free to wipe it out. this var has * no meaning when hop == 0. */ public boolean isHopRecommended; /** * remote failures. applies only if this nodestate is of a rendezvous * node. contains nids of all nodes for which this rendezvous cannot * recommend routes. * * invariants: * - undefined if this is not a rendezvous node * - empty */ public final HashSet<NodeState> remoteFailures = new HashSet<NodeState>(); /** * dstsPresent, the complement of remoteFailures (in defaultClients). */ public final HashSet<Short> dstsPresent = new HashSet<Short>(); /** * this is unused at the moment. still need to re-design. */ public final HashSet<Short> hopOptions = new HashSet<Short>(); public NodeState(NodeInfo info) { this.info = info; this.hop = info.id; latencies.put(info.id, (short) 0); } public int compareTo(NodeState o) { return new Short(info.id).compareTo(o.info.id); } } } class ShortShortMap { private final Hashtable<Short,Short> table = new Hashtable<Short, Short>(); private final short defaultValue; public ShortShortMap(short defaultValue) { this.defaultValue = defaultValue; } public Set<Short> keySet() { return table.keySet(); } public boolean containsKey(short key) { return table.containsKey(key); } public void remove(short key) { table.remove(key); } public short get(short key) { Short value = table.get(key); return value != null ? value : defaultValue; } public void put(short key, short value) { if (value == defaultValue) table.remove(key); else table.put(key, value); } } // welcome to my // DEATH MACHINE, // interloper!!!!!!!11 class NodeInfo { short id; int port; InetAddress addr; } class Rec { short dst; short via; } class Subprobe { long time; InetSocketAddress src; InetSocketAddress nod; byte type; } class PeerPing { long time; InetSocketAddress src; } class PeerPong { long time; InetSocketAddress src; } class Msg { short src; short version; short session; } class Join extends Msg { InetAddress addr; int port; } class Init extends Msg { short id; ArrayList<NodeInfo> members; } class Membership extends Msg { ArrayList<NodeInfo> members; short numNodes; short yourId; } class RoutingRecs extends Msg { ArrayList<Rec> recs; } class Ping extends Msg { long time; NodeInfo info; } class Pong extends Msg { long time; } class Measurements extends Msg { short[] probeTable; byte[] inflation; } class Serialization { public void serialize(Object obj, DataOutputStream out) throws IOException { if (false) {} else if (obj.getClass() == NodeInfo.class) { NodeInfo casted = (NodeInfo) obj; out.writeInt(((int) serVersion) << 8 | 0); out.writeShort(casted.id); out.writeInt(casted.port); { byte[] buf = casted.addr.getAddress(); out.writeInt(buf.length); out.write(buf); } } else if (obj.getClass() == Rec.class) { Rec casted = (Rec) obj; out.writeInt(((int) serVersion) << 8 | 1); out.writeShort(casted.dst); out.writeShort(casted.via); } else if (obj.getClass() == Subprobe.class) { Subprobe casted = (Subprobe) obj; out.writeInt(((int) serVersion) << 8 | 2); out.writeLong(casted.time); { byte[] buf = casted.src.getAddress().getAddress(); out.writeInt(buf.length); out.write(buf); } out.writeInt(casted.src.getPort()); { byte[] buf = casted.nod.getAddress().getAddress(); out.writeInt(buf.length); out.write(buf); } out.writeInt(casted.nod.getPort()); out.writeByte(casted.type); } else if (obj.getClass() == PeerPing.class) { PeerPing casted = (PeerPing) obj; out.writeInt(((int) serVersion) << 8 | 3); out.writeLong(casted.time); { byte[] buf = casted.src.getAddress().getAddress(); out.writeInt(buf.length); out.write(buf); } out.writeInt(casted.src.getPort()); } else if (obj.getClass() == PeerPong.class) { PeerPong casted = (PeerPong) obj; out.writeInt(((int) serVersion) << 8 | 4); out.writeLong(casted.time); { byte[] buf = casted.src.getAddress().getAddress(); out.writeInt(buf.length); out.write(buf); } out.writeInt(casted.src.getPort()); } else if (obj.getClass() == Msg.class) { Msg casted = (Msg) obj; out.writeInt(((int) serVersion) << 8 | 5); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Join.class) { Join casted = (Join) obj; out.writeInt(((int) serVersion) << 8 | 6); { byte[] buf = casted.addr.getAddress(); out.writeInt(buf.length); out.write(buf); } out.writeInt(casted.port); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Init.class) { Init casted = (Init) obj; out.writeInt(((int) serVersion) << 8 | 7); out.writeShort(casted.id); out.writeInt(casted.members.size()); for (int i = 0; i < casted.members.size(); i++) { out.writeShort(casted.members.get(i).id); out.writeInt(casted.members.get(i).port); { byte[] buf = casted.members.get(i).addr.getAddress(); out.writeInt(buf.length); out.write(buf); } } out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Membership.class) { Membership casted = (Membership) obj; out.writeInt(((int) serVersion) << 8 | 8); out.writeInt(casted.members.size()); for (int i = 0; i < casted.members.size(); i++) { out.writeShort(casted.members.get(i).id); out.writeInt(casted.members.get(i).port); { byte[] buf = casted.members.get(i).addr.getAddress(); out.writeInt(buf.length); out.write(buf); } } out.writeShort(casted.numNodes); out.writeShort(casted.yourId); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == RoutingRecs.class) { RoutingRecs casted = (RoutingRecs) obj; out.writeInt(((int) serVersion) << 8 | 9); out.writeInt(casted.recs.size()); for (int i = 0; i < casted.recs.size(); i++) { out.writeShort(casted.recs.get(i).dst); out.writeShort(casted.recs.get(i).via); } out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Ping.class) { Ping casted = (Ping) obj; out.writeInt(((int) serVersion) << 8 | 10); out.writeLong(casted.time); out.writeShort(casted.info.id); out.writeInt(casted.info.port); { byte[] buf = casted.info.addr.getAddress(); out.writeInt(buf.length); out.write(buf); } out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Pong.class) { Pong casted = (Pong) obj; out.writeInt(((int) serVersion) << 8 | 11); out.writeLong(casted.time); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } else if (obj.getClass() == Measurements.class) { Measurements casted = (Measurements) obj; out.writeInt(((int) serVersion) << 8 | 12); out.writeInt(casted.probeTable.length); for (int i = 0; i < casted.probeTable.length; i++) { out.writeShort(casted.probeTable[i]); } out.writeInt(casted.inflation.length); out.write(casted.inflation); out.writeShort(casted.src); out.writeShort(casted.version); out.writeShort(casted.session); } } public static byte serVersion = 2; public Object deserialize(DataInputStream in) throws IOException { int header = readInt(in); if ((header & 0xff00) != ((int) serVersion) << 8) return null; int msgtype = header & 0xff; switch (msgtype) { case 0: { // NodeInfo NodeInfo obj; { obj = new NodeInfo(); { obj.id = in.readShort(); } { obj.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } obj.addr = InetAddress.getByAddress(buf); } } return obj; } case 1: { // Rec Rec obj; { obj = new Rec(); { obj.dst = in.readShort(); } { obj.via = in.readShort(); } } return obj; } case 2: { // Subprobe Subprobe obj; { obj = new Subprobe(); { obj.time = in.readLong(); } { InetAddress addr; { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } addr = InetAddress.getByAddress(buf); } int port; { port = readInt(in); } obj.src = new InetSocketAddress(addr, port); } { InetAddress addr; { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } addr = InetAddress.getByAddress(buf); } int port; { port = readInt(in); } obj.nod = new InetSocketAddress(addr, port); } { obj.type = in.readByte(); } } return obj; } case 3: { // PeerPing PeerPing obj; { obj = new PeerPing(); { obj.time = in.readLong(); } { InetAddress addr; { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } addr = InetAddress.getByAddress(buf); } int port; { port = readInt(in); } obj.src = new InetSocketAddress(addr, port); } } return obj; } case 4: { // PeerPong PeerPong obj; { obj = new PeerPong(); { obj.time = in.readLong(); } { InetAddress addr; { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } addr = InetAddress.getByAddress(buf); } int port; { port = readInt(in); } obj.src = new InetSocketAddress(addr, port); } } return obj; } case 5: { // Msg Msg obj; { obj = new Msg(); { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } return obj; } case 6: { // Join Join obj; { obj = new Join(); { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } obj.addr = InetAddress.getByAddress(buf); } { obj.port = readInt(in); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 7: { // Init Init obj; { obj = new Init(); { obj.id = in.readShort(); } { obj.members = new ArrayList<NodeInfo>(); for (int i = 0, len = readInt(in); i < len; i++) { NodeInfo x; { x = new NodeInfo(); { x.id = in.readShort(); } { x.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } x.addr = InetAddress.getByAddress(buf); } } obj.members.add(x); } } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 8: { // Membership Membership obj; { obj = new Membership(); { obj.members = new ArrayList<NodeInfo>(); for (int i = 0, len = readInt(in); i < len; i++) { NodeInfo x; { x = new NodeInfo(); { x.id = in.readShort(); } { x.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } x.addr = InetAddress.getByAddress(buf); } } obj.members.add(x); } } { obj.numNodes = in.readShort(); } { obj.yourId = in.readShort(); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 9: { // RoutingRecs RoutingRecs obj; { obj = new RoutingRecs(); { obj.recs = new ArrayList<Rec>(); for (int i = 0, len = readInt(in); i < len; i++) { Rec x; { x = new Rec(); { x.dst = in.readShort(); } { x.via = in.readShort(); } } obj.recs.add(x); } } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 10: { // Ping Ping obj; { obj = new Ping(); { obj.time = in.readLong(); } { obj.info = new NodeInfo(); { obj.info.id = in.readShort(); } { obj.info.port = readInt(in); } { byte[] buf; { buf = new byte[readInt(in)]; in.read(buf); } obj.info.addr = InetAddress.getByAddress(buf); } } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 11: { // Pong Pong obj; { obj = new Pong(); { obj.time = in.readLong(); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } case 12: { // Measurements Measurements obj; { obj = new Measurements(); { obj.probeTable = new short[readInt(in)]; for (int i = 0; i < obj.probeTable.length; i++) { { obj.probeTable[i] = in.readShort(); } } } { obj.inflation = new byte[readInt(in)]; in.read(obj.inflation); } { { obj.src = in.readShort(); } { obj.version = in.readShort(); } { obj.session = in.readShort(); } } } return obj; } default: throw new RuntimeException("unknown obj type"); } } private byte[] readBuffer = new byte[4]; // read in a big-endian 4-byte integer public int readInt(DataInputStream dis) throws IOException { dis.readFully(readBuffer, 0, 4); return ( ((int)(readBuffer[0] & 255) << 24) + ((readBuffer[1] & 255) << 16) + ((readBuffer[2] & 255) << 8) + ((readBuffer[3] & 255) << 0)); } /* public static void main(String[] args) throws IOException { { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos); Pong pong = new Pong(); pong.src = 2; pong.version = 3; pong.time = 4; serialize(pong, out); byte[] buf = baos.toByteArray(); System.out.println(buf.length); Object obj = deserialize(new DataInputStream(new ByteArrayInputStream(buf))); System.out.println(obj); } { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos); Measurements m = new Measurements(); m.src = 2; m.version = 3; m.membershipList = new ArrayList<Integer>(); m.membershipList.add(4); m.membershipList.add(5); m.membershipList.add(6); m.ProbeTable = new long[5]; m.ProbeTable[1] = 7; m.ProbeTable[2] = 8; m.ProbeTable[3] = 9; serialize(m, out); byte[] buf = baos.toByteArray(); System.out.println(buf.length); Object obj = deserialize(new DataInputStream(new ByteArrayInputStream(buf))); System.out.println(obj); } { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos); Membership m = new Membership(); m.src = 2; m.version = 3; m.members = new ArrayList<NodeInfo>(); NodeInfo n1 = new NodeInfo(); n1.addr = InetAddress.getLocalHost(); n1.port = 4; n1.id = 5; m.members.add(n1); NodeInfo n2 = new NodeInfo(); n2.addr = InetAddress.getByName("google.com"); n2.port = 6; n2.id = 7; m.members.add(n2); m.numNodes = 8; serialize(m, out); byte[] buf = baos.toByteArray(); System.out.println(buf.length); Object obj = deserialize(new DataInputStream( new ByteArrayInputStream(buf))); System.out.println(obj); } }*/ }
import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; public class VersionNumbering { final static String version = "250"; static String name = "default"; static final String slash = System.getProperty("file.separator"); static String prefix = "default"; static void updateUpdatr() { if( !(new File( "Updatr")).exists() ) { (new File( "Updatr")).mkdir(); } try { BufferedWriter bw = new BufferedWriter(new FileWriter("Updatr" + slash + name + ".updatr")); bw.write( "name=" + name ); bw.newLine(); bw.write( "version=" + version ); bw.newLine(); bw.write( "url=www.prydwen.net/minec/latest" + prefix + "/" + name + ".updatr"); bw.newLine(); bw.write( "file=www.prydwen.net/minec/latest" + prefix + "/" + name + ".updatr"); bw.newLine(); bw.write( "notes="); bw.newLine(); bw.close(); } catch (IOException ioe) { ioe.printStackTrace(); } } }
package wyc.builder; import static wybs.lang.SyntaxError.internalFailure; import static wybs.lang.SyntaxError.syntaxError; import static wyc.lang.WhileyFile.internalFailure; import static wyc.lang.WhileyFile.syntaxError; import static wyil.util.ErrorMessages.*; import java.math.BigDecimal; import java.util.*; import wyautl_old.lang.Automata; import wyautl_old.lang.Automaton; import wybs.lang.*; import wybs.util.*; import wyc.lang.*; import wyc.lang.WhileyFile.Context; import wyil.lang.Constant; import wyil.lang.Type; import wyil.lang.WyilFile; /** * Propagates type information in a <i>flow-sensitive</i> fashion from declared * parameter and return types through variable declarations and assigned * expressions, to determine types for all intermediate expressions and * variables. During this propagation, type checking is performed to ensure * types are used soundly. For example: * * <pre> * function sum([int] data) => int: * int r = 0 // declared int type for r * for v in data: // infers int type for v, based on type of data * r = r + v // infers int type for r + v, based on type of operands * return r // infers int type for return expression * </pre> * * <p> * The flow typing algorithm distinguishes between the <i>declared type</i> of a * variable and its <i>known type</i>. That is, the known type at any given * point is permitted to be more precise than the declared type (but not vice * versa). For example: * </p> * * <pre> * function id(int x) => int: * return x * * function f(int y) => int: * int|null x = y * f(x) * </pre> * * <p> * The above example is considered type safe because the known type of * <code>x</code> at the function call is <code>int</code>, which differs from * its declared type (i.e. <code>int|null</code>). * </p> * * <p> * Loops present an interesting challenge for type propagation. Consider this * example: * </p> * * <pre> * function loopy(int max) => real: * var i = 0 * while i < max: * i = i + 0.5 * return i * </pre> * * <p> * On the first pass through the loop, variable <code>i</code> is inferred to * have type <code>int</code> (based on the type of the constant <code>0</code> * ). However, the add expression is inferred to have type <code>real</code> * (based on the type of the rhs) and, hence, the resulting type inferred for * <code>i</code> is <code>real</code>. At this point, the loop must be * reconsidered taking into account this updated type for <code>i</code>. * </p> * * <p> * The operation of the flow type checker splits into two stages: * </p> * <ul> * <li><b>Global Propagation.</b> During this stage, all named types are checked * and expanded.</li> * <li><b>Local Propagation.</b> During this stage, types are propagated through * statements and expressions (as above).</li> * </ul> * * <h3>References</h3> * <ul> * <li> * <p> * David J. Pearce and James Noble. Structural and Flow-Sensitive Types for * Whiley. Technical Report, Victoria University of Wellington, 2010. * </p> * </li> * </ul> * * @author David J. Pearce * */ public class FlowTypeChecker { private WhileyBuilder builder; private String filename; private WhileyFile.FunctionOrMethod current; /** * The constant cache contains a cache of expanded constant values. */ private final HashMap<NameID, Constant> constantCache = new HashMap<NameID, Constant>(); public FlowTypeChecker(WhileyBuilder builder) { this.builder = builder; } // WhileyFile(s) public void propagate(List<WhileyFile> files) { for(WhileyFile wf : files) { propagate(wf); } } public void propagate(WhileyFile wf) { this.filename = wf.filename; for (WhileyFile.Declaration decl : wf.declarations) { try { if (decl instanceof WhileyFile.FunctionOrMethod) { propagate((WhileyFile.FunctionOrMethod) decl); } else if (decl instanceof WhileyFile.Type) { propagate((WhileyFile.Type) decl); } else if (decl instanceof WhileyFile.Constant) { propagate((WhileyFile.Constant) decl); } } catch (ResolveError e) { syntaxError(errorMessage(RESOLUTION_ERROR, e.getMessage()), filename, decl, e); } catch (SyntaxError e) { throw e; } catch (Throwable t) { internalFailure(t.getMessage(), filename, decl, t); } } } // Declarations /** * Resolve types for a given type declaration. If an invariant expression is * given, then we have to propagate and resolve types throughout the * expression. * * @param td * Type declaration to check. * @throws Exception */ public void propagate(WhileyFile.Type td) throws Exception { // First, resolve the declared syntactic type into the corresponding // nominal type. td.resolvedType = resolveAsType(td.pattern.toSyntacticType(), td); if(td.invariant != null) { // Second, an invariant expression is given, so propagate through // that. // Construct the appropriate typing environment Environment environment = new Environment(); environment = addDeclaredVariables(td.pattern,environment,td); // Propagate type information through the constraint td.invariant = propagate(td.invariant,environment,td); } } /** * Propagate and check types for a given constant declaration. * * @param cd * Constant declaration to check. * @throws Exception */ public void propagate(WhileyFile.Constant cd) throws Exception { NameID nid = new NameID(cd.file().module, cd.name); cd.resolvedValue = resolveAsConstant(nid); } /** * Propagate and check types for a given function or method declaration. * * @param fd * Function or method declaration to check. * @throws Exception */ public void propagate(WhileyFile.FunctionOrMethod d) throws Exception { this.current = d; // ugly Environment environment = new Environment(); // Resolve the types of all parameters and construct an appropriate // environment for use in the flow-sensitive type propagation. for (WhileyFile.Parameter p : d.parameters) { environment = environment.put(p.name, resolveAsType(p.type, d)); } // Resolve types for any preconditions (i.e. requires clauses) provided. final List<Expr> d_requires = d.requires; for (int i = 0; i != d_requires.size(); ++i) { Expr condition = d_requires.get(i); condition = propagate(condition, environment.clone(), d); d_requires.set(i, condition); } // Resolve types for any postconditions (i.e. ensures clauses) provided. final List<Expr> d_ensures = d.ensures; if (d_ensures.size() > 0) { // At least one ensures clause is provided; so, first, construct an // appropriate environment from the initial one create. Environment ensuresEnvironment = addDeclaredVariables(d.ret, environment.clone(), d); // Now, type check each ensures clause for (int i = 0; i != d_ensures.size(); ++i) { Expr condition = d_ensures.get(i); condition = propagate(condition, ensuresEnvironment, d); d_ensures.set(i, condition); } } // Resolve the overall type for the function or method. if (d instanceof WhileyFile.Function) { WhileyFile.Function f = (WhileyFile.Function) d; f.resolvedType = resolveAsType(f.unresolvedType(), d); } else { WhileyFile.Method m = (WhileyFile.Method) d; m.resolvedType = resolveAsType(m.unresolvedType(), d); } // Finally, propagate type information throughout all statements in the // function / method body. propagate(d.statements, environment); } // Blocks & Statements /** * Propagate type information in a flow-sensitive fashion through a block of * statements, whilst type checking each statement and expression. * * @param block * Block of statements to flow sensitively type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(ArrayList<Stmt> block, Environment environment) { for (int i = 0; i != block.size(); ++i) { Stmt stmt = block.get(i); if (stmt instanceof Expr) { block.set(i, (Stmt) propagate((Expr) stmt, environment, current)); } else { environment = propagate(stmt, environment); } } return environment; } /** * Propagate type information in a flow-sensitive fashion through a given * statement, whilst type checking it at the same time. For statements which * contain other statements (e.g. if, while, etc), then this will * recursively propagate type information through them as well. * * * @param block * Block of statements to flow-sensitively type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt stmt, Environment environment) { try { if(stmt instanceof Stmt.VariableDeclaration) { return propagate((Stmt.VariableDeclaration) stmt,environment); } else if(stmt instanceof Stmt.Assign) { return propagate((Stmt.Assign) stmt,environment); } else if(stmt instanceof Stmt.Return) { return propagate((Stmt.Return) stmt,environment); } else if(stmt instanceof Stmt.IfElse) { return propagate((Stmt.IfElse) stmt,environment); } else if(stmt instanceof Stmt.While) { return propagate((Stmt.While) stmt,environment); } else if(stmt instanceof Stmt.ForAll) { return propagate((Stmt.ForAll) stmt,environment); } else if(stmt instanceof Stmt.Switch) { return propagate((Stmt.Switch) stmt,environment); } else if(stmt instanceof Stmt.DoWhile) { return propagate((Stmt.DoWhile) stmt,environment); } else if(stmt instanceof Stmt.Break) { return propagate((Stmt.Break) stmt,environment); } else if(stmt instanceof Stmt.Throw) { return propagate((Stmt.Throw) stmt,environment); } else if(stmt instanceof Stmt.TryCatch) { return propagate((Stmt.TryCatch) stmt,environment); } else if(stmt instanceof Stmt.Assert) { return propagate((Stmt.Assert) stmt,environment); } else if(stmt instanceof Stmt.Assume) { return propagate((Stmt.Assume) stmt,environment); } else if(stmt instanceof Stmt.Debug) { return propagate((Stmt.Debug) stmt,environment); } else if(stmt instanceof Stmt.Skip) { return propagate((Stmt.Skip) stmt,environment); } else { internalFailure("unknown statement: " + stmt.getClass().getName(),filename,stmt); return null; // deadcode } } catch(ResolveError e) { syntaxError(errorMessage(RESOLUTION_ERROR,e.getMessage()),filename,stmt,e); return null; // dead code } catch(SyntaxError e) { throw e; } catch(Throwable e) { internalFailure(e.getMessage(),filename,stmt,e); return null; // dead code } } /** * Type check an assertion statement. This requires checking that the * expression being asserted is well-formed and has boolean type. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Assert stmt, Environment environment) { stmt.expr = propagate(stmt.expr,environment,current); checkIsSubtype(Type.T_BOOL,stmt.expr); return environment; } /** * Type check an assume statement. This requires checking that the * expression being asserted is well-formed and has boolean type. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Assume stmt, Environment environment) { stmt.expr = propagate(stmt.expr,environment,current); checkIsSubtype(Type.T_BOOL,stmt.expr); return environment; } /** * Type check a variable declaration statement. This must associate the * given variable with either its declared and actual type in the * environment. If no initialiser is given, then the actual type is the void * (since the variable is not yet defined). Otherwise, the actual type is * the type of the initialiser expression. Additionally, when an initialiser * is given we must check it is well-formed and that it is a subtype of the * declared type. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.VariableDeclaration stmt, Environment environment) throws Exception { // First, resolve declared type stmt.type = resolveAsType(stmt.unresolvedType,current); // First, resolve type of initialiser if(stmt.expr != null) { stmt.expr = propagate(stmt.expr,environment,current); checkIsSubtype(stmt.type,stmt.expr); } // Second, update environment accordingly. Observe that we can safely // assume the variable is not already declared in the enclosing scope // because the parser checks this for us. environment = environment.put(stmt.name, stmt.type); // Done. return environment; } /** * Type check an assignment statement. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Assign stmt, Environment environment) throws Exception { Expr.LVal lhs = propagate(stmt.lhs,environment); Expr rhs = propagate(stmt.rhs,environment,current); if(lhs instanceof Expr.RationalLVal) { // represents a destructuring assignment Expr.RationalLVal tv = (Expr.RationalLVal) lhs; if(!Type.isImplicitCoerciveSubtype(Type.T_REAL, rhs.result().raw())) { syntaxError("real value expected, got " + rhs.result(),filename,rhs); } if (tv.numerator instanceof Expr.AssignedVariable && tv.denominator instanceof Expr.AssignedVariable) { Expr.AssignedVariable lv = (Expr.AssignedVariable) tv.numerator; Expr.AssignedVariable rv = (Expr.AssignedVariable) tv.denominator; lv.type = Nominal.T_VOID; rv.type = Nominal.T_VOID; lv.afterType = Nominal.T_INT; rv.afterType = Nominal.T_INT; environment = environment.put(lv.var, Nominal.T_INT); environment = environment.put(rv.var, Nominal.T_INT); } else { syntaxError(errorMessage(INVALID_TUPLE_LVAL),filename,lhs); } } else if(lhs instanceof Expr.Tuple) { // represents a destructuring assignment Expr.Tuple tv = (Expr.Tuple) lhs; ArrayList<Expr> tvFields = tv.fields; // FIXME: loss of nominal information here Type rawRhs = rhs.result().raw(); Nominal.EffectiveTuple tupleRhs = expandAsEffectiveTuple(rhs.result()); // FIXME: the following is something of a kludge. It would also be // nice to support more expressive destructuring assignment // operations. if(tupleRhs == null) { syntaxError("tuple value expected, got " + rhs.result().nominal(),filename,rhs); return null; // deadcode } List<Nominal> rhsElements = tupleRhs.elements(); if(rhsElements.size() != tvFields.size()) { syntaxError("incompatible tuple assignment",filename,rhs); } for(int i=0;i!=tvFields.size();++i) { Expr f = tvFields.get(i); Nominal t = rhsElements.get(i); if(f instanceof Expr.AbstractVariable) { Expr.AbstractVariable av = (Expr.AbstractVariable) f; Expr.AssignedVariable lv; if(lhs instanceof Expr.AssignedVariable) { // this case just avoids creating another object everytime we // visit this statement. lv = (Expr.AssignedVariable) lhs; } else { lv = new Expr.AssignedVariable(av.var, av.attributes()); } lv.type = Nominal.T_VOID; lv.afterType = t; environment = environment.put(lv.var, t); tvFields.set(i, lv); } else { syntaxError(errorMessage(INVALID_TUPLE_LVAL),filename,f); } } } else { Expr.AssignedVariable av = inferAfterType(lhs, rhs.result()); environment = environment.put(av.var, av.afterType); } stmt.lhs = (Expr.LVal) lhs; stmt.rhs = rhs; return environment; } private Expr.AssignedVariable inferAfterType(Expr.LVal lv, Nominal afterType) { if (lv instanceof Expr.AssignedVariable) { Expr.AssignedVariable v = (Expr.AssignedVariable) lv; v.afterType = afterType; return v; } else if (lv instanceof Expr.Dereference) { Expr.Dereference pa = (Expr.Dereference) lv; // NOTE: the before and after types are the same since an assignment // through a reference does not change its type. checkIsSubtype(pa.srcType,Nominal.Reference(afterType),lv); return inferAfterType((Expr.LVal) pa.src, pa.srcType); } else if (lv instanceof Expr.IndexOf) { Expr.IndexOf la = (Expr.IndexOf) lv; Nominal.EffectiveIndexible srcType = la.srcType; afterType = (Nominal) srcType.update(la.index.result(), afterType); return inferAfterType((Expr.LVal) la.src, afterType); } else if(lv instanceof Expr.FieldAccess) { Expr.FieldAccess la = (Expr.FieldAccess) lv; Nominal.EffectiveRecord srcType = la.srcType; // NOTE: I know I can modify this hash map, since it's created fresh // in Nominal.Record.fields(). afterType = (Nominal) srcType.update(la.name, afterType); return inferAfterType((Expr.LVal) la.src, afterType); } else { internalFailure("unknown lval: " + lv.getClass().getName(), filename, lv); return null; //deadcode } } /** * Type check a break statement. This requires propagating the current * environment to the block destination, to ensure that the actual types of * all variables at that point are precise. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Break stmt, Environment environment) { // FIXME: need to propagate environment to the break destination return BOTTOM; } /** * Type check an assume statement. This requires checking that the * expression being printed is well-formed and has string type. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Debug stmt, Environment environment) { stmt.expr = propagate(stmt.expr,environment,current); checkIsSubtype(Type.T_STRING,stmt.expr); return environment; } /** * Type check a do-while statement. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.DoWhile stmt, Environment environment) { // Iterate to a fixed point Environment old = null; Environment tmp = null; Environment orig = environment.clone(); boolean firstTime=true; do { old = environment.clone(); if(!firstTime) { // don't do this on the first go around, to mimick how the // do-while loop works. tmp = propagateCondition(stmt.condition,true,old.clone(),current).second(); environment = join(orig.clone(),propagate(stmt.body,tmp)); } else { firstTime=false; environment = join(orig.clone(),propagate(stmt.body,old)); } old.free(); // hacky, but safe } while(!environment.equals(old)); List<Expr> stmt_invariants = stmt.invariants; for (int i = 0; i != stmt_invariants.size(); ++i) { Expr invariant = stmt_invariants.get(i); invariant = propagate(invariant, environment, current); stmt_invariants.set(i, invariant); checkIsSubtype(Type.T_BOOL, invariant); } Pair<Expr,Environment> p = propagateCondition(stmt.condition,false,environment,current); stmt.condition = p.first(); environment = p.second(); return environment; } /** * Type check a <code>for</code> statement. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.ForAll stmt, Environment environment) throws Exception { stmt.source = propagate(stmt.source,environment,current); Nominal.EffectiveCollection srcType = expandAsEffectiveCollection(stmt.source.result()); stmt.srcType = srcType; if(srcType == null) { syntaxError(errorMessage(INVALID_SET_OR_LIST_EXPRESSION),filename,stmt); } // At this point, the major task is to determine what the types for the // iteration variables declared in the for loop. More than one variable // is permitted in some cases. Nominal[] elementTypes = new Nominal[stmt.variables.size()]; if(elementTypes.length == 2 && srcType instanceof Nominal.EffectiveMap) { Nominal.EffectiveMap dt = (Nominal.EffectiveMap) srcType; elementTypes[0] = dt.key(); elementTypes[1] = dt.value(); } else { if(elementTypes.length == 1) { elementTypes[0] = srcType.element(); } else { syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED),filename,stmt); } } // Now, update the environment to include those declared variables ArrayList<String> stmtVariables = stmt.variables; for(int i=0;i!=elementTypes.length;++i) { String var = stmtVariables.get(i); if (environment.containsKey(var)) { syntaxError(errorMessage(VARIABLE_ALREADY_DEFINED,var), filename, stmt); } environment = environment.put(var, elementTypes[i]); } // Iterate to a fixed point Environment old = null; Environment orig = environment.clone(); do { old = environment.clone(); environment = join(orig.clone(),propagate(stmt.body,old)); old.free(); // hacky, but safe } while(!environment.equals(old)); // Remove loop variables from the environment, since they are only // declared for the duration of the body but not beyond. for(int i=0;i!=elementTypes.length;++i) { String var = stmtVariables.get(i); environment = environment.remove(var); } if (stmt.invariant != null) { stmt.invariant = propagate(stmt.invariant, environment, current); checkIsSubtype(Type.T_BOOL,stmt.invariant); } return environment; } private Environment propagate(Stmt.IfElse stmt, Environment environment) { // First, check condition and apply variable retypings. Pair<Expr,Environment> p1,p2; p1 = propagateCondition(stmt.condition,true,environment.clone(),current); p2 = propagateCondition(stmt.condition,false,environment,current); stmt.condition = p1.first(); Environment trueEnvironment = p1.second(); Environment falseEnvironment = p2.second(); // Second, update environments for true and false branches if(stmt.trueBranch != null && stmt.falseBranch != null) { trueEnvironment = propagate(stmt.trueBranch,trueEnvironment); falseEnvironment = propagate(stmt.falseBranch,falseEnvironment); } else if(stmt.trueBranch != null) { trueEnvironment = propagate(stmt.trueBranch,trueEnvironment); } else if(stmt.falseBranch != null){ trueEnvironment = environment; falseEnvironment = propagate(stmt.falseBranch,falseEnvironment); } // Finally, join results back together return join(trueEnvironment,falseEnvironment); } /** * Type check a <code>return</code> statement. If a return expression is * given, then we must check that this is well-formed and is a subtype of * the enclosing function or method's declared return type. The environment * after a return statement is "bottom" because that represents an * unreachable program point. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Return stmt, Environment environment) throws Exception { if (stmt.expr != null) { stmt.expr = propagate(stmt.expr, environment, current); Nominal rhs = stmt.expr.result(); checkIsSubtype(current.resolvedType().ret(), rhs, stmt.expr); } environment.free(); return BOTTOM; } /** * Type check a <code>skip</code> statement, which has no effect on the * environment. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Skip stmt, Environment environment) { return environment; } private Environment propagate(Stmt.Switch stmt, Environment environment) throws Exception { stmt.expr = propagate(stmt.expr,environment,current); Environment finalEnv = null; boolean hasDefault = false; for(Stmt.Case c : stmt.cases) { // first, resolve the constants ArrayList<Constant> values = new ArrayList<Constant>(); for(Expr e : c.expr) { values.add(resolveAsConstant(e,current)); } c.constants = values; // second, propagate through the statements Environment localEnv = environment.clone(); localEnv = propagate(c.stmts,localEnv); if(finalEnv == null) { finalEnv = localEnv; } else { finalEnv = join(finalEnv,localEnv); } // third, keep track of whether a default hasDefault |= c.expr.isEmpty(); } if(!hasDefault) { // in this case, there is no default case in the switch. We must // therefore assume that there are values which will fall right // through the switch statement without hitting a case. Therefore, // we must include the original environment to accound for this. finalEnv = join(finalEnv,environment); } else { environment.free(); } return finalEnv; } /** * Type check a <code>throw</code> statement. We must check that the throw * expression is well-formed. The environment after a throw statement is * "bottom" because that represents an unreachable program point. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Throw stmt, Environment environment) { stmt.expr = propagate(stmt.expr,environment,current); return BOTTOM; } /** * Type check a try-catch statement. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.TryCatch stmt, Environment environment) throws Exception { for(Stmt.Catch handler : stmt.catches) { // FIXME: need to deal with handler environments properly! try { Nominal type = resolveAsType(handler.unresolvedType,current); handler.type = type; Environment local = environment.clone(); local = local.put(handler.variable, type); propagate(handler.stmts,local); local.free(); } catch(SyntaxError e) { throw e; } catch(Throwable t) { internalFailure(t.getMessage(),filename,handler,t); } } environment = propagate(stmt.body,environment); // need to do handlers here return environment; } /** * Type check a <code>whiley</code> statement. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.While stmt, Environment environment) { // Iterate to a fixed point Environment old = null; Environment tmp = null; Environment orig = environment.clone(); do { old = environment.clone(); tmp = propagateCondition(stmt.condition,true,old.clone(),current).second(); environment = join(orig.clone(),propagate(stmt.body,tmp)); old.free(); // hacky, but safe } while(!environment.equals(old)); List<Expr> stmt_invariants = stmt.invariants; for (int i = 0; i != stmt_invariants.size(); ++i) { Expr invariant = stmt_invariants.get(i); invariant = propagate(invariant, environment, current); stmt_invariants.set(i, invariant); checkIsSubtype(Type.T_BOOL, invariant); } Pair<Expr,Environment> p = propagateCondition(stmt.condition,false,environment,current); stmt.condition = p.first(); environment = p.second(); return environment; } // LVals private Expr.LVal propagate(Expr.LVal lval, Environment environment) { try { if(lval instanceof Expr.AbstractVariable) { Expr.AbstractVariable av = (Expr.AbstractVariable) lval; Nominal p = environment.get(av.var); if(p == null) { syntaxError(errorMessage(UNKNOWN_VARIABLE),filename,lval); } Expr.AssignedVariable lv = new Expr.AssignedVariable(av.var, av.attributes()); lv.type = p; return lv; } else if(lval instanceof Expr.RationalLVal) { Expr.RationalLVal av = (Expr.RationalLVal) lval; av.numerator = propagate(av.numerator,environment); av.denominator = propagate(av.denominator,environment); return av; } else if(lval instanceof Expr.Dereference) { Expr.Dereference pa = (Expr.Dereference) lval; Expr.LVal src = propagate((Expr.LVal) pa.src,environment); pa.src = src; pa.srcType = expandAsReference(src.result()); return pa; } else if(lval instanceof Expr.IndexOf) { // this indicates either a list, string or dictionary update Expr.IndexOf ai = (Expr.IndexOf) lval; Expr.LVal src = propagate((Expr.LVal) ai.src,environment); Expr index = propagate(ai.index,environment,current); ai.src = src; ai.index = index; Nominal.EffectiveIndexible srcType = expandAsEffectiveMap(src.result()); if(srcType == null) { syntaxError(errorMessage(INVALID_LVAL_EXPRESSION),filename,lval); } ai.srcType = srcType; return ai; } else if(lval instanceof Expr.AbstractDotAccess) { // this indicates a record update Expr.AbstractDotAccess ad = (Expr.AbstractDotAccess) lval; Expr.LVal src = propagate((Expr.LVal) ad.src,environment); Expr.FieldAccess ra = new Expr.FieldAccess(src, ad.name, ad.attributes()); Nominal.EffectiveRecord srcType = expandAsEffectiveRecord(src.result()); if(srcType == null) { syntaxError(errorMessage(INVALID_LVAL_EXPRESSION),filename,lval); } else if(srcType.field(ra.name) == null) { syntaxError(errorMessage(RECORD_MISSING_FIELD,ra.name),filename,lval); } ra.srcType = srcType; return ra; } } catch(SyntaxError e) { throw e; } catch(Throwable e) { internalFailure(e.getMessage(),filename,lval,e); return null; // dead code } internalFailure("unknown lval: " + lval.getClass().getName(),filename,lval); return null; // dead code } /** * The purpose of this method is to add variable names declared within a * type pattern. For example, as follows: * * <pre> * define tup as {int x, int y} where x < y * </pre> * * In this case, <code>x</code> and <code>y</code> are variable names * declared as part of the pattern. * * @param src * @param t * @param environment */ private Environment addDeclaredVariables(TypePattern pattern, Environment environment, WhileyFile.Context context) { if(pattern instanceof TypePattern.Leaf) { // do nout! } else if(pattern instanceof TypePattern.Union) { // FIXME: in principle, we can do better here. However, I leave this // unusual case for the future. } else if(pattern instanceof TypePattern.Intersection) { // FIXME: in principle, we can do better here. However, I leave this // unusual case for the future. } else if(pattern instanceof TypePattern.Record) { TypePattern.Record tp = (TypePattern.Record) pattern; for(TypePattern element : tp.elements) { addDeclaredVariables(element,environment,context); } } else { TypePattern.Tuple tp = (TypePattern.Tuple) pattern; for(TypePattern element : tp.elements) { addDeclaredVariables(element,environment,context); } } if (pattern.var != null) { Nominal type = resolveAsType(pattern.toSyntacticType(), context); environment = environment.put(pattern.var, type); } return environment; } // Condition /** * <p> * Propagate type information through an expression being used as a * condition, whilst checking it is well-typed at the same time. When used * as a condition (e.g. of an if-statement) an expression may update the * environment in accordance with any type tests used within. This is * important to ensure that variables are retyped in e.g. if-statements. For * example: * </p> * * <pre> * if x is int && x >= 0 * // x is int * else: * // * </pre> * <p> * Here, the if-condition must update the type of x in the true branch, but * *cannot* update the type of x in the false branch. * </p> * <p> * To handle conditions on the false branch, this function uses a sign flag * rather than expanding them using DeMorgan's laws (for efficiency). When * determining type for the false branch, the sign flag is initially false. * This prevents falsely concluding that e.g. "x is int" holds in the false * branch. * </p> * * @param expr * Condition expression to type check and propagate through * @param sign * Indicates how expression should be treated. If true, then * expression is treated "as is"; if false, then expression * should be treated as negated * @param environment * Determines the type of all variables immediately going into * this expression * @param context * Enclosing context of this expression (e.g. type declaration, * function declaration, etc) * @return */ public Pair<Expr, Environment> propagateCondition(Expr expr, boolean sign, Environment environment, Context context) { // Split up into the compound and non-compound forms. if(expr instanceof Expr.UnOp) { return propagateCondition((Expr.UnOp)expr,sign,environment,context); } else if(expr instanceof Expr.BinOp) { return propagateCondition((Expr.BinOp)expr,sign,environment,context); } else { // For non-compound forms, can just default back to the base rules // for general expressions. expr = propagate(expr,environment,context); checkIsSubtype(Type.T_BOOL,expr,context); return new Pair(expr,environment); } } /** * <p> * Propagate type information through a unary expression being used as a * condition and, in fact, only logical not is syntactically valid here. * </p> * * @param expr * Condition expression to type check and propagate through * @param sign * Indicates how expression should be treated. If true, then * expression is treated "as is"; if false, then expression * should be treated as negated * @param environment * Determines the type of all variables immediately going into * this expression * @param context * Enclosing context of this expression (e.g. type declaration, * function declaration, etc) * @return */ private Pair<Expr, Environment> propagateCondition(Expr.UnOp expr, boolean sign, Environment environment, Context context) { Expr.UnOp uop = (Expr.UnOp) expr; // Check whether we have logical not if(uop.op == Expr.UOp.NOT) { Pair<Expr,Environment> p = propagateCondition(uop.mhs,!sign,environment,context); uop.mhs = p.first(); checkIsSubtype(Type.T_BOOL,uop.mhs,context); uop.type = Nominal.T_BOOL; return new Pair(uop,p.second()); } else { // Nothing else other than logical not is valid at this point. syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION),context,expr); return null; // deadcode } } /** * <p> * Propagate type information through a binary expression being used as a * condition. In this case, only logical connectives ("&&", "||", "^") and * comparators (e.g. "==", "<=", etc) are permitted here. * </p> * * @param expr * Condition expression to type check and propagate through * @param sign * Indicates how expression should be treated. If true, then * expression is treated "as is"; if false, then expression * should be treated as negated * @param environment * Determines the type of all variables immediately going into * this expression * @param context * Enclosing context of this expression (e.g. type declaration, * function declaration, etc) * @return */ private Pair<Expr, Environment> propagateCondition(Expr.BinOp bop, boolean sign, Environment environment, Context context) { Expr.BOp op = bop.op; // Split into the two broard cases: logical connectives and primitives. switch (op) { case AND: case OR: case XOR: return resolveNonLeafCondition(bop,sign,environment,context); case EQ: case NEQ: case LT: case LTEQ: case GT: case GTEQ: case ELEMENTOF: case SUBSET: case SUBSETEQ: case IS: return resolveLeafCondition(bop,sign,environment,context); default: syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, bop); return null; // dead code } } private Pair<Expr, Environment> resolveNonLeafCondition( Expr.BinOp bop, boolean sign, Environment environment, Context context) { Expr.BOp op = bop.op; Pair<Expr,Environment> p; boolean followOn = (sign && op == Expr.BOp.AND) || (!sign && op == Expr.BOp.OR); if(followOn) { p = propagateCondition(bop.lhs,sign,environment.clone(),context); bop.lhs = p.first(); p = propagateCondition(bop.rhs,sign,p.second(),context); bop.rhs = p.first(); environment = p.second(); } else { // We could do better here p = propagateCondition(bop.lhs,sign,environment.clone(),context); bop.lhs = p.first(); Environment local = p.second(); // Recompute the lhs assuming that it is false. This is necessary to // generate the right environment going into the rhs, which is only // evaluated if the lhs is false. For example: // if(e is int && e > 0): // else: // In the false branch, we're determing the environment for // !(e is int && e > 0). This becomes !(e is int) || (e <= 0) where // on the rhs we require (e is int). p = propagateCondition(bop.lhs,!sign,environment.clone(),context); // FIXME: shouldn't the following line be !sign ??? // RecursiveType_Valid_3 + 4 fail without this?? p = propagateCondition(bop.rhs,sign,p.second(),context); bop.rhs = p.first(); environment = join(local,p.second()); } checkIsSubtype(Type.T_BOOL,bop.lhs,context); checkIsSubtype(Type.T_BOOL,bop.rhs,context); bop.srcType = Nominal.T_BOOL; return new Pair<Expr,Environment>(bop,environment); } private Pair<Expr, Environment> resolveLeafCondition(Expr.BinOp bop, boolean sign, Environment environment, Context context) { Expr.BOp op = bop.op; Expr lhs = propagate(bop.lhs,environment,context); Expr rhs = propagate(bop.rhs,environment,context); bop.lhs = lhs; bop.rhs = rhs; Type lhsRawType = lhs.result().raw(); Type rhsRawType = rhs.result().raw(); switch(op) { case IS: // this one is slightly more difficult. In the special case that // we have a type constant on the right-hand side then we want // to check that it makes sense. Otherwise, we just check that // it has type meta. if(rhs instanceof Expr.TypeVal) { // yes, right-hand side is a constant Expr.TypeVal tv = (Expr.TypeVal) rhs; Nominal unconstrainedTestType = resolveAsUnconstrainedType(tv.unresolvedType,context); /** * Determine the types guaranteed to hold on the true and false * branches respectively. We have to use the negated * unconstrainedTestType for the false branch because only that * is guaranteed if the test fails. For example: * * <pre> * define nat as int where $ &gt;= 0 * define listnat as [int]|nat * * int f([int]|int x): * if x if listnat: * x : [int]|int * ... * else: * x : int * </pre> * * The unconstrained type of listnat is [int], since nat is a * constrained type. */ Nominal glbForFalseBranch = Nominal.intersect(lhs.result(), Nominal.Negation(unconstrainedTestType)); Nominal glbForTrueBranch = Nominal.intersect(lhs.result(), tv.type); if(glbForFalseBranch.raw() == Type.T_VOID) { // DEFINITE TRUE CASE syntaxError(errorMessage(BRANCH_ALWAYS_TAKEN), context, bop); } else if (glbForTrueBranch.raw() == Type.T_VOID) { // DEFINITE FALSE CASE syntaxError(errorMessage(INCOMPARABLE_OPERANDS, lhsRawType, tv.type.raw()), context, bop); } // Finally, if the lhs is local variable then update its // type in the resulting environment. if(lhs instanceof Expr.LocalVariable) { Expr.LocalVariable lv = (Expr.LocalVariable) lhs; Nominal newType; if(sign) { newType = glbForTrueBranch; } else { newType = glbForFalseBranch; } environment = environment.put(lv.var,newType); } } else { // In this case, we can't update the type of the lhs since // we don't know anything about the rhs. It may be possible // to support bounds here in order to do that, but frankly // that's future work :) checkIsSubtype(Type.T_META,rhs,context); } bop.srcType = lhs.result(); break; case ELEMENTOF: Type.EffectiveList listType = rhsRawType instanceof Type.EffectiveList ? (Type.EffectiveList) rhsRawType : null; Type.EffectiveSet setType = rhsRawType instanceof Type.EffectiveSet ? (Type.EffectiveSet) rhsRawType : null; if (listType != null && !Type.isImplicitCoerciveSubtype(listType.element(), lhsRawType)) { syntaxError(errorMessage(INCOMPARABLE_OPERANDS, lhsRawType,listType.element()), context, bop); } else if (setType != null && !Type.isImplicitCoerciveSubtype(setType.element(), lhsRawType)) { syntaxError(errorMessage(INCOMPARABLE_OPERANDS, lhsRawType,setType.element()), context, bop); } bop.srcType = rhs.result(); break; case SUBSET: case SUBSETEQ: case LT: case LTEQ: case GTEQ: case GT: if(op == Expr.BOp.SUBSET || op == Expr.BOp.SUBSETEQ) { checkIsSubtype(Type.T_SET_ANY,lhs,context); checkIsSubtype(Type.T_SET_ANY,rhs,context); } else { checkIsSubtype(Type.T_REAL,lhs,context); checkIsSubtype(Type.T_REAL,rhs,context); } if(Type.isImplicitCoerciveSubtype(lhsRawType,rhsRawType)) { bop.srcType = lhs.result(); } else if(Type.isImplicitCoerciveSubtype(rhsRawType,lhsRawType)) { bop.srcType = rhs.result(); } else { syntaxError(errorMessage(INCOMPARABLE_OPERANDS,lhsRawType,rhsRawType),context,bop); return null; // dead code } break; case NEQ: // following is a sneaky trick for the special case below sign = !sign; case EQ: // first, check for special case of e.g. x != null. This is then // treated the same as !(x is null) if (lhs instanceof Expr.LocalVariable && rhs instanceof Expr.Constant && ((Expr.Constant) rhs).value == Constant.V_NULL) { // bingo, special case Expr.LocalVariable lv = (Expr.LocalVariable) lhs; Nominal newType; Nominal glb = Nominal.intersect(lhs.result(), Nominal.T_NULL); if(glb.raw() == Type.T_VOID) { syntaxError(errorMessage(INCOMPARABLE_OPERANDS,lhs.result().raw(),Type.T_NULL),context,bop); return null; } else if(sign) { newType = glb; } else { newType = Nominal.intersect(lhs.result(), Nominal.T_NOTNULL); } bop.srcType = lhs.result(); environment = environment.put(lv.var,newType); } else { // handle general case if(Type.isImplicitCoerciveSubtype(lhsRawType,rhsRawType)) { bop.srcType = lhs.result(); } else if(Type.isImplicitCoerciveSubtype(rhsRawType,lhsRawType)) { bop.srcType = rhs.result(); } else { syntaxError(errorMessage(INCOMPARABLE_OPERANDS,lhsRawType,rhsRawType),context,bop); return null; // dead code } } } return new Pair(bop,environment); } // Expressions public Expr propagate(Expr expr, Environment environment, Context context) { try { if(expr instanceof Expr.BinOp) { return propagate((Expr.BinOp) expr,environment,context); } else if(expr instanceof Expr.UnOp) { return propagate((Expr.UnOp) expr,environment,context); } else if(expr instanceof Expr.Comprehension) { return propagate((Expr.Comprehension) expr,environment,context); } else if(expr instanceof Expr.Constant) { return propagate((Expr.Constant) expr,environment,context); } else if(expr instanceof Expr.Cast) { return propagate((Expr.Cast) expr,environment,context); } else if(expr instanceof Expr.Map) { return propagate((Expr.Map) expr,environment,context); } else if(expr instanceof Expr.AbstractFunctionOrMethod) { return propagate((Expr.AbstractFunctionOrMethod) expr,environment,context); } else if(expr instanceof Expr.AbstractInvoke) { return propagate((Expr.AbstractInvoke) expr,environment,context); } else if(expr instanceof Expr.AbstractIndirectInvoke) { return propagate((Expr.AbstractIndirectInvoke) expr,environment,context); } else if(expr instanceof Expr.IndexOf) { return propagate((Expr.IndexOf) expr,environment,context); } else if(expr instanceof Expr.Lambda) { return propagate((Expr.Lambda) expr,environment,context); } else if(expr instanceof Expr.LengthOf) { return propagate((Expr.LengthOf) expr,environment,context); } else if(expr instanceof Expr.AbstractVariable) { return propagate((Expr.AbstractVariable) expr,environment,context); } else if(expr instanceof Expr.List) { return propagate((Expr.List) expr,environment,context); } else if(expr instanceof Expr.Set) { return propagate((Expr.Set) expr,environment,context); } else if(expr instanceof Expr.SubList) { return propagate((Expr.SubList) expr,environment,context); } else if(expr instanceof Expr.SubString) { return propagate((Expr.SubString) expr,environment,context); } else if(expr instanceof Expr.AbstractDotAccess) { return propagate((Expr.AbstractDotAccess) expr,environment,context); } else if(expr instanceof Expr.Dereference) { return propagate((Expr.Dereference) expr,environment,context); } else if(expr instanceof Expr.Record) { return propagate((Expr.Record) expr,environment,context); } else if(expr instanceof Expr.New) { return propagate((Expr.New) expr,environment,context); } else if(expr instanceof Expr.Tuple) { return propagate((Expr.Tuple) expr,environment,context); } else if(expr instanceof Expr.TypeVal) { return propagate((Expr.TypeVal) expr,environment,context); } } catch(ResolveError e) { syntaxError(errorMessage(RESOLUTION_ERROR,e.getMessage()),context,expr,e); } catch(SyntaxError e) { throw e; } catch(Throwable e) { internalFailure(e.getMessage(),context,expr,e); return null; // dead code } internalFailure("unknown expression: " + expr.getClass().getName(),context,expr); return null; // dead code } private Expr propagate(Expr.BinOp expr, Environment environment, Context context) throws Exception { // TODO: split binop into arithmetic and conditional operators. This // would avoid the following case analysis since conditional binary // operators and arithmetic binary operators actually behave quite // differently. switch(expr.op) { case AND: case OR: case XOR: case EQ: case NEQ: case LT: case LTEQ: case GT: case GTEQ: case ELEMENTOF: case SUBSET: case SUBSETEQ: case IS: return propagateCondition(expr,true,environment,context).first(); } Expr lhs = propagate(expr.lhs,environment,context); Expr rhs = propagate(expr.rhs,environment,context); expr.lhs = lhs; expr.rhs = rhs; Type lhsRawType = lhs.result().raw(); Type rhsRawType = rhs.result().raw(); boolean lhs_set = Type.isImplicitCoerciveSubtype(Type.T_SET_ANY,lhsRawType); boolean rhs_set = Type.isImplicitCoerciveSubtype(Type.T_SET_ANY,rhsRawType); boolean lhs_list = Type.isImplicitCoerciveSubtype(Type.T_LIST_ANY,lhsRawType); boolean rhs_list = Type.isImplicitCoerciveSubtype(Type.T_LIST_ANY,rhsRawType); boolean lhs_str = Type.isSubtype(Type.T_STRING,lhsRawType); boolean rhs_str = Type.isSubtype(Type.T_STRING,rhsRawType); Type srcType; if(lhs_str || rhs_str) { switch(expr.op) { case LISTAPPEND: expr.op = Expr.BOp.STRINGAPPEND; case STRINGAPPEND: break; default: syntaxError("Invalid string operation: " + expr.op, context, expr); } srcType = Type.T_STRING; } else if(lhs_list && rhs_list) { checkIsSubtype(Type.T_LIST_ANY,lhs,context); checkIsSubtype(Type.T_LIST_ANY,rhs,context); Type.EffectiveList lel = (Type.EffectiveList) lhsRawType; Type.EffectiveList rel = (Type.EffectiveList) rhsRawType; switch(expr.op) { case LISTAPPEND: srcType = Type.List(Type.Union(lel.element(),rel.element()),false); break; default: syntaxError("invalid list operation: " + expr.op,context,expr); return null; // dead-code } } else if(lhs_set && rhs_set) { checkIsSubtype(Type.T_SET_ANY,lhs,context); checkIsSubtype(Type.T_SET_ANY,rhs,context); // FIXME: something tells me there should be a function for doing // this. Perhaps effectiveSetType? if(lhs_list) { Type.EffectiveList tmp = (Type.EffectiveList) lhsRawType; lhsRawType = Type.Set(tmp.element(),false); } if(rhs_list) { Type.EffectiveList tmp = (Type.EffectiveList) rhsRawType; rhsRawType = Type.Set(tmp.element(),false); } // FIXME: loss of nominal information here Type.EffectiveSet ls = (Type.EffectiveSet) lhsRawType; Type.EffectiveSet rs = (Type.EffectiveSet) rhsRawType; switch(expr.op) { case ADD: expr.op = Expr.BOp.UNION; case UNION: // TODO: this forces unnecessary coercions, which would be // good to remove. srcType = Type.Set(Type.Union(ls.element(),rs.element()),false); break; case BITWISEAND: expr.op = Expr.BOp.INTERSECTION; case INTERSECTION: // FIXME: this is just plain wierd. if(Type.isSubtype(lhsRawType, rhsRawType)) { srcType = rhsRawType; } else { srcType = lhsRawType; } break; case SUB: expr.op = Expr.BOp.DIFFERENCE; case DIFFERENCE: srcType = lhsRawType; break; default: syntaxError("invalid set operation: " + expr.op,context,expr); return null; // deadcode } } else { switch(expr.op) { case IS: case AND: case OR: case XOR: return propagateCondition(expr,true,environment,context).first(); case BITWISEAND: case BITWISEOR: case BITWISEXOR: checkIsSubtype(Type.T_BYTE,lhs,context); checkIsSubtype(Type.T_BYTE,rhs,context); srcType = Type.T_BYTE; break; case LEFTSHIFT: case RIGHTSHIFT: checkIsSubtype(Type.T_BYTE,lhs,context); checkIsSubtype(Type.T_INT,rhs,context); srcType = Type.T_BYTE; break; case RANGE: checkIsSubtype(Type.T_INT,lhs,context); checkIsSubtype(Type.T_INT,rhs,context); srcType = Type.List(Type.T_INT, false); break; case REM: checkIsSubtype(Type.T_INT,lhs,context); checkIsSubtype(Type.T_INT,rhs,context); srcType = Type.T_INT; break; default: // all other operations go through here if(Type.isImplicitCoerciveSubtype(lhsRawType,rhsRawType)) { checkIsSubtype(Type.T_REAL,lhs,context); if(Type.isSubtype(Type.T_CHAR, lhsRawType)) { srcType = Type.T_INT; } else if(Type.isSubtype(Type.T_INT, lhsRawType)) { srcType = Type.T_INT; } else { srcType = Type.T_REAL; } } else { checkIsSubtype(Type.T_REAL,lhs,context); checkIsSubtype(Type.T_REAL,rhs,context); if(Type.isSubtype(Type.T_CHAR, rhsRawType)) { srcType = Type.T_INT; } else if(Type.isSubtype(Type.T_INT, rhsRawType)) { srcType = Type.T_INT; } else { srcType = Type.T_REAL; } } } } // FIXME: loss of nominal information expr.srcType = Nominal.construct(srcType,srcType); return expr; } private Expr propagate(Expr.UnOp expr, Environment environment, Context context) throws Exception { if (expr.op == Expr.UOp.NOT) { // hand off to special method for conditions return propagateCondition(expr, true, environment, context).first(); } Expr src = propagate(expr.mhs, environment,context); expr.mhs = src; switch(expr.op) { case NEG: checkIsSubtype(Type.T_REAL,src,context); break; case INVERT: checkIsSubtype(Type.T_BYTE,src,context); break; default: internalFailure( "unknown operator: " + expr.op.getClass().getName(), context, expr); } expr.type = src.result(); return expr; } private Expr propagate(Expr.Comprehension expr, Environment environment, Context context) throws Exception { ArrayList<Pair<String,Expr>> sources = expr.sources; Environment local = environment.clone(); for(int i=0;i!=sources.size();++i) { Pair<String,Expr> p = sources.get(i); Expr e = propagate(p.second(),local,context); p = new Pair<String,Expr>(p.first(),e); sources.set(i,p); Nominal element; Nominal type = e.result(); Nominal.EffectiveCollection colType = expandAsEffectiveCollection(type); if(colType == null) { syntaxError(errorMessage(INVALID_SET_OR_LIST_EXPRESSION),context,e); return null; // dead code } // update environment for subsequent source expressions, the // condition and the value. local = local.put(p.first(),colType.element()); } if(expr.condition != null) { expr.condition = propagate(expr.condition,local,context); } if (expr.cop == Expr.COp.SETCOMP || expr.cop == Expr.COp.LISTCOMP) { expr.value = propagate(expr.value,local,context); expr.type = Nominal.Set(expr.value.result(), false); } else { expr.type = Nominal.T_BOOL; } local.free(); return expr; } private Expr propagate(Expr.Constant expr, Environment environment, Context context) { return expr; } private Expr propagate(Expr.Cast c, Environment environment, Context context) throws Exception { c.expr = propagate(c.expr,environment,context); c.type = resolveAsType(c.unresolvedType, context); Type from = c.expr.result().raw(); Type to = c.type.raw(); if (!Type.isExplicitCoerciveSubtype(to, from)) { syntaxError(errorMessage(SUBTYPE_ERROR, to, from), context, c); } return c; } private Expr propagate(Expr.AbstractFunctionOrMethod expr, Environment environment, Context context) throws Exception { if(expr instanceof Expr.FunctionOrMethod) { return expr; } Pair<NameID, Nominal.FunctionOrMethod> p; if (expr.paramTypes != null) { ArrayList<Nominal> paramTypes = new ArrayList<Nominal>(); for (SyntacticType t : expr.paramTypes) { paramTypes.add(resolveAsType(t, context)); } // FIXME: clearly a bug here in the case of message reference p = (Pair) resolveAsFunctionOrMethod(expr.name, paramTypes, context); } else { p = resolveAsFunctionOrMethod(expr.name, context); } expr = new Expr.FunctionOrMethod(p.first(),expr.paramTypes,expr.attributes()); expr.type = p.second(); return expr; } private Expr propagate(Expr.Lambda expr, Environment environment, Context context) throws Exception { ArrayList<Type> rawTypes = new ArrayList<Type>(); ArrayList<Type> nomTypes = new ArrayList<Type>(); for(WhileyFile.Parameter p : expr.parameters) { Nominal n = resolveAsType(p.type,context); rawTypes.add(n.raw()); nomTypes.add(n.nominal()); // Now, update the environment to include those declared variables String var = p.name(); if (environment.containsKey(var)) { syntaxError(errorMessage(VARIABLE_ALREADY_DEFINED,var), context, p); } environment = environment.put(var, n); } expr.body = propagate(expr.body,environment,context); Type.FunctionOrMethod rawType; Type.FunctionOrMethod nomType; if(Exprs.isPure(expr.body, context)) { rawType = Type.Function(expr.body.result().raw(), Type.T_VOID, rawTypes); nomType = Type.Function(expr.body.result().nominal(), Type.T_VOID, nomTypes); } else { rawType = Type.Method(expr.body.result().raw(), Type.T_VOID, rawTypes); nomType = Type.Method(expr.body.result().nominal(), Type.T_VOID, nomTypes); } expr.type = (Nominal.FunctionOrMethod) Nominal.construct(nomType,rawType); return expr; } private Expr propagate(Expr.AbstractIndirectInvoke expr, Environment environment, Context context) throws Exception { expr.src = propagate(expr.src, environment, context); Nominal type = expr.src.result(); if (!(type instanceof Nominal.FunctionOrMethod)) { syntaxError("function or method type expected", context, expr.src); } Nominal.FunctionOrMethod funType = (Nominal.FunctionOrMethod) type; List<Nominal> paramTypes = funType.params(); ArrayList<Expr> exprArgs = expr.arguments; for (int i = 0; i != exprArgs.size(); ++i) { Nominal pt = paramTypes.get(i); Expr arg = propagate(exprArgs.get(i), environment, context); checkIsSubtype(pt, arg, context); exprArgs.set(i, arg); } if (funType instanceof Nominal.Function) { Expr.IndirectFunctionCall ifc = new Expr.IndirectFunctionCall(expr.src, exprArgs, expr.attributes()); ifc.functionType = (Nominal.Function) funType; return ifc; } else { Expr.IndirectMethodCall imc = new Expr.IndirectMethodCall(expr.src, exprArgs, expr.attributes()); imc.methodType = (Nominal.Method) funType; return imc; } } private Expr propagate(Expr.AbstractInvoke expr, Environment environment, Context context) throws Exception { // first, resolve through receiver and parameters. Expr receiver = expr.qualification; if(receiver != null) { receiver = propagate(receiver,environment,context); expr.qualification = receiver; } ArrayList<Expr> exprArgs = expr.arguments; ArrayList<Nominal> paramTypes = new ArrayList<Nominal>(); for(int i=0;i!=exprArgs.size();++i) { Expr arg = propagate(exprArgs.get(i),environment,context); exprArgs.set(i, arg); paramTypes.add(arg.result()); } // second, determine whether we already have a fully qualified name and // then lookup the appropriate function. if(receiver instanceof Expr.ModuleAccess) { // Yes, this function or method is qualified Expr.ModuleAccess ma = (Expr.ModuleAccess) receiver; NameID name = new NameID(ma.mid,expr.name); Nominal.FunctionOrMethod funType = resolveAsFunctionOrMethod(name, paramTypes, context); if(funType instanceof Nominal.Function) { Expr.FunctionCall r = new Expr.FunctionCall(name, ma, exprArgs, expr.attributes()); r.functionType = (Nominal.Function) funType; return r; } else { Expr.MethodCall r = new Expr.MethodCall(name, ma, exprArgs, expr.attributes()); r.methodType = (Nominal.Method) funType; return r; } } else if(receiver != null) { // function is qualified, so this is used as the scope for resolving // what the function is. Nominal.EffectiveRecord recType = expandAsEffectiveRecord(expr.qualification.result()); if(recType != null) { Nominal fieldType = recType.field(expr.name); if(fieldType == null) { syntaxError(errorMessage(RECORD_MISSING_FIELD,expr.name),context,expr); } else if(!(fieldType instanceof Nominal.FunctionOrMethod)) { syntaxError("function or method type expected",context,expr); } Nominal.FunctionOrMethod funType = (Nominal.FunctionOrMethod) fieldType; Expr.FieldAccess ra = new Expr.FieldAccess(receiver, expr.name, expr.attributes()); ra.srcType = recType; if(funType instanceof Nominal.Method) { Expr.IndirectMethodCall nexpr = new Expr.IndirectMethodCall(ra,expr.arguments,expr.attributes()); // FIXME: loss of nominal information nexpr.methodType = (Nominal.Method) funType; return nexpr; } else { Expr.IndirectFunctionCall nexpr = new Expr.IndirectFunctionCall(ra,expr.arguments,expr.attributes()); // FIXME: loss of nominal information nexpr.functionType = (Nominal.Function) funType; return nexpr; } } else { // In this case, we definitely have an object type. checkIsSubtype(Type.T_REF_ANY,expr.qualification,context); Type.Reference procType = (Type.Reference) expr.qualification.result().raw(); exprArgs.add(0,receiver); paramTypes.add(0,receiver.result()); Pair<NameID, Nominal.FunctionOrMethod> p = resolveAsFunctionOrMethod( expr.name, paramTypes, context); // TODO: problem if not Nominal.Method! Expr.MethodCall r = new Expr.MethodCall(p.first(), null, exprArgs, expr.attributes()); r.methodType = (Nominal.Method) p.second(); return r; } } else { // no, function is not qualified ... so, it's either a local // variable or a function call the location of which we need to // identify. Nominal type = environment.get(expr.name); Nominal.FunctionOrMethod funType = type != null ? expandAsFunctionOrMethod(type) : null; // FIXME: bad idea to use instanceof Nominal.FunctionOrMethod here if(funType != null) { // ok, matching local variable of function type. List<Nominal> funTypeParams = funType.params(); if(paramTypes.size() != funTypeParams.size()) { syntaxError("insufficient arguments to function call",context,expr); } for (int i = 0; i != funTypeParams.size(); ++i) { Nominal fpt = funTypeParams.get(i); checkIsSubtype(fpt, paramTypes.get(i), exprArgs.get(i),context); } Expr.LocalVariable lv = new Expr.LocalVariable(expr.name,expr.attributes()); lv.type = type; if(funType instanceof Nominal.Method) { Expr.IndirectMethodCall nexpr = new Expr.IndirectMethodCall(lv,expr.arguments,expr.attributes()); nexpr.methodType = (Nominal.Method) funType; return nexpr; } else { Expr.IndirectFunctionCall nexpr = new Expr.IndirectFunctionCall(lv,expr.arguments,expr.attributes()); nexpr.functionType = (Nominal.Function) funType; return nexpr; } } else { // no matching local variable, so attempt to resolve as direct // call. Pair<NameID, Nominal.FunctionOrMethod> p = resolveAsFunctionOrMethod(expr.name, paramTypes, context); funType = p.second(); if(funType instanceof Nominal.Function) { Expr.FunctionCall mc = new Expr.FunctionCall(p.first(), null, exprArgs, expr.attributes()); mc.functionType = (Nominal.Function) funType; return mc; } else { Expr.MethodCall mc = new Expr.MethodCall(p.first(), null, exprArgs, expr.attributes()); mc.methodType = (Nominal.Method) funType; return mc; } } } } private Expr propagate(Expr.IndexOf expr, Environment environment, Context context) throws Exception { expr.src = propagate(expr.src,environment,context); expr.index = propagate(expr.index,environment,context); Nominal.EffectiveIndexible srcType = expandAsEffectiveMap(expr.src.result()); if(srcType == null) { syntaxError(errorMessage(INVALID_SET_OR_LIST_EXPRESSION), context, expr.src); } else { expr.srcType = srcType; } checkIsSubtype(srcType.key(),expr.index,context); return expr; } private Expr propagate(Expr.LengthOf expr, Environment environment, Context context) throws Exception { expr.src = propagate(expr.src,environment, context); Nominal srcType = expr.src.result(); Type rawSrcType = srcType.raw(); // First, check whether this is still only an abstract access and, in // such case, upgrade it to the appropriate access expression. if (rawSrcType instanceof Type.EffectiveCollection) { expr.srcType = expandAsEffectiveCollection(srcType); return expr; } else { syntaxError("found " + expr.src.result().nominal() + ", expected string, set, list or dictionary.", context, expr.src); } // Second, determine the expanded src type for this access expression // and check the key value. checkIsSubtype(Type.T_STRING,expr.src,context); return expr; } private Expr propagate(Expr.AbstractVariable expr, Environment environment, Context context) throws Exception { Nominal type = environment.get(expr.var); if (expr instanceof Expr.LocalVariable) { Expr.LocalVariable lv = (Expr.LocalVariable) expr; lv.type = type; return lv; } else if (type != null) { // yes, this is a local variable Expr.LocalVariable lv = new Expr.LocalVariable(expr.var, expr.attributes()); lv.type = type; return lv; } else { // This variable access may correspond to an external access. // Therefore, we must determine which module this // is, and update the tree accordingly. try { NameID nid = resolveAsName(expr.var, context); Expr.ConstantAccess ca = new Expr.ConstantAccess(null, expr.var, nid, expr.attributes()); ca.value = resolveAsConstant(nid); return ca; } catch (ResolveError err) { } // In this case, we may still be OK if this corresponds to an // explicit module or package access. try { Path.ID mid = resolveAsModule(expr.var, context); return new Expr.ModuleAccess(null, expr.var, mid, expr.attributes()); } catch (ResolveError err) { } Path.ID pid = Trie.ROOT.append(expr.var); if (builder.exists(pid)) { return new Expr.PackageAccess(null, expr.var, pid, expr.attributes()); } // ok, failed. syntaxError(errorMessage(UNKNOWN_VARIABLE), context, expr); return null; // deadcode } } private Expr propagate(Expr.Set expr, Environment environment, Context context) { Nominal element = Nominal.T_VOID; ArrayList<Expr> exprs = expr.arguments; for(int i=0;i!=exprs.size();++i) { Expr e = propagate(exprs.get(i),environment,context); Nominal t = e.result(); exprs.set(i,e); element = Nominal.Union(t,element); } expr.type = Nominal.Set(element,false); return expr; } private Expr propagate(Expr.List expr, Environment environment, Context context) { Nominal element = Nominal.T_VOID; ArrayList<Expr> exprs = expr.arguments; for(int i=0;i!=exprs.size();++i) { Expr e = propagate(exprs.get(i),environment,context); Nominal t = e.result(); exprs.set(i,e); element = Nominal.Union(t,element); } expr.type = Nominal.List(element,false); return expr; } private Expr propagate(Expr.Map expr, Environment environment, Context context) { Nominal keyType = Nominal.T_VOID; Nominal valueType = Nominal.T_VOID; ArrayList<Pair<Expr,Expr>> exprs = expr.pairs; for(int i=0;i!=exprs.size();++i) { Pair<Expr,Expr> p = exprs.get(i); Expr key = propagate(p.first(),environment,context); Expr value = propagate(p.second(),environment,context); Nominal kt = key.result(); Nominal vt = value.result(); exprs.set(i,new Pair<Expr,Expr>(key,value)); keyType = Nominal.Union(kt,keyType); valueType = Nominal.Union(vt,valueType); } expr.type = Nominal.Map(keyType,valueType); return expr; } private Expr propagate(Expr.Record expr, Environment environment, Context context) { HashMap<String,Expr> exprFields = expr.fields; HashMap<String,Nominal> fieldTypes = new HashMap<String,Nominal>(); ArrayList<String> fields = new ArrayList<String>(exprFields.keySet()); for(String field : fields) { Expr e = propagate(exprFields.get(field),environment,context); Nominal t = e.result(); exprFields.put(field,e); fieldTypes.put(field,t); } expr.type = Nominal.Record(false,fieldTypes); return expr; } private Expr propagate(Expr.Tuple expr, Environment environment, Context context) { ArrayList<Expr> exprFields = expr.fields; ArrayList<Nominal> fieldTypes = new ArrayList<Nominal>(); for(int i=0;i!=exprFields.size();++i) { Expr e = propagate(exprFields.get(i),environment,context); Nominal t = e.result(); exprFields.set(i,e); fieldTypes.add(t); } expr.type = Nominal.Tuple(fieldTypes); return expr; } private Expr propagate(Expr.SubList expr, Environment environment, Context context) throws Exception { expr.src = propagate(expr.src,environment,context); expr.start = propagate(expr.start,environment,context); expr.end = propagate(expr.end,environment,context); checkIsSubtype(Type.T_LIST_ANY,expr.src,context); checkIsSubtype(Type.T_INT,expr.start,context); checkIsSubtype(Type.T_INT,expr.end,context); expr.type = expandAsEffectiveList(expr.src.result()); if(expr.type == null) { // must be a substring return new Expr.SubString(expr.src,expr.start,expr.end,expr.attributes()); } return expr; } private Expr propagate(Expr.SubString expr, Environment environment, Context context) throws Exception { expr.src = propagate(expr.src,environment,context); expr.start = propagate(expr.start,environment,context); expr.end = propagate(expr.end,environment,context); checkIsSubtype(Type.T_STRING,expr.src,context); checkIsSubtype(Type.T_INT,expr.start,context); checkIsSubtype(Type.T_INT,expr.end,context); return expr; } private Expr propagate(Expr.AbstractDotAccess expr, Environment environment, Context context) throws Exception { if (expr instanceof Expr.PackageAccess || expr instanceof Expr.ModuleAccess) { // don't need to do anything in these cases. return expr; } Expr src = expr.src; if(src != null) { src = propagate(expr.src,environment,context); expr.src = src; } if(expr instanceof Expr.FieldAccess) { return propagate((Expr.FieldAccess)expr,environment,context); } else if(expr instanceof Expr.ConstantAccess) { return propagate((Expr.ConstantAccess)expr,environment,context); } else if(src instanceof Expr.PackageAccess) { // either a package access, module access or constant access // This variable access may correspond to an external access. Expr.PackageAccess pa = (Expr.PackageAccess) src; Path.ID pid = pa.pid.append(expr.name); if (builder.exists(pid)) { return new Expr.PackageAccess(pa, expr.name, pid, expr.attributes()); } Path.ID mid = pa.pid.append(expr.name); if (builder.exists(mid)) { return new Expr.ModuleAccess(pa, expr.name, mid, expr.attributes()); } else { syntaxError(errorMessage(INVALID_PACKAGE_ACCESS), context, expr); return null; // deadcode } } else if(src instanceof Expr.ModuleAccess) { // must be a constant access Expr.ModuleAccess ma = (Expr.ModuleAccess) src; NameID nid = new NameID(ma.mid,expr.name); if (builder.isName(nid)) { Expr.ConstantAccess ca = new Expr.ConstantAccess(ma, expr.name, nid, expr.attributes()); ca.value = resolveAsConstant(nid); return ca; } syntaxError(errorMessage(INVALID_MODULE_ACCESS),context,expr); return null; // deadcode } else { // must be a RecordAccess Expr.FieldAccess ra = new Expr.FieldAccess(src,expr.name,expr.attributes()); return propagate(ra,environment,context); } } private Expr propagate(Expr.FieldAccess ra, Environment environment, Context context) throws Exception { ra.src = propagate(ra.src,environment,context); Nominal srcType = ra.src.result(); Nominal.EffectiveRecord recType = expandAsEffectiveRecord(srcType); if(recType == null) { syntaxError(errorMessage(RECORD_TYPE_REQUIRED,srcType.raw()),context,ra); } Nominal fieldType = recType.field(ra.name); if(fieldType == null) { syntaxError(errorMessage(RECORD_MISSING_FIELD,ra.name),context,ra); } ra.srcType = recType; return ra; } private Expr propagate(Expr.ConstantAccess expr, Environment environment, Context context) throws Exception { // we don't need to do anything here, since the value is already // resolved by case for AbstractDotAccess. return expr; } private Expr propagate(Expr.Dereference expr, Environment environment, Context context) throws Exception { Expr src = propagate(expr.src,environment,context); expr.src = src; Nominal.Reference srcType = expandAsReference(src.result()); if(srcType == null) { syntaxError("invalid reference expression",context,src); } expr.srcType = srcType; return expr; } private Expr propagate(Expr.New expr, Environment environment, Context context) { expr.expr = propagate(expr.expr,environment,context); expr.type = Nominal.Reference(expr.expr.result()); return expr; } private Expr propagate(Expr.TypeVal expr, Environment environment, Context context) throws Exception { expr.type = resolveAsType(expr.unresolvedType, context); return expr; } // Resolve as Function or Method /** * Responsible for determining the true type of a method or function being * invoked. To do this, it must find the function/method with the most * precise type that matches the argument types. * * @param nid * @param parameters * @return * @throws Exception */ private Nominal.FunctionOrMethod resolveAsFunctionOrMethod(NameID nid, List<Nominal> parameters, Context context) throws Exception { HashSet<Pair<NameID, Nominal.FunctionOrMethod>> candidates = new HashSet<Pair<NameID, Nominal.FunctionOrMethod>>(); addCandidateFunctionsAndMethods(nid, parameters, candidates, context); return selectCandidateFunctionOrMethod(nid.name(), parameters, candidates, context).second(); } public Pair<NameID,Nominal.FunctionOrMethod> resolveAsFunctionOrMethod(String name, Context context) throws Exception { return resolveAsFunctionOrMethod(name,null,context); } public Pair<NameID,Nominal.FunctionOrMethod> resolveAsFunctionOrMethod(String name, List<Nominal> parameters, Context context) throws Exception { HashSet<Pair<NameID,Nominal.FunctionOrMethod>> candidates = new HashSet<Pair<NameID, Nominal.FunctionOrMethod>>(); // first, try to find the matching message for (WhileyFile.Import imp : context.imports()) { String impName = imp.name; if (impName == null || impName.equals(name) || impName.equals("*")) { Trie filter = imp.filter; if(impName == null) { // import name is null, but it's possible that a module of // the given name exists, in which case any matching names // are automatically imported. filter = filter.parent().append(name); } for (Path.ID mid : builder.imports(filter)) { NameID nid = new NameID(mid,name); addCandidateFunctionsAndMethods(nid,parameters,candidates,context); } } } return selectCandidateFunctionOrMethod(name,parameters,candidates,context); } private boolean paramSubtypes(Type.FunctionOrMethod f1, Type.FunctionOrMethod f2) { List<Type> f1_params = f1.params(); List<Type> f2_params = f2.params(); if(f1_params.size() == f2_params.size()) { for(int i=0;i!=f1_params.size();++i) { Type f1_param = f1_params.get(i); Type f2_param = f2_params.get(i); if(!Type.isImplicitCoerciveSubtype(f1_param,f2_param)) { return false; } } return true; } return false; } private boolean paramStrictSubtypes(Type.FunctionOrMethod f1, Type.FunctionOrMethod f2) { List<Type> f1_params = f1.params(); List<Type> f2_params = f2.params(); if(f1_params.size() == f2_params.size()) { boolean allEqual = true; for(int i=0;i!=f1_params.size();++i) { Type f1_param = f1_params.get(i); Type f2_param = f2_params.get(i); if(!Type.isImplicitCoerciveSubtype(f1_param,f2_param)) { return false; } allEqual &= f1_param.equals(f2_param); } // This function returns true if the parameters are a strict // subtype. Therefore, if they are all equal it must return false. return !allEqual; } return false; } private String parameterString(List<Nominal> paramTypes) { String paramStr = "("; boolean firstTime = true; if(paramTypes == null) { paramStr += "..."; } else { for(Nominal t : paramTypes) { if(!firstTime) { paramStr += ","; } firstTime=false; paramStr += t.nominal(); } } return paramStr + ")"; } private Pair<NameID, Nominal.FunctionOrMethod> selectCandidateFunctionOrMethod( String name, List<Nominal> parameters, Collection<Pair<NameID, Nominal.FunctionOrMethod>> candidates, Context context) throws Exception { List<Type> rawParameters; Type.Function target; if (parameters != null) { rawParameters = stripNominal(parameters); target = (Type.Function) Type.Function(Type.T_ANY, Type.T_ANY, rawParameters); } else { rawParameters = null; target = null; } NameID candidateID = null; Nominal.FunctionOrMethod candidateType = null; for (Pair<NameID,Nominal.FunctionOrMethod> p : candidates) { Nominal.FunctionOrMethod nft = p.second(); Type.FunctionOrMethod ft = nft.raw(); if (parameters == null || paramSubtypes(ft, target)) { // this is now a genuine candidate if(candidateType == null || paramStrictSubtypes(candidateType.raw(), ft)) { candidateType = nft; candidateID = p.first(); } else if(!paramStrictSubtypes(ft, candidateType.raw())){ // this is an ambiguous error String msg = name + parameterString(parameters) + " is ambiguous"; // FIXME: should report all ambiguous matches here msg += "\n\tfound: " + candidateID + " : " + candidateType.nominal(); msg += "\n\tfound: " + p.first() + " : " + p.second().nominal(); throw new ResolveError(msg); } } } if(candidateType == null) { // second, didn't find matching message so generate error message String msg = "no match for " + name + parameterString(parameters); for (Pair<NameID, Nominal.FunctionOrMethod> p : candidates) { msg += "\n\tfound: " + p.first() + " : " + p.second().nominal(); } throw new ResolveError(msg); } else { // now check protection modified WhileyFile wf = builder.getSourceFile(candidateID.module()); if(wf != null) { if(wf != context.file()) { for (WhileyFile.FunctionOrMethod d : wf.declarations( WhileyFile.FunctionOrMethod.class, candidateID.name())) { if(d.parameters.equals(candidateType.params())) { if(!d.isPublic() && !d.isProtected()) { String msg = candidateID.module() + "." + name + parameterString(parameters) + " is not visible"; throw new ResolveError(msg); } } } } } else { WyilFile m = builder.getModule(candidateID.module()); WyilFile.MethodDeclaration d = m.method(candidateID.name(),candidateType.raw()); if(!d.isPublic() && !d.isProtected()) { String msg = candidateID.module() + "." + name + parameterString(parameters) + " is not visible"; throw new ResolveError(msg); } } } return new Pair<NameID,Nominal.FunctionOrMethod>(candidateID,candidateType); } private void addCandidateFunctionsAndMethods(NameID nid, List<?> parameters, Collection<Pair<NameID, Nominal.FunctionOrMethod>> candidates, Context context) throws Exception { Path.ID mid = nid.module(); int nparams = parameters != null ? parameters.size() : -1; WhileyFile wf = builder.getSourceFile(mid); if (wf != null) { for (WhileyFile.FunctionOrMethod f : wf.declarations( WhileyFile.FunctionOrMethod.class, nid.name())) { if (nparams == -1 || f.parameters.size() == nparams) { Nominal.FunctionOrMethod ft = (Nominal.FunctionOrMethod) resolveAsType( f.unresolvedType(), f); candidates.add(new Pair<NameID, Nominal.FunctionOrMethod>( nid, ft)); } } } else { try { WyilFile m = builder.getModule(mid); for (WyilFile.MethodDeclaration mm : m.methods()) { if ((mm.isFunction() || mm.isMethod()) && mm.name().equals(nid.name()) && (nparams == -1 || mm.type().params().size() == nparams)) { // FIXME: loss of nominal information Type.FunctionOrMethod t = (Type.FunctionOrMethod) mm .type(); Nominal.FunctionOrMethod fom; if (t instanceof Type.Function) { Type.Function ft = (Type.Function) t; fom = new Nominal.Function(ft, ft); } else { Type.Method mt = (Type.Method) t; fom = new Nominal.Method(mt, mt); } candidates .add(new Pair<NameID, Nominal.FunctionOrMethod>( nid, fom)); } } } catch (ResolveError e) { } } } private static List<Type> stripNominal(List<Nominal> types) { ArrayList<Type> r = new ArrayList<Type>(); for (Nominal t : types) { r.add(t.raw()); } return r; } // ResolveAsName public NameID resolveAsName(String name, Context context) throws Exception { for (WhileyFile.Import imp : context.imports()) { String impName = imp.name; if (impName == null || impName.equals(name) || impName.equals("*")) { Trie filter = imp.filter; if (impName == null) { // import name is null, but it's possible that a module of // the given name exists, in which case any matching names // are automatically imported. filter = filter.parent().append(name); } for (Path.ID mid : builder.imports(filter)) { NameID nid = new NameID(mid, name); if (builder.isName(nid)) { // ok, we have found the name in question. But, is it // visible? if (isVisible(nid, context)) { return nid; } else { throw new ResolveError(nid + " is not visible"); } } } } } throw new ResolveError("name not found: " + name); } public NameID resolveAsName(List<String> names, Context context) throws Exception { if (names.size() == 1) { return resolveAsName(names.get(0), context); } else if (names.size() == 2) { String name = names.get(1); Path.ID mid = resolveAsModule(names.get(0), context); NameID nid = new NameID(mid, name); if (builder.isName(nid)) { if (isVisible(nid, context)) { return nid; } else { throw new ResolveError(nid + " is not visible"); } } } else { String name = names.get(names.size() - 1); String module = names.get(names.size() - 2); Path.ID pkg = Trie.ROOT; for (int i = 0; i != names.size() - 2; ++i) { pkg = pkg.append(names.get(i)); } Path.ID mid = pkg.append(module); NameID nid = new NameID(mid, name); if (builder.isName(nid)) { if (isVisible(nid, context)) { return nid; } else { throw new ResolveError(nid + " is not visible"); } } } String name = null; for (String n : names) { if (name != null) { name = name + "." + n; } else { name = n; } } throw new ResolveError("name not found: " + name); } public Path.ID resolveAsModule(String name, Context context) throws Exception { for (WhileyFile.Import imp : context.imports()) { Trie filter = imp.filter; String last = filter.last(); if (last.equals("*")) { // this is generic import, so narrow the filter. filter = filter.parent().append(name); } else if (!last.equals(name)) { continue; // skip as not relevant } for (Path.ID mid : builder.imports(filter)) { return mid; } } throw new ResolveError("module not found: " + name); } // ResolveAsType public Nominal.Function resolveAsType(SyntacticType.Function t, Context context) { return (Nominal.Function) resolveAsType((SyntacticType) t, context); } public Nominal.Method resolveAsType(SyntacticType.Method t, Context context) { return (Nominal.Method) resolveAsType((SyntacticType) t, context); } public Nominal resolveAsType(SyntacticType type, Context context) { Type nominalType = resolveAsType(type, context, true, false); Type rawType = resolveAsType(type, context, false, false); return Nominal.construct(nominalType, rawType); } public Nominal resolveAsUnconstrainedType(SyntacticType type, Context context) { Type nominalType = resolveAsType(type, context, true, true); Type rawType = resolveAsType(type, context, false, true); return Nominal.construct(nominalType, rawType); } private Type resolveAsType(SyntacticType t, Context context, boolean nominal, boolean unconstrained) { if (t instanceof SyntacticType.Primitive) { if (t instanceof SyntacticType.Any) { return Type.T_ANY; } else if (t instanceof SyntacticType.Void) { return Type.T_VOID; } else if (t instanceof SyntacticType.Null) { return Type.T_NULL; } else if (t instanceof SyntacticType.Bool) { return Type.T_BOOL; } else if (t instanceof SyntacticType.Byte) { return Type.T_BYTE; } else if (t instanceof SyntacticType.Char) { return Type.T_CHAR; } else if (t instanceof SyntacticType.Int) { return Type.T_INT; } else if (t instanceof SyntacticType.Real) { return Type.T_REAL; } else if (t instanceof SyntacticType.Strung) { return Type.T_STRING; } else { internalFailure("unrecognised type encountered (" + t.getClass().getName() + ")", context, t); return null; // deadcode } } else { ArrayList<Automaton.State> states = new ArrayList<Automaton.State>(); HashMap<NameID, Integer> roots = new HashMap<NameID, Integer>(); resolveAsType(t, context, states, roots, nominal, unconstrained); return Type.construct(new Automaton(states)); } } private int resolveAsType(SyntacticType type, Context context, ArrayList<Automaton.State> states, HashMap<NameID, Integer> roots, boolean nominal, boolean unconstrained) { if (type instanceof SyntacticType.Primitive) { return resolveAsType((SyntacticType.Primitive) type, context, states); } int myIndex = states.size(); int myKind; int[] myChildren; Object myData = null; boolean myDeterministic = true; states.add(null); // reserve space for me if (type instanceof SyntacticType.List) { SyntacticType.List lt = (SyntacticType.List) type; myKind = Type.K_LIST; myChildren = new int[1]; myChildren[0] = resolveAsType(lt.element, context, states, roots, nominal, unconstrained); myData = false; } else if (type instanceof SyntacticType.Set) { SyntacticType.Set st = (SyntacticType.Set) type; myKind = Type.K_SET; myChildren = new int[1]; myChildren[0] = resolveAsType(st.element, context, states, roots, nominal, unconstrained); myData = false; } else if (type instanceof SyntacticType.Map) { SyntacticType.Map st = (SyntacticType.Map) type; myKind = Type.K_MAP; myChildren = new int[2]; myChildren[0] = resolveAsType(st.key, context, states, roots, nominal, unconstrained); myChildren[1] = resolveAsType(st.value, context, states, roots, nominal, unconstrained); } else if (type instanceof SyntacticType.Record) { SyntacticType.Record tt = (SyntacticType.Record) type; HashMap<String, SyntacticType> ttTypes = tt.types; Type.Record.State fields = new Type.Record.State(tt.isOpen, ttTypes.keySet()); Collections.sort(fields); myKind = Type.K_RECORD; myChildren = new int[fields.size()]; for (int i = 0; i != fields.size(); ++i) { String field = fields.get(i); myChildren[i] = resolveAsType(ttTypes.get(field), context, states, roots, nominal, unconstrained); } myData = fields; } else if (type instanceof SyntacticType.Tuple) { SyntacticType.Tuple tt = (SyntacticType.Tuple) type; ArrayList<SyntacticType> ttTypes = tt.types; myKind = Type.K_TUPLE; myChildren = new int[ttTypes.size()]; for (int i = 0; i != ttTypes.size(); ++i) { myChildren[i] = resolveAsType(ttTypes.get(i), context, states, roots, nominal, unconstrained); } } else if (type instanceof SyntacticType.Nominal) { // This case corresponds to a user-defined type. This will be // defined in some module (possibly ours), and we need to identify // what module that is here, and save it for future use. SyntacticType.Nominal dt = (SyntacticType.Nominal) type; NameID nid; try { nid = resolveAsName(dt.names, context); if (nominal) { myKind = Type.K_NOMINAL; myData = nid; myChildren = Automaton.NOCHILDREN; } else { // At this point, we're going to expand the given nominal // type. // We're going to use resolveAsType(NameID,...) to do this // which // will load the expanded type onto states at the current // point. // Therefore, we need to remove the initial null we loaded states.remove(myIndex); return resolveAsType(nid, states, roots, unconstrained); } } catch (ResolveError e) { syntaxError(e.getMessage(), context, dt, e); return 0; // dead-code } catch (SyntaxError e) { throw e; } catch (Throwable e) { internalFailure(e.getMessage(), context, dt, e); return 0; // dead-code } } else if (type instanceof SyntacticType.Negation) { SyntacticType.Negation ut = (SyntacticType.Negation) type; myKind = Type.K_NEGATION; myChildren = new int[1]; myChildren[0] = resolveAsType(ut.element, context, states, roots, nominal, unconstrained); } else if (type instanceof SyntacticType.Union) { SyntacticType.Union ut = (SyntacticType.Union) type; ArrayList<SyntacticType.NonUnion> utTypes = ut.bounds; myKind = Type.K_UNION; myChildren = new int[utTypes.size()]; for (int i = 0; i != utTypes.size(); ++i) { myChildren[i] = resolveAsType(utTypes.get(i), context, states, roots, nominal, unconstrained); } myDeterministic = false; } else if (type instanceof SyntacticType.Intersection) { internalFailure("intersection types not supported yet", context, type); return 0; // dead-code } else if (type instanceof SyntacticType.Reference) { SyntacticType.Reference ut = (SyntacticType.Reference) type; myKind = Type.K_REFERENCE; myChildren = new int[1]; myChildren[0] = resolveAsType(ut.element, context, states, roots, nominal, unconstrained); } else { SyntacticType.FunctionOrMethod ut = (SyntacticType.FunctionOrMethod) type; ArrayList<SyntacticType> utParamTypes = ut.paramTypes; int start = 0; if (ut instanceof SyntacticType.Method) { myKind = Type.K_METHOD; } else { myKind = Type.K_FUNCTION; } myChildren = new int[start + 2 + utParamTypes.size()]; myChildren[start++] = resolveAsType(ut.ret, context, states, roots, nominal, unconstrained); if (ut.throwType == null) { // this case indicates the user did not provide a throws clause. myChildren[start++] = resolveAsType(new SyntacticType.Void(), context, states, roots, nominal, unconstrained); } else { myChildren[start++] = resolveAsType(ut.throwType, context, states, roots, nominal, unconstrained); } for (SyntacticType pt : utParamTypes) { myChildren[start++] = resolveAsType(pt, context, states, roots, nominal, unconstrained); } } states.set(myIndex, new Automaton.State(myKind, myData, myDeterministic, myChildren)); return myIndex; } private int resolveAsType(NameID key, ArrayList<Automaton.State> states, HashMap<NameID, Integer> roots, boolean unconstrained) throws Exception { // First, check the various caches we have Integer root = roots.get(key); if (root != null) { return root; } // check whether this type is external or not WhileyFile wf = builder.getSourceFile(key.module()); if (wf == null) { // indicates a non-local key which we can resolve immediately // FIXME: need to properly support unconstrained types here WyilFile mi = builder.getModule(key.module()); WyilFile.TypeDeclaration td = mi.type(key.name()); return append(td.type(), states); } WhileyFile.Type td = wf.typeDecl(key.name()); if (td == null) { Type t = resolveAsConstant(key).type(); if (t instanceof Type.Set) { if (unconstrained) { // crikey this is ugly int myIndex = states.size(); int kind = Type.leafKind(Type.T_VOID); Object data = null; states.add(new Automaton.State(kind, data, true, Automaton.NOCHILDREN)); return myIndex; } Type.Set ts = (Type.Set) t; return append(ts.element(), states); } else { throw new ResolveError("type not found: " + key); } } // following is needed to terminate any recursion roots.put(key, states.size()); SyntacticType type = td.pattern.toSyntacticType(); // now, expand the given type fully if (unconstrained && td.invariant != null) { int myIndex = states.size(); int kind = Type.leafKind(Type.T_VOID); Object data = null; states.add(new Automaton.State(kind, data, true, Automaton.NOCHILDREN)); return myIndex; } else if (type instanceof Type.Leaf) { // FIXME: I believe this code is now redundant, and should be // removed or updated. The problem is that SyntacticType no longer // extends Type. int myIndex = states.size(); int kind = Type.leafKind((Type.Leaf) type); Object data = Type.leafData((Type.Leaf) type); states.add(new Automaton.State(kind, data, true, Automaton.NOCHILDREN)); return myIndex; } else { return resolveAsType(type, td, states, roots, false, unconstrained); } // TODO: performance can be improved here, but actually assigning the // constructed type into a cache of previously expanded types cache. // This is challenging, in the case that the type may not be complete at // this point. In particular, if it contains any back-links above this // index there could be an issue. } private int resolveAsType(SyntacticType.Primitive t, Context context, ArrayList<Automaton.State> states) { int myIndex = states.size(); int kind; if (t instanceof SyntacticType.Any) { kind = Type.K_ANY; } else if (t instanceof SyntacticType.Void) { kind = Type.K_VOID; } else if (t instanceof SyntacticType.Null) { kind = Type.K_NULL; } else if (t instanceof SyntacticType.Bool) { kind = Type.K_BOOL; } else if (t instanceof SyntacticType.Byte) { kind = Type.K_BYTE; } else if (t instanceof SyntacticType.Char) { kind = Type.K_CHAR; } else if (t instanceof SyntacticType.Int) { kind = Type.K_INT; } else if (t instanceof SyntacticType.Real) { kind = Type.K_RATIONAL; } else if (t instanceof SyntacticType.Strung) { kind = Type.K_STRING; } else { internalFailure("unrecognised type encountered (" + t.getClass().getName() + ")", context, t); return 0; // dead-code } states.add(new Automaton.State(kind, null, true, Automaton.NOCHILDREN)); return myIndex; } private static int append(Type type, ArrayList<Automaton.State> states) { int myIndex = states.size(); Automaton automaton = Type.destruct(type); Automaton.State[] tStates = automaton.states; int[] rmap = new int[tStates.length]; for (int i = 0, j = myIndex; i != rmap.length; ++i, ++j) { rmap[i] = j; } for (Automaton.State state : tStates) { states.add(Automata.remap(state, rmap)); } return myIndex; } // ResolveAsConstant public Constant resolveAsConstant(NameID nid) throws Exception { return resolveAsConstant(nid, new HashSet<NameID>()); } public Constant resolveAsConstant(Expr e, Context context) { e = propagate(e, new Environment(), context); return resolveAsConstant(e, context, new HashSet<NameID>()); } private Constant resolveAsConstant(NameID key, HashSet<NameID> visited) throws Exception { Constant result = constantCache.get(key); if (result != null) { return result; } else if (visited.contains(key)) { throw new ResolveError("cyclic constant definition encountered (" + key + " -> " + key + ")"); } else { visited.add(key); } WhileyFile wf = builder.getSourceFile(key.module()); if (wf != null) { WhileyFile.Declaration decl = wf.declaration(key.name()); if (decl instanceof WhileyFile.Constant) { WhileyFile.Constant cd = (WhileyFile.Constant) decl; if (cd.resolvedValue == null) { cd.constant = propagate(cd.constant, new Environment(), cd); cd.resolvedValue = resolveAsConstant(cd.constant, cd, visited); } result = cd.resolvedValue; } else { throw new ResolveError("unable to find constant " + key); } } else { WyilFile module = builder.getModule(key.module()); WyilFile.ConstantDeclaration cd = module.constant(key.name()); if (cd != null) { result = cd.constant(); } else { throw new ResolveError("unable to find constant " + key); } } constantCache.put(key, result); return result; } private Constant resolveAsConstant(Expr expr, Context context, HashSet<NameID> visited) { try { if (expr instanceof Expr.Constant) { Expr.Constant c = (Expr.Constant) expr; return c.value; } else if (expr instanceof Expr.ConstantAccess) { Expr.ConstantAccess c = (Expr.ConstantAccess) expr; return resolveAsConstant(c.nid, visited); } else if (expr instanceof Expr.BinOp) { Expr.BinOp bop = (Expr.BinOp) expr; Constant lhs = resolveAsConstant(bop.lhs, context, visited); Constant rhs = resolveAsConstant(bop.rhs, context, visited); return evaluate(bop, lhs, rhs, context); } else if (expr instanceof Expr.UnOp) { Expr.UnOp uop = (Expr.UnOp) expr; Constant lhs = resolveAsConstant(uop.mhs, context, visited); return evaluate(uop, lhs, context); } else if (expr instanceof Expr.Set) { Expr.Set nop = (Expr.Set) expr; ArrayList<Constant> values = new ArrayList<Constant>(); for (Expr arg : nop.arguments) { values.add(resolveAsConstant(arg, context, visited)); } return Constant.V_SET(values); } else if (expr instanceof Expr.List) { Expr.List nop = (Expr.List) expr; ArrayList<Constant> values = new ArrayList<Constant>(); for (Expr arg : nop.arguments) { values.add(resolveAsConstant(arg, context, visited)); } return Constant.V_LIST(values); } else if (expr instanceof Expr.Record) { Expr.Record rg = (Expr.Record) expr; HashMap<String, Constant> values = new HashMap<String, Constant>(); for (Map.Entry<String, Expr> e : rg.fields.entrySet()) { Constant v = resolveAsConstant(e.getValue(), context, visited); if (v == null) { return null; } values.put(e.getKey(), v); } return Constant.V_RECORD(values); } else if (expr instanceof Expr.Tuple) { Expr.Tuple rg = (Expr.Tuple) expr; ArrayList<Constant> values = new ArrayList<Constant>(); for (Expr e : rg.fields) { Constant v = resolveAsConstant(e, context, visited); if (v == null) { return null; } values.add(v); } return Constant.V_TUPLE(values); } else if (expr instanceof Expr.Map) { Expr.Map rg = (Expr.Map) expr; HashSet<Pair<Constant, Constant>> values = new HashSet<Pair<Constant, Constant>>(); for (Pair<Expr, Expr> e : rg.pairs) { Constant key = resolveAsConstant(e.first(), context, visited); Constant value = resolveAsConstant(e.second(), context, visited); if (key == null || value == null) { return null; } values.add(new Pair<Constant, Constant>(key, value)); } return Constant.V_MAP(values); } else if (expr instanceof Expr.FunctionOrMethod) { // TODO: add support for proper lambdas Expr.FunctionOrMethod f = (Expr.FunctionOrMethod) expr; return Constant.V_LAMBDA(f.nid, f.type.raw()); } } catch (SyntaxError.InternalFailure e) { throw e; } catch (ResolveError e) { syntaxError(e.getMessage(), context, expr, e); } catch (Throwable e) { internalFailure(e.getMessage(), context, expr, e); } internalFailure("unknown constant expression: " + expr.getClass().getName(), context, expr); return null; // deadcode } // expandAsType public Nominal.EffectiveSet expandAsEffectiveSet(Nominal lhs) throws Exception { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveSet) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveSet)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveSet) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.EffectiveList expandAsEffectiveList(Nominal lhs) throws Exception { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveList) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveList)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveList) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.EffectiveCollection expandAsEffectiveCollection(Nominal lhs) throws Exception { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveCollection) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveCollection)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveCollection) Nominal .construct(nominal, raw); } else { return null; } } public Nominal.EffectiveIndexible expandAsEffectiveMap(Nominal lhs) throws Exception { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveIndexible) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveIndexible)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveIndexible) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.EffectiveMap expandAsEffectiveDictionary(Nominal lhs) throws Exception { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveMap) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveMap)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveMap) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.EffectiveRecord expandAsEffectiveRecord(Nominal lhs) throws Exception { Type raw = lhs.raw(); if (raw instanceof Type.Record) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.Record)) { nominal = (Type) raw; // discard nominal information } return (Nominal.Record) Nominal.construct(nominal, raw); } else if (raw instanceof Type.UnionOfRecords) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.UnionOfRecords)) { nominal = (Type) raw; // discard nominal information } return (Nominal.UnionOfRecords) Nominal.construct(nominal, raw); } { return null; } } public Nominal.EffectiveTuple expandAsEffectiveTuple(Nominal lhs) throws Exception { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveTuple) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveTuple)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveTuple) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.Reference expandAsReference(Nominal lhs) throws Exception { Type.Reference raw = Type.effectiveReference(lhs.raw()); if (raw != null) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.Reference)) { nominal = raw; // discard nominal information } return (Nominal.Reference) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.FunctionOrMethod expandAsFunctionOrMethod(Nominal lhs) throws Exception { Type.FunctionOrMethod raw = Type.effectiveFunctionOrMethod(lhs.raw()); if (raw != null) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.FunctionOrMethod)) { nominal = raw; // discard nominal information } return (Nominal.FunctionOrMethod) Nominal.construct(nominal, raw); } else { return null; } } private Type expandOneLevel(Type type) throws Exception { if (type instanceof Type.Nominal) { Type.Nominal nt = (Type.Nominal) type; NameID nid = nt.name(); Path.ID mid = nid.module(); WhileyFile wf = builder.getSourceFile(mid); Type r = null; if (wf != null) { WhileyFile.Declaration decl = wf.declaration(nid.name()); if (decl instanceof WhileyFile.Type) { WhileyFile.Type td = (WhileyFile.Type) decl; r = resolveAsType(td.pattern.toSyntacticType(), td) .nominal(); } } else { WyilFile m = builder.getModule(mid); WyilFile.TypeDeclaration td = m.type(nid.name()); if (td != null) { r = td.type(); } } if (r == null) { throw new ResolveError("unable to locate " + nid); } return expandOneLevel(r); } else if (type instanceof Type.Leaf || type instanceof Type.Reference || type instanceof Type.Tuple || type instanceof Type.Set || type instanceof Type.List || type instanceof Type.Map || type instanceof Type.Record || type instanceof Type.FunctionOrMethod || type instanceof Type.Negation) { return type; } else { Type.Union ut = (Type.Union) type; ArrayList<Type> bounds = new ArrayList<Type>(); for (Type b : ut.bounds()) { bounds.add(expandOneLevel(b)); } return Type.Union(bounds); } } // Constant Evaluation [this should not be located here?] private Constant evaluate(Expr.UnOp bop, Constant v, Context context) { switch (bop.op) { case NOT: if (v instanceof Constant.Bool) { Constant.Bool b = (Constant.Bool) v; return Constant.V_BOOL(!b.value); } syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, bop); break; case NEG: if (v instanceof Constant.Integer) { Constant.Integer b = (Constant.Integer) v; return Constant.V_INTEGER(b.value.negate()); } else if (v instanceof Constant.Decimal) { Constant.Decimal b = (Constant.Decimal) v; return Constant.V_DECIMAL(b.value.negate()); } syntaxError(errorMessage(INVALID_NUMERIC_EXPRESSION), context, bop); break; case INVERT: if (v instanceof Constant.Byte) { Constant.Byte b = (Constant.Byte) v; return Constant.V_BYTE((byte) ~b.value); } break; } syntaxError(errorMessage(INVALID_UNARY_EXPRESSION), context, bop); return null; } private Constant evaluate(Expr.BinOp bop, Constant v1, Constant v2, Context context) { Type v1_type = v1.type(); Type v2_type = v2.type(); Type lub = Type.Union(v1_type, v2_type); // FIXME: there are bugs here related to coercions. if (Type.isSubtype(Type.T_BOOL, lub)) { return evaluateBoolean(bop, (Constant.Bool) v1, (Constant.Bool) v2, context); } else if (Type.isSubtype(Type.T_INT, lub)) { return evaluate(bop, (Constant.Integer) v1, (Constant.Integer) v2, context); } else if (Type.isImplicitCoerciveSubtype(Type.T_REAL, v1_type) && Type.isImplicitCoerciveSubtype(Type.T_REAL, v1_type)) { if (v1 instanceof Constant.Integer) { Constant.Integer i1 = (Constant.Integer) v1; v1 = Constant.V_DECIMAL(new BigDecimal(i1.value)); } else if (v2 instanceof Constant.Integer) { Constant.Integer i2 = (Constant.Integer) v2; v2 = Constant.V_DECIMAL(new BigDecimal(i2.value)); } return evaluate(bop, (Constant.Decimal) v1, (Constant.Decimal) v2, context); } else if (Type.isSubtype(Type.T_LIST_ANY, lub)) { return evaluate(bop, (Constant.List) v1, (Constant.List) v2, context); } else if (Type.isSubtype(Type.T_SET_ANY, lub)) { return evaluate(bop, (Constant.Set) v1, (Constant.Set) v2, context); } syntaxError(errorMessage(INVALID_BINARY_EXPRESSION), context, bop); return null; } private Constant evaluateBoolean(Expr.BinOp bop, Constant.Bool v1, Constant.Bool v2, Context context) { switch (bop.op) { case AND: return Constant.V_BOOL(v1.value & v2.value); case OR: return Constant.V_BOOL(v1.value | v2.value); case XOR: return Constant.V_BOOL(v1.value ^ v2.value); } syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, bop); return null; } private Constant evaluate(Expr.BinOp bop, Constant.Integer v1, Constant.Integer v2, Context context) { switch (bop.op) { case ADD: return Constant.V_INTEGER(v1.value.add(v2.value)); case SUB: return Constant.V_INTEGER(v1.value.subtract(v2.value)); case MUL: return Constant.V_INTEGER(v1.value.multiply(v2.value)); case DIV: return Constant.V_INTEGER(v1.value.divide(v2.value)); case REM: return Constant.V_INTEGER(v1.value.remainder(v2.value)); } syntaxError(errorMessage(INVALID_NUMERIC_EXPRESSION), context, bop); return null; } private Constant evaluate(Expr.BinOp bop, Constant.Decimal v1, Constant.Decimal v2, Context context) { switch (bop.op) { case ADD: return Constant.V_DECIMAL(v1.value.add(v2.value)); case SUB: return Constant.V_DECIMAL(v1.value.subtract(v2.value)); case MUL: return Constant.V_DECIMAL(v1.value.multiply(v2.value)); case DIV: return Constant.V_DECIMAL(v1.value.divide(v2.value)); } syntaxError(errorMessage(INVALID_NUMERIC_EXPRESSION), context, bop); return null; } private Constant evaluate(Expr.BinOp bop, Constant.List v1, Constant.List v2, Context context) { switch (bop.op) { case ADD: ArrayList<Constant> vals = new ArrayList<Constant>(v1.values); vals.addAll(v2.values); return Constant.V_LIST(vals); } syntaxError(errorMessage(INVALID_LIST_EXPRESSION), context, bop); return null; } private Constant evaluate(Expr.BinOp bop, Constant.Set v1, Constant.Set v2, Context context) { switch (bop.op) { case UNION: { HashSet<Constant> vals = new HashSet<Constant>(v1.values); vals.addAll(v2.values); return Constant.V_SET(vals); } case INTERSECTION: { HashSet<Constant> vals = new HashSet<Constant>(); for (Constant v : v1.values) { if (v2.values.contains(v)) { vals.add(v); } } return Constant.V_SET(vals); } case SUB: { HashSet<Constant> vals = new HashSet<Constant>(); for (Constant v : v1.values) { if (!v2.values.contains(v)) { vals.add(v); } } return Constant.V_SET(vals); } } syntaxError(errorMessage(INVALID_SET_EXPRESSION), context, bop); return null; } public boolean isVisible(NameID nid, Context context) throws Exception { Path.ID mid = nid.module(); if (mid.equals(context.file().module)) { return true; } WhileyFile wf = builder.getSourceFile(mid); if (wf != null) { WhileyFile.Declaration d = wf.declaration(nid.name()); if (d instanceof WhileyFile.Constant) { WhileyFile.Constant td = (WhileyFile.Constant) d; return td.isPublic() || td.isProtected(); } else if (d instanceof WhileyFile.Type) { WhileyFile.Type td = (WhileyFile.Type) d; return td.isPublic() || td.isProtected(); } return false; } else { // we have to do the following basically because we don't load // modifiers properly out of jvm class files (at the moment). return true; // WyilFile w = builder.getModule(mid); // WyilFile.ConstDef c = w.constant(nid.name()); // WyilFile.TypeDef t = w.type(nid.name()); // if(c != null) { // return c.isPublic() || c.isProtected(); // } else { // return t.isPublic() || t.isProtected(); } } // Misc // Check t1 :> t2 private void checkIsSubtype(Nominal t1, Nominal t2, SyntacticElement elem) { if (!Type.isImplicitCoerciveSubtype(t1.raw(), t2.raw())) { syntaxError( errorMessage(SUBTYPE_ERROR, t1.nominal(), t2.nominal()), filename, elem); } } private void checkIsSubtype(Nominal t1, Expr t2) { if (!Type.isImplicitCoerciveSubtype(t1.raw(), t2.result().raw())) { // We use the nominal type for error reporting, since this includes // more helpful names. syntaxError( errorMessage(SUBTYPE_ERROR, t1.nominal(), t2.result() .nominal()), filename, t2); } } private void checkIsSubtype(Type t1, Expr t2) { if (!Type.isImplicitCoerciveSubtype(t1, t2.result().raw())) { // We use the nominal type for error reporting, since this includes // more helpful names. syntaxError(errorMessage(SUBTYPE_ERROR, t1, t2.result().nominal()), filename, t2); } } // Check t1 :> t2 private void checkIsSubtype(Nominal t1, Nominal t2, SyntacticElement elem, Context context) { if (!Type.isImplicitCoerciveSubtype(t1.raw(), t2.raw())) { syntaxError( errorMessage(SUBTYPE_ERROR, t1.nominal(), t2.nominal()), context, elem); } } private void checkIsSubtype(Nominal t1, Expr t2, Context context) { if (!Type.isImplicitCoerciveSubtype(t1.raw(), t2.result().raw())) { // We use the nominal type for error reporting, since this includes // more helpful names. syntaxError( errorMessage(SUBTYPE_ERROR, t1.nominal(), t2.result() .nominal()), context, t2); } } private void checkIsSubtype(Type t1, Expr t2, Context context) { if (!Type.isImplicitCoerciveSubtype(t1, t2.result().raw())) { // We use the nominal type for error reporting, since this includes // more helpful names. syntaxError(errorMessage(SUBTYPE_ERROR, t1, t2.result().nominal()), context, t2); } } // Environment Class /** * <p> * Responsible for mapping source-level variables to their declared and * actual types, at any given program point. Since the flow-type checker * uses a flow-sensitive approach to type checking, then the typing * environment will change as we move through the statements of a function * or method. * </p> * * <p> * This class is implemented in a functional style to minimise possible * problems related to aliasing (which have been a problem in the past). To * improve performance, reference counting is to ensure that cloning the * underling map is only performed when actually necessary. * </p> * * @author David J. Pearce * */ private static final class Environment { /** * The underlying collection for this environment. */ private final HashMap<String, Nominal> map; /** * The reference count, which indicate how many references to this * environment there are. When there is only one reference, then the put * and putAll operations will perform an "inplace" update (i.e. without * cloning the underlying collection). */ private int count; // refCount /** * Construct an empty environment. Initially the reference count is 1. */ public Environment() { count = 1; map = new HashMap<String, Nominal>(); } /** * Construct a fresh environment as a copy of another map. Initially the * reference count is 1. */ private Environment(HashMap<String, Nominal> types) { count = 1; this.map = (HashMap<String, Nominal>) types.clone(); } /** * Get the type associated with a given variable, or null if that * variable is not declared. * * @param variable * Variable to return type for. * @return */ public Nominal get(String variable) { return map.get(variable); } /** * Check whether a given variable is declared within this environment. * * @param variable * @return */ public boolean containsKey(String variable) { return map.containsKey(variable); } /** * Return the set of declared variables in this environment (a.k.a the * domain). * * @return */ public Set<String> keySet() { return map.keySet(); } /** * Associate a type with a given variable. If that variable already had * a type, then this is overwritten. In the case that this environment * has a reference count of 1, then an "in place" update is performed. * Otherwise, a fresh copy of this environment is returned with the * given variable associated with the given type, whilst this * environment is unchanged. * * @param variable * Name of variable to be associated with given type * @param type * Type to associated with given variable * @return An updated version of the environment which contains the new * association. */ public Environment put(String variable, Nominal type) { if (count == 1) { map.put(variable, type); return this; } else { Environment nenv = new Environment(map); nenv.map.put(variable, type); count return nenv; } } /** * Copy all variable-type associations from the given environment into * this environment. The type of any variable already associated with a * type is overwritten. In the case that this environment has a * reference count of 1, then an "in place" update is performed. * Otherwise, a fresh copy of this environment is returned with the * given variables associated with the given types, whilst this * environment is unchanged. * * @param variable * Name of variable to be associated with given type * @param type * Type to associated with given variable * @return An updated version of the environment which contains all the * associations from the given environment. */ public Environment putAll(Environment env) { if (count == 1) { HashMap<String, Nominal> envTypes = env.map; map.putAll(envTypes); return this; } else { Environment nenv = new Environment(map); HashMap<String, Nominal> envTypes = env.map; nenv.map.putAll(envTypes); count return nenv; } } /** * Remove a variable and any associated type from this environment. In * the case that this environment has a reference count of 1, then an * "in place" update is performed. Otherwise, a fresh copy of this * environment is returned with the given variable and any association * removed. * * @param variable * Name of variable to be removed from the environment * @return An updated version of the environment in which the given * variable no longer exists. */ public Environment remove(String key) { if (count == 1) { map.remove(key); return this; } else { Environment nenv = new Environment(map); nenv.map.remove(key); count return nenv; } } /** * Create a fresh copy of this environment. In fact, this operation * simply increments the reference count of this environment and returns * it. */ public Environment clone() { count++; return this; } /** * Decrease the reference count of this environment by one. */ public void free() { --count; } public String toString() { return map.toString(); } public int hashCode() { return map.hashCode(); } public boolean equals(Object o) { if (o instanceof Environment) { Environment r = (Environment) o; return map.equals(r.map); } return false; } } private static final Environment BOTTOM = new Environment(); private static final Environment join(Environment lhs, Environment rhs) { // first, need to check for the special bottom value case. if (lhs == BOTTOM) { return rhs; } else if (rhs == BOTTOM) { return lhs; } // ok, not bottom so compute intersection. lhs.free(); rhs.free(); Environment result = new Environment(); for (String key : lhs.keySet()) { if (rhs.containsKey(key)) { Nominal lhs_t = lhs.get(key); Nominal rhs_t = rhs.get(key); result.put(key, Nominal.Union(lhs_t, rhs_t)); } } return result; } }
package wyc.builder; import static wyc.lang.WhileyFile.internalFailure; import static wyc.lang.WhileyFile.syntaxError; import static wycc.lang.SyntaxError.internalFailure; import static wycc.lang.SyntaxError.syntaxError; import static wyil.util.ErrorMessages.*; import java.io.IOException; import java.math.BigDecimal; import java.util.*; import wyautl_old.lang.Automata; import wyautl_old.lang.Automaton; import wybs.lang.*; import wybs.util.*; import wyc.lang.*; import wyc.lang.WhileyFile.Context; import wycc.lang.NameID; import wycc.lang.SyntacticElement; import wycc.lang.SyntaxError; import wycc.util.Pair; import wycc.util.ResolveError; import wyfs.lang.Path; import wyfs.util.Trie; import wyil.lang.Constant; import wyil.lang.Modifier; import wyil.lang.Type; import wyil.lang.WyilFile; /** * Propagates type information in a <i>flow-sensitive</i> fashion from declared * parameter and return types through variable declarations and assigned * expressions, to determine types for all intermediate expressions and * variables. During this propagation, type checking is performed to ensure * types are used soundly. For example: * * <pre> * function sum([int] data) => int: * int r = 0 // declared int type for r * for v in data: // infers int type for v, based on type of data * r = r + v // infers int type for r + v, based on type of operands * return r // infers int type for return expression * </pre> * * <p> * The flow typing algorithm distinguishes between the <i>declared type</i> of a * variable and its <i>known type</i>. That is, the known type at any given * point is permitted to be more precise than the declared type (but not vice * versa). For example: * </p> * * <pre> * function id(int x) => int: * return x * * function f(int y) => int: * int|null x = y * f(x) * </pre> * * <p> * The above example is considered type safe because the known type of * <code>x</code> at the function call is <code>int</code>, which differs from * its declared type (i.e. <code>int|null</code>). * </p> * * <p> * Loops present an interesting challenge for type propagation. Consider this * example: * </p> * * <pre> * function loopy(int max) => real: * var i = 0 * while i < max: * i = i + 0.5 * return i * </pre> * * <p> * On the first pass through the loop, variable <code>i</code> is inferred to * have type <code>int</code> (based on the type of the constant <code>0</code> * ). However, the add expression is inferred to have type <code>real</code> * (based on the type of the rhs) and, hence, the resulting type inferred for * <code>i</code> is <code>real</code>. At this point, the loop must be * reconsidered taking into account this updated type for <code>i</code>. * </p> * * <p> * The operation of the flow type checker splits into two stages: * </p> * <ul> * <li><b>Global Propagation.</b> During this stage, all named types are checked * and expanded.</li> * <li><b>Local Propagation.</b> During this stage, types are propagated through * statements and expressions (as above).</li> * </ul> * * <h3>References</h3> * <ul> * <li> * <p> * David J. Pearce and James Noble. Structural and Flow-Sensitive Types for * Whiley. Technical Report, Victoria University of Wellington, 2010. * </p> * </li> * </ul> * * @author David J. Pearce * */ public class FlowTypeChecker { private WhileyBuilder builder; private String filename; private WhileyFile.FunctionOrMethod current; /** * The constant cache contains a cache of expanded constant values. This is * simply to prevent recomputing them every time. */ private final HashMap<NameID, Constant> constantCache = new HashMap<NameID, Constant>(); public FlowTypeChecker(WhileyBuilder builder) { this.builder = builder; } // WhileyFile(s) public void propagate(List<WhileyFile> files) { for (WhileyFile wf : files) { propagate(wf); } } public void propagate(WhileyFile wf) { this.filename = wf.filename; for (WhileyFile.Declaration decl : wf.declarations) { try { if (decl instanceof WhileyFile.FunctionOrMethod) { propagate((WhileyFile.FunctionOrMethod) decl); } else if (decl instanceof WhileyFile.Type) { propagate((WhileyFile.Type) decl); } else if (decl instanceof WhileyFile.Constant) { propagate((WhileyFile.Constant) decl); } } catch (ResolveError e) { syntaxError(errorMessage(RESOLUTION_ERROR, e.getMessage()), filename, decl, e); } catch (SyntaxError e) { throw e; } catch (Throwable t) { internalFailure(t.getMessage(), filename, decl, t); } } } // Declarations /** * Resolve types for a given type declaration. If an invariant expression is * given, then we have to propagate and resolve types throughout the * expression. * * @param td * Type declaration to check. * @throws IOException */ public void propagate(WhileyFile.Type td) throws IOException { // First, resolve the declared syntactic type into the corresponding // nominal type. td.resolvedType = resolveAsType(td.pattern.toSyntacticType(), td); if (td.invariant != null) { // Second, an invariant expression is given, so propagate through // that. // Construct the appropriate typing environment Environment environment = new Environment(); environment = addDeclaredVariables(td.pattern, environment, td); // Propagate type information through the constraint td.invariant = propagate(td.invariant, environment, td); } } /** * Propagate and check types for a given constant declaration. * * @param cd * Constant declaration to check. * @throws IOException */ public void propagate(WhileyFile.Constant cd) throws IOException, ResolveError { NameID nid = new NameID(cd.file().module, cd.name()); cd.resolvedValue = resolveAsConstant(nid); } /** * Propagate and check types for a given function or method declaration. * * @param fd * Function or method declaration to check. * @throws IOException */ public void propagate(WhileyFile.FunctionOrMethod d) throws IOException { this.current = d; // ugly Environment environment = new Environment(); // Resolve the types of all parameters and construct an appropriate // environment for use in the flow-sensitive type propagation. for (WhileyFile.Parameter p : d.parameters) { environment = environment.put(p.name, resolveAsType(p.type, d)); } // Resolve types for any preconditions (i.e. requires clauses) provided. final List<Expr> d_requires = d.requires; for (int i = 0; i != d_requires.size(); ++i) { Expr condition = d_requires.get(i); condition = propagate(condition, environment.clone(), d); d_requires.set(i, condition); } // Resolve types for any postconditions (i.e. ensures clauses) provided. final List<Expr> d_ensures = d.ensures; if (d_ensures.size() > 0) { // At least one ensures clause is provided; so, first, construct an // appropriate environment from the initial one create. Environment ensuresEnvironment = addDeclaredVariables(d.ret, environment.clone(), d); // Now, type check each ensures clause for (int i = 0; i != d_ensures.size(); ++i) { Expr condition = d_ensures.get(i); condition = propagate(condition, ensuresEnvironment, d); d_ensures.set(i, condition); } } // Resolve the overall type for the function or method. if (d instanceof WhileyFile.Function) { WhileyFile.Function f = (WhileyFile.Function) d; f.resolvedType = resolveAsType(f.unresolvedType(), d); } else { WhileyFile.Method m = (WhileyFile.Method) d; m.resolvedType = resolveAsType(m.unresolvedType(), d); } // Finally, propagate type information throughout all statements in the // function / method body. propagate(d.statements, environment); } // Blocks & Statements /** * Propagate type information in a flow-sensitive fashion through a block of * statements, whilst type checking each statement and expression. * * @param block * Block of statements to flow sensitively type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(ArrayList<Stmt> block, Environment environment) { for (int i = 0; i != block.size(); ++i) { Stmt stmt = block.get(i); if (stmt instanceof Expr) { block.set(i, (Stmt) propagate((Expr) stmt, environment, current)); } else { environment = propagate(stmt, environment); } } return environment; } /** * Propagate type information in a flow-sensitive fashion through a given * statement, whilst type checking it at the same time. For statements which * contain other statements (e.g. if, while, etc), then this will * recursively propagate type information through them as well. * * * @param block * Block of statements to flow-sensitively type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt stmt, Environment environment) { try { if (stmt instanceof Stmt.VariableDeclaration) { return propagate((Stmt.VariableDeclaration) stmt, environment); } else if (stmt instanceof Stmt.Assign) { return propagate((Stmt.Assign) stmt, environment); } else if (stmt instanceof Stmt.Return) { return propagate((Stmt.Return) stmt, environment); } else if (stmt instanceof Stmt.IfElse) { return propagate((Stmt.IfElse) stmt, environment); } else if (stmt instanceof Stmt.While) { return propagate((Stmt.While) stmt, environment); } else if (stmt instanceof Stmt.ForAll) { return propagate((Stmt.ForAll) stmt, environment); } else if (stmt instanceof Stmt.Switch) { return propagate((Stmt.Switch) stmt, environment); } else if (stmt instanceof Stmt.DoWhile) { return propagate((Stmt.DoWhile) stmt, environment); } else if (stmt instanceof Stmt.Break) { return propagate((Stmt.Break) stmt, environment); } else if (stmt instanceof Stmt.Throw) { return propagate((Stmt.Throw) stmt, environment); } else if (stmt instanceof Stmt.TryCatch) { return propagate((Stmt.TryCatch) stmt, environment); } else if (stmt instanceof Stmt.Assert) { return propagate((Stmt.Assert) stmt, environment); } else if (stmt instanceof Stmt.Assume) { return propagate((Stmt.Assume) stmt, environment); } else if (stmt instanceof Stmt.Debug) { return propagate((Stmt.Debug) stmt, environment); } else if (stmt instanceof Stmt.Skip) { return propagate((Stmt.Skip) stmt, environment); } else { internalFailure("unknown statement: " + stmt.getClass().getName(), filename, stmt); return null; // deadcode } } catch (ResolveError e) { syntaxError(errorMessage(RESOLUTION_ERROR, e.getMessage()), filename, stmt, e); return null; // dead code } catch (SyntaxError e) { throw e; } catch (Throwable e) { internalFailure(e.getMessage(), filename, stmt, e); return null; // dead code } } /** * Type check an assertion statement. This requires checking that the * expression being asserted is well-formed and has boolean type. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Assert stmt, Environment environment) { stmt.expr = propagate(stmt.expr, environment, current); checkIsSubtype(Type.T_BOOL, stmt.expr); return environment; } /** * Type check an assume statement. This requires checking that the * expression being asserted is well-formed and has boolean type. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Assume stmt, Environment environment) { stmt.expr = propagate(stmt.expr, environment, current); checkIsSubtype(Type.T_BOOL, stmt.expr); return environment; } /** * Type check a variable declaration statement. This must associate the * given variable with either its declared and actual type in the * environment. If no initialiser is given, then the actual type is the void * (since the variable is not yet defined). Otherwise, the actual type is * the type of the initialiser expression. Additionally, when an initialiser * is given we must check it is well-formed and that it is a subtype of the * declared type. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.VariableDeclaration stmt, Environment environment) throws IOException { // First, resolve declared type stmt.type = resolveAsType(stmt.pattern.toSyntacticType(), current); // First, resolve type of initialiser if (stmt.expr != null) { stmt.expr = propagate(stmt.expr, environment, current); checkIsSubtype(stmt.type, stmt.expr); } // Second, update environment accordingly. Observe that we can safely // assume any variable(s) are not already declared in the enclosing // scope // because the parser checks this for us. environment = addDeclaredVariables(stmt.pattern, environment, current); // Done. return environment; } /** * Type check an assignment statement. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Assign stmt, Environment environment) throws IOException, ResolveError { Expr.LVal lhs = propagate(stmt.lhs, environment); Expr rhs = propagate(stmt.rhs, environment, current); if (lhs instanceof Expr.RationalLVal) { // represents a destructuring assignment Expr.RationalLVal tv = (Expr.RationalLVal) lhs; Pair<Expr.AssignedVariable, Expr.AssignedVariable> avs = inferAfterType( tv, rhs); environment = environment.put(avs.first().var, avs.first().afterType); environment = environment.put(avs.second().var, avs.second().afterType); } else if (lhs instanceof Expr.Tuple) { // represents a destructuring assignment Expr.Tuple tv = (Expr.Tuple) lhs; List<Expr.AssignedVariable> as = inferAfterType(tv, rhs); for (Expr.AssignedVariable av : as) { environment = environment.put(av.var, av.afterType); } } else { // represents element or field update Expr.AssignedVariable av = inferAfterType(lhs, rhs.result()); environment = environment.put(av.var, av.afterType); } stmt.lhs = (Expr.LVal) lhs; stmt.rhs = rhs; return environment; } private Pair<Expr.AssignedVariable, Expr.AssignedVariable> inferAfterType( Expr.RationalLVal tv, Expr rhs) throws IOException { Nominal afterType = rhs.result(); if (!Type.isImplicitCoerciveSubtype(Type.T_REAL, afterType.raw())) { syntaxError("real value expected, got " + afterType, filename, rhs); } if (tv.numerator instanceof Expr.AssignedVariable && tv.denominator instanceof Expr.AssignedVariable) { Expr.AssignedVariable lv = (Expr.AssignedVariable) tv.numerator; Expr.AssignedVariable rv = (Expr.AssignedVariable) tv.denominator; lv.type = Nominal.T_VOID; rv.type = Nominal.T_VOID; lv.afterType = Nominal.T_INT; rv.afterType = Nominal.T_INT; return new Pair<Expr.AssignedVariable, Expr.AssignedVariable>(lv, rv); } else { syntaxError(errorMessage(INVALID_TUPLE_LVAL), filename, tv); return null; // dead code } } private List<Expr.AssignedVariable> inferAfterType(Expr.Tuple lv, Expr rhs) throws IOException, ResolveError { Nominal afterType = rhs.result(); // First, check that the rhs is a subtype of the lhs checkIsSubtype(lv.type, afterType, rhs); Nominal.EffectiveTuple rhsType = expandAsEffectiveTuple(afterType); // Second, construct the list of assigned variables ArrayList<Expr.AssignedVariable> rs = new ArrayList<Expr.AssignedVariable>(); for (int i = 0; i != rhsType.elements().size(); ++i) { Expr element = lv.fields.get(i); if (element instanceof Expr.LVal) { rs.add(inferAfterType((Expr.LVal) element, rhsType.element(i))); } else { syntaxError(errorMessage(INVALID_TUPLE_LVAL), filename, element); } } // done return rs; } private Expr.AssignedVariable inferAfterType(Expr.LVal lv, Nominal afterType) { if (lv instanceof Expr.AssignedVariable) { Expr.AssignedVariable v = (Expr.AssignedVariable) lv; v.afterType = afterType; return v; } else if (lv instanceof Expr.Dereference) { Expr.Dereference pa = (Expr.Dereference) lv; // The before and after types are the same since an assignment // through a reference does not change its type. checkIsSubtype(pa.srcType, Nominal.Reference(afterType), lv); return inferAfterType((Expr.LVal) pa.src, pa.srcType); } else if (lv instanceof Expr.IndexOf) { Expr.IndexOf la = (Expr.IndexOf) lv; Nominal.EffectiveIndexible srcType = la.srcType; afterType = (Nominal) srcType.update(la.index.result(), afterType); return inferAfterType((Expr.LVal) la.src, afterType); } else if (lv instanceof Expr.FieldAccess) { Expr.FieldAccess la = (Expr.FieldAccess) lv; Nominal.EffectiveRecord srcType = la.srcType; // I know I can modify this hash map, since it's created fresh // in Nominal.Record.fields(). afterType = (Nominal) srcType.update(la.name, afterType); return inferAfterType((Expr.LVal) la.src, afterType); } else { internalFailure("unknown lval: " + lv.getClass().getName(), filename, lv); return null; // deadcode } } /** * Type check a break statement. This requires propagating the current * environment to the block destination, to ensure that the actual types of * all variables at that point are precise. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Break stmt, Environment environment) { // FIXME: need to propagate environment to the break destination return BOTTOM; } /** * Type check an assume statement. This requires checking that the * expression being printed is well-formed and has string type. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Debug stmt, Environment environment) { stmt.expr = propagate(stmt.expr, environment, current); checkIsSubtype(Type.T_STRING, stmt.expr); return environment; } /** * Type check a do-while statement. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.DoWhile stmt, Environment environment) { // Iterate to a fixed point Environment old = null; Environment tmp = null; Environment orig = environment.clone(); boolean firstTime = true; do { old = environment.clone(); if (!firstTime) { // don't do this on the first go around, to mimick how the // do-while loop works. tmp = propagateCondition(stmt.condition, true, old.clone(), current).second(); environment = join(orig.clone(), propagate(stmt.body, tmp)); } else { firstTime = false; environment = join(orig.clone(), propagate(stmt.body, old)); } old.free(); // hacky, but safe } while (!environment.equals(old)); List<Expr> stmt_invariants = stmt.invariants; for (int i = 0; i != stmt_invariants.size(); ++i) { Expr invariant = stmt_invariants.get(i); invariant = propagate(invariant, environment, current); stmt_invariants.set(i, invariant); checkIsSubtype(Type.T_BOOL, invariant); } Pair<Expr, Environment> p = propagateCondition(stmt.condition, false, environment, current); stmt.condition = p.first(); environment = p.second(); return environment; } /** * Type check a <code>for</code> statement. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.ForAll stmt, Environment environment) throws IOException, ResolveError { stmt.source = propagate(stmt.source, environment, current); Nominal.EffectiveCollection srcType = expandAsEffectiveCollection(stmt.source .result()); stmt.srcType = srcType; if (srcType == null) { syntaxError(errorMessage(INVALID_SET_OR_LIST_EXPRESSION), filename, stmt); } // At this point, the major task is to determine what the types for the // iteration variables declared in the for loop. More than one variable // is permitted in some cases. Nominal[] elementTypes = new Nominal[stmt.variables.size()]; if (elementTypes.length == 2 && srcType instanceof Nominal.EffectiveMap) { Nominal.EffectiveMap dt = (Nominal.EffectiveMap) srcType; elementTypes[0] = dt.key(); elementTypes[1] = dt.value(); } else { if (elementTypes.length == 1) { elementTypes[0] = srcType.element(); } else { syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED), filename, stmt); } } // Now, update the environment to include those declared variables ArrayList<String> stmtVariables = stmt.variables; for (int i = 0; i != elementTypes.length; ++i) { String var = stmtVariables.get(i); if (environment.containsKey(var)) { syntaxError(errorMessage(VARIABLE_ALREADY_DEFINED, var), filename, stmt); } environment = environment.put(var, elementTypes[i]); } // Iterate to a fixed point Environment old = null; Environment orig = environment.clone(); do { old = environment.clone(); environment = join(orig.clone(), propagate(stmt.body, old)); old.free(); // hacky, but safe } while (!environment.equals(old)); // Remove loop variables from the environment, since they are only // declared for the duration of the body but not beyond. for (int i = 0; i != elementTypes.length; ++i) { String var = stmtVariables.get(i); environment = environment.remove(var); } if (stmt.invariant != null) { stmt.invariant = propagate(stmt.invariant, environment, current); checkIsSubtype(Type.T_BOOL, stmt.invariant); } return environment; } private Environment propagate(Stmt.IfElse stmt, Environment environment) { // First, check condition and apply variable retypings. Pair<Expr, Environment> p1, p2; p1 = propagateCondition(stmt.condition, true, environment.clone(), current); p2 = propagateCondition(stmt.condition, false, environment, current); stmt.condition = p1.first(); Environment trueEnvironment = p1.second(); Environment falseEnvironment = p2.second(); // Second, update environments for true and false branches if (stmt.trueBranch != null && stmt.falseBranch != null) { trueEnvironment = propagate(stmt.trueBranch, trueEnvironment); falseEnvironment = propagate(stmt.falseBranch, falseEnvironment); } else if (stmt.trueBranch != null) { trueEnvironment = propagate(stmt.trueBranch, trueEnvironment); } else if (stmt.falseBranch != null) { trueEnvironment = environment; falseEnvironment = propagate(stmt.falseBranch, falseEnvironment); } // Finally, join results back together return join(trueEnvironment, falseEnvironment); } /** * Type check a <code>return</code> statement. If a return expression is * given, then we must check that this is well-formed and is a subtype of * the enclosing function or method's declared return type. The environment * after a return statement is "bottom" because that represents an * unreachable program point. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Return stmt, Environment environment) throws IOException { if (stmt.expr != null) { stmt.expr = propagate(stmt.expr, environment, current); Nominal rhs = stmt.expr.result(); checkIsSubtype(current.resolvedType().ret(), rhs, stmt.expr); } environment.free(); return BOTTOM; } /** * Type check a <code>skip</code> statement, which has no effect on the * environment. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Skip stmt, Environment environment) { return environment; } private Environment propagate(Stmt.Switch stmt, Environment environment) throws IOException { stmt.expr = propagate(stmt.expr, environment, current); Environment finalEnv = null; boolean hasDefault = false; for (Stmt.Case c : stmt.cases) { // first, resolve the constants ArrayList<Constant> values = new ArrayList<Constant>(); for (Expr e : c.expr) { values.add(resolveAsConstant(e, current)); } c.constants = values; // second, propagate through the statements Environment localEnv = environment.clone(); localEnv = propagate(c.stmts, localEnv); if (finalEnv == null) { finalEnv = localEnv; } else { finalEnv = join(finalEnv, localEnv); } // third, keep track of whether a default hasDefault |= c.expr.isEmpty(); } if (!hasDefault) { // in this case, there is no default case in the switch. We must // therefore assume that there are values which will fall right // through the switch statement without hitting a case. Therefore, // we must include the original environment to accound for this. finalEnv = join(finalEnv, environment); } else { environment.free(); } return finalEnv; } /** * Type check a <code>throw</code> statement. We must check that the throw * expression is well-formed. The environment after a throw statement is * "bottom" because that represents an unreachable program point. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.Throw stmt, Environment environment) { stmt.expr = propagate(stmt.expr, environment, current); return BOTTOM; } /** * Type check a try-catch statement. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.TryCatch stmt, Environment environment) throws IOException { for (Stmt.Catch handler : stmt.catches) { // FIXME: need to deal with handler environments properly! try { Nominal type = resolveAsType(handler.unresolvedType, current); handler.type = type; Environment local = environment.clone(); local = local.put(handler.variable, type); propagate(handler.stmts, local); local.free(); } catch (SyntaxError e) { throw e; } catch (Throwable t) { internalFailure(t.getMessage(), filename, handler, t); } } environment = propagate(stmt.body, environment); // need to do handlers here return environment; } /** * Type check a <code>whiley</code> statement. * * @param stmt * Statement to type check * @param environment * Determines the type of all variables immediately going into * this block * @return */ private Environment propagate(Stmt.While stmt, Environment environment) { // Iterate to a fixed point Environment old = null; Environment tmp = null; Environment orig = environment.clone(); do { old = environment.clone(); tmp = propagateCondition(stmt.condition, true, old.clone(), current) .second(); environment = join(orig.clone(), propagate(stmt.body, tmp)); old.free(); // hacky, but safe } while (!environment.equals(old)); List<Expr> stmt_invariants = stmt.invariants; for (int i = 0; i != stmt_invariants.size(); ++i) { Expr invariant = stmt_invariants.get(i); invariant = propagate(invariant, environment, current); stmt_invariants.set(i, invariant); checkIsSubtype(Type.T_BOOL, invariant); } Pair<Expr, Environment> p = propagateCondition(stmt.condition, false, environment, current); stmt.condition = p.first(); environment = p.second(); return environment; } // LVals private Expr.LVal propagate(Expr.LVal lval, Environment environment) { try { if (lval instanceof Expr.AbstractVariable) { Expr.AbstractVariable av = (Expr.AbstractVariable) lval; Nominal p = environment.getCurrentType(av.var); if (p == null) { syntaxError(errorMessage(UNKNOWN_VARIABLE), filename, lval); } Expr.AssignedVariable lv = new Expr.AssignedVariable(av.var, av.attributes()); lv.type = p; return lv; } else if (lval instanceof Expr.RationalLVal) { Expr.RationalLVal av = (Expr.RationalLVal) lval; av.numerator = propagate(av.numerator, environment); av.denominator = propagate(av.denominator, environment); return av; } else if (lval instanceof Expr.Dereference) { Expr.Dereference pa = (Expr.Dereference) lval; Expr.LVal src = propagate((Expr.LVal) pa.src, environment); pa.src = src; pa.srcType = expandAsReference(src.result()); return pa; } else if (lval instanceof Expr.IndexOf) { // this indicates either a list, string or dictionary update Expr.IndexOf ai = (Expr.IndexOf) lval; Expr.LVal src = propagate((Expr.LVal) ai.src, environment); Expr index = propagate(ai.index, environment, current); ai.src = src; ai.index = index; Nominal.EffectiveIndexible srcType = expandAsEffectiveMap(src .result()); if (srcType == null) { syntaxError(errorMessage(INVALID_LVAL_EXPRESSION), filename, lval); } ai.srcType = srcType; return ai; } else if (lval instanceof Expr.FieldAccess) { // this indicates a record update Expr.FieldAccess ad = (Expr.FieldAccess) lval; Expr.LVal src = propagate((Expr.LVal) ad.src, environment); Expr.FieldAccess ra = new Expr.FieldAccess(src, ad.name, ad.attributes()); Nominal.EffectiveRecord srcType = expandAsEffectiveRecord(src .result()); if (srcType == null) { syntaxError(errorMessage(INVALID_LVAL_EXPRESSION), filename, lval); } else if (srcType.field(ra.name) == null) { syntaxError(errorMessage(RECORD_MISSING_FIELD, ra.name), filename, lval); } ra.srcType = srcType; return ra; } else if (lval instanceof Expr.Tuple) { // this indicates a tuple update Expr.Tuple tup = (Expr.Tuple) lval; ArrayList<Nominal> elements = new ArrayList<Nominal>(); for (int i = 0; i != tup.fields.size(); ++i) { Expr element = tup.fields.get(i); if (element instanceof Expr.LVal) { element = propagate((Expr.LVal) element, environment); tup.fields.set(i, element); elements.add(element.result()); } else { syntaxError(errorMessage(INVALID_LVAL_EXPRESSION), filename, lval); } } tup.type = Nominal.Tuple(elements); return tup; } } catch (SyntaxError e) { throw e; } catch (Throwable e) { internalFailure(e.getMessage(), filename, lval, e); return null; // dead code } internalFailure("unknown lval: " + lval.getClass().getName(), filename, lval); return null; // dead code } /** * The purpose of this method is to add variable names declared within a * type pattern. For example, as follows: * * <pre> * define tup as {int x, int y} where x < y * </pre> * * In this case, <code>x</code> and <code>y</code> are variable names * declared as part of the pattern. * * @param src * @param t * @param environment */ private Environment addDeclaredVariables(TypePattern pattern, Environment environment, WhileyFile.Context context) { if (pattern instanceof TypePattern.Union) { // FIXME: in principle, we can do better here. However, I leave this // unusual case for the future. } else if (pattern instanceof TypePattern.Intersection) { // FIXME: in principle, we can do better here. However, I leave this // unusual case for the future. } else if (pattern instanceof TypePattern.Rational) { TypePattern.Rational tp = (TypePattern.Rational) pattern; environment = addDeclaredVariables(tp.numerator, environment, context); environment = addDeclaredVariables(tp.denominator, environment, context); } else if (pattern instanceof TypePattern.Record) { TypePattern.Record tp = (TypePattern.Record) pattern; for (TypePattern element : tp.elements) { environment = addDeclaredVariables(element, environment, context); } } else if (pattern instanceof TypePattern.Tuple) { TypePattern.Tuple tp = (TypePattern.Tuple) pattern; for (TypePattern element : tp.elements) { environment = addDeclaredVariables(element, environment, context); } } else { TypePattern.Leaf lp = (TypePattern.Leaf) pattern; if (lp.var != null) { Nominal type = resolveAsType(pattern.toSyntacticType(), context); environment = environment.put(lp.var.var, type); } } return environment; } // Condition /** * <p> * Propagate type information through an expression being used as a * condition, whilst checking it is well-typed at the same time. When used * as a condition (e.g. of an if-statement) an expression may update the * environment in accordance with any type tests used within. This is * important to ensure that variables are retyped in e.g. if-statements. For * example: * </p> * * <pre> * if x is int && x >= 0 * // x is int * else: * // * </pre> * <p> * Here, the if-condition must update the type of x in the true branch, but * *cannot* update the type of x in the false branch. * </p> * <p> * To handle conditions on the false branch, this function uses a sign flag * rather than expanding them using DeMorgan's laws (for efficiency). When * determining type for the false branch, the sign flag is initially false. * This prevents falsely concluding that e.g. "x is int" holds in the false * branch. * </p> * * @param expr * Condition expression to type check and propagate through * @param sign * Indicates how expression should be treated. If true, then * expression is treated "as is"; if false, then expression * should be treated as negated * @param environment * Determines the type of all variables immediately going into * this expression * @param context * Enclosing context of this expression (e.g. type declaration, * function declaration, etc) * @return */ public Pair<Expr, Environment> propagateCondition(Expr expr, boolean sign, Environment environment, Context context) { // Split up into the compound and non-compound forms. if (expr instanceof Expr.UnOp) { return propagateCondition((Expr.UnOp) expr, sign, environment, context); } else if (expr instanceof Expr.BinOp) { return propagateCondition((Expr.BinOp) expr, sign, environment, context); } else { // For non-compound forms, can just default back to the base rules // for general expressions. expr = propagate(expr, environment, context); checkIsSubtype(Type.T_BOOL, expr, context); return new Pair<Expr, Environment>(expr, environment); } } /** * <p> * Propagate type information through a unary expression being used as a * condition and, in fact, only logical not is syntactically valid here. * </p> * * @param expr * Condition expression to type check and propagate through * @param sign * Indicates how expression should be treated. If true, then * expression is treated "as is"; if false, then expression * should be treated as negated * @param environment * Determines the type of all variables immediately going into * this expression * @param context * Enclosing context of this expression (e.g. type declaration, * function declaration, etc) * @return */ private Pair<Expr, Environment> propagateCondition(Expr.UnOp expr, boolean sign, Environment environment, Context context) { Expr.UnOp uop = (Expr.UnOp) expr; // Check whether we have logical not if (uop.op == Expr.UOp.NOT) { Pair<Expr, Environment> p = propagateCondition(uop.mhs, !sign, environment, context); uop.mhs = p.first(); checkIsSubtype(Type.T_BOOL, uop.mhs, context); uop.type = Nominal.T_BOOL; return new Pair(uop, p.second()); } else { // Nothing else other than logical not is valid at this point. syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, expr); return null; // deadcode } } /** * <p> * Propagate type information through a binary expression being used as a * condition. In this case, only logical connectives ("&&", "||", "^") and * comparators (e.g. "==", "<=", etc) are permitted here. * </p> * * @param expr * Condition expression to type check and propagate through * @param sign * Indicates how expression should be treated. If true, then * expression is treated "as is"; if false, then expression * should be treated as negated * @param environment * Determines the type of all variables immediately going into * this expression * @param context * Enclosing context of this expression (e.g. type declaration, * function declaration, etc) * @return */ private Pair<Expr, Environment> propagateCondition(Expr.BinOp bop, boolean sign, Environment environment, Context context) { Expr.BOp op = bop.op; // Split into the two broard cases: logical connectives and primitives. switch (op) { case AND: case OR: case XOR: return resolveNonLeafCondition(bop, sign, environment, context); case EQ: case NEQ: case LT: case LTEQ: case GT: case GTEQ: case ELEMENTOF: case SUBSET: case SUBSETEQ: case IS: return resolveLeafCondition(bop, sign, environment, context); default: syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, bop); return null; // dead code } } /** * <p> * Propagate type information through a binary expression being used as a * logical connective ("&&", "||", "^"). * </p> * * @param bop * Binary operator for this expression. * @param sign * Indicates how expression should be treated. If true, then * expression is treated "as is"; if false, then expression * should be treated as negated * @param environment * Determines the type of all variables immediately going into * this expression * @param context * Enclosing context of this expression (e.g. type declaration, * function declaration, etc) * @return */ private Pair<Expr, Environment> resolveNonLeafCondition(Expr.BinOp bop, boolean sign, Environment environment, Context context) { Expr.BOp op = bop.op; Pair<Expr, Environment> p; boolean followOn = (sign && op == Expr.BOp.AND) || (!sign && op == Expr.BOp.OR); if (followOn) { // In this case, the environment feeds directly from the result of // propagating through the lhs into the rhs, and then into the // result of this expression. This means that updates to the // environment by either the lhs or rhs are visible outside of this // method. p = propagateCondition(bop.lhs, sign, environment.clone(), context); bop.lhs = p.first(); p = propagateCondition(bop.rhs, sign, p.second(), context); bop.rhs = p.first(); environment = p.second(); } else { // We could do better here p = propagateCondition(bop.lhs, sign, environment.clone(), context); bop.lhs = p.first(); Environment local = p.second(); // Recompute the lhs assuming that it is false. This is necessary to // generate the right environment going into the rhs, which is only // evaluated if the lhs is false. For example: // if(e is int && e > 0): // else: // In the false branch, we're determing the environment for // !(e is int && e > 0). This becomes !(e is int) || (e <= 0) where // on the rhs we require (e is int). p = propagateCondition(bop.lhs, !sign, environment.clone(), context); // Note, the following is intentional since we're specifically // considering the case where the lhs was false, and this case is // true. p = propagateCondition(bop.rhs, sign, p.second(), context); bop.rhs = p.first(); environment = join(local, p.second()); } checkIsSubtype(Type.T_BOOL, bop.lhs, context); checkIsSubtype(Type.T_BOOL, bop.rhs, context); bop.srcType = Nominal.T_BOOL; return new Pair<Expr, Environment>(bop, environment); } /** * <p> * Propagate type information through a binary expression being used as a * comparators (e.g. "==", "<=", etc). * </p> * * @param bop * Binary operator for this expression. * @param sign * Indicates how expression should be treated. If true, then * expression is treated "as is"; if false, then expression * should be treated as negated * @param environment * Determines the type of all variables immediately going into * this expression * @param context * Enclosing context of this expression (e.g. type declaration, * function declaration, etc) * @return */ private Pair<Expr, Environment> resolveLeafCondition(Expr.BinOp bop, boolean sign, Environment environment, Context context) { Expr.BOp op = bop.op; Expr lhs = propagate(bop.lhs, environment, context); Expr rhs = propagate(bop.rhs, environment, context); bop.lhs = lhs; bop.rhs = rhs; Type lhsRawType = lhs.result().raw(); Type rhsRawType = rhs.result().raw(); switch (op) { case IS: // this one is slightly more difficult. In the special case that // we have a type constant on the right-hand side then we want // to check that it makes sense. Otherwise, we just check that // it has type meta. if (rhs instanceof Expr.TypeVal) { // yes, right-hand side is a constant Expr.TypeVal tv = (Expr.TypeVal) rhs; Nominal unconstrainedTestType = resolveAsUnconstrainedType( tv.unresolvedType, context); /** * Determine the types guaranteed to hold on the true and false * branches respectively. We have to use the negated * unconstrainedTestType for the false branch because only that * is guaranteed if the test fails. For example: * * <pre> * define nat as int where $ &gt;= 0 * define listnat as [int]|nat * * int f([int]|int x): * if x if listnat: * x : [int]|int * ... * else: * x : int * </pre> * * The unconstrained type of listnat is [int], since nat is a * constrained type. */ Nominal glbForFalseBranch = Nominal.intersect(lhs.result(), Nominal.Negation(unconstrainedTestType)); Nominal glbForTrueBranch = Nominal.intersect(lhs.result(), tv.type); if (glbForFalseBranch.raw() == Type.T_VOID) { // DEFINITE TRUE CASE syntaxError(errorMessage(BRANCH_ALWAYS_TAKEN), context, bop); } else if (glbForTrueBranch.raw() == Type.T_VOID) { // DEFINITE FALSE CASE syntaxError( errorMessage(INCOMPARABLE_OPERANDS, lhsRawType, tv.type.raw()), context, bop); } // Finally, if the lhs is local variable then update its // type in the resulting environment. if (lhs instanceof Expr.LocalVariable) { Expr.LocalVariable lv = (Expr.LocalVariable) lhs; Nominal newType; if (sign) { newType = glbForTrueBranch; } else { newType = glbForFalseBranch; } environment = environment.put(lv.var, newType); } } else { // In this case, we can't update the type of the lhs since // we don't know anything about the rhs. It may be possible // to support bounds here in order to do that, but frankly // that's future work :) checkIsSubtype(Type.T_META, rhs, context); } bop.srcType = lhs.result(); break; case ELEMENTOF: Type.EffectiveList listType = rhsRawType instanceof Type.EffectiveList ? (Type.EffectiveList) rhsRawType : null; Type.EffectiveSet setType = rhsRawType instanceof Type.EffectiveSet ? (Type.EffectiveSet) rhsRawType : null; if (listType != null && !Type.isImplicitCoerciveSubtype(listType.element(), lhsRawType)) { syntaxError( errorMessage(INCOMPARABLE_OPERANDS, lhsRawType, listType.element()), context, bop); } else if (setType != null && !Type.isImplicitCoerciveSubtype(setType.element(), lhsRawType)) { syntaxError( errorMessage(INCOMPARABLE_OPERANDS, lhsRawType, setType.element()), context, bop); } bop.srcType = rhs.result(); break; case SUBSET: case SUBSETEQ: case LT: case LTEQ: case GTEQ: case GT: if (op == Expr.BOp.SUBSET || op == Expr.BOp.SUBSETEQ) { checkIsSubtype(Type.T_SET_ANY, lhs, context); checkIsSubtype(Type.T_SET_ANY, rhs, context); } else { checkIsSubtype(Type.T_REAL, lhs, context); checkIsSubtype(Type.T_REAL, rhs, context); } if (Type.isImplicitCoerciveSubtype(lhsRawType, rhsRawType)) { bop.srcType = lhs.result(); } else if (Type.isImplicitCoerciveSubtype(rhsRawType, lhsRawType)) { bop.srcType = rhs.result(); } else { syntaxError( errorMessage(INCOMPARABLE_OPERANDS, lhsRawType, rhsRawType), context, bop); return null; // dead code } break; case NEQ: // following is a sneaky trick for the special case below sign = !sign; case EQ: // first, check for special case of e.g. x != null. This is then // treated the same as !(x is null) if (lhs instanceof Expr.LocalVariable && rhs instanceof Expr.Constant && ((Expr.Constant) rhs).value == Constant.V_NULL) { // bingo, special case Expr.LocalVariable lv = (Expr.LocalVariable) lhs; Nominal newType; Nominal glb = Nominal.intersect(lhs.result(), Nominal.T_NULL); if (glb.raw() == Type.T_VOID) { syntaxError( errorMessage(INCOMPARABLE_OPERANDS, lhs.result() .raw(), Type.T_NULL), context, bop); return null; } else if (sign) { newType = glb; } else { newType = Nominal .intersect(lhs.result(), Nominal.T_NOTNULL); } bop.srcType = lhs.result(); environment = environment.put(lv.var, newType); } else { // handle general case if (Type.isImplicitCoerciveSubtype(lhsRawType, rhsRawType)) { bop.srcType = lhs.result(); } else if (Type.isImplicitCoerciveSubtype(rhsRawType, lhsRawType)) { bop.srcType = rhs.result(); } else { syntaxError( errorMessage(INCOMPARABLE_OPERANDS, lhsRawType, rhsRawType), context, bop); return null; // dead code } } } return new Pair<Expr, Environment>(bop, environment); } // Expressions /** * Propagate types through a given expression, whilst checking that it is * well typed. In this case, any use of a runtime type test cannot effect * callers of this function. * * @param expr * Expression to propagate types through. * @param environment * Determines the type of all variables immediately going into * this expression * @param context * Enclosing context of this expression (e.g. type declaration, * function declaration, etc) * @return */ public Expr propagate(Expr expr, Environment environment, Context context) { try { if (expr instanceof Expr.BinOp) { return propagate((Expr.BinOp) expr, environment, context); } else if (expr instanceof Expr.UnOp) { return propagate((Expr.UnOp) expr, environment, context); } else if (expr instanceof Expr.Comprehension) { return propagate((Expr.Comprehension) expr, environment, context); } else if (expr instanceof Expr.Constant) { return propagate((Expr.Constant) expr, environment, context); } else if (expr instanceof Expr.Cast) { return propagate((Expr.Cast) expr, environment, context); } else if (expr instanceof Expr.ConstantAccess) { return propagate((Expr.ConstantAccess) expr, environment, context); } else if (expr instanceof Expr.FieldAccess) { return propagate((Expr.FieldAccess) expr, environment, context); } else if (expr instanceof Expr.Map) { return propagate((Expr.Map) expr, environment, context); } else if (expr instanceof Expr.AbstractFunctionOrMethod) { return propagate((Expr.AbstractFunctionOrMethod) expr, environment, context); } else if (expr instanceof Expr.AbstractInvoke) { return propagate((Expr.AbstractInvoke) expr, environment, context); } else if (expr instanceof Expr.AbstractIndirectInvoke) { return propagate((Expr.AbstractIndirectInvoke) expr, environment, context); } else if (expr instanceof Expr.IndexOf) { return propagate((Expr.IndexOf) expr, environment, context); } else if (expr instanceof Expr.Lambda) { return propagate((Expr.Lambda) expr, environment, context); } else if (expr instanceof Expr.LengthOf) { return propagate((Expr.LengthOf) expr, environment, context); } else if (expr instanceof Expr.LocalVariable) { return propagate((Expr.LocalVariable) expr, environment, context); } else if (expr instanceof Expr.List) { return propagate((Expr.List) expr, environment, context); } else if (expr instanceof Expr.Set) { return propagate((Expr.Set) expr, environment, context); } else if (expr instanceof Expr.SubList) { return propagate((Expr.SubList) expr, environment, context); } else if (expr instanceof Expr.SubString) { return propagate((Expr.SubString) expr, environment, context); } else if (expr instanceof Expr.Dereference) { return propagate((Expr.Dereference) expr, environment, context); } else if (expr instanceof Expr.Record) { return propagate((Expr.Record) expr, environment, context); } else if (expr instanceof Expr.New) { return propagate((Expr.New) expr, environment, context); } else if (expr instanceof Expr.Tuple) { return propagate((Expr.Tuple) expr, environment, context); } else if (expr instanceof Expr.TypeVal) { return propagate((Expr.TypeVal) expr, environment, context); } } catch (ResolveError e) { syntaxError(errorMessage(RESOLUTION_ERROR, e.getMessage()), context, expr, e); } catch (SyntaxError e) { throw e; } catch (Throwable e) { internalFailure(e.getMessage(), context, expr, e); return null; // dead code } internalFailure("unknown expression: " + expr.getClass().getName(), context, expr); return null; // dead code } private Expr propagate(Expr.BinOp expr, Environment environment, Context context) throws IOException { // TODO: split binop into arithmetic and conditional operators. This // would avoid the following case analysis since conditional binary // operators and arithmetic binary operators actually behave quite // differently. switch (expr.op) { case AND: case OR: case XOR: case EQ: case NEQ: case LT: case LTEQ: case GT: case GTEQ: case ELEMENTOF: case SUBSET: case SUBSETEQ: case IS: return propagateCondition(expr, true, environment, context).first(); } Expr lhs = propagate(expr.lhs, environment, context); Expr rhs = propagate(expr.rhs, environment, context); expr.lhs = lhs; expr.rhs = rhs; Type lhsRawType = lhs.result().raw(); Type rhsRawType = rhs.result().raw(); boolean lhs_set = Type.isImplicitCoerciveSubtype(Type.T_SET_ANY, lhsRawType); boolean rhs_set = Type.isImplicitCoerciveSubtype(Type.T_SET_ANY, rhsRawType); boolean lhs_list = Type.isImplicitCoerciveSubtype(Type.T_LIST_ANY, lhsRawType); boolean rhs_list = Type.isImplicitCoerciveSubtype(Type.T_LIST_ANY, rhsRawType); boolean lhs_str = Type.isSubtype(Type.T_STRING, lhsRawType); boolean rhs_str = Type.isSubtype(Type.T_STRING, rhsRawType); Type srcType; if (lhs_str || rhs_str) { switch (expr.op) { case LISTAPPEND: expr.op = Expr.BOp.STRINGAPPEND; case STRINGAPPEND: break; default: syntaxError("Invalid string operation: " + expr.op, context, expr); } srcType = Type.T_STRING; } else if (lhs_list && rhs_list) { checkIsSubtype(Type.T_LIST_ANY, lhs, context); checkIsSubtype(Type.T_LIST_ANY, rhs, context); Type.EffectiveList lel = (Type.EffectiveList) lhsRawType; Type.EffectiveList rel = (Type.EffectiveList) rhsRawType; switch (expr.op) { case LISTAPPEND: srcType = Type.List(Type.Union(lel.element(), rel.element()), false); break; default: syntaxError("invalid list operation: " + expr.op, context, expr); return null; // dead-code } } else if (lhs_set && rhs_set) { checkIsSubtype(Type.T_SET_ANY, lhs, context); checkIsSubtype(Type.T_SET_ANY, rhs, context); // FIXME: something tells me there should be a function for doing // this. Perhaps effectiveSetType? if (lhs_list) { Type.EffectiveList tmp = (Type.EffectiveList) lhsRawType; lhsRawType = Type.Set(tmp.element(), false); } if (rhs_list) { Type.EffectiveList tmp = (Type.EffectiveList) rhsRawType; rhsRawType = Type.Set(tmp.element(), false); } // FIXME: loss of nominal information here Type.EffectiveSet ls = (Type.EffectiveSet) lhsRawType; Type.EffectiveSet rs = (Type.EffectiveSet) rhsRawType; switch (expr.op) { case ADD: expr.op = Expr.BOp.UNION; case UNION: // TODO: this forces unnecessary coercions, which would be // good to remove. srcType = Type.Set(Type.Union(ls.element(), rs.element()), false); break; case BITWISEAND: expr.op = Expr.BOp.INTERSECTION; case INTERSECTION: // FIXME: this is just plain wierd. if (Type.isSubtype(lhsRawType, rhsRawType)) { srcType = rhsRawType; } else { srcType = lhsRawType; } break; case SUB: expr.op = Expr.BOp.DIFFERENCE; case DIFFERENCE: srcType = lhsRawType; break; default: syntaxError("invalid set operation: " + expr.op, context, expr); return null; // deadcode } } else { switch (expr.op) { case IS: case AND: case OR: case XOR: return propagateCondition(expr, true, environment, context) .first(); case BITWISEAND: case BITWISEOR: case BITWISEXOR: checkIsSubtype(Type.T_BYTE, lhs, context); checkIsSubtype(Type.T_BYTE, rhs, context); srcType = Type.T_BYTE; break; case LEFTSHIFT: case RIGHTSHIFT: checkIsSubtype(Type.T_BYTE, lhs, context); checkIsSubtype(Type.T_INT, rhs, context); srcType = Type.T_BYTE; break; case RANGE: checkIsSubtype(Type.T_INT, lhs, context); checkIsSubtype(Type.T_INT, rhs, context); srcType = Type.List(Type.T_INT, false); break; case REM: checkIsSubtype(Type.T_INT, lhs, context); checkIsSubtype(Type.T_INT, rhs, context); srcType = Type.T_INT; break; default: // all other operations go through here if (Type.isImplicitCoerciveSubtype(lhsRawType, rhsRawType)) { checkIsSubtype(Type.T_REAL, lhs, context); if (Type.isSubtype(Type.T_CHAR, lhsRawType)) { srcType = Type.T_INT; } else if (Type.isSubtype(Type.T_INT, lhsRawType)) { srcType = Type.T_INT; } else { srcType = Type.T_REAL; } } else { checkIsSubtype(Type.T_REAL, lhs, context); checkIsSubtype(Type.T_REAL, rhs, context); if (Type.isSubtype(Type.T_CHAR, rhsRawType)) { srcType = Type.T_INT; } else if (Type.isSubtype(Type.T_INT, rhsRawType)) { srcType = Type.T_INT; } else { srcType = Type.T_REAL; } } } } // FIXME: loss of nominal information expr.srcType = Nominal.construct(srcType, srcType); return expr; } private Expr propagate(Expr.UnOp expr, Environment environment, Context context) throws IOException { if (expr.op == Expr.UOp.NOT) { // hand off to special method for conditions return propagateCondition(expr, true, environment, context).first(); } Expr src = propagate(expr.mhs, environment, context); expr.mhs = src; switch (expr.op) { case NEG: checkIsSubtype(Type.T_REAL, src, context); break; case INVERT: checkIsSubtype(Type.T_BYTE, src, context); break; default: internalFailure( "unknown operator: " + expr.op.getClass().getName(), context, expr); } expr.type = src.result(); return expr; } private Expr propagate(Expr.Comprehension expr, Environment environment, Context context) throws IOException, ResolveError { ArrayList<Pair<String, Expr>> sources = expr.sources; Environment local = environment.clone(); for (int i = 0; i != sources.size(); ++i) { Pair<String, Expr> p = sources.get(i); Expr e = propagate(p.second(), local, context); p = new Pair<String, Expr>(p.first(), e); sources.set(i, p); Nominal type = e.result(); Nominal.EffectiveCollection colType = expandAsEffectiveCollection(type); if (colType == null) { syntaxError(errorMessage(INVALID_SET_OR_LIST_EXPRESSION), context, e); return null; // dead code } // update environment for subsequent source expressions, the // condition and the value. local = local.put(p.first(), colType.element()); } if (expr.condition != null) { expr.condition = propagate(expr.condition, local, context); } if (expr.cop == Expr.COp.SETCOMP || expr.cop == Expr.COp.LISTCOMP) { expr.value = propagate(expr.value, local, context); expr.type = Nominal.Set(expr.value.result(), false); } else { expr.type = Nominal.T_BOOL; } local.free(); return expr; } private Expr propagate(Expr.Constant expr, Environment environment, Context context) { return expr; } private Expr propagate(Expr.Cast c, Environment environment, Context context) throws IOException { c.expr = propagate(c.expr, environment, context); c.type = resolveAsType(c.unresolvedType, context); Type from = c.expr.result().raw(); Type to = c.type.raw(); if (!Type.isExplicitCoerciveSubtype(to, from)) { syntaxError(errorMessage(SUBTYPE_ERROR, to, from), context, c); } return c; } private Expr propagate(Expr.AbstractFunctionOrMethod expr, Environment environment, Context context) throws IOException, ResolveError { if (expr instanceof Expr.FunctionOrMethod) { return expr; } Pair<NameID, Nominal.FunctionOrMethod> p; if (expr.paramTypes != null) { ArrayList<Nominal> paramTypes = new ArrayList<Nominal>(); for (SyntacticType t : expr.paramTypes) { paramTypes.add(resolveAsType(t, context)); } // FIXME: clearly a bug here in the case of message reference p = (Pair<NameID, Nominal.FunctionOrMethod>) resolveAsFunctionOrMethod( expr.name, paramTypes, context); } else { p = resolveAsFunctionOrMethod(expr.name, context); } expr = new Expr.FunctionOrMethod(p.first(), expr.paramTypes, expr.attributes()); expr.type = p.second(); return expr; } private Expr propagate(Expr.Lambda expr, Environment environment, Context context) throws IOException { ArrayList<Type> rawTypes = new ArrayList<Type>(); ArrayList<Type> nomTypes = new ArrayList<Type>(); for (WhileyFile.Parameter p : expr.parameters) { Nominal n = resolveAsType(p.type, context); rawTypes.add(n.raw()); nomTypes.add(n.nominal()); // Now, update the environment to include those declared variables String var = p.name(); if (environment.containsKey(var)) { syntaxError(errorMessage(VARIABLE_ALREADY_DEFINED, var), context, p); } environment = environment.put(var, n); } expr.body = propagate(expr.body, environment, context); Type.FunctionOrMethod rawType; Type.FunctionOrMethod nomType; if (Exprs.isPure(expr.body, context)) { rawType = Type.Function(expr.body.result().raw(), Type.T_VOID, rawTypes); nomType = Type.Function(expr.body.result().nominal(), Type.T_VOID, nomTypes); } else { rawType = Type.Method(expr.body.result().raw(), Type.T_VOID, rawTypes); nomType = Type.Method(expr.body.result().nominal(), Type.T_VOID, nomTypes); } expr.type = (Nominal.FunctionOrMethod) Nominal.construct(nomType, rawType); return expr; } private Expr propagate(Expr.AbstractIndirectInvoke expr, Environment environment, Context context) throws IOException, ResolveError { expr.src = propagate(expr.src, environment, context); Nominal type = expr.src.result(); if (!(type instanceof Nominal.FunctionOrMethod)) { syntaxError("function or method type expected", context, expr.src); } Nominal.FunctionOrMethod funType = (Nominal.FunctionOrMethod) type; List<Nominal> paramTypes = funType.params(); ArrayList<Expr> exprArgs = expr.arguments; if (paramTypes.size() != exprArgs.size()) { syntaxError( "insufficient arguments for function or method invocation", context, expr.src); } for (int i = 0; i != exprArgs.size(); ++i) { Nominal pt = paramTypes.get(i); Expr arg = propagate(exprArgs.get(i), environment, context); checkIsSubtype(pt, arg, context); exprArgs.set(i, arg); } if (funType instanceof Nominal.Function) { Expr.IndirectFunctionCall ifc = new Expr.IndirectFunctionCall( expr.src, exprArgs, expr.attributes()); ifc.functionType = (Nominal.Function) funType; return ifc; } else { Expr.IndirectMethodCall imc = new Expr.IndirectMethodCall(expr.src, exprArgs, expr.attributes()); imc.methodType = (Nominal.Method) funType; return imc; } } private Expr propagate(Expr.AbstractInvoke expr, Environment environment, Context context) throws IOException, ResolveError { // first, resolve through receiver and parameters. Path.ID qualification = expr.qualification; ArrayList<Expr> exprArgs = expr.arguments; ArrayList<Nominal> paramTypes = new ArrayList<Nominal>(); for (int i = 0; i != exprArgs.size(); ++i) { Expr arg = propagate(exprArgs.get(i), environment, context); exprArgs.set(i, arg); paramTypes.add(arg.result()); } // second, determine the fully qualified name of this function based on // the given function name and any supplied qualifications. ArrayList<String> qualifications = new ArrayList<String>(); if (expr.qualification != null) { for (String n : expr.qualification) { qualifications.add(n); } } qualifications.add(expr.name); NameID name = resolveAsName(qualifications, context); // third, lookup the appropriate function or method based on the name // and given parameter types. Nominal.FunctionOrMethod funType = resolveAsFunctionOrMethod(name, paramTypes, context); if (funType instanceof Nominal.Function) { Expr.FunctionCall r = new Expr.FunctionCall(name, qualification, exprArgs, expr.attributes()); r.functionType = (Nominal.Function) funType; return r; } else { Expr.MethodCall r = new Expr.MethodCall(name, qualification, exprArgs, expr.attributes()); r.methodType = (Nominal.Method) funType; return r; } } private Expr propagate(Expr.IndexOf expr, Environment environment, Context context) throws IOException, ResolveError { expr.src = propagate(expr.src, environment, context); expr.index = propagate(expr.index, environment, context); Nominal.EffectiveIndexible srcType = expandAsEffectiveMap(expr.src .result()); if (srcType == null) { syntaxError(errorMessage(INVALID_SET_OR_LIST_EXPRESSION), context, expr.src); } else { expr.srcType = srcType; } checkIsSubtype(srcType.key(), expr.index, context); return expr; } private Expr propagate(Expr.LengthOf expr, Environment environment, Context context) throws IOException, ResolveError { expr.src = propagate(expr.src, environment, context); Nominal srcType = expr.src.result(); Type rawSrcType = srcType.raw(); // First, check whether this is still only an abstract access and, in // such case, upgrade it to the appropriate access expression. if (rawSrcType instanceof Type.EffectiveCollection) { expr.srcType = expandAsEffectiveCollection(srcType); return expr; } else { syntaxError("found " + expr.src.result().nominal() + ", expected string, set, list or dictionary.", context, expr.src); } // Second, determine the expanded src type for this access expression // and check the key value. checkIsSubtype(Type.T_STRING, expr.src, context); return expr; } private Expr propagate(Expr.LocalVariable expr, Environment environment, Context context) throws IOException { Nominal type = environment.getCurrentType(expr.var); expr.type = type; return expr; } private Expr propagate(Expr.Set expr, Environment environment, Context context) { Nominal element = Nominal.T_VOID; ArrayList<Expr> exprs = expr.arguments; for (int i = 0; i != exprs.size(); ++i) { Expr e = propagate(exprs.get(i), environment, context); Nominal t = e.result(); exprs.set(i, e); element = Nominal.Union(t, element); } expr.type = Nominal.Set(element, false); return expr; } private Expr propagate(Expr.List expr, Environment environment, Context context) { Nominal element = Nominal.T_VOID; ArrayList<Expr> exprs = expr.arguments; for (int i = 0; i != exprs.size(); ++i) { Expr e = propagate(exprs.get(i), environment, context); Nominal t = e.result(); exprs.set(i, e); element = Nominal.Union(t, element); } expr.type = Nominal.List(element, false); return expr; } private Expr propagate(Expr.Map expr, Environment environment, Context context) { Nominal keyType = Nominal.T_VOID; Nominal valueType = Nominal.T_VOID; ArrayList<Pair<Expr, Expr>> exprs = expr.pairs; for (int i = 0; i != exprs.size(); ++i) { Pair<Expr, Expr> p = exprs.get(i); Expr key = propagate(p.first(), environment, context); Expr value = propagate(p.second(), environment, context); Nominal kt = key.result(); Nominal vt = value.result(); exprs.set(i, new Pair<Expr, Expr>(key, value)); keyType = Nominal.Union(kt, keyType); valueType = Nominal.Union(vt, valueType); } expr.type = Nominal.Map(keyType, valueType); return expr; } private Expr propagate(Expr.Record expr, Environment environment, Context context) { HashMap<String, Expr> exprFields = expr.fields; HashMap<String, Nominal> fieldTypes = new HashMap<String, Nominal>(); ArrayList<String> fields = new ArrayList<String>(exprFields.keySet()); for (String field : fields) { Expr e = propagate(exprFields.get(field), environment, context); Nominal t = e.result(); exprFields.put(field, e); fieldTypes.put(field, t); } expr.type = Nominal.Record(false, fieldTypes); return expr; } private Expr propagate(Expr.Tuple expr, Environment environment, Context context) { ArrayList<Expr> exprFields = expr.fields; ArrayList<Nominal> fieldTypes = new ArrayList<Nominal>(); for (int i = 0; i != exprFields.size(); ++i) { Expr e = propagate(exprFields.get(i), environment, context); Nominal t = e.result(); exprFields.set(i, e); fieldTypes.add(t); } expr.type = Nominal.Tuple(fieldTypes); return expr; } private Expr propagate(Expr.SubList expr, Environment environment, Context context) throws IOException, ResolveError { expr.src = propagate(expr.src, environment, context); expr.start = propagate(expr.start, environment, context); expr.end = propagate(expr.end, environment, context); checkIsSubtype(Type.T_LIST_ANY, expr.src, context); checkIsSubtype(Type.T_INT, expr.start, context); checkIsSubtype(Type.T_INT, expr.end, context); expr.type = expandAsEffectiveList(expr.src.result()); if (expr.type == null) { // must be a substring return new Expr.SubString(expr.src, expr.start, expr.end, expr.attributes()); } return expr; } private Expr propagate(Expr.SubString expr, Environment environment, Context context) throws IOException { expr.src = propagate(expr.src, environment, context); expr.start = propagate(expr.start, environment, context); expr.end = propagate(expr.end, environment, context); checkIsSubtype(Type.T_STRING, expr.src, context); checkIsSubtype(Type.T_INT, expr.start, context); checkIsSubtype(Type.T_INT, expr.end, context); return expr; } private Expr propagate(Expr.FieldAccess ra, Environment environment, Context context) throws IOException, ResolveError { ra.src = propagate(ra.src, environment, context); Nominal srcType = ra.src.result(); Nominal.EffectiveRecord recType = expandAsEffectiveRecord(srcType); if (recType == null) { syntaxError(errorMessage(RECORD_TYPE_REQUIRED, srcType.raw()), context, ra); } Nominal fieldType = recType.field(ra.name); if (fieldType == null) { syntaxError(errorMessage(RECORD_MISSING_FIELD, ra.name), context, ra); } ra.srcType = recType; return ra; } private Expr propagate(Expr.ConstantAccess expr, Environment environment, Context context) throws IOException { // First, determine the fully qualified name of this function based on // the given function name and any supplied qualifications. ArrayList<String> qualifications = new ArrayList<String>(); if (expr.qualification != null) { for (String n : expr.qualification) { qualifications.add(n); } } qualifications.add(expr.name); try { NameID name = resolveAsName(qualifications, context); // Second, determine the value of the constant. expr.value = resolveAsConstant(name); return expr; } catch (ResolveError e) { syntaxError(errorMessage(UNKNOWN_VARIABLE), context, expr); return null; } } private Expr propagate(Expr.Dereference expr, Environment environment, Context context) throws IOException, ResolveError { Expr src = propagate(expr.src, environment, context); expr.src = src; Nominal.Reference srcType = expandAsReference(src.result()); if (srcType == null) { syntaxError("invalid reference expression", context, src); } expr.srcType = srcType; return expr; } private Expr propagate(Expr.New expr, Environment environment, Context context) { expr.expr = propagate(expr.expr, environment, context); expr.type = Nominal.Reference(expr.expr.result()); return expr; } private Expr propagate(Expr.TypeVal expr, Environment environment, Context context) throws IOException { expr.type = resolveAsType(expr.unresolvedType, context); return expr; } // Resolve as Function or Method /** * Responsible for determining the true type of a method or function being * invoked. To do this, it must find the function/method with the most * precise type that matches the argument types. * * @param nid * @param parameters * @return * @throws IOException */ public Nominal.FunctionOrMethod resolveAsFunctionOrMethod(NameID nid, List<Nominal> parameters, Context context) throws IOException, ResolveError { // Thet set of candidate names and types for this function or method. HashSet<Pair<NameID, Nominal.FunctionOrMethod>> candidates = new HashSet<Pair<NameID, Nominal.FunctionOrMethod>>(); // First, add all valid candidates to the list without considering which // is the most precise. addCandidateFunctionsAndMethods(nid, parameters, candidates, context); // Second, add to narrow down the list of candidates to a single choice. // If this is impossible, then we have an ambiguity error. return selectCandidateFunctionOrMethod(nid.name(), parameters, candidates, context).second(); } public Pair<NameID, Nominal.FunctionOrMethod> resolveAsFunctionOrMethod( String name, Context context) throws IOException, ResolveError { return resolveAsFunctionOrMethod(name, null, context); } public Pair<NameID, Nominal.FunctionOrMethod> resolveAsFunctionOrMethod( String name, List<Nominal> parameters, Context context) throws IOException,ResolveError { HashSet<Pair<NameID, Nominal.FunctionOrMethod>> candidates = new HashSet<Pair<NameID, Nominal.FunctionOrMethod>>(); // first, try to find the matching message for (WhileyFile.Import imp : context.imports()) { String impName = imp.name; if (impName == null || impName.equals(name) || impName.equals("*")) { Trie filter = imp.filter; if (impName == null) { // import name is null, but it's possible that a module of // the given name exists, in which case any matching names // are automatically imported. filter = filter.parent().append(name); } for (Path.ID mid : builder.imports(filter)) { NameID nid = new NameID(mid, name); addCandidateFunctionsAndMethods(nid, parameters, candidates, context); } } } return selectCandidateFunctionOrMethod(name, parameters, candidates, context); } private boolean paramSubtypes(Type.FunctionOrMethod f1, Type.FunctionOrMethod f2) { List<Type> f1_params = f1.params(); List<Type> f2_params = f2.params(); if (f1_params.size() == f2_params.size()) { for (int i = 0; i != f1_params.size(); ++i) { Type f1_param = f1_params.get(i); Type f2_param = f2_params.get(i); if (!Type.isImplicitCoerciveSubtype(f1_param, f2_param)) { return false; } } return true; } return false; } private boolean paramStrictSubtypes(Type.FunctionOrMethod f1, Type.FunctionOrMethod f2) { List<Type> f1_params = f1.params(); List<Type> f2_params = f2.params(); if (f1_params.size() == f2_params.size()) { boolean allEqual = true; for (int i = 0; i != f1_params.size(); ++i) { Type f1_param = f1_params.get(i); Type f2_param = f2_params.get(i); if (!Type.isImplicitCoerciveSubtype(f1_param, f2_param)) { return false; } allEqual &= f1_param.equals(f2_param); } // This function returns true if the parameters are a strict // subtype. Therefore, if they are all equal it must return false. return !allEqual; } return false; } private String parameterString(List<Nominal> paramTypes) { String paramStr = "("; boolean firstTime = true; if (paramTypes == null) { paramStr += "..."; } else { for (Nominal t : paramTypes) { if (!firstTime) { paramStr += ","; } firstTime = false; paramStr += t.nominal(); } } return paramStr + ")"; } private Pair<NameID, Nominal.FunctionOrMethod> selectCandidateFunctionOrMethod( String name, List<Nominal> parameters, Collection<Pair<NameID, Nominal.FunctionOrMethod>> candidates, Context context) throws IOException,ResolveError { List<Type> rawParameters; Type.Function target; if (parameters != null) { rawParameters = stripNominal(parameters); target = (Type.Function) Type.Function(Type.T_ANY, Type.T_ANY, rawParameters); } else { rawParameters = null; target = null; } NameID candidateID = null; Nominal.FunctionOrMethod candidateType = null; for (Pair<NameID, Nominal.FunctionOrMethod> p : candidates) { Nominal.FunctionOrMethod nft = p.second(); Type.FunctionOrMethod ft = nft.raw(); if (parameters == null || paramSubtypes(ft, target)) { // this is now a genuine candidate if (candidateType == null || paramStrictSubtypes(candidateType.raw(), ft)) { candidateType = nft; candidateID = p.first(); } else if (!paramStrictSubtypes(ft, candidateType.raw())) { // this is an ambiguous error String msg = name + parameterString(parameters) + " is ambiguous"; // FIXME: should report all ambiguous matches here msg += "\n\tfound: " + candidateID + " : " + candidateType.nominal(); msg += "\n\tfound: " + p.first() + " : " + p.second().nominal(); throw new ResolveError(msg); } } } if (candidateType == null) { // second, didn't find matching message so generate error message String msg = "no match for " + name + parameterString(parameters); for (Pair<NameID, Nominal.FunctionOrMethod> p : candidates) { msg += "\n\tfound: " + p.first() + " : " + p.second().nominal(); } throw new ResolveError(msg); } else { // now check protection modifier WhileyFile wf = builder.getSourceFile(candidateID.module()); if (wf != null) { if (wf != context.file()) { for (WhileyFile.FunctionOrMethod d : wf.declarations( WhileyFile.FunctionOrMethod.class, candidateID.name())) { if (d.parameters.equals(candidateType.params())) { if (!d.hasModifier(Modifier.PUBLIC) && !d.hasModifier(Modifier.PROTECTED)) { String msg = candidateID.module() + "." + name + parameterString(parameters) + " is not visible"; throw new ResolveError(msg); } } } } } else { WyilFile m = builder.getModule(candidateID.module()); WyilFile.FunctionOrMethodDeclaration d = m.method( candidateID.name(), candidateType.raw()); if (!d.hasModifier(Modifier.PUBLIC) && !d.hasModifier(Modifier.PROTECTED)) { String msg = candidateID.module() + "." + name + parameterString(parameters) + " is not visible"; throw new ResolveError(msg); } } } return new Pair<NameID, Nominal.FunctionOrMethod>(candidateID, candidateType); } private void addCandidateFunctionsAndMethods(NameID nid, List<?> parameters, Collection<Pair<NameID, Nominal.FunctionOrMethod>> candidates, Context context) throws IOException { Path.ID mid = nid.module(); int nparams = parameters != null ? parameters.size() : -1; WhileyFile wf = builder.getSourceFile(mid); if (wf != null) { for (WhileyFile.FunctionOrMethod f : wf.declarations( WhileyFile.FunctionOrMethod.class, nid.name())) { if (nparams == -1 || f.parameters.size() == nparams) { Nominal.FunctionOrMethod ft = (Nominal.FunctionOrMethod) resolveAsType( f.unresolvedType(), f); candidates.add(new Pair<NameID, Nominal.FunctionOrMethod>( nid, ft)); } } } else { WyilFile m = builder.getModule(mid); for (WyilFile.FunctionOrMethodDeclaration mm : m.methods()) { if ((mm.isFunction() || mm.isMethod()) && mm.name().equals(nid.name()) && (nparams == -1 || mm.type().params().size() == nparams)) { // FIXME: loss of nominal information // FIXME: loss of visibility information (e.g if this // function is declared in terms of a protected type) Type.FunctionOrMethod t = (Type.FunctionOrMethod) mm .type(); Nominal.FunctionOrMethod fom; if (t instanceof Type.Function) { Type.Function ft = (Type.Function) t; fom = new Nominal.Function(ft, ft); } else { Type.Method mt = (Type.Method) t; fom = new Nominal.Method(mt, mt); } candidates .add(new Pair<NameID, Nominal.FunctionOrMethod>( nid, fom)); } } } } private static List<Type> stripNominal(List<Nominal> types) { ArrayList<Type> r = new ArrayList<Type>(); for (Nominal t : types) { r.add(t.raw()); } return r; } // ResolveAsName public NameID resolveAsName(String name, Context context) throws IOException, ResolveError { for (WhileyFile.Import imp : context.imports()) { String impName = imp.name; if (impName == null || impName.equals(name) || impName.equals("*")) { Trie filter = imp.filter; if (impName == null) { // import name is null, but it's possible that a module of // the given name exists, in which case any matching names // are automatically imported. filter = filter.parent().append(name); } for (Path.ID mid : builder.imports(filter)) { NameID nid = new NameID(mid, name); if (builder.isName(nid)) { // ok, we have found the name in question. But, is it // visible? if (isNameVisible(nid, context)) { return nid; } else { throw new ResolveError(nid + " is not visible"); } } } } } throw new ResolveError("name not found: " + name); } public NameID resolveAsName(List<String> names, Context context) throws IOException, ResolveError { if (names.size() == 1) { return resolveAsName(names.get(0), context); } else if (names.size() == 2) { String name = names.get(1); Path.ID mid = resolveAsModule(names.get(0), context); NameID nid = new NameID(mid, name); if (builder.isName(nid)) { if (isNameVisible(nid, context)) { return nid; } else { throw new ResolveError(nid + " is not visible"); } } } else { String name = names.get(names.size() - 1); String module = names.get(names.size() - 2); Path.ID pkg = Trie.ROOT; for (int i = 0; i != names.size() - 2; ++i) { pkg = pkg.append(names.get(i)); } Path.ID mid = pkg.append(module); NameID nid = new NameID(mid, name); if (builder.isName(nid)) { if (isNameVisible(nid, context)) { return nid; } else { throw new ResolveError(nid + " is not visible"); } } } String name = null; for (String n : names) { if (name != null) { name = name + "." + n; } else { name = n; } } throw new ResolveError("name not found: " + name); } public Path.ID resolveAsModule(String name, Context context) throws IOException, ResolveError { for (WhileyFile.Import imp : context.imports()) { Trie filter = imp.filter; String last = filter.last(); if (last.equals("*")) { // this is generic import, so narrow the filter. filter = filter.parent().append(name); } else if (!last.equals(name)) { continue; // skip as not relevant } for (Path.ID mid : builder.imports(filter)) { return mid; } } throw new ResolveError("module not found: " + name); } // ResolveAsType public Nominal.Function resolveAsType(SyntacticType.Function t, Context context) { return (Nominal.Function) resolveAsType((SyntacticType) t, context); } public Nominal.Method resolveAsType(SyntacticType.Method t, Context context) { return (Nominal.Method) resolveAsType((SyntacticType) t, context); } public Nominal resolveAsType(SyntacticType type, Context context) { Type nominalType = resolveAsType(type, context, true, false); Type rawType = resolveAsType(type, context, false, false); return Nominal.construct(nominalType, rawType); } public Nominal resolveAsUnconstrainedType(SyntacticType type, Context context) { Type nominalType = resolveAsType(type, context, true, true); Type rawType = resolveAsType(type, context, false, true); return Nominal.construct(nominalType, rawType); } private Type resolveAsType(SyntacticType t, Context context, boolean nominal, boolean unconstrained) { if (t instanceof SyntacticType.Primitive) { if (t instanceof SyntacticType.Any) { return Type.T_ANY; } else if (t instanceof SyntacticType.Void) { return Type.T_VOID; } else if (t instanceof SyntacticType.Null) { return Type.T_NULL; } else if (t instanceof SyntacticType.Bool) { return Type.T_BOOL; } else if (t instanceof SyntacticType.Byte) { return Type.T_BYTE; } else if (t instanceof SyntacticType.Char) { return Type.T_CHAR; } else if (t instanceof SyntacticType.Int) { return Type.T_INT; } else if (t instanceof SyntacticType.Real) { return Type.T_REAL; } else if (t instanceof SyntacticType.Strung) { return Type.T_STRING; } else { internalFailure("unrecognised type encountered (" + t.getClass().getName() + ")", context, t); return null; // deadcode } } else { ArrayList<Automaton.State> states = new ArrayList<Automaton.State>(); HashMap<NameID, Integer> roots = new HashMap<NameID, Integer>(); resolveAsType(t, context, states, roots, nominal, unconstrained); return Type.construct(new Automaton(states)); } } private int resolveAsType(SyntacticType type, Context context, ArrayList<Automaton.State> states, HashMap<NameID, Integer> roots, boolean nominal, boolean unconstrained) { if (type instanceof SyntacticType.Primitive) { return resolveAsType((SyntacticType.Primitive) type, context, states); } int myIndex = states.size(); int myKind; int[] myChildren; Object myData = null; boolean myDeterministic = true; states.add(null); // reserve space for me if (type instanceof SyntacticType.List) { SyntacticType.List lt = (SyntacticType.List) type; myKind = Type.K_LIST; myChildren = new int[1]; myChildren[0] = resolveAsType(lt.element, context, states, roots, nominal, unconstrained); myData = false; } else if (type instanceof SyntacticType.Set) { SyntacticType.Set st = (SyntacticType.Set) type; myKind = Type.K_SET; myChildren = new int[1]; myChildren[0] = resolveAsType(st.element, context, states, roots, nominal, unconstrained); myData = false; } else if (type instanceof SyntacticType.Map) { SyntacticType.Map st = (SyntacticType.Map) type; myKind = Type.K_MAP; myChildren = new int[2]; myChildren[0] = resolveAsType(st.key, context, states, roots, nominal, unconstrained); myChildren[1] = resolveAsType(st.value, context, states, roots, nominal, unconstrained); } else if (type instanceof SyntacticType.Record) { SyntacticType.Record tt = (SyntacticType.Record) type; HashMap<String, SyntacticType> ttTypes = tt.types; Type.Record.State fields = new Type.Record.State(tt.isOpen, ttTypes.keySet()); Collections.sort(fields); myKind = Type.K_RECORD; myChildren = new int[fields.size()]; for (int i = 0; i != fields.size(); ++i) { String field = fields.get(i); myChildren[i] = resolveAsType(ttTypes.get(field), context, states, roots, nominal, unconstrained); } myData = fields; } else if (type instanceof SyntacticType.Tuple) { SyntacticType.Tuple tt = (SyntacticType.Tuple) type; ArrayList<SyntacticType> ttTypes = tt.types; myKind = Type.K_TUPLE; myChildren = new int[ttTypes.size()]; for (int i = 0; i != ttTypes.size(); ++i) { myChildren[i] = resolveAsType(ttTypes.get(i), context, states, roots, nominal, unconstrained); } } else if (type instanceof SyntacticType.Nominal) { // This case corresponds to a user-defined type. This will be // defined in some module (possibly ours), and we need to identify // what module that is here, and save it for future use. // Furthermore, we need to determine whether the name is visible // (i.e. non-private) and/or whether the body of the type is visible // (i.e. non-protected). SyntacticType.Nominal dt = (SyntacticType.Nominal) type; NameID nid; try { // Determine the full qualified name of this nominal type. This // will additionally ensure that the name is visible nid = resolveAsName(dt.names, context); if (nominal || !isTypeVisible(nid, context)) { myKind = Type.K_NOMINAL; myData = nid; myChildren = Automaton.NOCHILDREN; } else { // At this point, we're going to expand the given nominal // type. We're going to use resolveAsType(NameID,...) to do // this which will load the expanded type onto states at the // current point. Therefore, we need to remove the initial // null we loaded on. states.remove(myIndex); return resolveAsType(nid, states, roots, unconstrained); } } catch (ResolveError e) { syntaxError(e.getMessage(), context, dt, e); return 0; // dead-code } catch (SyntaxError e) { throw e; } catch (Throwable e) { internalFailure(e.getMessage(), context, dt, e); return 0; // dead-code } } else if (type instanceof SyntacticType.Negation) { SyntacticType.Negation ut = (SyntacticType.Negation) type; myKind = Type.K_NEGATION; myChildren = new int[1]; myChildren[0] = resolveAsType(ut.element, context, states, roots, nominal, unconstrained); } else if (type instanceof SyntacticType.Union) { SyntacticType.Union ut = (SyntacticType.Union) type; ArrayList<SyntacticType.NonUnion> utTypes = ut.bounds; myKind = Type.K_UNION; myChildren = new int[utTypes.size()]; for (int i = 0; i != utTypes.size(); ++i) { myChildren[i] = resolveAsType(utTypes.get(i), context, states, roots, nominal, unconstrained); } myDeterministic = false; } else if (type instanceof SyntacticType.Intersection) { internalFailure("intersection types not supported yet", context, type); return 0; // dead-code } else if (type instanceof SyntacticType.Reference) { SyntacticType.Reference ut = (SyntacticType.Reference) type; myKind = Type.K_REFERENCE; myChildren = new int[1]; myChildren[0] = resolveAsType(ut.element, context, states, roots, nominal, unconstrained); } else { SyntacticType.FunctionOrMethod ut = (SyntacticType.FunctionOrMethod) type; ArrayList<SyntacticType> utParamTypes = ut.paramTypes; int start = 0; if (ut instanceof SyntacticType.Method) { myKind = Type.K_METHOD; } else { myKind = Type.K_FUNCTION; } myChildren = new int[start + 2 + utParamTypes.size()]; myChildren[start++] = resolveAsType(ut.ret, context, states, roots, nominal, unconstrained); if (ut.throwType == null) { // this case indicates the user did not provide a throws clause. myChildren[start++] = resolveAsType(new SyntacticType.Void(), context, states, roots, nominal, unconstrained); } else { myChildren[start++] = resolveAsType(ut.throwType, context, states, roots, nominal, unconstrained); } for (SyntacticType pt : utParamTypes) { myChildren[start++] = resolveAsType(pt, context, states, roots, nominal, unconstrained); } } states.set(myIndex, new Automaton.State(myKind, myData, myDeterministic, myChildren)); return myIndex; } private int resolveAsType(NameID key, ArrayList<Automaton.State> states, HashMap<NameID, Integer> roots, boolean unconstrained) throws IOException, ResolveError { // First, check the various caches we have Integer root = roots.get(key); if (root != null) { return root; } // check whether this type is external or not WhileyFile wf = builder.getSourceFile(key.module()); if (wf == null) { // indicates a non-local key which we can resolve immediately WyilFile mi = builder.getModule(key.module()); WyilFile.TypeDeclaration td = mi.type(key.name()); return append(td.type(), states); } WhileyFile.Type td = wf.typeDecl(key.name()); if (td == null) { // FIXME: the following allows (in certain cases) constants to be // interpreted as types. This should not be allowed and needs to be // removed in the future. However, to do this requires some kind of // unit/constant/enum type. See #315 Type t = resolveAsConstant(key).type(); if (t instanceof Type.Set) { if (unconstrained) { // crikey this is ugly int myIndex = states.size(); int kind = Type.leafKind(Type.T_VOID); Object data = null; states.add(new Automaton.State(kind, data, true, Automaton.NOCHILDREN)); return myIndex; } Type.Set ts = (Type.Set) t; return append(ts.element(), states); } else { throw new ResolveError("type not found: " + key); } } // following is needed to terminate any recursion roots.put(key, states.size()); SyntacticType type = td.pattern.toSyntacticType(); // now, expand the given type fully if (unconstrained && td.invariant != null) { int myIndex = states.size(); int kind = Type.leafKind(Type.T_VOID); Object data = null; states.add(new Automaton.State(kind, data, true, Automaton.NOCHILDREN)); return myIndex; } else if (type instanceof Type.Leaf) { // FIXME: I believe this code is now redundant, and should be // removed or updated. The problem is that SyntacticType no longer // extends Type. int myIndex = states.size(); int kind = Type.leafKind((Type.Leaf) type); Object data = Type.leafData((Type.Leaf) type); states.add(new Automaton.State(kind, data, true, Automaton.NOCHILDREN)); return myIndex; } else { return resolveAsType(type, td, states, roots, false, unconstrained); } // TODO: performance can be improved here, but actually assigning the // constructed type into a cache of previously expanded types cache. // This is challenging, in the case that the type may not be complete at // this point. In particular, if it contains any back-links above this // index there could be an issue. } private int resolveAsType(SyntacticType.Primitive t, Context context, ArrayList<Automaton.State> states) { int myIndex = states.size(); int kind; if (t instanceof SyntacticType.Any) { kind = Type.K_ANY; } else if (t instanceof SyntacticType.Void) { kind = Type.K_VOID; } else if (t instanceof SyntacticType.Null) { kind = Type.K_NULL; } else if (t instanceof SyntacticType.Bool) { kind = Type.K_BOOL; } else if (t instanceof SyntacticType.Byte) { kind = Type.K_BYTE; } else if (t instanceof SyntacticType.Char) { kind = Type.K_CHAR; } else if (t instanceof SyntacticType.Int) { kind = Type.K_INT; } else if (t instanceof SyntacticType.Real) { kind = Type.K_RATIONAL; } else if (t instanceof SyntacticType.Strung) { kind = Type.K_STRING; } else { internalFailure("unrecognised type encountered (" + t.getClass().getName() + ")", context, t); return 0; // dead-code } states.add(new Automaton.State(kind, null, true, Automaton.NOCHILDREN)); return myIndex; } private static int append(Type type, ArrayList<Automaton.State> states) { int myIndex = states.size(); Automaton automaton = Type.destruct(type); Automaton.State[] tStates = automaton.states; int[] rmap = new int[tStates.length]; for (int i = 0, j = myIndex; i != rmap.length; ++i, ++j) { rmap[i] = j; } for (Automaton.State state : tStates) { states.add(Automata.remap(state, rmap)); } return myIndex; } // ResolveAsConstant /** * <p> * Resolve a given name as a constant value. This is a global problem, since * a constant declaration in one source file may refer to constants declared * in other compilation units. This function will actually evaluate constant * expressions (e.g. "1+2") to produce actual constant vales. * </p> * * <p> * Constant declarations form a global graph spanning multiple compilation * units. In resolving a given constant, this function must traverse those * portions of the graph which make up the constant. Constants are not * permitted to be declared recursively (i.e. in terms of themselves) and * this function will report an error is such a recursive cycle is detected * in the constant graph. * </p> * * @param nid * Fully qualified name identifier of constant to resolve * @return Constant value representing named constant * @throws IOException */ public Constant resolveAsConstant(NameID nid) throws IOException, ResolveError { return resolveAsConstant(nid, new HashSet<NameID>()); } /** * <p> * Resolve a given <i>constant expression</i> as a constant value. A * constant expression is one which refers only to known and visible * constant values, rather than e.g. local variables. Constant expressions * may still use operators (e.g. "1+2", or "1+c" where c is a declared * constant). * </p> * * <p> * Constant expressions used in a few places in Whiley. In particular, the * cases of a <code>switch</code> statement must be defined using constant * expressions. * </p> * * @param e * @param context * @return */ public Constant resolveAsConstant(Expr e, Context context) { e = propagate(e, new Environment(), context); return resolveAsConstant(e, context, new HashSet<NameID>()); } private Constant resolveAsConstant(NameID key, HashSet<NameID> visited) throws IOException, ResolveError { Constant result = constantCache.get(key); if (result != null) { return result; } else if (visited.contains(key)) { throw new ResolveError("cyclic constant definition encountered (" + key + " -> " + key + ")"); } else { visited.add(key); } WhileyFile wf = builder.getSourceFile(key.module()); if (wf != null) { WhileyFile.Declaration decl = wf.declaration(key.name()); if (decl instanceof WhileyFile.Constant) { WhileyFile.Constant cd = (WhileyFile.Constant) decl; if (cd.resolvedValue == null) { cd.constant = propagate(cd.constant, new Environment(), cd); cd.resolvedValue = resolveAsConstant(cd.constant, cd, visited); } result = cd.resolvedValue; } else { throw new ResolveError("unable to find constant " + key); } } else { WyilFile module = builder.getModule(key.module()); WyilFile.ConstantDeclaration cd = module.constant(key.name()); if (cd != null) { result = cd.constant(); } else { throw new ResolveError("unable to find constant " + key); } } constantCache.put(key, result); return result; } private Constant resolveAsConstant(Expr expr, Context context, HashSet<NameID> visited) { try { if (expr instanceof Expr.Constant) { Expr.Constant c = (Expr.Constant) expr; return c.value; } else if (expr instanceof Expr.ConstantAccess) { Expr.ConstantAccess c = (Expr.ConstantAccess) expr; ArrayList<String> qualifications = new ArrayList<String>(); if (c.qualification != null) { for (String n : c.qualification) { qualifications.add(n); } } qualifications.add(c.name); try { NameID nid = resolveAsName(qualifications, context); return resolveAsConstant(nid, visited); } catch (ResolveError e) { syntaxError(errorMessage(UNKNOWN_VARIABLE), context, expr); return null; } } else if (expr instanceof Expr.BinOp) { Expr.BinOp bop = (Expr.BinOp) expr; Constant lhs = resolveAsConstant(bop.lhs, context, visited); Constant rhs = resolveAsConstant(bop.rhs, context, visited); return evaluate(bop, lhs, rhs, context); } else if (expr instanceof Expr.UnOp) { Expr.UnOp uop = (Expr.UnOp) expr; Constant lhs = resolveAsConstant(uop.mhs, context, visited); return evaluate(uop, lhs, context); } else if (expr instanceof Expr.Set) { Expr.Set nop = (Expr.Set) expr; ArrayList<Constant> values = new ArrayList<Constant>(); for (Expr arg : nop.arguments) { values.add(resolveAsConstant(arg, context, visited)); } return Constant.V_SET(values); } else if (expr instanceof Expr.List) { Expr.List nop = (Expr.List) expr; ArrayList<Constant> values = new ArrayList<Constant>(); for (Expr arg : nop.arguments) { values.add(resolveAsConstant(arg, context, visited)); } return Constant.V_LIST(values); } else if (expr instanceof Expr.Record) { Expr.Record rg = (Expr.Record) expr; HashMap<String, Constant> values = new HashMap<String, Constant>(); for (Map.Entry<String, Expr> e : rg.fields.entrySet()) { Constant v = resolveAsConstant(e.getValue(), context, visited); if (v == null) { return null; } values.put(e.getKey(), v); } return Constant.V_RECORD(values); } else if (expr instanceof Expr.Tuple) { Expr.Tuple rg = (Expr.Tuple) expr; ArrayList<Constant> values = new ArrayList<Constant>(); for (Expr e : rg.fields) { Constant v = resolveAsConstant(e, context, visited); if (v == null) { return null; } values.add(v); } return Constant.V_TUPLE(values); } else if (expr instanceof Expr.Map) { Expr.Map rg = (Expr.Map) expr; HashSet<Pair<Constant, Constant>> values = new HashSet<Pair<Constant, Constant>>(); for (Pair<Expr, Expr> e : rg.pairs) { Constant key = resolveAsConstant(e.first(), context, visited); Constant value = resolveAsConstant(e.second(), context, visited); if (key == null || value == null) { return null; } values.add(new Pair<Constant, Constant>(key, value)); } return Constant.V_MAP(values); } else if (expr instanceof Expr.FunctionOrMethod) { // TODO: add support for proper lambdas Expr.FunctionOrMethod f = (Expr.FunctionOrMethod) expr; return Constant.V_LAMBDA(f.nid, f.type.raw()); } } catch (SyntaxError.InternalFailure e) { throw e; } catch (Throwable e) { internalFailure(e.getMessage(), context, expr, e); } internalFailure("unknown constant expression: " + expr.getClass().getName(), context, expr); return null; // deadcode } /** * Determine whether a name is visible in a given context. This effectively * corresponds to checking whether or not the already name exists in the * given context; or, a public or protected named is imported from another * file. * * @param nid * Name to check modifiers of * @param context * Context in which we are trying to access named item * * @return True if given context permitted to access name * @throws IOException */ public boolean isNameVisible(NameID nid, Context context) throws IOException { // Any element in the same file is automatically visible if (nid.module().equals(context.file().module)) { return true; } else { return hasModifier(nid, context, Modifier.PUBLIC) || hasModifier(nid, context, Modifier.PROTECTED); } } /** * Determine whether a named type is fully visible in a given context. This * effectively corresponds to checking whether or not the already type * exists in the given context; or, a public type is imported from another * file. * * @param nid * Name to check modifiers of * @param context * Context in which we are trying to access named item * * @return True if given context permitted to access name * @throws IOException */ public boolean isTypeVisible(NameID nid, Context context) throws IOException { // Any element in the same file is automatically visible if (nid.module().equals(context.file().module)) { return true; } else { return hasModifier(nid, context, Modifier.PUBLIC); } } /** * Determine whether a named item has a modifier matching one of a given * list. This is particularly useful for checking visibility (e.g. public, * private, etc) of named items. * * @param nid * Name to check modifiers of * @param context * Context in which we are trying to access named item * @param modifiers * * @return True if given context permitted to access name * @throws IOException */ public boolean hasModifier(NameID nid, Context context, Modifier modifier) throws IOException { Path.ID mid = nid.module(); // Attempt to access source file first. WhileyFile wf = builder.getSourceFile(mid); if (wf != null) { // Source file location, so check visible of element. WhileyFile.NamedDeclaration nd = wf.declaration(nid.name()); return nd != null && nd.hasModifier(modifier); } else { // Source file not being compiled, therefore attempt to access wyil // file directly. // we have to do the following basically because we don't load // modifiers properly out of jvm class files (at the moment). // return false; WyilFile w = builder.getModule(mid); List<WyilFile.Declaration> declarations = w.declarations(); for (int i = 0; i != declarations.size(); ++i) { WyilFile.Declaration d = declarations.get(i); if (d instanceof WyilFile.NamedDeclaration) { WyilFile.NamedDeclaration nd = (WyilFile.NamedDeclaration) d; return nd != null && nd.hasModifier(modifier); } } return false; } } // Constant Evaluation /** * Evaluate a given unary operator on a given input value. * * @param operator * Unary operator to evaluate * @param operand * Operand to apply operator on * @param context * Context in which to apply operator (useful for error * reporting) * @return */ private Constant evaluate(Expr.UnOp operator, Constant operand, Context context) { switch (operator.op) { case NOT: if (operand instanceof Constant.Bool) { Constant.Bool b = (Constant.Bool) operand; return Constant.V_BOOL(!b.value); } syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, operator); break; case NEG: if (operand instanceof Constant.Integer) { Constant.Integer b = (Constant.Integer) operand; return Constant.V_INTEGER(b.value.negate()); } else if (operand instanceof Constant.Decimal) { Constant.Decimal b = (Constant.Decimal) operand; return Constant.V_DECIMAL(b.value.negate()); } syntaxError(errorMessage(INVALID_NUMERIC_EXPRESSION), context, operator); break; case INVERT: if (operand instanceof Constant.Byte) { Constant.Byte b = (Constant.Byte) operand; return Constant.V_BYTE((byte) ~b.value); } break; } syntaxError(errorMessage(INVALID_UNARY_EXPRESSION), context, operator); return null; } private Constant evaluate(Expr.BinOp bop, Constant v1, Constant v2, Context context) { Type v1_type = v1.type(); Type v2_type = v2.type(); Type lub = Type.Union(v1_type, v2_type); // FIXME: there are bugs here related to coercions. if (Type.isSubtype(Type.T_BOOL, lub)) { return evaluateBoolean(bop, (Constant.Bool) v1, (Constant.Bool) v2, context); } else if (Type.isSubtype(Type.T_INT, lub)) { return evaluate(bop, (Constant.Integer) v1, (Constant.Integer) v2, context); } else if (Type.isImplicitCoerciveSubtype(Type.T_REAL, v1_type) && Type.isImplicitCoerciveSubtype(Type.T_REAL, v1_type)) { if (v1 instanceof Constant.Integer) { Constant.Integer i1 = (Constant.Integer) v1; v1 = Constant.V_DECIMAL(new BigDecimal(i1.value)); } else if (v2 instanceof Constant.Integer) { Constant.Integer i2 = (Constant.Integer) v2; v2 = Constant.V_DECIMAL(new BigDecimal(i2.value)); } return evaluate(bop, (Constant.Decimal) v1, (Constant.Decimal) v2, context); } else if (Type.isSubtype(Type.T_LIST_ANY, lub)) { return evaluate(bop, (Constant.List) v1, (Constant.List) v2, context); } else if (Type.isSubtype(Type.T_SET_ANY, lub)) { return evaluate(bop, (Constant.Set) v1, (Constant.Set) v2, context); } syntaxError(errorMessage(INVALID_BINARY_EXPRESSION), context, bop); return null; } private Constant evaluateBoolean(Expr.BinOp bop, Constant.Bool v1, Constant.Bool v2, Context context) { switch (bop.op) { case AND: return Constant.V_BOOL(v1.value & v2.value); case OR: return Constant.V_BOOL(v1.value | v2.value); case XOR: return Constant.V_BOOL(v1.value ^ v2.value); } syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, bop); return null; } private Constant evaluate(Expr.BinOp bop, Constant.Integer v1, Constant.Integer v2, Context context) { switch (bop.op) { case ADD: return Constant.V_INTEGER(v1.value.add(v2.value)); case SUB: return Constant.V_INTEGER(v1.value.subtract(v2.value)); case MUL: return Constant.V_INTEGER(v1.value.multiply(v2.value)); case DIV: return Constant.V_INTEGER(v1.value.divide(v2.value)); case REM: return Constant.V_INTEGER(v1.value.remainder(v2.value)); } syntaxError(errorMessage(INVALID_NUMERIC_EXPRESSION), context, bop); return null; } private Constant evaluate(Expr.BinOp bop, Constant.Decimal v1, Constant.Decimal v2, Context context) { switch (bop.op) { case ADD: return Constant.V_DECIMAL(v1.value.add(v2.value)); case SUB: return Constant.V_DECIMAL(v1.value.subtract(v2.value)); case MUL: return Constant.V_DECIMAL(v1.value.multiply(v2.value)); case DIV: return Constant.V_DECIMAL(v1.value.divide(v2.value)); } syntaxError(errorMessage(INVALID_NUMERIC_EXPRESSION), context, bop); return null; } private Constant evaluate(Expr.BinOp bop, Constant.List v1, Constant.List v2, Context context) { switch (bop.op) { case ADD: ArrayList<Constant> vals = new ArrayList<Constant>(v1.values); vals.addAll(v2.values); return Constant.V_LIST(vals); } syntaxError(errorMessage(INVALID_LIST_EXPRESSION), context, bop); return null; } private Constant evaluate(Expr.BinOp bop, Constant.Set v1, Constant.Set v2, Context context) { switch (bop.op) { case UNION: { HashSet<Constant> vals = new HashSet<Constant>(v1.values); vals.addAll(v2.values); return Constant.V_SET(vals); } case INTERSECTION: { HashSet<Constant> vals = new HashSet<Constant>(); for (Constant v : v1.values) { if (v2.values.contains(v)) { vals.add(v); } } return Constant.V_SET(vals); } case SUB: { HashSet<Constant> vals = new HashSet<Constant>(); for (Constant v : v1.values) { if (!v2.values.contains(v)) { vals.add(v); } } return Constant.V_SET(vals); } } syntaxError(errorMessage(INVALID_SET_EXPRESSION), context, bop); return null; } // expandAsType public Nominal.EffectiveSet expandAsEffectiveSet(Nominal lhs) throws IOException, ResolveError { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveSet) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveSet)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveSet) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.EffectiveList expandAsEffectiveList(Nominal lhs) throws IOException, ResolveError { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveList) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveList)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveList) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.EffectiveCollection expandAsEffectiveCollection(Nominal lhs) throws IOException, ResolveError { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveCollection) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveCollection)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveCollection) Nominal .construct(nominal, raw); } else { return null; } } public Nominal.EffectiveIndexible expandAsEffectiveMap(Nominal lhs) throws IOException, ResolveError { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveIndexible) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveIndexible)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveIndexible) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.EffectiveMap expandAsEffectiveDictionary(Nominal lhs) throws IOException, ResolveError { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveMap) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveMap)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveMap) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.EffectiveRecord expandAsEffectiveRecord(Nominal lhs) throws IOException, ResolveError { Type raw = lhs.raw(); if (raw instanceof Type.Record) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.Record)) { nominal = (Type) raw; // discard nominal information } return (Nominal.Record) Nominal.construct(nominal, raw); } else if (raw instanceof Type.UnionOfRecords) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.UnionOfRecords)) { nominal = (Type) raw; // discard nominal information } return (Nominal.UnionOfRecords) Nominal.construct(nominal, raw); } { return null; } } public Nominal.EffectiveTuple expandAsEffectiveTuple(Nominal lhs) throws IOException, ResolveError { Type raw = lhs.raw(); if (raw instanceof Type.EffectiveTuple) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.EffectiveTuple)) { nominal = raw; // discard nominal information } return (Nominal.EffectiveTuple) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.Reference expandAsReference(Nominal lhs) throws IOException, ResolveError { Type.Reference raw = Type.effectiveReference(lhs.raw()); if (raw != null) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.Reference)) { nominal = raw; // discard nominal information } return (Nominal.Reference) Nominal.construct(nominal, raw); } else { return null; } } public Nominal.FunctionOrMethod expandAsFunctionOrMethod(Nominal lhs) throws IOException, ResolveError { Type.FunctionOrMethod raw = Type.effectiveFunctionOrMethod(lhs.raw()); if (raw != null) { Type nominal = expandOneLevel(lhs.nominal()); if (!(nominal instanceof Type.FunctionOrMethod)) { nominal = raw; // discard nominal information } return (Nominal.FunctionOrMethod) Nominal.construct(nominal, raw); } else { return null; } } private Type expandOneLevel(Type type) throws IOException, ResolveError { if (type instanceof Type.Nominal) { Type.Nominal nt = (Type.Nominal) type; NameID nid = nt.name(); Path.ID mid = nid.module(); WhileyFile wf = builder.getSourceFile(mid); Type r = null; if (wf != null) { WhileyFile.Declaration decl = wf.declaration(nid.name()); if (decl instanceof WhileyFile.Type) { WhileyFile.Type td = (WhileyFile.Type) decl; r = resolveAsType(td.pattern.toSyntacticType(), td) .nominal(); } } else { WyilFile m = builder.getModule(mid); WyilFile.TypeDeclaration td = m.type(nid.name()); if (td != null) { r = td.type(); } } if (r == null) { throw new ResolveError("unable to locate " + nid); } return expandOneLevel(r); } else if (type instanceof Type.Leaf || type instanceof Type.Reference || type instanceof Type.Tuple || type instanceof Type.Set || type instanceof Type.List || type instanceof Type.Map || type instanceof Type.Record || type instanceof Type.FunctionOrMethod || type instanceof Type.Negation) { return type; } else { Type.Union ut = (Type.Union) type; ArrayList<Type> bounds = new ArrayList<Type>(); for (Type b : ut.bounds()) { bounds.add(expandOneLevel(b)); } return Type.Union(bounds); } } // Misc // Check t1 :> t2 private void checkIsSubtype(Nominal t1, Nominal t2, SyntacticElement elem) { if (!Type.isImplicitCoerciveSubtype(t1.raw(), t2.raw())) { syntaxError( errorMessage(SUBTYPE_ERROR, t1.nominal(), t2.nominal()), filename, elem); } } private void checkIsSubtype(Nominal t1, Expr t2) { if (!Type.isImplicitCoerciveSubtype(t1.raw(), t2.result().raw())) { // We use the nominal type for error reporting, since this includes // more helpful names. syntaxError( errorMessage(SUBTYPE_ERROR, t1.nominal(), t2.result() .nominal()), filename, t2); } } private void checkIsSubtype(Type t1, Expr t2) { if (!Type.isImplicitCoerciveSubtype(t1, t2.result().raw())) { // We use the nominal type for error reporting, since this includes // more helpful names. syntaxError(errorMessage(SUBTYPE_ERROR, t1, t2.result().nominal()), filename, t2); } } // Check t1 :> t2 private void checkIsSubtype(Nominal t1, Nominal t2, SyntacticElement elem, Context context) { if (!Type.isImplicitCoerciveSubtype(t1.raw(), t2.raw())) { syntaxError( errorMessage(SUBTYPE_ERROR, t1.nominal(), t2.nominal()), context, elem); } } private void checkIsSubtype(Nominal t1, Expr t2, Context context) { if (!Type.isImplicitCoerciveSubtype(t1.raw(), t2.result().raw())) { // We use the nominal type for error reporting, since this includes // more helpful names. syntaxError( errorMessage(SUBTYPE_ERROR, t1.nominal(), t2.result() .nominal()), context, t2); } } private void checkIsSubtype(Type t1, Expr t2, Context context) { if (!Type.isImplicitCoerciveSubtype(t1, t2.result().raw())) { // We use the nominal type for error reporting, since this includes // more helpful names. syntaxError(errorMessage(SUBTYPE_ERROR, t1, t2.result().nominal()), context, t2); } } // Environment Class /** * <p> * Responsible for mapping source-level variables to their declared and * actual types, at any given program point. Since the flow-type checker * uses a flow-sensitive approach to type checking, then the typing * environment will change as we move through the statements of a function * or method. * </p> * * <p> * This class is implemented in a functional style to minimise possible * problems related to aliasing (which have been a problem in the past). To * improve performance, reference counting is to ensure that cloning the * underling map is only performed when actually necessary. * </p> * * @author David J. Pearce * */ private static final class Environment { /** * The mapping of variables to their declared type. */ private final HashMap<String, Nominal> declaredTypes; /** * The mapping of variables to their current type. */ private final HashMap<String, Nominal> currentTypes; /** * The reference count, which indicate how many references to this * environment there are. When there is only one reference, then the put * and putAll operations will perform an "inplace" update (i.e. without * cloning the underlying collection). */ private int count; // refCount /** * Construct an empty environment. Initially the reference count is 1. */ public Environment() { count = 1; currentTypes = new HashMap<String, Nominal>(); declaredTypes = new HashMap<String, Nominal>(); } /** * Construct a fresh environment as a copy of another map. Initially the * reference count is 1. */ private Environment(Environment environment) { count = 1; this.currentTypes = (HashMap<String, Nominal>) environment.currentTypes.clone(); this.declaredTypes = (HashMap<String, Nominal>) environment.declaredTypes.clone(); } /** * Get the type associated with a given variable at the current program * point, or null if that variable is not declared. * * @param variable * Variable to return type for. * @return */ public Nominal getCurrentType(String variable) { return currentTypes.get(variable); } /** * Get the declared type of a given variable, or null if that variable * is not declared. * * @param variable * Variable to return type for. * @return */ public Nominal getDeclaredType(String variable) { return null; } /** * Check whether a given variable is declared within this environment. * * @param variable * @return */ public boolean containsKey(String variable) { return currentTypes.containsKey(variable); } /** * Return the set of declared variables in this environment (a.k.a the * domain). * * @return */ public Set<String> keySet() { return currentTypes.keySet(); } /** * Associate a type with a given variable. If that variable already had * a type, then this is overwritten. In the case that this environment * has a reference count of 1, then an "in place" update is performed. * Otherwise, a fresh copy of this environment is returned with the * given variable associated with the given type, whilst this * environment is unchanged. * * @param variable * Name of variable to be associated with given type * @param type * Type to associated with given variable * @return An updated version of the environment which contains the new * association. */ public Environment put(String variable, Nominal type) { if (count == 1) { currentTypes.put(variable, type); return this; } else { Environment nenv = new Environment(this); nenv.currentTypes.put(variable, type); count return nenv; } } /** * Copy all variable-type associations from the given environment into * this environment. The type of any variable already associated with a * type is overwritten. In the case that this environment has a * reference count of 1, then an "in place" update is performed. * Otherwise, a fresh copy of this environment is returned with the * given variables associated with the given types, whilst this * environment is unchanged. * * @param variable * Name of variable to be associated with given type * @param type * Type to associated with given variable * @return An updated version of the environment which contains all the * associations from the given environment. */ public Environment putAll(Environment env) { if (count == 1) { HashMap<String, Nominal> envTypes = env.currentTypes; currentTypes.putAll(envTypes); return this; } else { Environment nenv = new Environment(this); HashMap<String, Nominal> envTypes = env.currentTypes; nenv.currentTypes.putAll(envTypes); count return nenv; } } /** * Remove a variable and any associated type from this environment. In * the case that this environment has a reference count of 1, then an * "in place" update is performed. Otherwise, a fresh copy of this * environment is returned with the given variable and any association * removed. * * @param variable * Name of variable to be removed from the environment * @return An updated version of the environment in which the given * variable no longer exists. */ public Environment remove(String key) { if (count == 1) { currentTypes.remove(key); return this; } else { Environment nenv = new Environment(this); nenv.currentTypes.remove(key); count return nenv; } } /** * Create a fresh copy of this environment. In fact, this operation * simply increments the reference count of this environment and returns * it. */ public Environment clone() { count++; return this; } /** * Decrease the reference count of this environment by one. */ public void free() { --count; } public String toString() { return currentTypes.toString(); } public int hashCode() { return currentTypes.hashCode(); } public boolean equals(Object o) { if (o instanceof Environment) { Environment r = (Environment) o; return currentTypes.equals(r.currentTypes); } return false; } } private static final Environment BOTTOM = new Environment(); private static final Environment join(Environment lhs, Environment rhs) { // first, need to check for the special bottom value case. if (lhs == BOTTOM) { return rhs; } else if (rhs == BOTTOM) { return lhs; } // ok, not bottom so compute intersection. lhs.free(); rhs.free(); Environment result = new Environment(); for (String key : lhs.keySet()) { if (rhs.containsKey(key)) { Nominal lhs_t = lhs.getCurrentType(key); Nominal rhs_t = rhs.getCurrentType(key); result.put(key, Nominal.Union(lhs_t, rhs_t)); } } return result; } }
package moses.client.service; import java.util.concurrent.ConcurrentLinkedQueue; import org.json.JSONArray; import moses.client.abstraction.HardwareAbstraction; import moses.client.abstraction.apks.InstalledExternalApplicationsManager; import moses.client.com.NetworkJSON; import moses.client.service.helpers.C2DMManager; import moses.client.service.helpers.Executor; import moses.client.service.helpers.ExecutorWithObject; import moses.client.service.helpers.Login; import moses.client.service.helpers.Logout; import moses.client.userstudy.UserstudyNotificationManager; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.OnSharedPreferenceChangeListener; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.Binder; import android.os.IBinder; import android.preference.PreferenceManager; import android.util.Log; /** * The Class MosesService. * * @author Jaco Hofmann */ public class MosesService extends android.app.Service implements OnSharedPreferenceChangeListener { /** * The Class LocalBinder. */ public class LocalBinder extends Binder { /** * Gets the service. * * @return the service */ public MosesService getService() { return MosesService.this; } } /** * The Class MosesSettings. */ public class MosesSettings { /** The username. */ public String username = ""; /** The password. */ public String password = ""; /** The sessionid. */ public String sessionid = ""; /** The logged in. */ public boolean loggedIn = false; public boolean loggingIn = false; public boolean firstStart = true; /** Saves the used filter. */ public JSONArray filter = new JSONArray(); public ConcurrentLinkedQueue<Executor> postLoginSuccessHook = new ConcurrentLinkedQueue<Executor>(); public ConcurrentLinkedQueue<Executor> postLoginFailureHook = new ConcurrentLinkedQueue<Executor>(); public ConcurrentLinkedQueue<Executor> loginStartHook = new ConcurrentLinkedQueue<Executor>(); public ConcurrentLinkedQueue<Executor> loginEndHook = new ConcurrentLinkedQueue<Executor>(); public ConcurrentLinkedQueue<Executor> postLogoutHook = new ConcurrentLinkedQueue<Executor>(); public String url = "http: public ConcurrentLinkedQueue<ExecutorWithObject> changeTextFieldHook = new ConcurrentLinkedQueue<ExecutorWithObject>(); } /** The m binder. */ private final IBinder mBinder = new LocalBinder(); /** The settings file. */ private SharedPreferences settingsFile; /** The mset. */ private MosesSettings mset = new MosesSettings(); private static MosesService thisInstance = null; public static MosesService getInstance() { return thisInstance; } /** * Gets the session id. * * @return the session id */ public String getSessionID() { return mset.sessionid; } /** * Inits the config. */ private void initConfig() { settingsFile = PreferenceManager.getDefaultSharedPreferences(this); mset.username = settingsFile.getString("username_pref", ""); mset.password = settingsFile.getString("password_pref", ""); } /** * Checks if is logged in. * * @return true, if is logged in */ public boolean isLoggedIn() { return mset.loggedIn; } public void setFilter(JSONArray filter) { mset.filter = filter; settingsFile = PreferenceManager.getDefaultSharedPreferences(this); settingsFile.edit().putString("sensor_data", filter.toString()).commit(); Log.d("MoSeS.SERVICE","Set data to: " + settingsFile.getString("sensor_data", "[]")); } public JSONArray getFilter() { return mset.filter; } public Context getServiceContext() { return this; } /** * Logged in. * * @param sessionid * the sessionid */ public void loggedIn(String sessionid) { mset.loggedIn = true; mset.loggingIn = false; mset.sessionid = sessionid; } /** * Logged out. */ public void loggedOut() { mset.loggedIn = false; mset.sessionid = ""; } public void registerChangeTextFieldHook(ExecutorWithObject e) { if (!mset.changeTextFieldHook.contains(e)) mset.changeTextFieldHook.add(e); } public void unregisterChangeTextFieldHook(ExecutorWithObject e) { mset.changeTextFieldHook.remove(e); } /** * Login. * * @param e * the e */ public void login() { if (mset.username.equals("") || mset.password.equals("")) { for (ExecutorWithObject e : mset.changeTextFieldHook) { e.execute(getString(moses.client.R.string.no_username_password)); } return; } if (isOnline()) { if (!mset.loggedIn && !mset.loggingIn) { Log.d("MoSeS.SERVICE", "Logging in..."); mset.loggingIn = true; Login.setService(this); new Login(mset.username, mset.password, mset.postLoginSuccessHook, mset.postLoginFailureHook, mset.loginStartHook, mset.loginEndHook); } } else { Log.d("MoSeS.SERVICE", "Tried logging in but no internet connection was present."); } } /** * Logout. * * @param e * the e */ public void logout() { new Logout(this, mset.postLogoutHook); } /* * (non-Javadoc) * * @see android.app.Service#onBind(android.content.Intent) */ @Override public IBinder onBind(Intent arg0) { return mBinder; } /* * (non-Javadoc) * * @see android.app.Service#onCreate() */ @Override public void onCreate() { super.onCreate(); thisInstance = this; registerPostLoginFailureHook(new Executor() { @Override public void execute() { mset.loggingIn = false; } }); InstalledExternalApplicationsManager.init(this); UserstudyNotificationManager.init(this); mset.firstStart = PreferenceManager.getDefaultSharedPreferences(this).getBoolean("first_start", true); NetworkJSON.url = mset.url; PreferenceManager.getDefaultSharedPreferences(this) .registerOnSharedPreferenceChangeListener(this); C2DMManager.requestC2DMId(MosesService.this); firstLogin(); initConfig(); Log.d("MoSeS.SERVICE", "Service Created"); } public void executeLoggedIn(Executor e) { if (isLoggedIn()) e.execute(); else { registerPostLoginSuccessOneTimeHook(e); login(); } } public void registerPostLoginSuccessHook(Executor e) { if (!mset.postLoginSuccessHook.contains(e)) mset.postLoginSuccessHook.add(e); } public void registerPostLoginSuccessOneTimeHook(final Executor e) { Executor n = new Executor() { @Override public void execute() { e.execute(); unregisterPostLoginSuccessHook(this); } }; mset.postLoginSuccessHook.add(n); } public void unregisterPostLoginSuccessHook(Executor e) { mset.postLoginSuccessHook.remove(e); } public void registerPostLoginFailureHook(Executor e) { if (!mset.postLoginFailureHook.contains(e)) mset.postLoginFailureHook.add(e); } public void unregisterPostLoginFailureHook(Executor e) { mset.postLoginFailureHook.remove(e); } public void registerLoginStartHook(Executor e) { if (!mset.loginStartHook.contains(e)) mset.loginStartHook.add(e); } public void unregisterLoginStartHook(Executor e) { mset.loginStartHook.remove(e); } public void registerLoginEndHook(Executor e) { if (!mset.loginEndHook.contains(e)) mset.loginEndHook.add(e); } public void unregisterLoginEndHook(Executor e) { mset.loginEndHook.remove(); } public void registerPostLogoutHook(Executor e) { if (!mset.postLogoutHook.contains(e)) mset.postLogoutHook.add(e); } public void unregisterPostLogoutHook(Executor e) { mset.postLogoutHook.remove(e); } /* * (non-Javadoc) * * @see android.app.Service#onDestroy() */ @Override public void onDestroy() { super.onDestroy(); thisInstance = null; Log.d("MoSeS.SERVICE", "Service Destroyed"); } /* * (non-Javadoc) * * @see android.app.Service#onStart(android.content.Intent, int) */ @Override public void onStart(Intent intent, int startId) { super.onStart(intent, startId); Log.d("MoSeS.SERVICE", "Service Started"); } /** * Reload settings. */ public void reloadSettings() { initConfig(); } /** * sends device information to the moses server * */ private void syncDeviceInformation() { new HardwareAbstraction(this).syncDeviceInformation(); } public boolean isOnline() { ConnectivityManager cm = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo netInfo = cm.getActiveNetworkInfo(); if (netInfo != null && netInfo.isConnectedOrConnecting()) { return true; } return false; } private void firstLogin() { syncDeviceInformation(); new HardwareAbstraction(MosesService.this).getFilter(); } private void uploadFilter() { settingsFile = PreferenceManager.getDefaultSharedPreferences(this); String s = settingsFile.getString("sensor_data", "[]"); HardwareAbstraction ha = new HardwareAbstraction(this); ha.setFilter(s); } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { if (key.equals("sensor_data")) { Log.d("MoSeS.SERVICE", "Sensor filter changed to: " + sharedPreferences.getString("sensor_data", "")); uploadFilter(); } else if(key.equals("username_pref")) { Log.d("MoSeS.SERVICE", "Username changed - getting new data."); mset.username = settingsFile.getString("username_pref", ""); firstLogin(); }else if(key.equals("password_pref")) { Log.d("MoSeS.SERVICE", "Username changed - getting new data."); mset.password = settingsFile.getString("password_pref", ""); firstLogin(); } } }
package nars.regulation.twopoint; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.List; import java.util.Random; import javax.swing.JPanel; import nars.core.Memory; import nars.core.NAR; import nars.core.build.Default; import nars.entity.Task; import nars.gui.NARSwing; import nars.language.Term; import nars.operator.Operation; import nars.operator.Operator; /** * * @author patrick.hammer */ public class drawPanel extends JPanel { int inc=0; int lastinc=0; public class move extends Operator { public move() { super("^move"); } @Override protected List<Task> execute(Operation operation, Term[] args, Memory memory) { if(args.length==2) { //left, self inc++; if(args[0].toString().equals("left")) { x-=10; if(x>setpoint) { nar.addInput("<SELF --> [good]>. :|: %1.00;0.90%"); } else { nar.addInput("<SELF --> [good]>. :|: %0.00;0.90%"); } } if(args[0].toString().equals("right")) { x+=10; if(x>setpoint) { nar.addInput("<SELF --> [good]>. :|: %0.00;0.90%"); } else { nar.addInput("<SELF --> [good]>. :|: %1.00;0.90%"); } } } return null; } } NAR nar; public drawPanel() { nar=new Default().build(); nar.addPlugin(new move()); //new NARSwing(nar); nar.addInput("<SELF --> [good]>!"); new javax.swing.Timer(30, new ActionListener(){ @Override public void actionPerformed(ActionEvent e) { repaint(); } }).start(); } int setpoint=150; int x=100; int y=10; int k=0; private void doDrawing(Graphics g) { int modu=10; boolean cond = (inc!=lastinc); lastinc=inc; if(k<1) { //nar.addInput("move(left). :|: %0.00;0.99%"); // nar.addInput("move(right). :|: %0.00;0.99%"); nar.addInput("move(right)! :|:"); nar.addInput("move(left)! :|:"); nar.addInput("move(right)! :|:"); } if((cond || k%50==0) && x==setpoint) { nar.addInput("<SELF --> [good]>. :|: %1.00;0.90%"); } if(cond) { System.out.println(x); if(cond) { nar.addInput("<SELF --> [good]>!"); } if(x>setpoint) { nar.addInput("<target --> left>. :|:"); //nar.addInput("move(left)! :|:"); } if(x<setpoint) { nar.addInput("<target --> right>. :|:"); // nar.addInput("move(right)! :|:"); } } k++; nar.step(100); Graphics2D g2d = (Graphics2D) g; g2d.setColor(Color.blue); g2d.fillOval(x, y, 10, 10); g2d.setColor(Color.red); g2d.fillOval(setpoint, y, 10, 10); /*for (int i = 0; i <= 1000; i++) { Dimension size = getSize(); Insets insets = getInsets(); int w = size.width - insets.left - insets.right; int h = size.height - insets.top - insets.bottom; Random r = new Random(); int x = Math.abs(r.nextInt()) % w; int y = Math.abs(r.nextInt()) % h; g2d.drawLine(x, y, x, y); }*/ } @Override public void paintComponent(Graphics g) { super.paintComponent(g); doDrawing(g); } }
package nars.core.build; import nars.core.ConceptProcessor; import nars.core.Memory; import nars.core.NARBuilder; import nars.core.Param; import nars.core.control.SequentialMemoryCycle; import nars.entity.Concept; import nars.entity.ConceptBuilder; import nars.entity.Task; import nars.entity.TaskLink; import nars.entity.TermLink; import nars.language.Term; import nars.storage.AbstractBag; import nars.storage.Bag; /** * Default set of NAR parameters which have been classically used for development. */ public class DefaultNARBuilder extends NARBuilder implements ConceptBuilder { public int taskLinkBagLevels; /** Size of TaskLinkBag */ public int taskLinkBagSize; public int termLinkBagLevels; /** Size of TermLinkBag */ public int termLinkBagSize; /** determines maximum number of concepts */ private int conceptBagSize; /** Size of TaskBuffer */ private int taskBufferSize = 10; public DefaultNARBuilder() { super(); setConceptBagLevels(100); setConceptBagSize(1000); setTaskLinkBagLevels(100); setTaskLinkBagSize(20); setTermLinkBagLevels(100); setTermLinkBagSize(100); setTaskBufferSize(10); } @Override public Param newParam() { Param p = new Param(); p.noiseLevel.set(100); //Cycle control p.cycleMemory.set(1); p.cycleInputTasks.set(1); p.decisionThreshold.set(0.30); p.conceptCyclesToForget.set(10); p.taskCyclesToForget.set(20); p.beliefCyclesToForget.set(50); p.newTaskCyclesToForget.set(10); p.conceptBeliefsMax.set(7); p.conceptQuestionsMax.set(5); p.duration.set(5); p.shortTermMemorySize.set(15); p.contrapositionPriority.set(30); p.termLinkMaxReasoned.set(3); p.termLinkMaxMatched.set(10); p.termLinkRecordLength.set(10); //NAL9 experimental p.experimentalNarsPlus.set(false); p.internalExperience.set(false); p.abbreviationMinComplexity.set(20); p.abbreviationMinQuality.set(0.9f); return p; } @Override public ConceptProcessor newConceptProcessor(Param p, ConceptBuilder c) { return new SequentialMemoryCycle(newConceptBag(p), c); } @Override public ConceptBuilder getConceptBuilder() { return this; } @Override public Concept newConcept(Term t, Memory m) { AbstractBag<TaskLink> taskLinks = new Bag<>(getTaskLinkBagLevels(), getTaskLinkBagSize(), m.param.taskCyclesToForget); AbstractBag<TermLink> termLinks = new Bag<>(getTermLinkBagLevels(), getTermLinkBagSize(), m.param.beliefCyclesToForget); return new Concept(t, taskLinks, termLinks, m); } protected AbstractBag<Concept> newConceptBag(Param p) { return new Bag(getConceptBagLevels(), getConceptBagSize(), p.conceptCyclesToForget); } @Override public AbstractBag<Task> newNovelTaskBag(Param p) { return new Bag<>(getConceptBagLevels(), getTaskBufferSize(), p.newTaskCyclesToForget); } public int getConceptBagSize() { return conceptBagSize; } public DefaultNARBuilder setConceptBagSize(int conceptBagSize) { this.conceptBagSize = conceptBagSize; return this; } /** Level granularity in Bag, usually 100 (two digits) */ private int conceptBagLevels; public int getConceptBagLevels() { return conceptBagLevels; } public DefaultNARBuilder setConceptBagLevels(int bagLevels) { this.conceptBagLevels = bagLevels; return this; } /** * @return the taskLinkBagLevels */ public int getTaskLinkBagLevels() { return taskLinkBagLevels; } public DefaultNARBuilder setTaskLinkBagLevels(int taskLinkBagLevels) { this.taskLinkBagLevels = taskLinkBagLevels; return this; } public void setTaskBufferSize(int taskBufferSize) { this.taskBufferSize = taskBufferSize; } public int getTaskBufferSize() { return taskBufferSize; } public int getTaskLinkBagSize() { return taskLinkBagSize; } public DefaultNARBuilder setTaskLinkBagSize(int taskLinkBagSize) { this.taskLinkBagSize = taskLinkBagSize; return this; } public int getTermLinkBagLevels() { return termLinkBagLevels; } public DefaultNARBuilder setTermLinkBagLevels(int termLinkBagLevels) { this.termLinkBagLevels = termLinkBagLevels; return this; } public int getTermLinkBagSize() { return termLinkBagSize; } public DefaultNARBuilder setTermLinkBagSize(int termLinkBagSize) { this.termLinkBagSize = termLinkBagSize; return this; } public static class CommandLineNARBuilder extends DefaultNARBuilder { private final Param param; @Override public Param newParam() { return param; } public CommandLineNARBuilder(String[] args) { super(); param = super.newParam(); for (int i = 0; i < args.length; i++) { String arg = args[i]; if ("--silence".equals(arg)) { arg = args[++i]; int sl = Integer.parseInt(arg); param.noiseLevel.set(100-sl); } if ("--noise".equals(arg)) { arg = args[++i]; int sl = Integer.parseInt(arg); param.noiseLevel.set(sl); } } } /** * Decode the silence level * * @param param Given argument * @return Whether the argument is not the silence level */ public static boolean isReallyFile(String param) { return !"--silence".equals(param); } } // /** Concept decay rate in ConceptBag, in [1, 99]. */ // private static final int CONCEPT_CYCLES_TO_FORGET = 10; // /** TaskLink decay rate in TaskLinkBag, in [1, 99]. */ // private static final int TASK_LINK_CYCLES_TO_FORGET = 20; // /** TermLink decay rate in TermLinkBag, in [1, 99]. */ // private static final int TERM_LINK_CYCLES_TO_FORGET = 50; // /** Task decay rate in TaskBuffer, in [1, 99]. */ // private static final int NEW_TASK_FORGETTING_CYCLE = 10; }
package nars.meta; import nars.link.TermLink; import nars.nal.nal1.Inheritance; import nars.nal.nal4.Product; import nars.premise.Premise; import nars.process.ConceptProcess; import nars.task.Sentence; import nars.task.Task; import nars.term.Atom; import nars.term.Compound; import nars.term.Term; import nars.term.Variable; import nars.term.transform.CompoundTransform; import java.util.HashSet; import java.util.Set; /** * A rule which produces a Task * contains: preconditions, predicates, postconditions, post-evaluations and metainfo */ public class TaskRule extends Rule<Premise,Task> { private final Term[] preconditions; //the terms to match private final PostCondition[] postconditions; //it has certain pre-conditions, all given as predicates after the two input premises public TaskRule(Product premises, Product result) { super(premises, result); //1. construct precondition term array //Term[] terms = terms(); Term[] precon = this.preconditions = premises.terms(); Term[] postcons = result.terms(); //The last entry is the postcondition this.normalizeDestructively(); postconditions = new PostCondition[postcons.length / 2]; //term_1 meta_1 ,..., term_2 meta_2 ... int k = 0; for (int i = 0; i < postcons.length; ) { Term t = postcons[i++]; if (i >= postcons.length) throw new RuntimeException("invalid rule: missing meta term for postcondition involving " + t); postconditions[k++] = new PostCondition(t, ((Product)postcons[i++]).terms() ); } } @Override protected void init(Term... term) { super.init(term); } public Product premise() { return (Product)term(0); } public Product result() { return (Product) term(1); } public int premiseCount() { return premise().length(); } public static final Set<Atom> reservedPostconditions = new HashSet(6); static { reservedPostconditions.add(Atom.the("Truth")); reservedPostconditions.add(Atom.the("Stamp")); reservedPostconditions.add(Atom.the("Occurrence")); reservedPostconditions.add(Atom.the("Desire")); reservedPostconditions.add(Atom.the("Order")); reservedPostconditions.add(Atom.the("Info")); } public static class TaskRuleNormalization implements CompoundTransform<Compound,Term> { @Override public boolean test(Term term) { if (term instanceof Atom) { String name = term.toString(); return (Character.isUpperCase(name.charAt(0))); } return false; } @Override public Term apply(Compound containingCompound, Term v, int depth) { //do not alter postconditions if ((containingCompound instanceof Inheritance) && reservedPostconditions.contains(((Inheritance)containingCompound).getPredicate())) return v; return new Variable("%" + v.toString()); } } final static TaskRuleNormalization taskRuleNormalization = new TaskRuleNormalization(); @Override public TaskRule normalizeDestructively() { this.transform(taskRuleNormalization); this.invalidate(); return this; } public TaskRule normalize() { return this; } public void forward(Task task, Sentence belief, Term beliefTerm, ConceptProcess nal) { //if preconditions are met: for (PostCondition p : postconditions) p.apply(preconditions, task, belief, beliefTerm, nal); } }
package atHome.city; import java.awt.Graphics2D; import java.awt.Point; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.imageio.ImageIO; import restaurant.RoleOrder; import restaurant.gui.FoodIcon; import city.PersonAgent; import city.gui.Gui; import city.gui.SimCityGui; import city.roles.AtHomeRole; public class AtHomeGui implements Gui{ private PersonAgent agent = null; private AtHomeRole role = null; private boolean isPresent = false; private static BufferedImage personImg = null; private int xPos, yPos; private int xDestination, yDestination; private int xHomePosition = 20; private int yHomePosition = 30; private int xFRIDGE_POSITION = 0; private int yFRIDGE_POSITION = 0; private int xGRILL_POSITION = 0; private int yGRILL_POSITION = 0; private int xTABLE_POS = 57; private int yTABLE_POS = 70; private int xKITCHEN_COUNTER_POSITION = 0; private int yKITCHEN_COUNTER_POSITION = 0; static final int yTABLE_OFFSET = 300; static final int xKITCHEN_OFFSET = 217; static final int xFOOD_OFFSET = 10; static final int yFOOD_OFFSET = 4; static final int yKITCHEN_COUNTER_OFFSET = 30; static final int yGRILL_RIGHT_OFFSET = 30; static final int xGRILL_RIGHT_OFFSET = 52; static final int yFIDGE_OFFSET = 15; static final int xFIDGE_OFFSET = 100; static final int yAPT_OFFSET = 310; static final int xAPT_OFFSET = 30; static final int HOUSE_TABLEPOS = 150; static final int COOKING_OFFSET = 20; static final int KITCHEN_OFFSET = 15; List<MyFood> foods = Collections.synchronizedList(new ArrayList<MyFood>()); private enum Command {noCommand, GoHome, GoToFridge, GoToGrill, GoToCounter, GoToRestPost, EatFood, LeaveHome, GetFoodFromCounter, GetFoodFromGrill}; private enum FoodState{PutFoodOnGrill, PutFoodOnCounter, FoodOnGrill, FoodOnCounter, PickUpFromGrill, PickUpFromCounter, PutOnPickUpTable, OnPickUpTable, WaiterPickedUp}; Command command = Command.noCommand; public AtHomeGui(PersonAgent c, AtHomeRole r) { try { StringBuilder path = new StringBuilder("imgs/"); personImg = ImageIO.read(new File(path.toString() + "customer_v1.png")); } catch (IOException e) {} this.agent = c; this.role = r; if(agent.myHome instanceof Apartment) { int aptnum = ((Apartment)agent.myHome).renters.indexOf(agent); //System.out.println("MY APT NUMBER IS: " + aptnum); if(aptnum < 4)//top 4 apartments { xKITCHEN_COUNTER_POSITION = xHomePosition + aptnum*xKITCHEN_OFFSET; yKITCHEN_COUNTER_POSITION = yHomePosition - yKITCHEN_COUNTER_OFFSET + KITCHEN_OFFSET; xFRIDGE_POSITION = xHomePosition + xFIDGE_OFFSET + aptnum*xKITCHEN_OFFSET; yFRIDGE_POSITION = yHomePosition + yFIDGE_OFFSET- KITCHEN_OFFSET; xGRILL_POSITION = xHomePosition + xGRILL_RIGHT_OFFSET + aptnum*xKITCHEN_OFFSET; yGRILL_POSITION = yHomePosition -yGRILL_RIGHT_OFFSET+ KITCHEN_OFFSET; xTABLE_POS += xKITCHEN_OFFSET*aptnum; xHomePosition = xHomePosition + aptnum*xKITCHEN_OFFSET; //xDestination = xHomePosition; //yDestination = yHomePosition; } else //bottom 4 apartments { xKITCHEN_COUNTER_POSITION = xAPT_OFFSET + xHomePosition + (aptnum-4)*xKITCHEN_OFFSET; yKITCHEN_COUNTER_POSITION = yHomePosition - yKITCHEN_COUNTER_OFFSET + yAPT_OFFSET+ KITCHEN_OFFSET; xFRIDGE_POSITION = xAPT_OFFSET + xHomePosition + xFIDGE_OFFSET + (aptnum-4)*xKITCHEN_OFFSET; yFRIDGE_POSITION = yHomePosition + yFIDGE_OFFSET + yAPT_OFFSET- KITCHEN_OFFSET; xGRILL_POSITION = xAPT_OFFSET + xHomePosition + xGRILL_RIGHT_OFFSET + (aptnum-4)*xKITCHEN_OFFSET; yGRILL_POSITION = yHomePosition -yGRILL_RIGHT_OFFSET + yAPT_OFFSET+ KITCHEN_OFFSET; xTABLE_POS = xAPT_OFFSET*2 + xKITCHEN_OFFSET*(aptnum-4); yTABLE_POS += yTABLE_OFFSET; xHomePosition = xHomePosition + (aptnum-4)*xKITCHEN_OFFSET; yHomePosition = yHomePosition + yAPT_OFFSET; //xDestination = xHomePosition; //yDestination = yHomePosition; } } if(agent.myHome instanceof Home) { xHomePosition = 50; yHomePosition = 50; xKITCHEN_COUNTER_POSITION = xHomePosition; yKITCHEN_COUNTER_POSITION = yHomePosition - yKITCHEN_COUNTER_OFFSET; xFRIDGE_POSITION = xHomePosition + xFIDGE_OFFSET; yFRIDGE_POSITION = yHomePosition + yFIDGE_OFFSET; xGRILL_POSITION = xHomePosition + xGRILL_RIGHT_OFFSET; yGRILL_POSITION = yHomePosition -yGRILL_RIGHT_OFFSET; xTABLE_POS = HOUSE_TABLEPOS; yTABLE_POS = HOUSE_TABLEPOS; } xPos = 0; yPos = 200; } @Override public void updatePosition() { //moving within house if(agent.myHome instanceof Home) { if(command == Command.LeaveHome) { if (yPos < yDestination) yPos++; else if (yPos > yDestination) yPos if(yPos == yDestination) { if (xPos < xDestination) xPos++; else if (xPos > xDestination) xPos } } else { if (yPos < yDestination) yPos++; else if (yPos > yDestination) yPos if (xPos < xDestination) xPos++; else if (xPos > xDestination) xPos } } else //moving within apt { if(command == Command.LeaveHome) { if (yPos < yDestination) yPos++; else if (yPos > yDestination) yPos if(yPos == yDestination) { if (xPos < xDestination) xPos++; else if (xPos > xDestination) xPos } } else { if (xPos < xDestination) xPos++; else if (xPos > xDestination) xPos else if (yPos < yDestination) yPos++; else if (yPos > yDestination) yPos } } if (xPos == xDestination && yPos == yDestination) { if(command == Command.GoHome || command == Command.GoToFridge) { command = Command.noCommand; role.msgAnimationFinshed(); xDestination = xHomePosition; yDestination = yHomePosition; } else if(command == Command.GoToGrill || command == Command.GoToCounter) { command = Command.noCommand; role.msgAnimationFinshed(); xDestination = xHomePosition; yDestination = yHomePosition; for(MyFood f: foods) { if(f.food != null) { if(f.state == FoodState.PutFoodOnGrill) { f.state = FoodState.FoodOnGrill; f.CookingPoint = new Point(xGRILL_POSITION + COOKING_OFFSET, yGRILL_POSITION + 0*COOKING_OFFSET); } else if (f.state == FoodState.PutFoodOnCounter) { f.state = FoodState.FoodOnCounter; f.CookingPoint = new Point(xKITCHEN_COUNTER_POSITION + COOKING_OFFSET,yKITCHEN_COUNTER_POSITION + 0*COOKING_OFFSET); } } } } else if(command == command.GetFoodFromGrill || command == command.GetFoodFromCounter) { command = Command.noCommand; role.msgAnimationFinshed(); for(MyFood f : foods) { if(f.state == FoodState.FoodOnGrill) { f.state = FoodState.PickUpFromGrill; } else if (f.state == FoodState.FoodOnCounter) { f.state = FoodState.PickUpFromCounter; } } } else if(command == Command.EatFood || command == Command.LeaveHome) { command = Command.noCommand; role.msgAnimationFinshed(); } } } public void doEnterHome() { command = Command.GoHome; xDestination = xHomePosition; yDestination = yHomePosition; } @Override public boolean isPresent() { return isPresent; } public void setPresent(boolean p) { isPresent = p; } public void draw(Graphics2D g) { g.drawImage(personImg, xPos, yPos, null); drawFood(g); } public void drawFood(Graphics2D g) { synchronized(foods) { for(MyFood f: foods) { if(f.food != null) { if(f.state == FoodState.PutFoodOnGrill || f.state == FoodState.PutFoodOnCounter || f.state == FoodState.PickUpFromCounter || f.state == FoodState.PickUpFromGrill) { g.drawImage(f.food.iconImg, xPos+f.point.x, yPos+f.point.y, null); } else if(f.state == FoodState.FoodOnGrill || f.state == FoodState.FoodOnCounter) { g.drawImage(f.food.iconImg, f.CookingPoint.x, f.CookingPoint.y, null); } } } } } public void DoLeaveHome() { command = Command.LeaveHome; xDestination = -20; yDestination = 200; } public void DoGoToFridge() { xDestination = xFRIDGE_POSITION; yDestination = yFRIDGE_POSITION; command = Command.GoToFridge; } public void DoCookFood(String choice) { // Grab food from fidge(already at fidge // if burger,steak,chicken put on grill and set timer // if salad or cookie, put on right if(choice.equalsIgnoreCase("steak") || choice.equalsIgnoreCase("chicken")){ foods.add(new MyFood(new FoodIcon(choice+"g"), new Point(xFOOD_OFFSET, yFOOD_OFFSET),choice)); xDestination = xGRILL_POSITION; yDestination = yGRILL_POSITION; command = Command.GoToGrill; }else{ foods.add(new MyFood(new FoodIcon(choice+"g"), new Point(xFOOD_OFFSET, yFOOD_OFFSET),choice)); xDestination = xKITCHEN_COUNTER_POSITION; yDestination = yKITCHEN_COUNTER_POSITION; command = Command.GoToCounter; } } public void SitDownAndEatFood() { command = Command.EatFood; xDestination = xTABLE_POS; yDestination = yTABLE_POS; } public void PlateFood() { for(MyFood f: foods) { if(f.food != null) { if(f.state == FoodState.FoodOnGrill) { command = Command.GetFoodFromGrill; xDestination = xGRILL_POSITION; yDestination = yGRILL_POSITION; } if (f.state == FoodState.FoodOnCounter) { command = Command.GetFoodFromCounter; xDestination = xKITCHEN_COUNTER_POSITION; yDestination = yKITCHEN_COUNTER_POSITION; } } } } public void DoneEating() { foods.clear(); } class MyFood { FoodIcon food; Point point; Point CookingPoint; FoodState state; String choice; MyFood(FoodIcon f, Point p, String c){ this.food = f; this.point = p; this.choice = c; if(choice.equalsIgnoreCase("steak") || choice.equalsIgnoreCase("chicken")) { state = FoodState.PutFoodOnGrill; } else { state = FoodState.PutFoodOnCounter; } } } }
/* * EDACCView.java */ package edacc; import edacc.model.DatabaseConnector; import edacc.model.NoConnectionToDBException; import java.awt.Component; import java.sql.SQLException; import java.util.Observable; import java.util.logging.Level; import java.util.logging.Logger; import org.jdesktop.application.Action; import org.jdesktop.application.ResourceMap; import org.jdesktop.application.SingleFrameApplication; import org.jdesktop.application.FrameView; import org.jdesktop.application.TaskMonitor; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.Observer; import javax.swing.Timer; import javax.swing.Icon; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.JOptionPane; import javax.swing.SwingUtilities; import edacc.manageDB.Util; /** * The application's main frame. */ public class EDACCView extends FrameView implements Observer { private EDACCExperimentMode experimentMode; private EDACCManageDBMode manageDBMode; private EDACCNoMode noMode; private Component mode; private javax.swing.GroupLayout mainPanelLayout; public EDACCView(SingleFrameApplication app) { super(app); initComponents(); DatabaseConnector.getInstance().addObserver(this); // status bar initialization - message timeout, idle icon and busy animation, etc ResourceMap resourceMap = getResourceMap(); int messageTimeout = resourceMap.getInteger("StatusBar.messageTimeout"); messageTimer = new Timer(messageTimeout, new ActionListener() { public void actionPerformed(ActionEvent e) { statusMessageLabel.setText(""); } }); messageTimer.setRepeats(false); int busyAnimationRate = resourceMap.getInteger("StatusBar.busyAnimationRate"); for (int i = 0; i < busyIcons.length; i++) { busyIcons[i] = resourceMap.getIcon("StatusBar.busyIcons[" + i + "]"); } busyIconTimer = new Timer(busyAnimationRate, new ActionListener() { public void actionPerformed(ActionEvent e) { busyIconIndex = (busyIconIndex + 1) % busyIcons.length; statusAnimationLabel.setIcon(busyIcons[busyIconIndex]); } }); idleIcon = resourceMap.getIcon("StatusBar.idleIcon"); statusAnimationLabel.setIcon(idleIcon); progressBar.setVisible(false); // connecting action tasks to status bar via TaskMonitor TaskMonitor taskMonitor = new TaskMonitor(getApplication().getContext()); taskMonitor.addPropertyChangeListener(new java.beans.PropertyChangeListener() { public void propertyChange(java.beans.PropertyChangeEvent evt) { String propertyName = evt.getPropertyName(); if ("started".equals(propertyName)) { if (!busyIconTimer.isRunning()) { statusAnimationLabel.setIcon(busyIcons[0]); busyIconIndex = 0; busyIconTimer.start(); } progressBar.setVisible(true); progressBar.setIndeterminate(true); } else if ("done".equals(propertyName)) { busyIconTimer.stop(); statusAnimationLabel.setIcon(idleIcon); progressBar.setVisible(false); progressBar.setValue(0); } else if ("message".equals(propertyName)) { String text = (String) (evt.getNewValue()); statusMessageLabel.setText((text == null) ? "" : text); messageTimer.restart(); } else if ("progress".equals(propertyName)) { int value = (Integer) (evt.getNewValue()); progressBar.setVisible(true); progressBar.setIndeterminate(false); progressBar.setValue(value); } } }); experimentMode = new EDACCExperimentMode(); manageDBMode = new EDACCManageDBMode(); noMode = new EDACCNoMode(); mainPanelLayout = new javax.swing.GroupLayout(mainPanel); mainPanel.setLayout(mainPanelLayout); mainPanelLayout.setHorizontalGroup( mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING).addComponent(noMode, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)); mainPanelLayout.setVerticalGroup( mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING).addComponent(noMode, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)); mode = noMode; updateConnectionStateView(); SwingUtilities.invokeLater(new Runnable() { public void run() { btnConnectToDB(); } }); } private void createDatabaseErrorMessage(SQLException e) { javax.swing.JOptionPane.showMessageDialog(null, "There was an error while communicating with the database: " +e, "Connection error", javax.swing.JOptionPane.ERROR_MESSAGE); } @Action public void showAboutBox() { if (aboutBox == null) { JFrame mainFrame = EDACCApp.getApplication().getMainFrame(); aboutBox = new EDACCAboutBox(mainFrame); aboutBox.setLocationRelativeTo(mainFrame); } EDACCApp.getApplication().show(aboutBox); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { mainPanel = new javax.swing.JPanel(); menuBar = new javax.swing.JMenuBar(); javax.swing.JMenu fileMenu = new javax.swing.JMenu(); connectToDBMenuItem = new javax.swing.JMenuItem(); disconnectMenuItem = new javax.swing.JMenuItem(); generateDBMenuItem = new javax.swing.JMenuItem(); javax.swing.JMenuItem exitMenuItem = new javax.swing.JMenuItem(); gridMenu = new javax.swing.JMenu(); settingsMenuItem = new javax.swing.JMenuItem(); modusMenu = new javax.swing.JMenu(); manageDBModeMenuItem = new javax.swing.JRadioButtonMenuItem(); manageExperimentModeMenuItem = new javax.swing.JRadioButtonMenuItem(); javax.swing.JMenu helpMenu = new javax.swing.JMenu(); javax.swing.JMenuItem aboutMenuItem = new javax.swing.JMenuItem(); jMenuItem1 = new javax.swing.JMenuItem(); statusPanel = new javax.swing.JPanel(); javax.swing.JSeparator statusPanelSeparator = new javax.swing.JSeparator(); statusMessageLabel = new javax.swing.JLabel(); statusAnimationLabel = new javax.swing.JLabel(); progressBar = new javax.swing.JProgressBar(); mainPanel.setMinimumSize(new java.awt.Dimension(800, 600)); mainPanel.setName("mainPanel"); // NOI18N org.jdesktop.layout.GroupLayout mainPanelLayout = new org.jdesktop.layout.GroupLayout(mainPanel); mainPanel.setLayout(mainPanelLayout); mainPanelLayout.setHorizontalGroup( mainPanelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(0, 1006, Short.MAX_VALUE) ); mainPanelLayout.setVerticalGroup( mainPanelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(0, 630, Short.MAX_VALUE) ); menuBar.setAutoscrolls(true); menuBar.setName("menuBar"); // NOI18N org.jdesktop.application.ResourceMap resourceMap = org.jdesktop.application.Application.getInstance(edacc.EDACCApp.class).getContext().getResourceMap(EDACCView.class); fileMenu.setText(resourceMap.getString("fileMenu.text")); // NOI18N fileMenu.setName("fileMenu"); // NOI18N javax.swing.ActionMap actionMap = org.jdesktop.application.Application.getInstance(edacc.EDACCApp.class).getContext().getActionMap(EDACCView.class, this); connectToDBMenuItem.setAction(actionMap.get("btnConnectToDB")); // NOI18N connectToDBMenuItem.setText(resourceMap.getString("connectToDBMenuItem.text")); // NOI18N connectToDBMenuItem.setName("connectToDBMenuItem"); // NOI18N fileMenu.add(connectToDBMenuItem); disconnectMenuItem.setAction(actionMap.get("btnDisconnect")); // NOI18N disconnectMenuItem.setText(resourceMap.getString("disconnectMenuItem.text")); // NOI18N disconnectMenuItem.setName("disconnectMenuItem"); // NOI18N fileMenu.add(disconnectMenuItem); generateDBMenuItem.setAction(actionMap.get("btnGenerateTables")); // NOI18N generateDBMenuItem.setText(resourceMap.getString("generateDBMenuItem.text")); // NOI18N generateDBMenuItem.setName("generateDBMenuItem"); // NOI18N fileMenu.add(generateDBMenuItem); exitMenuItem.setAction(actionMap.get("quit")); // NOI18N exitMenuItem.setName("exitMenuItem"); // NOI18N fileMenu.add(exitMenuItem); menuBar.add(fileMenu); gridMenu.setAction(actionMap.get("btnGridSettings")); // NOI18N gridMenu.setText(resourceMap.getString("gridMenu.text")); // NOI18N gridMenu.setName("gridMenu"); // NOI18N settingsMenuItem.setAction(actionMap.get("btnGridSettings")); // NOI18N settingsMenuItem.setText(resourceMap.getString("settingsMenuItem.text")); // NOI18N settingsMenuItem.setName("settingsMenuItem"); // NOI18N gridMenu.add(settingsMenuItem); menuBar.add(gridMenu); modusMenu.setText(resourceMap.getString("modusMenu.text")); // NOI18N modusMenu.setName("modusMenu"); // NOI18N manageDBModeMenuItem.setAction(actionMap.get("manageDBMode")); // NOI18N manageDBModeMenuItem.setText(resourceMap.getString("manageDBModeMenuItem.text")); // NOI18N manageDBModeMenuItem.setName("manageDBModeMenuItem"); // NOI18N modusMenu.add(manageDBModeMenuItem); manageExperimentModeMenuItem.setAction(actionMap.get("manageExperimentMode")); // NOI18N manageExperimentModeMenuItem.setText(resourceMap.getString("manageExperimentModeMenuItem.text")); // NOI18N manageExperimentModeMenuItem.setName("manageExperimentModeMenuItem"); // NOI18N modusMenu.add(manageExperimentModeMenuItem); menuBar.add(modusMenu); helpMenu.setText(resourceMap.getString("helpMenu.text")); // NOI18N helpMenu.setName("helpMenu"); // NOI18N aboutMenuItem.setAction(actionMap.get("showAboutBox")); // NOI18N aboutMenuItem.setName("aboutMenuItem"); // NOI18N helpMenu.add(aboutMenuItem); jMenuItem1.setText(resourceMap.getString("jMenuItem1.text")); // NOI18N jMenuItem1.setName("jMenuItem1"); // NOI18N helpMenu.add(jMenuItem1); menuBar.add(helpMenu); statusPanel.setName("statusPanel"); // NOI18N statusPanelSeparator.setName("statusPanelSeparator"); // NOI18N statusMessageLabel.setName("statusMessageLabel"); // NOI18N statusAnimationLabel.setHorizontalAlignment(javax.swing.SwingConstants.LEFT); statusAnimationLabel.setName("statusAnimationLabel"); // NOI18N progressBar.setName("progressBar"); // NOI18N org.jdesktop.layout.GroupLayout statusPanelLayout = new org.jdesktop.layout.GroupLayout(statusPanel); statusPanel.setLayout(statusPanelLayout); statusPanelLayout.setHorizontalGroup( statusPanelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(statusPanelSeparator, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 1006, Short.MAX_VALUE) .add(statusPanelLayout.createSequentialGroup() .addContainerGap() .add(statusMessageLabel) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, 836, Short.MAX_VALUE) .add(progressBar, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(statusAnimationLabel) .addContainerGap()) ); statusPanelLayout.setVerticalGroup( statusPanelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(statusPanelLayout.createSequentialGroup() .add(statusPanelSeparator, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 2, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .add(statusPanelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(statusMessageLabel) .add(statusAnimationLabel) .add(progressBar, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .add(3, 3, 3)) ); setComponent(mainPanel); setMenuBar(menuBar); setStatusBar(statusPanel); }// </editor-fold>//GEN-END:initComponents @Action public void btnConnectToDB() { if (databaseSettings == null) { JFrame mainFrame = EDACCApp.getApplication().getMainFrame(); databaseSettings = new EDACCDatabaseSettingsView(mainFrame, true); databaseSettings.setLocationRelativeTo(mainFrame); } EDACCApp.getApplication().show(databaseSettings); manageDBMode(); } @Action public void btnDisconnect() { try { DatabaseConnector.getInstance().disconnect(); } catch (SQLException ex) { JOptionPane.showMessageDialog(EDACCApp.getApplication().getMainFrame(), "An error occured while closing the database connection: \n" + ex.getMessage(), "Couldn't close database connection", JOptionPane.ERROR_MESSAGE); } finally { noMode(); } } @Action public void btnGenerateTables() { if (JOptionPane.showConfirmDialog(mode, "This will destroy the EDACC tables of your DB an create new ones. Do you wish to continue?", "Warning!", JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) { try { // User clicked on "Yes" DatabaseConnector.getInstance().createDBSchema(); } catch (NoConnectionToDBException ex) { JOptionPane.showMessageDialog(mode, "Couldn't generate the EDACC tables: No connection to database. Please connect to a database first.", "Error!", JOptionPane.ERROR_MESSAGE); } catch (SQLException ex) { JOptionPane.showMessageDialog(mode, "An error occured while trying to generate the EDACC tables: " + ex.getMessage(), "Error!", JOptionPane.ERROR_MESSAGE); } finally { noMode(); } } } public void noMode() { manageExperimentModeMenuItem.setSelected(false); manageDBModeMenuItem.setSelected(false); mainPanelLayout.replace(mode, noMode); mode = noMode; } @Action public void manageDBMode() { /*if (manageDBModeMenuItem.isSelected()) { noMode(); return; }*/ if (manageExperimentModeMenuItem.isSelected()) { if (experimentMode.hasUnsavedChanges()) { if (JOptionPane.showConfirmDialog(mode, "Any unsaved changes will be lost, are you sure you want to switch to Manage DB mode?", "Warning!", JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) { Util.clearCaches(); } else { manageDBModeMenuItem.setSelected(false); return; } } experimentMode.expController.unloadExperiment(); } try { manageDBMode.initialize(); mainPanelLayout.replace(mode, manageDBMode); mode = manageDBMode; manageDBModeMenuItem.setSelected(true); manageExperimentModeMenuItem.setSelected(false); } catch (NoConnectionToDBException ex) { JOptionPane.showMessageDialog(this.getComponent(), "You have to connect to the database before switching modes", "No database connection", JOptionPane.ERROR_MESSAGE); noMode(); } catch (SQLException ex) { createDatabaseErrorMessage(ex); noMode(); } } @Action public void manageExperimentMode() { /*if (manageExperimentModeMenuItem.isSelected()) { noMode(); return; }*/ if (manageDBModeMenuItem.isSelected()) { if (manageDBMode.unsavedChanges) { if (JOptionPane.showConfirmDialog(mode, "Any unsaved changes will be lost, are you sure you want to switch to experiment mode?", "Warning!", JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) { Util.clearCaches(); } else { manageExperimentModeMenuItem.setSelected(false); return; } } } try { experimentMode.initialize(); mainPanelLayout.replace(mode, experimentMode); mode = experimentMode; manageExperimentModeMenuItem.setSelected(true); manageDBModeMenuItem.setSelected(false); } catch (NoConnectionToDBException ex) { JOptionPane.showMessageDialog(this.getComponent(), "You have to connect to the database before switching modes", "No database connection", JOptionPane.ERROR_MESSAGE); noMode(); } catch (SQLException ex) { createDatabaseErrorMessage(ex); noMode(); } } @Action public void btnGridSettings() { if (gridSettings == null) { JFrame mainFrame = EDACCApp.getApplication().getMainFrame(); gridSettings = new EDACCGridSettingsView(mainFrame, true); gridSettings.setLocationRelativeTo(mainFrame); } try { gridSettings.loadSettings(); EDACCApp.getApplication().show(gridSettings); } catch (NoConnectionToDBException e) { JOptionPane.showMessageDialog(this.getComponent(), "Couldn't load settings. No connection to database", "No database connection", JOptionPane.ERROR_MESSAGE); } catch (SQLException e) { JOptionPane.showMessageDialog(this.getComponent(), "Error while loading settings: \n" + e.getMessage(), "Error loading settings", JOptionPane.ERROR_MESSAGE); } } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JMenuItem connectToDBMenuItem; private javax.swing.JMenuItem disconnectMenuItem; private javax.swing.JMenuItem generateDBMenuItem; private javax.swing.JMenu gridMenu; private javax.swing.JMenuItem jMenuItem1; private javax.swing.JPanel mainPanel; private javax.swing.JRadioButtonMenuItem manageDBModeMenuItem; private javax.swing.JRadioButtonMenuItem manageExperimentModeMenuItem; private javax.swing.JMenuBar menuBar; private javax.swing.JMenu modusMenu; private javax.swing.JProgressBar progressBar; private javax.swing.JMenuItem settingsMenuItem; private javax.swing.JLabel statusAnimationLabel; private javax.swing.JLabel statusMessageLabel; private javax.swing.JPanel statusPanel; // End of variables declaration//GEN-END:variables private final Timer messageTimer; private final Timer busyIconTimer; private final Icon idleIcon; private final Icon[] busyIcons = new Icon[15]; private int busyIconIndex = 0; private JDialog aboutBox; private JDialog databaseSettings; private EDACCGridSettingsView gridSettings; public void update(Observable o, Object arg) { // watch connection state updateConnectionStateView(); } /** * Updates the GUI components which are sensitive on the DB connection state. */ private void updateConnectionStateView() { boolean state = DatabaseConnector.getInstance().isConnected(); connectToDBMenuItem.setEnabled(!state); disconnectMenuItem.setEnabled(state); generateDBMenuItem.setEnabled(state); } }
package binaryTree; import java.util.LinkedList; import java.util.Queue; public class BinaryTree { // Fields public Node root; // Constructors public BinaryTree(){ root = null; } public BinaryTree(Node r00t){ root = r00t; } // Helper Functions public int getDepth(Node n){ if(n==null || root==null){return Integer.MIN_VALUE;} int depth=0; while(n.value!=root.value){ n=n.parent; depth++; } return depth; } public boolean isLeafNode(Node n){ if(n.leftChild == null && n.rightChild == null){ return true; }else{ return false; } } public boolean isInternalNode(Node n){ if(n.leftChild !=null || n.rightChild !=null){ return true; }else{ return false; } } public boolean isFullInternalNode(Node n){ if(n.leftChild !=null && n.rightChild !=null){ return true; }else{ return false; } } public int countNumberOfNodes(Node curr){ if(curr==null){return 0;} int count=0; Queue<Node> hold = new LinkedList<Node>(); hold.add(curr); while(!hold.isEmpty()){ Node x = hold.peek(); if(x.leftChild!=null){ hold.add(x.leftChild); } if(x.rightChild!=null){ hold.add(x.rightChild); } count++; hold.poll(); } return count; } public Node getDeepestLeftNode(Node n){ if(n==null){System.out.println("Error - getDeepestLeftNode() - returning bad node.");return new Node(Integer.MIN_VALUE);} Node d = new Node(Integer.MIN_VALUE); while(n!=null){ if(n.leftChild!=null){ n=n.leftChild; }else{ return n; } } return d; } // Insertion public void insert(Node a){ if(a==null){return;} if(root==null){root = a;root.parent=null;return;} boolean hasNodeBeenAdded = false; Node it = root; while(!hasNodeBeenAdded){ switch (a.compare(it)){ case -1: if(it.leftChild==null){ it.leftChild = a; a.parent = it; hasNodeBeenAdded=true; }else{ it = it.leftChild; } break; case 0: System.out.println("insert() - Error - Node Already Exists."); hasNodeBeenAdded=true; break; case 1: if(it.rightChild==null){ it.rightChild = a; a.parent = it; hasNodeBeenAdded=true; }else{ it = it.rightChild; } break; case Integer.MIN_VALUE: System.out.println("insert() - Error - Invalid Value."); hasNodeBeenAdded=true; break; } } } // Deletion public boolean delete(Node n){ if(n==null){return false;} if(root==null){return false;} boolean deleted = false; Queue<Node> hold = new LinkedList<Node>(); hold.add(root); while(!hold.isEmpty() && !deleted){ Node x = hold.peek(); if(x.leftChild!=null){ hold.add(x.leftChild); } if(x.rightChild!=null){ hold.add(x.rightChild); } if(x.value==n.value){ deleted = true; //TODO //internal/leaf handler //boolean - check if root / parent is null //if right child is not null, get the right childs deepest left and replace (if root, parent is null) //if the right child is null, replace with left child (if root, parent is null) if(x.rightChild!=null){ Node deepestLeft = getDeepestLeftNode(x.rightChild); deepestLeft.parent.leftChild=null; if(x.parent!=null){ //Determine which child x is if(x.parent.leftChild.isEqualTo(x)){ }else if(x.parent.rightChild.isEqualTo(x)){ x.parent.rightChild=deepestLeft; //realign parent deepestLeft.parent=x.parent; if(x.leftChild!=null){ deepestLeft.leftChild=x.leftChild; } } } return true; }else{ } } hold.poll(); } return false; } // Depth-first order Traversals public void preorderTraversal(Node curr){ if(curr == null){ return; } doSomething(curr); preorderTraversal(curr.leftChild); preorderTraversal(curr.rightChild); } public void inorderTraversal(Node curr){ if(curr == null){ return; } inorderTraversal(curr.leftChild); doSomething(curr); inorderTraversal(curr.rightChild); } public void postorderTraversal(Node curr, String purpose){ if(curr == null){ return; } inorderTraversal(curr.leftChild); inorderTraversal(curr.rightChild); doSomething(curr); } public void doSomething(Node n){ System.out.print(n.value); System.out.print(","); } // Breadth-first order Traversal aka level-order public void breadthfirstTraversal(){ if(root==null){return;} Queue<Node> hold = new LinkedList<Node>(); hold.add(root); while(!hold.isEmpty()){ Node x = hold.peek(); if(x.leftChild!=null){ hold.add(x.leftChild); } if(x.rightChild!=null){ hold.add(x.rightChild); } doSomething(x); hold.poll(); } } // Binary Tree Properties public int getTreeHeight(){ if(root==null){return 0;} Queue<Node> hold = new LinkedList<Node>(); int deepestDepth = Integer.MIN_VALUE; hold.add(root); while(!hold.isEmpty()){ Node x = hold.peek(); if(x.leftChild!=null){ hold.add(x.leftChild); } if(x.rightChild!=null){ hold.add(x.rightChild); } if(getDepth(x) > deepestDepth){ deepestDepth = getDepth(x); } hold.poll(); } return deepestDepth; } public int getNumberOfNodes(){ if(root==null){return 0;} int counter = countNumberOfNodes(root); return counter; } // isFullBinaryTree() // A Full Binary Tree is a tree in which every node other than the leaves has // two children. That is, every node in a binary tree has // either two children or no children. public boolean isFullBinaryTree(){ if(root==null){return true;} Queue<Node> temp = new LinkedList<Node>(); temp.add(root); while(!temp.isEmpty()){ Node x = temp.peek(); if(x!=null){ //Must have 2 valid children or 2 nulls if(isFullInternalNode(x) || isLeafNode(x)){//If valid parent, check through children temp.add(x.leftChild); temp.add(x.rightChild); }else { return false; } } temp.poll(); } return true; } // isPerfectBinaryTree() // A perfect binary tree is a full binary tree // in which all leaves have the same depth/level. public boolean isPerfectBinaryTree(){ if(root==null){return false;} if(!isFullBinaryTree()){return false;} int lastLeafFoundDepth=Integer.MIN_VALUE; Queue<Node> temp = new LinkedList<Node>(); temp.add(root); while(!temp.isEmpty()){ Node x = temp.peek(); if(x!=null){ if(x.leftChild != null){ temp.add(x.leftChild); } if(x.rightChild != null){ temp.add(x.rightChild); } if(this.isLeafNode(x)){ // If first neaf lode, set lastDepth, otherwise compare // and invalidate tree if not the same depth if(lastLeafFoundDepth == Integer.MIN_VALUE){ lastLeafFoundDepth = getDepth(x); }else{ if(lastLeafFoundDepth != getDepth(x)){ return false; } } } } temp.poll(); } return true; } // isCompleteBinaryTree() // All levels are full, except for the last. And all // nodes in the last level are "all the way left" public boolean isCompleteBinaryTree(){ //get tree height //iterate through levels, use BDFS, get fullness of each level excluding the last. //on last level, if at any point a null precedes a node, false. //else true. //TODO return true; } // isDegenerateTree() // A degenerate tree is a tree where each parent node // has only one associated child node. This means that performance-wise, // the tree will behave like a linked list data structure. public boolean isDegenerateTree(){ if(root==null){return true;} Queue<Node> temp = new LinkedList<Node>(); temp.add(root); while(!temp.isEmpty()){ Node x = temp.peek(); if(x!=null){ if(isFullInternalNode(x)){ return false; } if(x.leftChild != null){ temp.add(x.leftChild); } if(x.rightChild != null){ temp.add(x.rightChild); } } temp.poll(); } return true; } }
package burai.run; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.List; import javafx.application.Platform; import javafx.scene.control.Alert; import javafx.scene.control.Alert.AlertType; import burai.app.QEFXMain; import burai.app.path.QEPath; import burai.com.env.Environments; import burai.com.file.FileTools; import burai.input.QEInput; import burai.project.Project; import burai.run.parser.LogParser; public class RunningNode implements Runnable { private static final RunningType DEFAULT_TYPE = RunningType.SCF; private boolean alive; private Project project; private RunningStatus status; private List<RunningStatusChanged> onStatusChangedList; private RunningType type; private int numProcesses; private int numThreads; private Process objProcess; public RunningNode(Project project) { if (project == null) { throw new IllegalArgumentException("project is null."); } this.alive = true; this.project = project; this.status = RunningStatus.IDLE; this.onStatusChangedList = null; this.type = null; this.numProcesses = 1; this.numThreads = 1; this.objProcess = null; } public Project getProject() { return this.project; } public synchronized RunningStatus getStatus() { return this.status; } protected synchronized void setStatus(RunningStatus status) { if (status == null) { return; } this.status = status; if (this.onStatusChangedList != null) { for (RunningStatusChanged onStatusChanged : this.onStatusChangedList) { if (onStatusChanged != null) { onStatusChanged.onRunningStatusChanged(this.status); } } } } public synchronized void addOnStatusChanged(RunningStatusChanged onStatusChanged) { if (onStatusChanged != null) { if (this.onStatusChangedList == null) { this.onStatusChangedList = new ArrayList<RunningStatusChanged>(); } this.onStatusChangedList.add(onStatusChanged); } } public synchronized void removeOnStatusChanged(RunningStatusChanged onStatusChanged) { if (onStatusChanged != null) { if (this.onStatusChangedList != null) { this.onStatusChangedList.remove(onStatusChanged); } } } public synchronized RunningType getType() { return this.type; } public synchronized void setType(RunningType type) { this.type = type; } public synchronized int getNumProcesses() { return this.numProcesses; } public synchronized void setNumProcesses(int numProcesses) { this.numProcesses = numProcesses; } public synchronized int getNumThreads() { return this.numThreads; } public synchronized void setNumThreads(int numThreads) { this.numThreads = numThreads; } public synchronized void stop() { this.alive = false; if (this.objProcess != null) { this.objProcess.destroy(); } } @Override public void run() { synchronized (this) { if (!this.alive) { return; } } File directory = this.getDirectory(); if (directory == null) { return; } RunningType type2 = null; int numProcesses2 = -1; int numThreads2 = -1; synchronized (this) { type2 = this.type; numProcesses2 = this.numProcesses; numThreads2 = this.numThreads; } if (type2 == null) { type2 = DEFAULT_TYPE; } if (numProcesses2 < 1) { numProcesses2 = 1; } if (numThreads2 < 1) { numThreads2 = 1; } QEInput input = new FXQEInputFactory(type2).getQEInput(this.project); if (input == null) { return; } String inpName = this.project.getInpFileName(); inpName = inpName == null ? null : inpName.trim(); File inpFile = (inpName == null || inpName.isEmpty()) ? null : new File(directory, inpName); if (inpFile == null) { return; } List<String[]> commandList = type2.getCommandList(inpName, numProcesses2); if (commandList == null || commandList.isEmpty()) { return; } List<RunningCondition> conditionList = type2.getConditionList(); if (conditionList == null || conditionList.size() < commandList.size()) { return; } List<InputEditor> inputEditorList = type2.getInputEditorList(this.project); if (inputEditorList == null || inputEditorList.size() < commandList.size()) { return; } List<LogParser> parserList = type2.getParserList(this.project); if (parserList == null || parserList.size() < commandList.size()) { return; } List<PostOperation> postList = type2.getPostList(); if (postList == null || postList.size() < commandList.size()) { return; } this.deleteLogFiles(directory); int iCommand = 0; ProcessBuilder builder = null; boolean errOccurred = false; for (int i = 0; i < commandList.size(); i++) { synchronized (this) { if (!this.alive) { return; } } String[] command = commandList.get(i); if (command == null || command.length < 1) { continue; } RunningCondition condition = conditionList.get(i); if (condition == null) { continue; } InputEditor inputEditor = inputEditorList.get(i); if (inputEditor == null) { continue; } LogParser parser = parserList.get(i); if (parser == null) { continue; } PostOperation post = postList.get(i); if (post == null) { continue; } QEInput input2 = inputEditor.editInput(input); if (input2 == null) { continue; } if (!condition.toRun(this.project, input2)) { continue; } boolean inpStatus = this.writeQEInput(input2, inpFile); if (!inpStatus) { continue; } String logName = this.project.getLogFileName(iCommand); logName = logName == null ? null : logName.trim(); File logFile = (logName == null || logName.isEmpty()) ? null : new File(directory, logName); if (logFile == null) { continue; } String errName = this.project.getErrFileName(iCommand); errName = errName == null ? null : errName.trim(); File errFile = (errName == null || errName.isEmpty()) ? null : new File(directory, errName); if (errFile == null) { continue; } builder = new ProcessBuilder(); builder.directory(directory); builder.command(command); builder.redirectOutput(logFile); builder.redirectError(errFile); builder.environment().put("OMP_NUM_THREADS", Integer.toString(numThreads2)); this.setPathToBuilder(builder); try { synchronized (this) { this.objProcess = builder.start(); } parser.startParsing(logFile); if (this.objProcess != null) { if (this.objProcess.waitFor() != 0) { errOccurred = true; break; } } } catch (Exception e) { e.printStackTrace(); errOccurred = true; break; } finally { synchronized (this) { this.objProcess = null; } parser.endParsing(); } if (!errOccurred) { post.operate(this.project); } iCommand++; } if (!errOccurred) { type2.setProjectStatus(this.project); } else { this.showErrorDialog(builder); } } private File getDirectory() { String dirPath = this.project.getDirectoryPath(); if (dirPath == null) { return null; } File dirFile = new File(dirPath); try { if (!dirFile.isDirectory()) { return null; } } catch (Exception e) { e.printStackTrace(); return null; } return dirFile; } private boolean writeQEInput(QEInput input, File file) { if (input == null) { return false; } if (file == null) { return false; } String strInput = input.toString(); if (strInput == null) { return false; } PrintWriter writer = null; try { writer = new PrintWriter(new BufferedWriter(new FileWriter(file))); writer.println(strInput); } catch (IOException e) { e.printStackTrace(); return false; } finally { if (writer != null) { writer.close(); } } return true; } private void deleteLogFiles(File directory) { if (directory == null) { return; } final int maxIndex = 9; for (int i = 0; true; i++) { String logName = this.project.getLogFileName(i); logName = logName == null ? null : logName.trim(); if (logName == null || logName.isEmpty()) { continue; } boolean status = false; try { File logFile = new File(directory, logName); if (logFile.exists()) { status = FileTools.deleteAllFiles(logFile, false); } } catch (Exception e) { e.printStackTrace(); } if ((!status) && (i > maxIndex)) { break; } } for (int i = 0; true; i++) { String errName = this.project.getErrFileName(i); errName = errName == null ? null : errName.trim(); if (errName == null || errName.isEmpty()) { continue; } boolean status = false; try { File errFile = new File(directory, errName); if (errFile.exists()) { status = FileTools.deleteAllFiles(errFile, false); } } catch (Exception e) { e.printStackTrace(); } if ((!status) && (i > maxIndex)) { break; } } String exitName = this.project.getExitFileName(); exitName = exitName == null ? null : exitName.trim(); if (exitName != null && (!exitName.isEmpty())) { try { File exitFile = new File(directory, exitName); if (exitFile.exists()) { FileTools.deleteAllFiles(exitFile, false); } } catch (Exception e) { e.printStackTrace(); } } } private void setPathToBuilder(ProcessBuilder builder) { if (builder == null) { return; } String delim = null; if (Environments.isWindows()) { delim = ";"; } else { delim = ":"; } String qePath = QEPath.getPath(); String mpiPath = QEPath.getMPIPath(); String orgPath = builder.environment().get("PATH"); if (orgPath == null) { orgPath = builder.environment().get("Path"); } if (orgPath == null) { orgPath = builder.environment().get("path"); } String path = null; if (qePath != null && !(qePath.isEmpty())) { path = path == null ? qePath : (path + delim + qePath); } if (mpiPath != null && !(mpiPath.isEmpty())) { path = path == null ? mpiPath : (path + delim + mpiPath); } if (orgPath != null && !(orgPath.isEmpty())) { path = path == null ? orgPath : (path + delim + orgPath); } if (path != null && !(path.isEmpty())) { builder.environment().put("PATH", path); builder.environment().put("Path", path); builder.environment().put("path", path); } } private void showErrorDialog(ProcessBuilder buider) { File dirFile = buider == null ? null : buider.directory(); String dirStr = dirFile == null ? null : dirFile.getPath(); if (dirStr != null) { dirStr = dirStr.trim(); } final String message1; if (dirStr == null || dirStr.isEmpty()) { message1 = "ERROR in running the project."; } else { message1 = "ERROR in running the project: " + dirStr; } String cmdStr = null; List<String> cmdList = buider == null ? null : buider.command(); if (cmdList != null) { for (String cmd : cmdList) { if (cmd != null) { cmd = cmd.trim(); } if (cmd == null || cmd.isEmpty()) { continue; } if (cmdStr == null) { cmdStr = cmd; } else { cmdStr = cmdStr + " " + cmd; } } } if (cmdStr != null) { cmdStr = cmdStr.trim(); } final String message2; if (cmdStr == null || cmdStr.isEmpty()) { message2 = "NO COMMAND."; } else { message2 = "COMMAND: " + cmdStr; } Platform.runLater(() -> { Alert alert = new Alert(AlertType.ERROR); QEFXMain.initializeDialogOwner(alert); alert.setHeaderText(message1); alert.setContentText(message2); alert.showAndWait(); }); } @Override public int hashCode() { return 0; } @Override public boolean equals(Object obj) { return this == obj; } }
package file; import java.io.File; import java.io.IOException; import java.nio.file.FileVisitOption; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.EnumSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; /** * This class will search all folders and the respective subfolders for files * and return them as a List of File items. */ public class FileWalker{ /** output list for Files or Folders */ private LinkedList<File> resultList = new LinkedList<File>(); /** Directory's to process */ private LinkedList<File> dirToSearch = new LinkedList<File>(); private boolean noSub = false; private boolean folderOnly = false; private boolean imageOnly = false; public void addPath(File file){ File[] f = {file}; addPath(f); } public void addPath(File[] file){ //main add Method for (File f : file){ if(!dirToSearch.contains(f)); dirToSearch.add(f); } } public void addPath(String string) { File[] f = {new File(string)}; // make a 1 element array addPath(f); } public void addPath(String[] dirs){ File[] conv = new File[dirs.length]; int i=0; for (String s : dirs){ //string to File conv[i]=(new File(s)); i++; } addPath(conv); } public void addPath(List<String> dirs){ File[] conv = new File[dirs.size()]; int i=0; for (String s : dirs){ //string to File conv[i]=(new File(s)); i++; } addPath(conv); } public void setnoSub(boolean set){ this.noSub = set; } public void setfolderOnly(boolean set){ this.folderOnly = set; } public void setImagesOnly(boolean set){ this.imageOnly = set; } public List<File> fileWalkList(){ LinkedList<File> files = fileWalk(); ArrayList<File> list = new ArrayList<File>(100); Iterator<File> ite = files.iterator(); while(ite.hasNext()){ list.add(ite.next()); } return list; } public List<String> fileWalkStringList(){ LinkedList<File> files = fileWalk(); ArrayList<String> list = new ArrayList<String>(100); Iterator<File> ite = files.iterator(); while(ite.hasNext()){ list.add(ite.next().toString()); } return list; } public LinkedList<File> fileWalk(){ for(File f : dirToSearch){ try { if(noSub && folderOnly) Files.walkFileTree( f.toPath(), EnumSet.noneOf(FileVisitOption.class),2, new FetchFiles()); else if(noSub && !folderOnly) Files.walkFileTree( f.toPath(), EnumSet.noneOf(FileVisitOption.class),1, new FetchFiles()); else Files.walkFileTree( f.toPath(), new FetchFiles()); } catch (IOException e) { e.printStackTrace(); // should not reach this... } } return new LinkedList<File>(resultList); } public void clearAll(){ this.resultList.clear(); this.dirToSearch.clear(); } class FetchFiles extends SimpleFileVisitor<Path>{ @Override public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException { if(! folderOnly && attrs.isRegularFile()){ if(imageOnly){ String toTest = path.toString().toLowerCase(); if (toTest.endsWith(".jpg") || toTest.endsWith(".png") || toTest.endsWith(".gif")) resultList.add(path.toFile()); }else{ resultList.add(path.toFile()); } } return FileVisitResult.CONTINUE; } @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { if(folderOnly && attrs.isDirectory()) resultList.add(dir.toFile()); return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { System.err.println("unable to access "+file.toString()); return FileVisitResult.CONTINUE; } } }
/* * Gameboi */ package gameboi; import java.nio.file.Path; import java.nio.file.Paths; /** * * @author tomis007 */ public class GameBoi { /** * @param argv the command line arguments */ public static void main(String[] argv) { Path rom_path = Paths.get(argv[0]); GBMem memory = new GBMem(rom_path); CPU z80 = new CPU(memory); GPU gpu = new GPU(memory, z80); z80.setGPU(gpu); while (true) { int count = 69905; // long startTime = System.nanoTime(); while (count > 0) { int cycles; cycles = z80.ExecuteOpcode(); gpu.updateGraphics(cycles); count -= cycles; } // long endTime = System.nanoTime(); // if (endTime - startTime < (1000000000 / 60)) { // System.out.println("Should sleep: " + (endTime - startTime)); // try { // System.out.println("sleeping"); // Thread.sleep((endTime - startTime) / 1000000); // } catch (InterruptedException e) { // System.out.println("ugh"); // System.out.println(count); } // System.out.println(count); // System.exit(1); } }
package liquibase.database; import liquibase.database.sql.RawSqlStatement; import liquibase.database.sql.SqlStatement; import liquibase.database.structure.DatabaseSnapshot; import liquibase.database.structure.PostgresDatabaseSnapshot; import liquibase.exception.JDBCException; import liquibase.exception.CustomChangeException; import liquibase.util.StringUtils; import liquibase.diff.DiffStatusListener; import java.sql.*; import java.text.ParseException; import java.util.HashSet; import java.util.Set; import java.util.List; import java.util.ArrayList; import java.util.logging.Level; /** * Encapsulates PostgreSQL database support. */ public class PostgresDatabase extends AbstractDatabase { public static final String PRODUCT_NAME = "PostgreSQL"; private Set<String> systemTablesAndViews = new HashSet<String>(); private String defaultDatabaseSchemaName; private String defaultCatalogName; public PostgresDatabase() { // systemTablesAndViews.add("pg_logdir_ls"); // systemTablesAndViews.add("administrable_role_authorizations"); // systemTablesAndViews.add("applicable_roles"); // systemTablesAndViews.add("attributes"); // systemTablesAndViews.add("check_constraint_routine_usage"); // systemTablesAndViews.add("check_constraints"); // systemTablesAndViews.add("column_domain_usage"); // systemTablesAndViews.add("column_privileges"); // systemTablesAndViews.add("column_udt_usage"); // systemTablesAndViews.add("columns"); // systemTablesAndViews.add("constraint_column_usage"); // systemTablesAndViews.add("constraint_table_usage"); // systemTablesAndViews.add("data_type_privileges"); // systemTablesAndViews.add("domain_constraints"); // systemTablesAndViews.add("domain_udt_usage"); // systemTablesAndViews.add("domains"); // systemTablesAndViews.add("element_types"); // systemTablesAndViews.add("enabled_roles"); // systemTablesAndViews.add("key_column_usage"); // systemTablesAndViews.add("parameters"); // systemTablesAndViews.add("referential_constraints"); // systemTablesAndViews.add("role_column_grants"); // systemTablesAndViews.add("role_routine_grants"); // systemTablesAndViews.add("role_table_grants"); // systemTablesAndViews.add("role_usage_grants"); // systemTablesAndViews.add("routine_privileges"); // systemTablesAndViews.add("routines"); // systemTablesAndViews.add("schemata"); // systemTablesAndViews.add("sequences"); // systemTablesAndViews.add("sql_features"); // systemTablesAndViews.add("sql_implementation_info"); // systemTablesAndViews.add("sql_languages"); // systemTablesAndViews.add("sql_packages"); // systemTablesAndViews.add("sql_parts"); // systemTablesAndViews.add("sql_sizing"); // systemTablesAndViews.add("sql_sizing_profiles"); // systemTablesAndViews.add("table_constraints"); // systemTablesAndViews.add("table_privileges"); // systemTablesAndViews.add("tables"); // systemTablesAndViews.add("triggers"); // systemTablesAndViews.add("usage_privileges"); // systemTablesAndViews.add("view_column_usage"); // systemTablesAndViews.add("view_routine_usage"); // systemTablesAndViews.add("view_table_usage"); // systemTablesAndViews.add("views"); // systemTablesAndViews.add("information_schema_catalog_name"); // systemTablesAndViews.add("triggered_update_columns"); // systemTablesAndViews.add("book_pkey"); } public String getProductName() { return "PostgreSQL"; } public String getTypeName() { return "postgresql"; } public Set<String> getSystemTablesAndViews() { return systemTablesAndViews; } public boolean supportsInitiallyDeferrableColumns() { return true; } public boolean isCorrectDatabaseImplementation(Connection conn) throws JDBCException { return PRODUCT_NAME.equalsIgnoreCase(getDatabaseProductName(conn)); } public String getDefaultDriver(String url) { if (url.startsWith("jdbc:postgresql:")) { return "org.postgresql.Driver"; } return null; } public String getBooleanType() { return "BOOLEAN"; } public String getCurrencyType() { return "DECIMAL"; } public String getUUIDType() { return "CHAR(36)"; } public String getClobType() { return "TEXT"; } public String getBlobType() { return "BYTEA"; } public String getDateTimeType() { return "TIMESTAMP WITH TIME ZONE"; } public boolean supportsSequences() { return true; } public String getCurrentDateTimeFunction() { return "NOW()"; } protected String getDefaultDatabaseSchemaName() throws JDBCException { if (defaultDatabaseSchemaName == null) { try { List<String> searchPaths = getSearchPaths(); if (searchPaths != null && searchPaths.size() > 0) { for (String searchPath : searchPaths) { if (searchPath != null && searchPath.length() > 0) { defaultDatabaseSchemaName = searchPath; if (defaultDatabaseSchemaName.equals("$user") && getConnectionUsername() != null) { if (! schemaExists(getConnectionUsername())) { defaultDatabaseSchemaName = null; } else { defaultDatabaseSchemaName = getConnectionUsername(); } } if (defaultDatabaseSchemaName != null) break; } } } } catch (Exception e) { // TODO: throw? e.printStackTrace(); log.log(Level.SEVERE, "Failed to get default catalog name from postgres", e); } } return defaultDatabaseSchemaName; } public String getDefaultCatalogName() throws JDBCException { return super.getDefaultCatalogName(); } public String getDatabaseChangeLogTableName() { return super.getDatabaseChangeLogTableName().toLowerCase(); } public String getDatabaseChangeLogLockTableName() { return super.getDatabaseChangeLogLockTableName().toLowerCase(); } // public void dropDatabaseObjects(String schema) throws JDBCException { // try { // if (schema == null) { // schema = getConnectionUsername(); // new JdbcTemplate(this).execute(new RawSqlStatement("DROP OWNED BY " + schema)); // getConnection().commit(); // changeLogTableExists = false; // changeLogLockTableExists = false; // changeLogCreateAttempted = false; // changeLogLockCreateAttempted = false; // } catch (SQLException e) { // throw new JDBCException(e); public SqlStatement createFindSequencesSQL(String schema) throws JDBCException { return new RawSqlStatement("SELECT relname AS SEQUENCE_NAME FROM pg_class, pg_namespace WHERE relkind='S' AND pg_class.relnamespace = pg_namespace.oid AND nspname = '" + convertRequestedSchemaToSchema(schema) + "' AND 'nextval(''" + (schema == null ? "" : schema + ".") + "'||relname||'''::regclass)' not in (select adsrc from pg_attrdef where adsrc is not null) AND 'nextval('''||relname||'''::regclass)' not in (select adsrc from pg_attrdef where adsrc is not null)"); } public boolean isSystemTable(String catalogName, String schemaName, String tableName) { return super.isSystemTable(catalogName, schemaName, tableName) || "pg_catalog".equals(schemaName) || "pg_toast".equals(schemaName) || tableName.endsWith("_seq") || tableName.endsWith("_key") || tableName.endsWith("_pkey") || tableName.startsWith("idx_") || tableName.startsWith("pk_"); } public boolean supportsTablespaces() { return true; } public SqlStatement getViewDefinitionSql(String schemaName, String name) throws JDBCException { return new RawSqlStatement("select definition from pg_views where viewname='" + name + "' AND schemaname='" + convertRequestedSchemaToSchema(schemaName) + "'"); } public String getColumnType(String columnType, Boolean autoIncrement) { if (columnType.startsWith("java.sql.Types.VARCHAR")) { //returns "name" for type return columnType.replace("java.sql.Types.", ""); } String type = super.getColumnType(columnType, autoIncrement); if (type.startsWith("TEXT(")) { return getClobType(); } else if (type.toLowerCase().startsWith("float8")) { return "FLOAT8"; } else if (type.toLowerCase().startsWith("float4")) { return "FLOAT4"; } if (autoIncrement != null && autoIncrement) { if ("integer".equals(type.toLowerCase())) { return "serial"; } else if ("bigint".equals(type.toLowerCase()) || "bigserial".equals(type.toLowerCase())) { return "bigserial"; } else { // Unknown integer type, default to "serial" return "serial"; } } return type; } public String getAutoIncrementClause() { return ""; } public Object convertDatabaseValueToJavaObject(Object defaultValue, int dataType, int columnSize, int decimalDigits) throws ParseException { if (defaultValue != null) { if (defaultValue instanceof String) { defaultValue = ((String) defaultValue).replaceAll("'::[\\w\\s]+$", "'"); if (dataType == Types.DATE || dataType == Types.TIME || dataType == Types.TIMESTAMP) { //remove trailing time zone info defaultValue = ((String) defaultValue).replaceFirst("-\\d+$", ""); } } } return super.convertDatabaseValueToJavaObject(defaultValue, dataType, columnSize, decimalDigits); } public String convertRequestedSchemaToSchema(String requestedSchema) throws JDBCException { if (requestedSchema == null) { // Return the catalog name instead.. return getDefaultCatalogName(); } else { return StringUtils.trimToNull(requestedSchema).toLowerCase(); } } public String convertRequestedSchemaToCatalog(String requestedSchema) throws JDBCException { return super.convertRequestedSchemaToCatalog(requestedSchema); } /** * @see liquibase.database.AbstractDatabase#escapeTableName(java.lang.String, java.lang.String) */ @Override public String escapeTableName(String schemaName, String tableName) { //Check if tableName is in reserved words and has CaseSensitivity problems if (StringUtils.trimToNull(tableName) != null && (hasCaseProblems(tableName) || isReservedWord(tableName))) { return super.escapeTableName(schemaName, "\"" + tableName + "\""); } return super.escapeTableName(schemaName, tableName); } /** * @see liquibase.database.AbstractDatabase#escapeColumnName(java.lang.String, java.lang.String, java.lang.String) */ @Override public String escapeColumnName(String schemaName, String tableName, String columnName) { if (hasCaseProblems(columnName) || isReservedWord(columnName)) return "\"" + columnName + "\""; return columnName; } /* * Check if given string has case problems according to postgresql documentation. * If there are at least one characters with upper case while all other are in lower case (or vice versa) this string should be escaped. */ private boolean hasCaseProblems(String tableName) { if (tableName.matches(".*[A-Z].*") && tableName.matches(".*[a-z].*")) return true; return false; } /* * Check if given string is reserved word. */ private boolean isReservedWord(String tableName) { for (int i = 0; i != this.reservedWords.length; i++) if (this.reservedWords[i].toLowerCase().equalsIgnoreCase(tableName)) return true; return false; } /* * Reserved words from postgresql documentation */ private String[] reservedWords = new String[]{"ALL", "ANALYSE", "ANALYZE", "AND", "ANY", "ARRAY", "AS", "ASC", "ASYMMETRIC", "AUTHORIZATION", "BETWEEN", "BINARY", "BOTH", "CASE", "CAST", "CHECK", "COLLATE", "COLUMN", "CONSTRAINT", "CORRESPONDING", "CREATE", "CROSS", "CURRENT_DATE", "CURRENT_ROLE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "CURRENT_USER", "DEFAULT", "DEFERRABLE", "DESC", "DISTINCT", "DO", "ELSE", "END", "EXCEPT", "FALSE", "FOR", "FOREIGN", "FREEZE", "FROM", "FULL", "GRANT", "GROUP", "HAVING", "ILIKE", "IN", "INITIALLY", "INNER", "INTERSECT", "INTO", "IS", "ISNULL", "JOIN", "LEADING", "LEFT", "LIKE", "LIMIT", "LOCALTIME", "LOCALTIMESTAMP", "NATURAL", "NEW", "NOT", "NOTNULL", "NULL", "OFF", "OFFSET", "OLD", "ON", "ONLY", "OPEN", "OR", "ORDER", "OUTER", "OVERLAPS", "PLACING", "PRIMARY", "REFERENCES", "RETURNING", "RIGHT", "SELECT", "SESSION_USER", "SIMILAR", "SOME", "SYMMETRIC", "TABLE", "THEN", "TO", "TRAILING", "TRUE", "UNION", "UNIQUE", "USER", "USING", "VERBOSE", "WHEN", "WHERE"}; /* * Get the current search paths */ private List<String> getSearchPaths() { List<String> searchPaths = null; try { DatabaseConnection con = getConnection(); if (con != null) { Statement stmt = con.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet searchPathQry = stmt.executeQuery("SHOW search_path"); if (searchPathQry.next()) { String searchPathResult = searchPathQry.getString(1); if (searchPathResult != null) { String dirtySearchPaths[] = searchPathResult.split("\\,"); searchPaths = new ArrayList<String>(); for (String searchPath : dirtySearchPaths) { searchPath = searchPath.trim(); // Ensure there is consistency .. if (searchPath.equals("\"$user\"")) { searchPath = "$user"; } searchPaths.add(searchPath); } } } } } catch (Exception e) { // TODO: Something? e.printStackTrace(); log.log(Level.SEVERE, "Failed to get default catalog name from postgres", e); } return searchPaths; } private boolean catalogExists(String catalogName) throws SQLException { if (catalogName != null) { return runExistsQuery("select count(*) from information_schema.schemata where catalog_name='" + catalogName + "'"); } else { return false; } } private boolean schemaExists(String schemaName) throws SQLException { if (schemaName != null) { return runExistsQuery("select count(*) from information_schema.schemata where schema_name='" + schemaName + "'"); } else { return false; } } private boolean runExistsQuery(String query) throws SQLException { DatabaseConnection con = getConnection(); Statement stmt = con.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet existsQry = stmt.executeQuery(query); if (existsQry.next()) { Integer count = existsQry.getInt(1); if (count != null && count > 0) { return true; } } return false; } public DatabaseSnapshot createDatabaseSnapshot(String schema, Set<DiffStatusListener> statusListeners) throws JDBCException { return new PostgresDatabaseSnapshot(this, statusListeners, schema); } }
package bisq.core.filter; import bisq.core.btc.nodes.BtcNodes; import bisq.core.locale.Res; import bisq.core.offer.Offer; import bisq.core.payment.payload.PaymentAccountPayload; import bisq.core.payment.payload.PaymentMethod; import bisq.core.provider.ProvidersRepository; import bisq.core.user.Preferences; import bisq.core.user.User; import bisq.network.p2p.NodeAddress; import bisq.network.p2p.P2PService; import bisq.network.p2p.P2PServiceListener; import bisq.network.p2p.network.NetworkFilter; import bisq.network.p2p.storage.HashMapChangedListener; import bisq.network.p2p.storage.payload.ProtectedStorageEntry; import bisq.common.app.DevEnv; import bisq.common.app.Version; import bisq.common.config.Config; import bisq.common.config.ConfigFileEditor; import bisq.common.crypto.HashCashService; import bisq.common.crypto.KeyRing; import org.bitcoinj.core.ECKey; import org.bitcoinj.core.Sha256Hash; import javax.inject.Inject; import javax.inject.Named; import javafx.beans.property.ObjectProperty; import javafx.beans.property.SimpleObjectProperty; import org.bouncycastle.util.encoders.Base64; import java.security.PublicKey; import java.nio.charset.StandardCharsets; import java.math.BigInteger; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiFunction; import java.util.function.Consumer; import java.lang.reflect.Method; import lombok.extern.slf4j.Slf4j; import javax.annotation.Nullable; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static org.bitcoinj.core.Utils.HEX; /** * We only support one active filter, if we receive multiple we use the one with the more recent creationDate. */ @Slf4j public class FilterManager { private static final String BANNED_PRICE_RELAY_NODES = "bannedPriceRelayNodes"; private static final String BANNED_SEED_NODES = "bannedSeedNodes"; private static final String BANNED_BTC_NODES = "bannedBtcNodes"; private final BiFunction<byte[], byte[], Boolean> challengeValidation = Arrays::equals; // We only require a new pow if difficulty has increased private final BiFunction<Integer, Integer, Boolean> difficultyValidation = (value, controlValue) -> value - controlValue >= 0; // Listener public interface Listener { void onFilterAdded(Filter filter); } private final P2PService p2PService; private final KeyRing keyRing; private final User user; private final Preferences preferences; private final ConfigFileEditor configFileEditor; private final ProvidersRepository providersRepository; private final boolean ignoreDevMsg; private final ObjectProperty<Filter> filterProperty = new SimpleObjectProperty<>(); private final List<Listener> listeners = new CopyOnWriteArrayList<>(); private final List<String> publicKeys; private ECKey filterSigningKey; private final Set<Filter> invalidFilters = new HashSet<>(); private Consumer<String> filterWarningHandler; // Constructor @Inject public FilterManager(P2PService p2PService, KeyRing keyRing, User user, Preferences preferences, Config config, ProvidersRepository providersRepository, NetworkFilter networkFilter, @Named(Config.IGNORE_DEV_MSG) boolean ignoreDevMsg, @Named(Config.USE_DEV_PRIVILEGE_KEYS) boolean useDevPrivilegeKeys) { this.p2PService = p2PService; this.keyRing = keyRing; this.user = user; this.preferences = preferences; this.configFileEditor = new ConfigFileEditor(config.configFile); this.providersRepository = providersRepository; this.ignoreDevMsg = ignoreDevMsg; publicKeys = useDevPrivilegeKeys ? Collections.singletonList(DevEnv.DEV_PRIVILEGE_PUB_KEY) : List.of("0358d47858acdc41910325fce266571540681ef83a0d6fedce312bef9810793a27", "029340c3e7d4bb0f9e651b5f590b434fecb6175aeaa57145c7804ff05d210e534f", "034dc7530bf66ffd9580aa98031ea9a18ac2d269f7c56c0e71eca06105b9ed69f9"); networkFilter.setBannedNodeFunction(this::isNodeAddressBannedFromNetwork); } // API public void onAllServicesInitialized() { if (ignoreDevMsg) { return; } p2PService.getP2PDataStorage().getMap().values().stream() .map(ProtectedStorageEntry::getProtectedStoragePayload) .filter(protectedStoragePayload -> protectedStoragePayload instanceof Filter) .map(protectedStoragePayload -> (Filter) protectedStoragePayload) .forEach(this::onFilterAddedFromNetwork); // On mainNet we expect to have received a filter object, if not show a popup to the user to inform the // Bisq devs. if (Config.baseCurrencyNetwork().isMainnet() && getFilter() == null) { filterWarningHandler.accept(Res.get("popup.warning.noFilter")); } p2PService.addHashSetChangedListener(new HashMapChangedListener() { @Override public void onAdded(Collection<ProtectedStorageEntry> protectedStorageEntries) { protectedStorageEntries.stream() .filter(protectedStorageEntry -> protectedStorageEntry.getProtectedStoragePayload() instanceof Filter) .forEach(protectedStorageEntry -> { Filter filter = (Filter) protectedStorageEntry.getProtectedStoragePayload(); onFilterAddedFromNetwork(filter); }); } @Override public void onRemoved(Collection<ProtectedStorageEntry> protectedStorageEntries) { protectedStorageEntries.stream() .filter(protectedStorageEntry -> protectedStorageEntry.getProtectedStoragePayload() instanceof Filter) .forEach(protectedStorageEntry -> { Filter filter = (Filter) protectedStorageEntry.getProtectedStoragePayload(); onFilterRemovedFromNetwork(filter); }); } }); p2PService.addP2PServiceListener(new P2PServiceListener() { @Override public void onDataReceived() { } @Override public void onNoSeedNodeAvailable() { } @Override public void onNoPeersAvailable() { } @Override public void onUpdatedDataReceived() { // We should have received all data at that point and if the filters were not set we // clean up the persisted banned nodes in the options file as it might be that we missed the filter // remove message if we have not been online. if (filterProperty.get() == null) { clearBannedNodes(); } } @Override public void onTorNodeReady() { } @Override public void onHiddenServicePublished() { } @Override public void onSetupFailed(Throwable throwable) { } @Override public void onRequestCustomBridges() { } }); } public void setFilterWarningHandler(Consumer<String> filterWarningHandler) { this.filterWarningHandler = filterWarningHandler; addListener(filter -> { if (filter != null && filterWarningHandler != null) { if (filter.getSeedNodes() != null && !filter.getSeedNodes().isEmpty()) { log.info("One of the seed nodes got banned. {}", filter.getSeedNodes()); // Let's keep that more silent. Might be used in case a node is unstable and we don't want to confuse users. // filterWarningHandler.accept(Res.get("popup.warning.nodeBanned", Res.get("popup.warning.seed"))); } if (filter.getPriceRelayNodes() != null && !filter.getPriceRelayNodes().isEmpty()) { log.info("One of the price relay nodes got banned. {}", filter.getPriceRelayNodes()); // Let's keep that more silent. Might be used in case a node is unstable and we don't want to confuse users. // filterWarningHandler.accept(Res.get("popup.warning.nodeBanned", Res.get("popup.warning.priceRelay"))); } if (requireUpdateToNewVersionForTrading()) { filterWarningHandler.accept(Res.get("popup.warning.mandatoryUpdate.trading")); } if (requireUpdateToNewVersionForDAO()) { filterWarningHandler.accept(Res.get("popup.warning.mandatoryUpdate.dao")); } if (filter.isDisableDao()) { filterWarningHandler.accept(Res.get("popup.warning.disable.dao")); } } }); } public boolean isPrivilegedDevPubKeyBanned(String pubKeyAsHex) { Filter filter = getFilter(); if (filter == null) { return false; } return filter.getBannedPrivilegedDevPubKeys().contains(pubKeyAsHex); } public boolean canAddDevFilter(String privKeyString) { if (privKeyString == null || privKeyString.isEmpty()) { return false; } if (!isValidDevPrivilegeKey(privKeyString)) { log.warn("Key in invalid"); return false; } ECKey ecKeyFromPrivate = toECKey(privKeyString); String pubKeyAsHex = getPubKeyAsHex(ecKeyFromPrivate); if (isPrivilegedDevPubKeyBanned(pubKeyAsHex)) { log.warn("Pub key is banned."); return false; } return true; } public String getSignerPubKeyAsHex(String privKeyString) { ECKey ecKey = toECKey(privKeyString); return getPubKeyAsHex(ecKey); } public void addDevFilter(Filter filterWithoutSig, String privKeyString) { setFilterSigningKey(privKeyString); String signatureAsBase64 = getSignature(filterWithoutSig); Filter filterWithSig = Filter.cloneWithSig(filterWithoutSig, signatureAsBase64); user.setDevelopersFilter(filterWithSig); p2PService.addProtectedStorageEntry(filterWithSig); // Cleanup potential old filters created in the past with same priv key invalidFilters.forEach(filter -> { removeInvalidFilters(filter, privKeyString); }); } public void addToInvalidFilters(Filter filter) { invalidFilters.add(filter); } public void removeInvalidFilters(Filter filter, String privKeyString) { // We can only remove the filter if it's our own filter if (Arrays.equals(filter.getOwnerPubKey().getEncoded(), keyRing.getSignatureKeyPair().getPublic().getEncoded())) { log.info("Remove invalid filter {}", filter); setFilterSigningKey(privKeyString); String signatureAsBase64 = getSignature(Filter.cloneWithoutSig(filter)); Filter filterWithSig = Filter.cloneWithSig(filter, signatureAsBase64); boolean result = p2PService.removeData(filterWithSig); if (!result) { log.warn("Could not remove filter {}", filter); } } else { log.info("The invalid filter is not our own, so we cannot remove it from the network"); } } public boolean canRemoveDevFilter(String privKeyString) { if (privKeyString == null || privKeyString.isEmpty()) { return false; } Filter developersFilter = getDevFilter(); if (developersFilter == null) { log.warn("There is no persisted dev filter to be removed."); return false; } if (!isValidDevPrivilegeKey(privKeyString)) { log.warn("Key in invalid."); return false; } ECKey ecKeyFromPrivate = toECKey(privKeyString); String pubKeyAsHex = getPubKeyAsHex(ecKeyFromPrivate); if (!developersFilter.getSignerPubKeyAsHex().equals(pubKeyAsHex)) { log.warn("pubKeyAsHex derived from private key does not match filterSignerPubKey. " + "filterSignerPubKey={}, pubKeyAsHex derived from private key={}", developersFilter.getSignerPubKeyAsHex(), pubKeyAsHex); return false; } if (isPrivilegedDevPubKeyBanned(pubKeyAsHex)) { log.warn("Pub key is banned."); return false; } return true; } public void removeDevFilter(String privKeyString) { setFilterSigningKey(privKeyString); Filter filterWithSig = user.getDevelopersFilter(); if (filterWithSig == null) { // Should not happen as UI button is deactivated in that case return; } if (p2PService.removeData(filterWithSig)) { user.setDevelopersFilter(null); } else { log.warn("Removing dev filter from network failed"); } } public void addListener(Listener listener) { listeners.add(listener); } public ObjectProperty<Filter> filterProperty() { return filterProperty; } @Nullable public Filter getFilter() { return filterProperty.get(); } @Nullable public Filter getDevFilter() { return user.getDevelopersFilter(); } public PublicKey getOwnerPubKey() { return keyRing.getSignatureKeyPair().getPublic(); } public boolean isCurrencyBanned(String currencyCode) { return getFilter() != null && getFilter().getBannedCurrencies() != null && getFilter().getBannedCurrencies().stream() .anyMatch(e -> e.equals(currencyCode)); } public boolean isPaymentMethodBanned(PaymentMethod paymentMethod) { return getFilter() != null && getFilter().getBannedPaymentMethods() != null && getFilter().getBannedPaymentMethods().stream() .anyMatch(e -> e.equals(paymentMethod.getId())); } public boolean isOfferIdBanned(String offerId) { return getFilter() != null && getFilter().getBannedOfferIds().stream() .anyMatch(e -> e.equals(offerId)); } public boolean isNodeAddressBanned(NodeAddress nodeAddress) { return getFilter() != null && getFilter().getNodeAddressesBannedFromTrading().stream() .anyMatch(e -> e.equals(nodeAddress.getFullAddress())); } public boolean isNodeAddressBannedFromNetwork(NodeAddress nodeAddress) { return getFilter() != null && getFilter().getNodeAddressesBannedFromNetwork().stream() .anyMatch(e -> e.equals(nodeAddress.getFullAddress())); } public boolean isAutoConfExplorerBanned(String address) { return getFilter() != null && getFilter().getBannedAutoConfExplorers().stream() .anyMatch(e -> e.equals(address)); } public boolean requireUpdateToNewVersionForTrading() { if (getFilter() == null) { return false; } boolean requireUpdateToNewVersion = false; String getDisableTradeBelowVersion = getFilter().getDisableTradeBelowVersion(); if (getDisableTradeBelowVersion != null && !getDisableTradeBelowVersion.isEmpty()) { requireUpdateToNewVersion = Version.isNewVersion(getDisableTradeBelowVersion); } return requireUpdateToNewVersion; } public boolean requireUpdateToNewVersionForDAO() { if (getFilter() == null) { return false; } boolean requireUpdateToNewVersion = false; String disableDaoBelowVersion = getFilter().getDisableDaoBelowVersion(); if (disableDaoBelowVersion != null && !disableDaoBelowVersion.isEmpty()) { requireUpdateToNewVersion = Version.isNewVersion(disableDaoBelowVersion); } return requireUpdateToNewVersion; } public boolean arePeersPaymentAccountDataBanned(PaymentAccountPayload paymentAccountPayload) { return getFilter() != null && paymentAccountPayload != null && getFilter().getBannedPaymentAccounts().stream() .filter(paymentAccountFilter -> paymentAccountFilter.getPaymentMethodId().equals( paymentAccountPayload.getPaymentMethodId())) .anyMatch(paymentAccountFilter -> { try { Method method = paymentAccountPayload.getClass().getMethod(paymentAccountFilter.getGetMethodName()); // We invoke getter methods (no args), e.g. getHolderName String valueFromInvoke = (String) method.invoke(paymentAccountPayload); return valueFromInvoke.equalsIgnoreCase(paymentAccountFilter.getValue()); } catch (Throwable e) { log.error(e.getMessage()); return false; } }); } public boolean isWitnessSignerPubKeyBanned(String witnessSignerPubKeyAsHex) { return getFilter() != null && getFilter().getBannedAccountWitnessSignerPubKeys() != null && getFilter().getBannedAccountWitnessSignerPubKeys().stream() .anyMatch(e -> e.equals(witnessSignerPubKeyAsHex)); } public boolean isProofOfWorkValid(Offer offer) { Filter filter = getFilter(); if (filter == null) { return true; } checkArgument(offer.getBsqSwapOfferPayload().isPresent(), "Offer payload must be BsqSwapOfferPayload"); return HashCashService.verify(offer.getBsqSwapOfferPayload().get().getProofOfWork(), HashCashService.getBytes(offer.getId() + offer.getOwnerNodeAddress().toString()), filter.getPowDifficulty(), challengeValidation, difficultyValidation); } // Private private void onFilterAddedFromNetwork(Filter newFilter) { Filter currentFilter = getFilter(); if (!isFilterPublicKeyInList(newFilter)) { if (newFilter.getSignerPubKeyAsHex() != null && !newFilter.getSignerPubKeyAsHex().isEmpty()) { log.warn("isFilterPublicKeyInList failed. Filter.getSignerPubKeyAsHex={}", newFilter.getSignerPubKeyAsHex()); } else { log.info("isFilterPublicKeyInList failed. Filter.getSignerPubKeyAsHex not set (expected case for pre v1.3.9 filter)"); } return; } if (!isSignatureValid(newFilter)) { log.warn("verifySignature failed. Filter={}", newFilter); return; } if (currentFilter != null) { if (currentFilter.getCreationDate() > newFilter.getCreationDate()) { log.info("We received a new filter from the network but the creation date is older than the " + "filter we have already. We ignore the new filter."); addToInvalidFilters(newFilter); return; } else { log.info("We received a new filter from the network and the creation date is newer than the " + "filter we have already. We ignore the old filter."); addToInvalidFilters(currentFilter); } if (isPrivilegedDevPubKeyBanned(newFilter.getSignerPubKeyAsHex())) { log.warn("Pub key of filter is banned. currentFilter={}, newFilter={}", currentFilter, newFilter); return; } } // Our new filter is newer so we apply it. // We do not require strict guarantees here (e.g. clocks not synced) as only trusted developers have the key // for deploying filters and this is only in place to avoid unintended situations of multiple filters // from multiple devs or if same dev publishes new filter from different app without the persisted devFilter. filterProperty.set(newFilter); // Seed nodes are requested at startup before we get the filter so we only apply the banned // nodes at the next startup and don't update the list in the P2P network domain. // We persist it to the property file which is read before any other initialisation. saveBannedNodes(BANNED_SEED_NODES, newFilter.getSeedNodes()); saveBannedNodes(BANNED_BTC_NODES, newFilter.getBtcNodes()); // Banned price relay nodes we can apply at runtime List<String> priceRelayNodes = newFilter.getPriceRelayNodes(); saveBannedNodes(BANNED_PRICE_RELAY_NODES, priceRelayNodes); //TODO should be moved to client with listening on onFilterAdded providersRepository.applyBannedNodes(priceRelayNodes); //TODO should be moved to client with listening on onFilterAdded if (newFilter.isPreventPublicBtcNetwork() && preferences.getBitcoinNodesOptionOrdinal() == BtcNodes.BitcoinNodesOption.PUBLIC.ordinal()) { preferences.setBitcoinNodesOptionOrdinal(BtcNodes.BitcoinNodesOption.PROVIDED.ordinal()); } listeners.forEach(e -> e.onFilterAdded(newFilter)); } private void onFilterRemovedFromNetwork(Filter filter) { if (!isFilterPublicKeyInList(filter)) { log.warn("isFilterPublicKeyInList failed. Filter={}", filter); return; } if (!isSignatureValid(filter)) { log.warn("verifySignature failed. Filter={}", filter); return; } // We don't check for banned filter as we want to remove a banned filter anyway. if (filterProperty.get() != null && !filterProperty.get().equals(filter)) { return; } clearBannedNodes(); if (filter.equals(user.getDevelopersFilter())) { user.setDevelopersFilter(null); } filterProperty.set(null); } // Clears options files from banned nodes private void clearBannedNodes() { saveBannedNodes(BANNED_BTC_NODES, null); saveBannedNodes(BANNED_SEED_NODES, null); saveBannedNodes(BANNED_PRICE_RELAY_NODES, null); if (providersRepository.getBannedNodes() != null) { providersRepository.applyBannedNodes(null); } } private void saveBannedNodes(String optionName, List<String> bannedNodes) { if (bannedNodes != null) configFileEditor.setOption(optionName, String.join(",", bannedNodes)); else configFileEditor.clearOption(optionName); } private boolean isValidDevPrivilegeKey(String privKeyString) { try { ECKey filterSigningKey = toECKey(privKeyString); String pubKeyAsHex = getPubKeyAsHex(filterSigningKey); return isPublicKeyInList(pubKeyAsHex); } catch (Throwable t) { return false; } } private void setFilterSigningKey(String privKeyString) { this.filterSigningKey = toECKey(privKeyString); } private String getSignature(Filter filterWithoutSig) { Sha256Hash hash = getSha256Hash(filterWithoutSig); ECKey.ECDSASignature ecdsaSignature = filterSigningKey.sign(hash); byte[] encodeToDER = ecdsaSignature.encodeToDER(); return new String(Base64.encode(encodeToDER), StandardCharsets.UTF_8); } private boolean isFilterPublicKeyInList(Filter filter) { String signerPubKeyAsHex = filter.getSignerPubKeyAsHex(); if (!isPublicKeyInList(signerPubKeyAsHex)) { log.info("Invalid filter (expected case for pre v1.3.9 filter as we still keep that in the network " + "but the new version does not recognize it as valid filter): " + "signerPubKeyAsHex from filter is not part of our pub key list. " + "signerPubKeyAsHex={}, publicKeys={}, filterCreationDate={}", signerPubKeyAsHex, publicKeys, new Date(filter.getCreationDate())); return false; } return true; } private boolean isPublicKeyInList(String pubKeyAsHex) { boolean isPublicKeyInList = publicKeys.contains(pubKeyAsHex); if (!isPublicKeyInList) { log.info("pubKeyAsHex is not part of our pub key list (expected case for pre v1.3.9 filter). pubKeyAsHex={}, publicKeys={}", pubKeyAsHex, publicKeys); } return isPublicKeyInList; } private boolean isSignatureValid(Filter filter) { try { Filter filterForSigVerification = Filter.cloneWithoutSig(filter); Sha256Hash hash = getSha256Hash(filterForSigVerification); checkNotNull(filter.getSignatureAsBase64(), "filter.getSignatureAsBase64() must not be null"); byte[] sigData = Base64.decode(filter.getSignatureAsBase64()); ECKey.ECDSASignature ecdsaSignature = ECKey.ECDSASignature.decodeFromDER(sigData); String signerPubKeyAsHex = filter.getSignerPubKeyAsHex(); byte[] decode = HEX.decode(signerPubKeyAsHex); ECKey ecPubKey = ECKey.fromPublicOnly(decode); return ecPubKey.verify(hash, ecdsaSignature); } catch (Throwable e) { log.warn("verifySignature failed. filter={}", filter); return false; } } private ECKey toECKey(String privKeyString) { return ECKey.fromPrivate(new BigInteger(1, HEX.decode(privKeyString))); } private Sha256Hash getSha256Hash(Filter filter) { byte[] filterData = filter.toProtoMessage().toByteArray(); return Sha256Hash.of(filterData); } private String getPubKeyAsHex(ECKey ecKey) { return HEX.encode(ecKey.getPubKey()); } }
package com.rafkind.paintown.animator; import java.awt.event.*; import javax.swing.*; import javax.swing.event.*; import com.rafkind.paintown.Lambda0; import com.rafkind.paintown.Lambda1; import com.rafkind.paintown.Lambda2; import org.swixml.SwingEngine; public class ProjectilePane { private JPanel mainPanel; private Projectile projectile; private Animation currentAnimation; public ProjectilePane( final Animator animator, final Projectile projectile ){ this.projectile = projectile; SwingEngine engine = new SwingEngine( "animator/projectile.xml" ); final DrawArea drawArea = new DrawArea(new Lambda0(){ public Object invoke(){ return null; } }); JPanel canvas = (JPanel) engine.find( "canvas" ); canvas.add( drawArea ); final JButton start = (JButton) engine.find( "start" ); final JButton stop = (JButton) engine.find( "stop" ); final JButton edit = (JButton) engine.find( "edit" ); final JList animations = (JList) engine.find( "animations" ); animations.setListData( projectile.getAnimations() ); final Lambda1 editAnimation = new Lambda1(){ public Object invoke( Object i ){ int index = ((Integer) i).intValue(); Animation temp = projectile.getAnimation( index ); CharacterAnimation edit = new CharacterAnimation( projectile, temp, new Lambda2(){ public Object invoke(Object o1, Object o2){ return null; } }); // animator.addNewTab( edit.getEditor(), temp.getName() ); return null; } }; animations.addMouseListener( new MouseAdapter() { public void mouseClicked(MouseEvent e) { if (e.getClickCount() == 2) { int index = animations.locationToIndex(e.getPoint()); editAnimation.invoke_( new Integer( index ) ); } } }); edit.addActionListener( new AbstractAction(){ public void actionPerformed( ActionEvent event ){ editAnimation.invoke_( new Integer( animations.getSelectedIndex() ) ); } }); start.addActionListener( new AbstractAction(){ public void actionPerformed( ActionEvent event ){ if ( animations.getSelectedValue() != null ){ if ( currentAnimation != null ){ currentAnimation.stopRunning(); } currentAnimation = (Animation) animations.getSelectedValue(); drawArea.animate( currentAnimation ); currentAnimation.startRunning(); } } }); stop.addActionListener( new AbstractAction(){ public void actionPerformed( ActionEvent event ){ drawArea.unanimate(); if ( currentAnimation != null ){ currentAnimation.stopRunning(); } } }); final JLabel scaleNum = (JLabel) engine.find( "scale-num" ); scaleNum.setText( "Scale: " + drawArea.getScale() ); final JSlider scale = (JSlider) engine.find( "scale" ); scale.setValue( (int)(drawArea.getScale() * 5.0) ); scale.addChangeListener( new ChangeListener(){ public void stateChanged( ChangeEvent e ){ drawArea.setScale( scale.getValue() / 5.0 ); scaleNum.setText( "Scale: " + drawArea.getScale() ); } }); mainPanel = (JPanel) engine.getRootComponent(); } public SpecialPanel getEditor(){ return new SpecialPanel( mainPanel, null, projectile ); } }
package de.otto.jlineup; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import de.otto.jlineup.config.JobConfig; import de.otto.jlineup.file.FileTracker; import java.io.File; import java.io.IOException; import java.io.Reader; import static com.fasterxml.jackson.core.JsonParser.Feature.*; import static com.fasterxml.jackson.databind.SerializationFeature.INDENT_OUTPUT; public class JacksonWrapper { private static final ObjectMapper objectMapper; static { objectMapper = new ObjectMapper(); objectMapper.enable(INDENT_OUTPUT); objectMapper.enable(ALLOW_COMMENTS); objectMapper.enable(ALLOW_TRAILING_COMMA); objectMapper.enable(ALLOW_UNQUOTED_CONTROL_CHARS); } private static ObjectMapper objectMapper() { return objectMapper; } public static String serializeObject(Object object) { try { return objectMapper().writeValueAsString(object); } catch (JsonProcessingException e) { throw new RuntimeException("There is a problem while writing the " + object.getClass().getCanonicalName() + " with Jackson.", e); } } public static JobConfig deserializeConfig(Reader reader) { try { return objectMapper().readValue(reader, JobConfig.class); } catch (IOException e) { throw new RuntimeException("Error reading config into object.", e); } } public static FileTracker readFileTrackerFile(File file) { try { return objectMapper().readValue(file, FileTracker.class); } catch (IOException e) { throw new RuntimeException("Could not read FileTracker file.", e); } } }
package io.keen.client.java; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.StringReader; import java.io.StringWriter; import java.net.InetSocketAddress; import java.net.Proxy; import java.net.URL; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import io.keen.client.java.exceptions.InvalidEventCollectionException; import io.keen.client.java.exceptions.InvalidEventException; import io.keen.client.java.exceptions.NoWriteKeyException; import io.keen.client.java.exceptions.ServerException; import io.keen.client.java.http.HttpHandler; import io.keen.client.java.http.OutputSource; import io.keen.client.java.http.Request; import io.keen.client.java.http.Response; import io.keen.client.java.http.UrlConnectionHttpHandler; /** * <p> * KeenClient provides all of the functionality required to: * </p> * * <ul> * <li>Create events from map objects</li> * <li>Automatically insert properties into events as they are created</li> * <li>Post events to the Keen server, either one-at-a-time or in batches</li> * <li>Store events in between batch posts, if desired</li> * <li>Perform posts either synchronously or asynchronously</li> * </ul> * * <p> * To create a {@link KeenClient}, use a subclass of {@link io.keen.client.java.KeenClient.Builder} * which provides the default interfaces for various operations (HTTP, JSON, queueing, async). * </p> * * @author dkador, klitwack * @since 1.0.0 */ public class KeenClient { ///// PUBLIC STATIC METHODS ///// /** * Call this to retrieve the {@code KeenClient} singleton instance. * * @return The singleton instance of the client. */ public static KeenClient client() { if (ClientSingleton.INSTANCE.client == null) { throw new IllegalStateException("Please call KeenClient.initialize() before requesting the client."); } return ClientSingleton.INSTANCE.client; } /** * Initializes the static Keen client. Only the first call to this method has any effect. All * subsequent calls are ignored. * * @param client The {@link io.keen.client.java.KeenClient} implementation to use as the * singleton client for the library. */ public static void initialize(KeenClient client) { if (client == null) { throw new IllegalArgumentException("Client must not be null"); } if (ClientSingleton.INSTANCE.client != null) { // Do nothing. return; } ClientSingleton.INSTANCE.client = client; } /** * Gets whether or not the singleton KeenClient has been initialized. * * @return {@code true} if and only if the client has been initialized. */ public static boolean isInitialized() { return (ClientSingleton.INSTANCE.client != null); } ///// PUBLIC METHODS ////// /** * Adds an event to the default project with default Keen properties and no callbacks. * * @see #addEvent(KeenProject, String, java.util.Map, java.util.Map, KeenCallback) */ public void addEvent(String eventCollection, Map<String, Object> event) { addEvent(eventCollection, event, null); } /** * Adds an event to the default project with no callbacks. * * @see #addEvent(KeenProject, String, java.util.Map, java.util.Map, KeenCallback) */ public void addEvent(String eventCollection, Map<String, Object> event, Map<String, Object> keenProperties) { addEvent(null, eventCollection, event, keenProperties, null); } /** * Synchronously adds an event to the specified collection. This method will immediately * publish the event to the Keen server in the current thread. * * @param project The project in which to publish the event. If a default project has been set * on the client, this parameter may be null, in which case the default project * will be used. * @param eventCollection The name of the collection in which to publish the event. * @param event A Map that consists of key/value pairs. Keen naming conventions apply (see * docs). Nested Maps and lists are acceptable (and encouraged!). * @param keenProperties A Map that consists of key/value pairs to override default properties. * ex: "timestamp" -> Calendar.getInstance() * @param callback An optional callback to receive notification of success or failure. */ public void addEvent(KeenProject project, String eventCollection, Map<String, Object> event, Map<String, Object> keenProperties, KeenCallback callback) { if (!isActive) { handleLibraryInactive(callback); return; } if (project == null && defaultProject == null) { handleFailure(null, new IllegalStateException("No project specified, but no default project found")); return; } KeenProject useProject = (project == null ? defaultProject : project); try { // Build the event. Map<String, Object> newEvent = validateAndBuildEvent(useProject, eventCollection, event, keenProperties); // Publish the event. publish(useProject, eventCollection, newEvent); handleSuccess(callback); } catch (Exception e) { handleFailure(callback, e); } } /** * Adds an event to the default project with default Keen properties and no callbacks. * * @see #addEvent(KeenProject, String, java.util.Map, java.util.Map, KeenCallback) */ public void addEventAsync(String eventCollection, Map<String, Object> event) { addEventAsync(eventCollection, event, null); } /** * Adds an event to the default project with no callbacks. * * @see #addEvent(KeenProject, String, java.util.Map, java.util.Map, KeenCallback) */ public void addEventAsync(String eventCollection, Map<String, Object> event, final Map<String, Object> keenProperties) { addEventAsync(null, eventCollection, event, keenProperties, null); } /** * Asynchronously adds an event to the specified collection. This method will request that * the Keen client's {@link java.util.concurrent.Executor} executes the publish operation. * * @param project The project in which to publish the event. If a default project has been set * on the client this parameter may be null, in which case the default project * will be used. * @param eventCollection The name of the collection in which to publish the event. * @param event A Map that consists of key/value pairs. Keen naming conventions apply (see * docs). Nested Maps and lists are acceptable (and encouraged!). * @param keenProperties A Map that consists of key/value pairs to override default properties. * ex: "timestamp" -> Calendar.getInstance() * @param callback An optional callback to receive notification of success or failure. */ public void addEventAsync(final KeenProject project, final String eventCollection, final Map<String, Object> event, final Map<String, Object> keenProperties, final KeenCallback callback) { if (!isActive) { handleLibraryInactive(callback); return; } if (project == null && defaultProject == null) { handleFailure(null, new IllegalStateException("No project specified, but no default project found")); return; } final KeenProject useProject = (project == null ? defaultProject : project); // Wrap the asynchronous execute in a try/catch block in case the executor throws a // RejectedExecutionException (or anything else). try { publishExecutor.execute(new Runnable() { @Override public void run() { addEvent(useProject, eventCollection, event, keenProperties, callback); } }); } catch (Exception e) { handleFailure(callback, e); } } /** * Queues an event in the default project with default Keen properties and no callbacks. * * @see #queueEvent(KeenProject, String, java.util.Map, java.util.Map, KeenCallback) */ public void queueEvent(String eventCollection, Map<String, Object> event) { queueEvent(eventCollection, event, null); } /** * Queues an event in the default project with no callbacks. * * @see #queueEvent(KeenProject, String, java.util.Map, java.util.Map, KeenCallback) */ public void queueEvent(String eventCollection, Map<String, Object> event, Map<String, Object> keenProperties) { queueEvent(null, eventCollection, event, keenProperties, null); } /** * Synchronously queues an event for publishing. The event will be cached in the client's * {@link io.keen.client.java.KeenEventStore} until the next call to either * {@link #sendQueuedEvents()} or {@link #sendQueuedEventsAsync()}. * * @param project The project in which to publish the event. If a default project has been set * on the client this parameter may be null, in which case the default project * will be used. * @param eventCollection The name of the collection in which to publish the event. * @param event A Map that consists of key/value pairs. Keen naming conventions apply (see * docs). Nested Maps and lists are acceptable (and encouraged!). * @param keenProperties A Map that consists of key/value pairs to override default properties. * ex: "timestamp" -> Calendar.getInstance() * @param callback An optional callback to receive notification of success or failure. */ public void queueEvent(KeenProject project, String eventCollection, Map<String, Object> event, Map<String, Object> keenProperties, final KeenCallback callback) { if (!isActive) { handleLibraryInactive(callback); return; } if (project == null && defaultProject == null) { handleFailure(null, new IllegalStateException("No project specified, but no default project found")); return; } KeenProject useProject = (project == null ? defaultProject : project); try { // Build the event Map<String, Object> newEvent = validateAndBuildEvent(useProject, eventCollection, event, keenProperties); // Serialize the event into JSON. StringWriter writer = new StringWriter(); jsonHandler.writeJson(writer, newEvent); String jsonEvent = writer.toString(); KeenUtils.closeQuietly(writer); // Save the JSON event out to the event store. Object handle = eventStore.store(useProject.getProjectId(), eventCollection, jsonEvent); Map<String, Integer> attempts = getAttemptsMap(useProject.getProjectId(), eventCollection); attempts.put("" + handle.hashCode(), maxAttempts); setAttemptsMap(useProject.getProjectId(), eventCollection, attempts); handleSuccess(callback); } catch (Exception e) { handleFailure(callback, e); } } /** * Sends all queued events for the default project with no callbacks. * * @see #sendQueuedEvents(KeenProject, KeenCallback) */ public void sendQueuedEvents() { sendQueuedEvents(null); } /** * Sends all queued events for the specified project with no callbacks. * * @see #sendQueuedEvents(KeenProject, KeenCallback) */ public void sendQueuedEvents(KeenProject project) { sendQueuedEvents(project, null); } /** * Synchronously sends all queued events for the given project. This method will immediately * publish the events to the Keen server in the current thread. * * @param project The project for which to send queued events. If a default project has been set * on the client this parameter may be null, in which case the default project * will be used. * @param callback An optional callback to receive notification of success or failure. */ public synchronized void sendQueuedEvents(KeenProject project, KeenCallback callback) { if (!isActive) { handleLibraryInactive(callback); return; } if (project == null && defaultProject == null) { handleFailure(null, new IllegalStateException("No project specified, but no default project found")); return; } if (!isNetworkConnected()) { KeenLogging.log("Not sending events because there is no network connection. " + "Events will be retried next time `sendQueuedEvents` is called."); handleFailure(callback, new Exception("Network not connected.")); return; } KeenProject useProject = (project == null ? defaultProject : project); try { String projectId = useProject.getProjectId(); Map<String, List<Object>> eventHandles = eventStore.getHandles(projectId); Map<String, List<Map<String, Object>>> events = buildEventMap(projectId, eventHandles); String response = publishAll(useProject, events); if (response != null) { try { handleAddEventsResponse(eventHandles, response); } catch (Exception e) { // Errors handling the response are non-fatal; just log them. KeenLogging.log("Error handling response to batch publish: " + e.getMessage()); } } handleSuccess(callback); } catch (Exception e) { handleFailure(callback, e); } } /** * Sends all queued events for the default project with no callbacks. * * @see #sendQueuedEventsAsync(KeenProject, KeenCallback) */ public void sendQueuedEventsAsync() { sendQueuedEventsAsync(null); } /** * Sends all queued events for the specified project with no callbacks. * * @see #sendQueuedEventsAsync(KeenProject, KeenCallback) */ public void sendQueuedEventsAsync(final KeenProject project) { sendQueuedEventsAsync(project, null); } /** * Asynchronously sends all queued events for the given project. This method will request that * the Keen client's {@link java.util.concurrent.Executor} executes the publish operation. * * @param project The project for which to send queued events. If a default project has been set * on the client this parameter may be null, in which case the default project * will be used. * @param callback An optional callback to receive notification of success or failure. */ public void sendQueuedEventsAsync(final KeenProject project, final KeenCallback callback) { if (!isActive) { handleLibraryInactive(callback); return; } if (project == null && defaultProject == null) { handleFailure(null, new IllegalStateException("No project specified, but no default project found")); return; } final KeenProject useProject = (project == null ? defaultProject : project); // Wrap the asynchronous execute in a try/catch block in case the executor throws a // RejectedExecutionException (or anything else). try { publishExecutor.execute(new Runnable() { @Override public void run() { sendQueuedEvents(useProject, callback); } }); } catch (Exception e) { handleFailure(callback, e); } } /** * Gets the JSON handler for this client. * * @return The {@link io.keen.client.java.KeenJsonHandler}. */ public KeenJsonHandler getJsonHandler() { return jsonHandler; } /** * Gets the event store for this client. * * @return The {@link io.keen.client.java.KeenEventStore}. */ public KeenEventStore getEventStore() { return eventStore; } /** * Gets the executor for asynchronous publishing for this client. * * @return The {@link java.util.concurrent.Executor}. */ public Executor getPublishExecutor() { return publishExecutor; } /** * Gets the default project that this {@link KeenClient} will use if no project is specified. * * @return The default project. */ public KeenProject getDefaultProject() { return defaultProject; } /** * Sets the default project that this {@link KeenClient} should use if no project is specified. * * @param defaultProject The new default project. */ public void setDefaultProject(KeenProject defaultProject) { this.defaultProject = defaultProject; } /** * Gets the base API URL associated with this instance of the {@link KeenClient}. * * @return The base API URL */ public String getBaseUrl() { return baseUrl; } public void setBaseUrl(String baseUrl) { if (baseUrl == null) { this.baseUrl = KeenConstants.SERVER_ADDRESS; } else { this.baseUrl = baseUrl; } } /** * Sets the maximum number of HTTPS POST retry attempts for all events added in the future. * * @param maxAttempts the maximum number attempts */ public void setMaxAttempts(int maxAttempts) { this.maxAttempts = maxAttempts; } /** * Sets the maximum number of HTTPS POST retry attempts for all events added in the future. * * @return the maximum number attempts */ public int getMaxAttempts() { return maxAttempts; } /** * Gets the {@link GlobalPropertiesEvaluator} associated with this instance of the {@link KeenClient}. * * @return The {@link GlobalPropertiesEvaluator} */ public GlobalPropertiesEvaluator getGlobalPropertiesEvaluator() { return globalPropertiesEvaluator; } /** * Call this to set the {@link GlobalPropertiesEvaluator} for this instance of the {@link KeenClient}. * The evaluator is invoked every time an event is added to an event collection. * <p/> * Global properties are properties which are sent with EVERY event. For example, you may wish to always * capture device information like OS version, handset type, orientation, etc. * <p/> * The evaluator takes as a parameter a single String, which is the name of the event collection the * event's being added to. You're responsible for returning a Map which represents the global properties * for this particular event collection. * <p/> * Note that because we use a class defined by you, you can create DYNAMIC global properties. For example, * if you want to capture device orientation, then your evaluator can ask the device for its current orientation * and then construct the Map. If your global properties aren't dynamic, then just return the same Map * every time. * <p/> * Example usage: * <pre> * {@code KeenClient client = KeenClient.client(); * GlobalPropertiesEvaluator evaluator = new GlobalPropertiesEvaluator() { * public Map<String, Object> getGlobalProperties(String eventCollection) { * Map<String, Object> map = new HashMap<String, Object>(); * map.put("some dynamic property name", "some dynamic property value"); * return map; * } * }; * client.setGlobalPropertiesEvaluator(evaluator); * } * </pre> * * @param globalPropertiesEvaluator The evaluator which is invoked any time an event is added to an event * collection. */ public void setGlobalPropertiesEvaluator(GlobalPropertiesEvaluator globalPropertiesEvaluator) { this.globalPropertiesEvaluator = globalPropertiesEvaluator; } /** * Gets the Keen Global Properties map. See docs for {@link #setGlobalProperties(java.util.Map)}. * * @return The Global Properties map. */ public Map<String, Object> getGlobalProperties() { return globalProperties; } /** * Call this to set the Keen Global Properties Map for this instance of the {@link KeenClient}. The Map * is used every time an event is added to an event collection. * <p/> * Keen Global Properties are properties which are sent with EVERY event. For example, you may wish to always * capture static information like user ID, app version, etc. * <p/> * Every time an event is added to an event collection, the SDK will check to see if this property is defined. * If it is, the SDK will copy all the properties from the global properties into the newly added event. * <p/> * Note that because this is just a Map, it's much more difficult to create DYNAMIC global properties. * It also doesn't support per-collection properties. If either of these use cases are important to you, please use * the {@link GlobalPropertiesEvaluator}. * <p/> * Also note that the Keen properties defined in {@link #getGlobalPropertiesEvaluator()} take precedence over * the properties defined in getGlobalProperties, and that the Keen Properties defined in each * individual event take precedence over either of the Global Properties. * <p/> * Example usage: * <p/> * <pre> * KeenClient client = KeenClient.client(); * Map<String, Object> map = new HashMap<String, Object>(); * map.put("some standard key", "some standard value"); * client.setGlobalProperties(map); * </pre> * * @param globalProperties The new map you wish to use as the Keen Global Properties. */ public void setGlobalProperties(Map<String, Object> globalProperties) { this.globalProperties = globalProperties; } /** * Gets whether or not the Keen client is running in debug mode. * * @return {@code true} if debug mode is enabled, otherwise {@code false}. */ public boolean isDebugMode() { return isDebugMode; } /** * Sets whether or not the Keen client should run in debug mode. When debug mode is enabled, * all exceptions will be thrown immediately; otherwise they will be logged and reported to * any callbacks, but never thrown. * * @param isDebugMode {@code true} to enable debug mode, or {@code false} to disable it. */ public void setDebugMode(boolean isDebugMode) { this.isDebugMode = isDebugMode; } /** * Gets whether or not the client is in active mode. * * @return {@code true} if the client is active,; {@code false} if it is inactive. */ public boolean isActive() { return isActive; } /** * Sets an HTTP proxy server configuration for this client. * * @param proxyHost The proxy hostname or IP address. * @param proxyPort The proxy port number. */ public void setProxy(String proxyHost, int proxyPort) { this.proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(proxyHost, proxyPort)); } /** * Sets an HTTP proxy server configuration for this client. * * @param proxy The Proxy object to set. */ public void setProxy(Proxy proxy) { this.proxy = proxy; } /** * Gets the client Proxy. * * @return the proxy. */ public Proxy getProxy() { return proxy; } ///// PROTECTED ABSTRACT BUILDER IMPLEMENTATION ///// /** * Builder class for instantiating Keen clients. Subclasses should override this and * implement the getDefault* methods to provide new default behavior. * <p/> * This builder doesn't include any default implementation for handling JSON serialization and * de-serialization. Subclasses must provide one. * <p/> * This builder defaults to using HttpURLConnection to handle HTTP requests. * <p/> * To cache events in between batch uploads, this builder defaults to a RAM-based event store. * <p/> * This builder defaults to a fixed thread pool (constructed with * {@link java.util.concurrent.Executors#newFixedThreadPool(int)}) to run asynchronous requests. */ public static abstract class Builder { private HttpHandler httpHandler; private KeenJsonHandler jsonHandler; private KeenEventStore eventStore; private Executor publishExecutor; private KeenNetworkStatusHandler networkStatusHandler; /** * Gets the default {@link HttpHandler} to use if none is explicitly set for this builder. * * This implementation returns a handler that will use {@link java.net.HttpURLConnection} * to make HTTP requests. * * Subclasses should override this to provide an alternative default {@link HttpHandler}. * * @return The default {@link HttpHandler}. * @throws Exception If there is an error creating the {@link HttpHandler}. */ protected HttpHandler getDefaultHttpHandler() throws Exception { return new UrlConnectionHttpHandler(); } /** * Gets the {@link HttpHandler} that this builder is currently configured to use for making * HTTP requests. If null, a default will be used instead. * * @return The {@link HttpHandler} to use. */ public HttpHandler getHttpHandler() { return httpHandler; } /** * Sets the {@link HttpHandler} to use for making HTTP requests. * * @param httpHandler The {@link HttpHandler} to use. */ public void setHttpHandler(HttpHandler httpHandler) { this.httpHandler = httpHandler; } /** * Sets the {@link HttpHandler} to use for making HTTP requests. * * @param httpHandler The {@link HttpHandler} to use. * @return This instance (for method chaining). */ public Builder withHttpHandler(HttpHandler httpHandler) { setHttpHandler(httpHandler); return this; } /** * Gets the default {@link KeenJsonHandler} to use if none is explicitly set for this builder. * * Subclasses must override this to provide a default {@link KeenJsonHandler}. * * @return The default {@link KeenJsonHandler}. * @throws Exception If there is an error creating the {@link KeenJsonHandler}. */ protected abstract KeenJsonHandler getDefaultJsonHandler() throws Exception; /** * Gets the {@link KeenJsonHandler} that this builder is currently configured to use for * handling JSON operations. If null, a default will be used instead. * * @return The {@link KeenJsonHandler} to use. */ public KeenJsonHandler getJsonHandler() { return jsonHandler; } /** * Sets the {@link KeenJsonHandler} to use for handling JSON operations. * * @param jsonHandler The {@link KeenJsonHandler} to use. */ public void setJsonHandler(KeenJsonHandler jsonHandler) { this.jsonHandler = jsonHandler; } /** * Sets the {@link KeenJsonHandler} to use for handling JSON operations. * * @param jsonHandler The {@link KeenJsonHandler} to use. * @return This instance (for method chaining). */ public Builder withJsonHandler(KeenJsonHandler jsonHandler) { setJsonHandler(jsonHandler); return this; } /** * Gets the default {@link KeenEventStore} to use if none is explicitly set for this builder. * * This implementation returns a RAM-based store. * * Subclasses should override this to provide an alternative default {@link KeenEventStore}. * * @return The default {@link KeenEventStore}. * @throws Exception If there is an error creating the {@link KeenEventStore}. */ protected KeenEventStore getDefaultEventStore() throws Exception { return new RamEventStore(); } /** * Gets the {@link KeenEventStore} that this builder is currently configured to use for * storing events between batch publish operations. If null, a default will be used instead. * * @return The {@link KeenEventStore} to use. */ public KeenEventStore getEventStore() { return eventStore; } /** * Sets the {@link KeenEventStore} to use for storing events in between batch publish * operations. * * @param eventStore The {@link KeenEventStore} to use. */ public void setEventStore(KeenEventStore eventStore) { this.eventStore = eventStore; } /** * Sets the {@link KeenEventStore} to use for storing events in between batch publish * operations. * * @param eventStore The {@link KeenEventStore} to use. * @return This instance (for method chaining). */ public Builder withEventStore(KeenEventStore eventStore) { setEventStore(eventStore); return this; } /** * Gets the default {@link Executor} to use if none is explicitly set for this builder. * * This implementation returns a simple fixed thread pool with the number of threads equal * to the number of available processors. * * Subclasses should override this to provide an alternative default {@link Executor}. * * @return The default {@link Executor}. * @throws Exception If there is an error creating the {@link Executor}. */ protected Executor getDefaultPublishExecutor() throws Exception { int procCount = Runtime.getRuntime().availableProcessors(); return Executors.newFixedThreadPool(procCount); } /** * Gets the {@link Executor} that this builder is currently configured to use for * asynchronous publishing operations. If null, a default will be used instead. * * @return The {@link Executor} to use. */ public Executor getPublishExecutor() { return publishExecutor; } /** * Sets the {@link Executor} to use for asynchronous publishing operations. * * @param publishExecutor The {@link Executor} to use. */ public void setPublishExecutor(Executor publishExecutor) { this.publishExecutor = publishExecutor; } /** * Sets the {@link Executor} to use for asynchronous publishing operations. * * @param publishExecutor The {@link Executor} to use. * @return This instance (for method chaining). */ public Builder withPublishExecutor(Executor publishExecutor) { setPublishExecutor(publishExecutor); return this; } /** * Gets the default {@link KeenNetworkStatusHandler} to use if none is explicitly set for this builder. * * This implementation always returns true. * * Subclasses should override this to provide an alternative default {@link KeenNetworkStatusHandler}. * * @return The default {@link KeenNetworkStatusHandler}. */ protected KeenNetworkStatusHandler getDefaultNetworkStatusHandler() { return new AlwaysConnectedNetworkStatusHandler(); } /** * Gets the {@link KeenNetworkStatusHandler} that this builder is currently configured to use. * If null, a default will be used instead. * * @return The {@link KeenNetworkStatusHandler} to use. */ public KeenNetworkStatusHandler getNetworkStatusHandler () { return networkStatusHandler; } /** * Sets the {@link KeenNetworkStatusHandler} to use. * * @param networkStatusHandler The {@link KeenNetworkStatusHandler} to use. */ public void setNetworkStatusHandler(KeenNetworkStatusHandler networkStatusHandler) { this.networkStatusHandler = networkStatusHandler; } /** * Sets the {@link KeenNetworkStatusHandler} to use. * * @param networkStatusHandler The {@link KeenNetworkStatusHandler} to use. * @return This instance (for method chaining). */ public Builder withNetworkStatusHandler(KeenNetworkStatusHandler networkStatusHandler) { setNetworkStatusHandler(networkStatusHandler); return this; } /** * Builds a new Keen client using the interfaces which have been specified explicitly on * this builder instance via the set* or with* methods, or the default interfaces if none * have been specified. * * @return A newly constructed Keen client. */ public KeenClient build() { try { if (httpHandler == null) { httpHandler = getDefaultHttpHandler(); } } catch (Exception e) { KeenLogging.log("Exception building HTTP handler: " + e.getMessage()); } try { if (jsonHandler == null) { jsonHandler = getDefaultJsonHandler(); } } catch (Exception e) { KeenLogging.log("Exception building JSON handler: " + e.getMessage()); } try { if (eventStore == null) { eventStore = getDefaultEventStore(); } } catch (Exception e) { KeenLogging.log("Exception building event store: " + e.getMessage()); } try { if (publishExecutor == null) { publishExecutor = getDefaultPublishExecutor(); } } catch (Exception e) { KeenLogging.log("Exception building publish executor: " + e.getMessage()); } try { if (networkStatusHandler == null) { networkStatusHandler = getDefaultNetworkStatusHandler(); } } catch (Exception e) { KeenLogging.log("Exception building network status handler: " + e.getMessage()); } return buildInstance(); } /** * Builds an instance based on this builder. This method is exposed only as a test hook to * allow test classes to modify how the {@link KeenClient} is constructed (i.e. by * providing a mock {@link Environment}. * * @return The new {@link KeenClient}. */ protected KeenClient buildInstance() { return new KeenClient(this); } } ///// PROTECTED CONSTRUCTORS ///// /** * Constructs a Keen client using system environment variables. * * @param builder The builder from which to retrieve this client's interfaces and settings. */ protected KeenClient(Builder builder) { this(builder, new Environment()); } /** * Constructs a Keen client using the provided environment. * * NOTE: This constructor is only intended for use by test code, and should not be used * directly. Subclasses should call the default {@link #KeenClient(Builder)} constructor. * * @param builder The builder from which to retrieve this client's interfaces and settings. * @param env The environment to use to attempt to build the default project. */ KeenClient(Builder builder, Environment env) { // Initialize final properties using the builder. this.httpHandler = builder.httpHandler; this.jsonHandler = builder.jsonHandler; this.eventStore = builder.eventStore; this.publishExecutor = builder.publishExecutor; this.networkStatusHandler = builder.networkStatusHandler; // If any of the interfaces are null, mark this client as inactive. if (httpHandler == null || jsonHandler == null || eventStore == null || publishExecutor == null) { setActive(false); } // Initialize other properties. this.baseUrl = KeenConstants.SERVER_ADDRESS; this.globalPropertiesEvaluator = null; this.globalProperties = null; // If a default project has been specified in environment variables, use it. if (env.getKeenProjectId() != null) { defaultProject = new KeenProject(env); } } ///// PROTECTED METHODS ///// /** * Sets whether or not the client is in active mode. When the client is inactive, all requests * will be ignored. * * @param isActive {@code true} to make the client active, or {@code false} to make it * inactive. */ protected void setActive(boolean isActive) { this.isActive = isActive; KeenLogging.log("Keen Client set to " + (isActive? "active" : "inactive")); } /** * Validates an event and inserts global properties, producing a new event object which is * ready to be published to the Keen service. * * @param project The project in which the event will be published. * @param eventCollection The name of the collection in which the event will be published. * @param event A Map that consists of key/value pairs. * @param keenProperties A Map that consists of key/value pairs to override default properties. * @return A new event Map containing Keen properties and global properties. */ protected Map<String, Object> validateAndBuildEvent(KeenProject project, String eventCollection, Map<String, Object> event, Map<String, Object> keenProperties) { if (project.getWriteKey() == null) { throw new NoWriteKeyException("You can't send events to Keen IO if you haven't set a write key."); } validateEventCollection(eventCollection); validateEvent(event); KeenLogging.log(String.format(Locale.US, "Adding event to collection: %s", eventCollection)); // build the event Map<String, Object> newEvent = new HashMap<String, Object>(); // handle keen properties Calendar currentTime = Calendar.getInstance(); String timestamp = ISO_8601_FORMAT.format(currentTime.getTime()); if (keenProperties == null) { keenProperties = new HashMap<String, Object>(); keenProperties.put("timestamp", timestamp); } else if (!keenProperties.containsKey("timestamp")) { // we need to make a copy if we are setting the timestamp since // they might reuse the original keepProperties object. keenProperties = new HashMap<String, Object>(keenProperties); keenProperties.put("timestamp", timestamp); } newEvent.put("keen", keenProperties); // handle global properties Map<String, Object> globalProperties = getGlobalProperties(); if (globalProperties != null) { newEvent.putAll(globalProperties); } GlobalPropertiesEvaluator globalPropertiesEvaluator = getGlobalPropertiesEvaluator(); if (globalPropertiesEvaluator != null) { Map<String, Object> props = globalPropertiesEvaluator.getGlobalProperties(eventCollection); if (props != null) { newEvent.putAll(props); } } // now handle user-defined properties newEvent.putAll(event); return newEvent; } ///// PRIVATE TYPES ///// /** * The {@link io.keen.client.java.KeenClient} class's singleton enum. */ private enum ClientSingleton { INSTANCE; KeenClient client; } ///// PRIVATE CONSTANTS ///// private static final DateFormat ISO_8601_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.US); ///// PRIVATE FIELDS ///// private final HttpHandler httpHandler; private final KeenJsonHandler jsonHandler; private final KeenEventStore eventStore; private final Executor publishExecutor; private final KeenNetworkStatusHandler networkStatusHandler; private boolean isActive = true; private boolean isDebugMode; private int maxAttempts = KeenConstants.DEFAULT_MAX_ATTEMPTS; private KeenProject defaultProject; private String baseUrl; private GlobalPropertiesEvaluator globalPropertiesEvaluator; private Map<String, Object> globalProperties; private Proxy proxy; ///// PRIVATE METHODS ///// /** * Validates the name of an event collection. * * @param eventCollection An event collection name to be validated. * @throws io.keen.client.java.exceptions.InvalidEventCollectionException If the event collection name is invalid. See Keen documentation for details. */ private void validateEventCollection(String eventCollection) { if (eventCollection == null || eventCollection.length() == 0) { throw new InvalidEventCollectionException("You must specify a non-null, " + "non-empty event collection: " + eventCollection); } if (eventCollection.startsWith("$")) { throw new InvalidEventCollectionException("An event collection name cannot start with the dollar sign ($)" + " character."); } if (eventCollection.length() > 256) { throw new InvalidEventCollectionException("An event collection name cannot be longer than 256 characters."); } } /** * @see #validateEvent(java.util.Map, int) */ private void validateEvent(Map<String, Object> event) { validateEvent(event, 0); } /** * Validates an event. * * @param event The event to validate. * @param depth The number of layers of the map structure that have already been traversed; this * should be 0 for the initial call and will increment on each recursive call. */ @SuppressWarnings("unchecked") // cast to generic Map will always be okay in this case private void validateEvent(Map<String, Object> event, int depth) { if (depth == 0) { if (event == null || event.size() == 0) { throw new InvalidEventException("You must specify a non-null, non-empty event."); } if (event.containsKey("keen")) { throw new InvalidEventException("An event cannot contain a root-level property named 'keen'."); } } else if (depth > KeenConstants.MAX_EVENT_DEPTH) { throw new InvalidEventException("An event's depth (i.e. layers of nesting) cannot exceed " + KeenConstants.MAX_EVENT_DEPTH); } for (Map.Entry<String, Object> entry : event.entrySet()) { String key = entry.getKey(); if (key.contains(".")) { throw new InvalidEventException("An event cannot contain a property with the period (.) character in " + "it."); } if (key.startsWith("$")) { throw new InvalidEventException("An event cannot contain a property that starts with the dollar sign " + "($) character in it."); } if (key.length() > 256) { throw new InvalidEventException("An event cannot contain a property name longer than 256 characters."); } validateEventValue(entry.getValue(), depth); } } /** * Validates a value within an event structure. This method will handle validating each element * in a list, as well as recursively validating nested maps. * * @param value The value to validate. * @param depth The current depth of validation. */ @SuppressWarnings("unchecked") // cast to generic Map will always be okay in this case private void validateEventValue(Object value, int depth) { if (value instanceof String) { String strValue = (String) value; if (strValue.length() >= 10000) { throw new InvalidEventException("An event cannot contain a string property value longer than 10," + "000 characters."); } } else if (value instanceof Map) { validateEvent((Map<String, Object>) value, depth + 1); } else if (value instanceof Iterable) { for (Object listElement : (Iterable) value) { validateEventValue(listElement, depth); } } } /** * Builds a map from collection name to a list of event maps, given a map from collection name * to a list of event handles. This method just uses the event store to retrieve each event by * its handle. * * @param eventHandles A map from collection name to a list of event handles in the event store. * @return A map from collection name to a list of event maps. * @throws IOException If there is an error retrieving events from the store. */ private Map<String, List<Map<String, Object>>> buildEventMap(String projectId, Map<String, List<Object>> eventHandles) throws IOException { Map<String, List<Map<String, Object>>> result = new HashMap<String, List<Map<String, Object>>>(); for (Map.Entry<String, List<Object>> entry : eventHandles.entrySet()) { String eventCollection = entry.getKey(); List<Object> handles = entry.getValue(); // Skip event collections that don't contain any events. if (handles == null || handles.size() == 0) { continue; } // Build the event list by retrieving events from the store. List<Map<String, Object>> events = new ArrayList<Map<String, Object>>(handles.size()); Map<String, Integer> attempts = getAttemptsMap(projectId, eventCollection); for (Object handle : handles) { // Get the event from the store. String jsonEvent = eventStore.get(handle); // De-serialize the event from its JSON. StringReader reader = new StringReader(jsonEvent); Map<String, Object> event = jsonHandler.readJson(reader); KeenUtils.closeQuietly(reader); String attemptsKey = "" + handle.hashCode(); Integer remainingAttempts = attempts.get(attemptsKey); if (remainingAttempts == null) { // treat null as "this is the last attempt" remainingAttempts = 1; } // decrement the remaining attempts count and put the new value on the map remainingAttempts attempts.put(attemptsKey, remainingAttempts); if (remainingAttempts >= 0) { // if we had some remaining attempts, then try again events.add(event); } else { // otherwise remove it from the store eventStore.remove(handle); // iff eventStore.remove succeeds we can do some housekeeping and remove the // key from the attempts hash. attempts.remove(attemptsKey); } } setAttemptsMap(projectId, eventCollection, attempts); result.put(eventCollection, events); } return result; } /** * Publishes a single event to the Keen service. * * @param project The project in which to publish the event. * @param eventCollection The name of the collection in which to publish the event. * @param event The event to publish. * @return The response from the server. * @throws IOException If there was an error communicating with the server. */ private String publish(KeenProject project, String eventCollection, Map<String, Object> event) throws IOException { // just using basic JDK HTTP library String urlString = String.format(Locale.US, "%s/%s/projects/%s/events/%s", getBaseUrl(), KeenConstants.API_VERSION, project.getProjectId(), eventCollection); URL url = new URL(urlString); return publishObject(project, url, event); } /** * Publishes a batch of events to the Keen service. * * @param project The project in which to publish the event. * @param events A map from collection name to a list of event maps. * @return The response from the server. * @throws IOException If there was an error communicating with the server. */ private String publishAll(KeenProject project, Map<String, List<Map<String, Object>>> events) throws IOException { // just using basic JDK HTTP library String urlString = String.format(Locale.US, "%s/%s/projects/%s/events", getBaseUrl(), KeenConstants.API_VERSION, project.getProjectId()); URL url = new URL(urlString); return publishObject(project, url, events); } /** * Posts a request to the server in the specified project, using the given URL and request data. * The request data will be serialized into JSON using the client's * {@link io.keen.client.java.KeenJsonHandler}. * * @param project The project in which the event(s) will be published; this is used to * determine the write key to use for authentication. * @param url The URL to which the POST should be sent. * @param requestData The request data, which will be serialized into JSON and sent in the * request body. * @return The response from the server. * @throws IOException If there was an error communicating with the server. */ private synchronized String publishObject(KeenProject project, URL url, final Map<String, ?> requestData) throws IOException { if (requestData == null || requestData.size() == 0) { KeenLogging.log("No API calls were made because there were no events to upload"); return null; } // Build an output source which simply writes the serialized JSON to the output. OutputSource source = new OutputSource() { @Override public void writeTo(OutputStream out) throws IOException { OutputStreamWriter writer = new OutputStreamWriter(out, ENCODING); jsonHandler.writeJson(writer, requestData); } }; // If logging is enabled, log the request being sent. if (KeenLogging.isLoggingEnabled()) { try { StringWriter writer = new StringWriter(); jsonHandler.writeJson(writer, requestData); String request = writer.toString(); KeenLogging.log(String.format(Locale.US, "Sent request '%s' to URL '%s'", request, url.toString())); } catch (IOException e) { KeenLogging.log("Couldn't log event written to file: "); e.printStackTrace(); } } // Send the request. String writeKey = project.getWriteKey(); Request request = new Request(url, "POST", writeKey, source, proxy); Response response = httpHandler.execute(request); // If logging is enabled, log the response. if (KeenLogging.isLoggingEnabled()) { KeenLogging.log(String.format(Locale.US, "Received response: '%s' (%d)", response.body, response.statusCode)); } // If the request succeeded, return the response body. Otherwise throw an exception. if (response.isSuccess()) { return response.body; } else { throw new ServerException(response.body); } } /** * Returns the status of the network connection * * @return true if there is network connection */ private boolean isNetworkConnected() { return networkStatusHandler.isNetworkConnected(); } ///// PRIVATE CONSTANTS ///// private static final String ENCODING = "UTF-8"; /** * Handles a response from the Keen service to a batch post events operation. In particular, * this method will iterate through the responses and remove any successfully processed events * (or events which failed for known fatal reasons) from the event store so they won't be sent * in subsequent posts. * * @param handles A map from collection names to lists of handles in the event store. This is * referenced against the response from the server to determine which events to * remove from the store. * @param response The response from the server. * @throws IOException If there is an error removing events from the store. */ @SuppressWarnings("unchecked") private void handleAddEventsResponse(Map<String, List<Object>> handles, String response) throws IOException { // Parse the response into a map. StringReader reader = new StringReader(response); Map<String, Object> responseMap; responseMap = jsonHandler.readJson(reader); // It's not obvious what the best way is to try and recover from them, but just hoping it // doesn't happen is probably the wrong answer. // Loop through all the event collections. for (Map.Entry<String, Object> entry : responseMap.entrySet()) { String collectionName = entry.getKey(); // Get the list of handles in this collection. List<Object> collectionHandles = handles.get(collectionName); // Iterate through the elements in the collection List<Map<String, Object>> eventResults = (List<Map<String, Object>>) entry.getValue(); int index = 0; for (Map<String, Object> eventResult : eventResults) { // now loop through each event collection's individual results boolean removeCacheEntry = true; boolean success = (Boolean) eventResult.get(KeenConstants.SUCCESS_PARAM); if (!success) { // grab error code and description Map errorDict = (Map) eventResult.get(KeenConstants.ERROR_PARAM); String errorCode = (String) errorDict.get(KeenConstants.NAME_PARAM); if (errorCode.equals(KeenConstants.INVALID_COLLECTION_NAME_ERROR) || errorCode.equals(KeenConstants.INVALID_PROPERTY_NAME_ERROR) || errorCode.equals(KeenConstants.INVALID_PROPERTY_VALUE_ERROR)) { removeCacheEntry = true; KeenLogging.log("An invalid event was found. Deleting it. Error: " + errorDict.get(KeenConstants.DESCRIPTION_PARAM)); } else { String description = (String) errorDict.get(KeenConstants.DESCRIPTION_PARAM); removeCacheEntry = false; KeenLogging.log(String.format(Locale.US, "The event could not be inserted for some reason. " + "Error name and description: %s %s", errorCode, description)); } } // If the cache entry should be removed, get the handle at the appropriate index // and ask the event store to remove it. if (removeCacheEntry) { Object handle = collectionHandles.get(index); // Try to remove the object from the cache. Catch and log exceptions to prevent // a single failure from derailing the rest of the cleanup. try { eventStore.remove(handle); } catch (IOException e) { KeenLogging.log("Failed to remove object '" + handle + "' from cache"); } } index++; } } } /** * Reports success to a callback. If the callback is null, this is a no-op. Any exceptions * thrown by the callback are silently ignored. * * @param callback A callback; may be null. */ private void handleSuccess(KeenCallback callback) { if (callback != null) { try { callback.onSuccess(); } catch (Exception userException) { // Do nothing. } } } /** * Handles a failure in the Keen library. If the client is running in debug mode, this will * immediately throw a runtime exception. Otherwise, this will log an error message and, if the * callback is non-null, call the {@link KeenCallback#onFailure(Exception)} method. Any * exceptions thrown by the callback are silently ignored. * * @param callback A callback; may be null. * @param e The exception which caused the failure. */ private void handleFailure(KeenCallback callback, Exception e) { if (isDebugMode) { if (e instanceof RuntimeException) { throw (RuntimeException) e; } else { throw new RuntimeException(e); } } else { KeenLogging.log("Encountered error: " + e.getMessage()); if (callback != null) { try { callback.onFailure(e); } catch (Exception userException) { // Do nothing. } } } } /** * Reports failure when the library is inactive due to failed initialization. * * @param callback A callback; may be null. */ // TODO: Cap how many times this failure is reported, and after that just fail silently. private void handleLibraryInactive(KeenCallback callback) { handleFailure(callback, new IllegalStateException("The Keen library failed to initialize " + "properly and is inactive")); } /** * Gets the map of attempt counts from the eventStore * * @param projectId the project id * @param eventCollection the collection name * @return a Map of event hashCodes to attempt counts * @throws IOException */ private Map<String, Integer> getAttemptsMap(String projectId, String eventCollection) throws IOException { Map<String, Integer> attempts = new HashMap<String, Integer>(); if (eventStore instanceof KeenAttemptCountingEventStore) { KeenAttemptCountingEventStore res = (KeenAttemptCountingEventStore)eventStore; String attemptsJSON = res.getAttempts(projectId, eventCollection); if (attemptsJSON != null) { StringReader reader = new StringReader(attemptsJSON); Map<String, Object> attemptTmp = jsonHandler.readJson(reader); for (Entry<String, Object> entry : attemptTmp.entrySet()) { if (entry.getValue() instanceof Number) { attempts.put(entry.getKey(), ((Number)entry.getValue()).intValue()); } } } } return attempts; } /** * Set the attempts Map in the eventStore * * @param projectId the project id * @param eventCollection the collection name * @param attempts the current attempts Map * @throws IOException */ private void setAttemptsMap(String projectId, String eventCollection, Map<String, Integer> attempts) throws IOException { if (eventStore instanceof KeenAttemptCountingEventStore) { KeenAttemptCountingEventStore res = (KeenAttemptCountingEventStore)eventStore; StringWriter writer = new StringWriter(); jsonHandler.writeJson(writer, attempts); String attemptsJSON = writer.toString(); res.setAttempts(projectId, eventCollection, attemptsJSON); } } }
package net.time4j.format; import net.time4j.engine.AttributeQuery; import net.time4j.engine.ChronoElement; import net.time4j.engine.Chronology; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.net.URL; import java.net.URLConnection; import java.text.DateFormatSymbols; import java.text.Normalizer; import java.util.Arrays; import java.util.Collections; import java.util.EnumMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.MissingResourceException; import java.util.PropertyResourceBundle; import java.util.ResourceBundle; import java.util.ServiceLoader; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static net.time4j.format.TextWidth.ABBREVIATED; import static net.time4j.format.TextWidth.SHORT; /** * <p>Source for localized calendrical informations on enum basis like month * or weekday names. </p> * * <p>This class is a facade for an underlying implementation of * {@code CalendarText.Provider} which will be loaded as SPI-interface * by helpf of a {@code ServiceLoader}. If no such SPI-interface can be * found then this class will resort to the sources of JDK (usually * as wrapper around {@code java.text.DateFormatSymbols}). </p> * * <p>Furthermore, an instance of {@code CalendarText} can also access * the UTF-8 text resources in the folder &quot;data&quot; relative to * the class path which are not based on JDK-defaults. </p> * * @author Meno Hochschild * @concurrency <immutable> */ /*[deutsch] * <p>Quelle f&uuml;r lokalisierte kalendarische Informationen auf Enum-Basis * wie zum Beispiel Monats- oder Wochentagsnamen. </p> * * <p>Diese Klasse ist eine Fassade f&uuml;r eine dahinterstehende * {@code CalendarText.Provider}-Implementierung, die als SPI-Interface * &uuml;ber einen {@code ServiceLoader}-Mechanismus geladen wird. Gibt es * keine solche Implementierung, wird intern auf die Quellen des JDK mittels * der Schnittstelle {@code java.text.DateFormatSymbols} ausgewichen. </p> * * <p>Dar&uuml;berhinaus kann eine Instanz von {@code CalendarText} auch * auf UTF-8-Textressourcen im Verzeichnis &quot;data&quot; innerhalb des * Klassenpfads zugreifen, die nicht auf JDK-Vorgaben beruhen. </p> * * @author Meno Hochschild * @concurrency <immutable> */ public final class CalendarText { /** * <p>Default calendar type for all ISO systems. </p> */ /*[deutsch] * <p>Standard-Kalendertyp f&uuml;r alle ISO-Systeme. </p> */ public static final String ISO_CALENDAR_TYPE = "iso8601"; private static final ConcurrentMap<String, CalendarText> CACHE = new ConcurrentHashMap<String, CalendarText>(); private static final ResourceBundle.Control CONTROL = new UTF8NoFallbackControl(); // Name des Provider private final String provider; // Standardtexte private final Map<TextWidth, Map<OutputContext, Accessor>> stdMonths; private final Map<TextWidth, Map<OutputContext, Accessor>> leapMonths; private final Map<TextWidth, Map<OutputContext, Accessor>> quarters; private final Map<TextWidth, Map<OutputContext, Accessor>> weekdays; private final Map<TextWidth, Accessor> eras; private final Map<TextWidth, Accessor> meridiems; private final ResourceBundle textForms; private final MissingResourceException mre; private CalendarText( String calendarType, Locale locale, Provider p ) { super(); this.provider = p.toString(); // Allgemeine Textformen als optionales Bundle vorbereiten ResourceBundle rb = null; MissingResourceException tmpMre = null; try { rb = ResourceBundle.getBundle( "data/" + calendarType, locale, CONTROL); } catch (MissingResourceException ex) { tmpMre = ex; } this.textForms = rb; this.mre = tmpMre; this.stdMonths = Collections.unmodifiableMap( getMonths(calendarType, locale, p, false)); Map<TextWidth, Map<OutputContext, Accessor>> tmpLeapMonths = getMonths(calendarType, locale, p, true); if (tmpLeapMonths == null) { this.leapMonths = this.stdMonths; } else { this.leapMonths = Collections.unmodifiableMap(tmpLeapMonths); } Map<TextWidth, Map<OutputContext, Accessor>> qt = new EnumMap<TextWidth, Map<OutputContext, Accessor>> (TextWidth.class); for (TextWidth tw : TextWidth.values()) { Map<OutputContext, Accessor> qo = new EnumMap<OutputContext, Accessor>(OutputContext.class); for (OutputContext oc : OutputContext.values()) { qo.put( oc, new Accessor( p.quarters(calendarType, locale, tw, oc), locale)); } qt.put(tw, qo); } this.quarters = Collections.unmodifiableMap(qt); Map<TextWidth, Map<OutputContext, Accessor>> wt = new EnumMap<TextWidth, Map<OutputContext, Accessor>> (TextWidth.class); for (TextWidth tw : TextWidth.values()) { Map<OutputContext, Accessor> wo = new EnumMap<OutputContext, Accessor>(OutputContext.class); for (OutputContext oc : OutputContext.values()) { wo.put( oc, new Accessor( p.weekdays(calendarType, locale, tw, oc), locale)); } wt.put(tw, wo); } this.weekdays = Collections.unmodifiableMap(wt); Map<TextWidth, Accessor> et = new EnumMap<TextWidth, Accessor>(TextWidth.class); for (TextWidth tw : TextWidth.values()) { et.put( tw, new Accessor(p.eras(calendarType, locale, tw), locale)); } this.eras = Collections.unmodifiableMap(et); Map<TextWidth, Accessor> mt = new EnumMap<TextWidth, Accessor>(TextWidth.class); for (TextWidth tw : TextWidth.values()) { mt.put( tw, new Accessor(p.meridiems(calendarType, locale, tw), locale)); } this.meridiems = Collections.unmodifiableMap(mt); } /** * <p>Returns an instance of {@code CalendarText} for given chronology * and language. </p> * * @param chronology chronology (with calendar system) * @param locale language * @return {@code CalendarText} object maybe cached */ /*[deutsch] * <p>Gibt eine Instanz dieser Klasse f&uuml;r die angegebene Chronologie * und Sprache zur&uuml;ck. </p> * * @param chronology chronology (with calendar system) * @param locale language * @return {@code CalendarText} object maybe cached */ public static CalendarText getInstance( Chronology<?> chronology, Locale locale ) { return getInstance(extractCalendarType(chronology), locale); } /** * <p>Returns an instance of {@code CalendarText} for given calendar type * and language. </p> * * @param calendarType name of calendar system * @param locale language * @return {@code CalendarText} object maybe cached * @see CalendarType */ /*[deutsch] * <p>Gibt eine Instanz dieser Klasse f&uuml;r Kalendertyp * und Sprache zur&uuml;ck. </p> * * @param calendarType name of calendar system * @param locale language * @return {@code CalendarText} object maybe cached * @see CalendarType */ public static CalendarText getInstance( String calendarType, Locale locale ) { if (calendarType == null) { throw new NullPointerException("Missing calendar type."); } StringBuilder sb = new StringBuilder(); sb.append(calendarType); sb.append(':'); sb.append(locale.getLanguage()); sb.append('-'); sb.append(locale.getCountry()); String key = sb.toString(); CalendarText instance = CACHE.get(key); if (instance == null) { Provider p = null; ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl == null) { cl = Provider.class.getClassLoader(); } // ServiceLoader-Mechanismus (Suche nach externen Providern) for (Provider tmp : ServiceLoader.load(Provider.class, cl)) { if ( isCalendarTypeSupported(tmp, calendarType) && isLocaleSupported(tmp, locale) ) { p = tmp; break; } } // Java-Ressourcen if (p == null) { Provider tmp = new OldJdkProvider(); if ( isCalendarTypeSupported(tmp, calendarType) && isLocaleSupported(tmp, locale) ) { p = tmp; } // if (p == null) { // TODO: Provider mit Zugriff auf data/{calendar-type}! if (p == null) { p = new FallbackProvider(); } } instance = new CalendarText(calendarType, locale, p); CalendarText old = CACHE.putIfAbsent(key, instance); if (old != null) { instance = old; } } return instance; } /** * <p>Yields an {@code Accessor} for all standard months. </p> * * <p>The underlying list is sorted such that it will obey to the * typical order of months in given calendar system. ISO-systems * define January as first month and at whole 12 months. Other * calendar systems can also define for example 13 months. The order * of element value enums must be in agreement with the order of * the text forms contained here. </p> * * @param textWidth text width of displayed month name * @param outputContext output context (stand-alone?) * @return accessor for standard month names * @see net.time4j.Month */ /*[deutsch] * <p>Liefert einen {@code Accessor} f&uuml;r alle * Standard-Monatsnamen. </p> * * <p>Die Liste ist so sortiert, da&szlig; die f&uuml;r das jeweilige * Kalendersystem typische Reihenfolge der Monate eingehalten wird. * ISO-Systeme definieren den Januar als den ersten Monat und insgesamt * 12 Monate. Andere Kalendersysteme k&ouml;nnen auch 13 Monate definieren. * Die Reihenfolge der Elementwert-Enums mu&szlig; mit der Reihenfolge der * hier enthaltenen Textformen &uuml;bereinstimmen. </p> * * @param textWidth text width of displayed month name * @param outputContext output context (stand-alone?) * @return accessor for standard month names * @see net.time4j.Month */ public Accessor getStdMonths( TextWidth textWidth, OutputContext outputContext ) { return this.getMonths(textWidth, outputContext, false); } /** * <p>Yields an {@code Accessor} for all months if a leap month * is relevant. </p> * * <p>Note: Leap months are defined in some calendar systems like the * hebrew calendar (&quot;Adar II&quot;) else there is no difference * between standard and leap months escpecially not in ISO-8601. </p> * * @param textWidth text width of displayed month name * @param outputContext output context (stand-alone?) * @return accessor for month names * @see net.time4j.Month * @see #getStdMonths(TextWidth, OutputContext) */ /*[deutsch] * <p>Liefert einen {@code Accessor} f&uuml;r alle * Monatsnamen, wenn ein Schaltmonat relevant ist. </p> * * <p>Hinweis: Schaltmonate sind in einigen Kalendersystemen wie dem * hebr&auml;ischen Kalender definiert (&quot;Adar II&quot;). Ansonsten * gibt es keinen Unterschied zwischen Standard- und Schaltmonaten, * insbesondere nicht im ISO-8601-Standard. </p> * * @param textWidth text width of displayed month name * @param outputContext output context (stand-alone?) * @return accessor for month names * @see net.time4j.Month * @see #getStdMonths(TextWidth, OutputContext) */ public Accessor getLeapMonths( TextWidth textWidth, OutputContext outputContext ) { return this.getMonths(textWidth, outputContext, true); } /** * <p>Yields an {@code Accessor} for all quarter years. </p> * * <p>The underlying list of text forms is sorted in the same order * as the enum {@code Quarter} and uses its ordinal index as list * index. ISO systems define the range January-March as first quarter * etc. and at whole four quarters per calendar year. </p> * * @param textWidth text width of displayed quarter name * @param outputContext output context (stand-alone?) * @return accessor for quarter names * @see net.time4j.Quarter */ /*[deutsch] * <p>Liefert einen {@code Accessor} f&uuml;r alle * Quartalsnamen. </p> * * <p>Die Liste ist wie das Enum {@code Quarter} sortiert und verwendet * dessen Ordinalindex als Listenindex. ISO-Systeme definieren den * Zeitraum Januar-M&auml;rz als erstes Quartal usw. und insgesamt * 4 Quartale pro Kalenderjahr. </p> * * @param textWidth text width of displayed quarter name * @param outputContext output context (stand-alone?) * @return accessor for quarter names * @see net.time4j.Quarter */ public Accessor getQuarters( TextWidth textWidth, OutputContext outputContext ) { return this.quarters.get(textWidth).get(outputContext); } /** * <p>Yields an {@code Accessor} for all weekday names. </p> * * <p>The underlying list of text forms is sorted such that the * typical order of weekdays is used in given calendar system. * ISO systems define Monday as first day of week and at whole * 7 weekdays. This order is also valid for US in the context of * this class although in US Sunday is considered as start of a * week. The order element value enums must be in agreement with * the order of text forms contained here. </p> * * @param textWidth text width of displayed weekday name * @param outputContext output context (stand-alone?) * @return accessor for weekday names * @see net.time4j.Weekday */ /*[deutsch] * <p>Liefert einen {@code Accessor} f&uuml;r alle * Wochentagsnamen. </p> * * <p>Die Liste ist so sortiert, da&szlig; die f&uuml;r das jeweilige * Kalendersystem typische Reihenfolge der Wochentage eingehalten wird. * ISO-Systeme definieren den Montag als den ersten Wochentag und insgesamt * 7 Wochentage. Diese Sortierung gilt im Kontext dieser Klasse auch * f&uuml;r die USA, in denen der Sonntag als erster Tag der Woche gilt. * Die Reihenfolge der Elementwert-Enums mu&szlig; mit der Reihenfolge * der hier enthaltenen Textformen &uuml;bereinstimmen. </p> * * @param textWidth text width of displayed weekday name * @param outputContext output context (stand-alone?) * @return accessor for weekday names * @see net.time4j.Weekday */ public Accessor getWeekdays( TextWidth textWidth, OutputContext outputContext ) { return this.weekdays.get(textWidth).get(outputContext); } /** * <p>Yields an {@code Accessor} for all era names. </p> * * <p>The underlying list of text forms is sorted such that the * typical order of eras is used in given calendar system. ISO systems * define era names based on their historical extensions (eras of * gregorian/historic calendar) because they themselves have no internal * concept of eras. The order of element value enums must be in agreement * with the text forms contained here. If an era is not defined on enum * basis then the format API will not evaluate this class but the * {@code CalendarSystem} to get the right text forms. </p> * * @param textWidth text width of displayed era name * @return accessor for era names * @see net.time4j.engine.CalendarSystem#getEras() */ /*[deutsch] * <p>Liefert einen {@code Accessor} f&uuml;r alle * &Auml;ranamen. </p> * * <p>Die Liste ist so sortiert, da&szlig; die f&uuml;r das jeweilige * Kalendersystem typische Reihenfolge der &Auml;ranamen eingehalten wird. * ISO-Systeme definieren &Auml;ranamen basierend auf ihren historischen * Erweiterungen, da sie selbst keine kennen (also die des gregorianischen * historischen Kalenders). Die Reihenfolge der Elementwert-Enums mu&szlig; * mit der Reihenfolge der hier enthaltenen Textformen &uuml;bereinstimmen. * Wenn eine &Auml;ra nicht auf Enum-Basis definiert ist, wertet das * Format-API nicht diese Klasse, sondern das {@code CalendarSystem} zur * Bestimmung der Textformen aus. </p> * * @param textWidth text width of displayed era name * @return accessor for era names * @see net.time4j.engine.CalendarSystem#getEras() */ public Accessor getEras(TextWidth textWidth) { return this.eras.get(textWidth); } /** * <p>Yields an {@code Accessor} for all am/pm-names. </p> * * <p>The underlying list of text forms is sorted in AM-PM-order. * The order of element value enums must be the same. </p> * * @param textWidth text width of displayed AM/PM name * @return accessor for AM/PM names * @see net.time4j.Meridiem */ /*[deutsch] * <p>Liefert einen {@code Accessor} f&uuml;r alle * Tagesabschnittsnamen. </p> * * <p>Die Liste ist in AM/PM-Reihenfolge sortiert. Die Reihenfolge der * Elementwert-Enums mu&szlig; mit der Reihenfolge der hier enthaltenen * Textformen &uuml;bereinstimmen. </p> * * @param textWidth text width of displayed AM/PM name * @return accessor for AM/PM names * @see net.time4j.Meridiem */ public Accessor getMeridiems(TextWidth textWidth) { return this.meridiems.get(textWidth); } /** * <p>Yields an {@code Accessor} for all text forms of given * chronological element. </p> * * <p>Text forms might exist in different variations. In case of * enum-based variants the name of the enum (example &quot;WIDE&quot; in * the variant {@code TextWidth}) is to be used, in case of boolean-based * variants the literals &quot;true&quot; and &quot;false&quot; are to be * used. </p> * * <p>While the methods {@code getStdMonths()}, {@code getWeekdays()} * etc.are mainly based on JDK-defaults, this method is escpecially * designed for querying chronological texts which are not contained in * JDK. Text forms will be stored internally in the resource folder * &quot;data&quot; relative to class path in properties-files using * UTF-8 encoding. The basic name of these resources is the calendar type. * The combination of element name and optionally variants in the form * &quot;(variant1|variant2|...|variantN)&quot; and the underscore and * finally a numerical suffix with base 1 serves as resource text key. * If there is no entry for given key in the resources then this method * will simply yield the name of enum value associated with given element * value. </p> * * @param <V> generic type of element values based on enums * @param element element text forms are searched for * @param variants text form variants (optional) * @return accessor for any text forms * @throws MissingResourceException if for given calendar type there are * no text resource files */ /*[deutsch] * <p>Liefert einen {@code Accessor} f&uuml;r alle Textformen des angegebenen * chronologischen Elements. </p> * * <p>Textformen k&ouml;nnen unter Umst&auml;nden in verschiedenen * Varianten vorkommen. Als Variantenbezug dient bei enum-Varianten * der Name der Enum-Auspr&auml;gung (Beispiel &quot;WIDE&quot; in * der Variante {@code TextWidth}), im boolean-Fall sind die Literale * &quot;true&quot; und &quot;false&quot; zu verwenden. </p> * * <p>W&auml;hrend die Methoden {@code getStdMonths()}, {@code getWeekdays()} * etc. in erster Linie auf JDK-Vorgaben beruhen, dient diese Methode dazu, * chronologiespezifische Texte zu beschaffen, die nicht im JDK enthalten * sind. Textformen werden intern im Ressourcenverzeichnis &quot;data&quot; * des Klassenpfads mit Hilfe von properties-Dateien im UTF-8-Format * gespeichert. Der Basisname dieser Ressourcen ist der Kalendertyp. Als * Textschluuml;ssel dient die Kombination aus Elementname, optional Varianten * in der Form &quot;(variant1|variant2|...|variantN)&quot;, dem Unterstrich * und schlie&szlig;lich einem numerischen Suffix mit Basis 1. Wird in den * Ressourcen zum angegebenen Schl&uuml;ssel kein Eintrag gefunden, liefert * diese Methode einfach den Namen des mit dem Element assoziierten * enum-Werts. </p> * * @param <V> generic type of element values based on enums * @param element element text forms are searched for * @param variants text form variants (optional) * @return accessor for any text forms * @throws MissingResourceException if for given calendar type there are * no text resource files */ public <V extends Enum<V>> Accessor getTextForms( ChronoElement<V> element, String... variants ) { if (this.textForms == null) { throw new MissingResourceException( this.mre.getMessage(), this.mre.getClassName(), this.mre.getKey()); } V[] enums = element.getType().getEnumConstants(); int len = enums.length; String[] tfs = new String[len]; StringBuilder sb = new StringBuilder(element.name()); if ( (variants != null) && (variants.length > 0) ) { boolean first = true; for (int v = 0; v < variants.length; v++) { if (first) { sb.append('('); } else { sb.append('|'); } sb.append(variants[v]); } sb.append(')'); } String raw = sb.toString(); for (int i = 0; i < len; i++) { String vkey = toKey(raw, i); if (this.textForms.containsKey(vkey)) { tfs[i] = this.textForms.getString(vkey); } else { String skey = toKey(element.name(), i); if (this.textForms.containsKey(skey)) { tfs[i] = this.textForms.getString(skey); } else { tfs[i] = enums[i].name(); } } } return new Accessor(tfs, this.textForms.getLocale()); } /** * <p>Yields the localized GMT-prefix which is used in the * <i>localized GMT format</i> of CLDR. </p> * * @param locale language and country configuration * @return localized GMT-String defaults to &quot;GMT&quot; */ /*[deutsch] * <p>Liefert das lokalisierte GMT-Pr&auml;fix, das im * <i>localized GMT format</i> von CLDR benutzt wird. </p> * * @param locale language and country configuration * @return localized GMT-String defaults to &quot;GMT&quot; */ public static String getGMTPrefix(Locale locale) { CalendarText ct = CalendarText.getInstance(ISO_CALENDAR_TYPE, locale); if (ct.textForms == null) { return "GMT"; } return ct.textForms.getString("prefixGMTOffset"); } /** * <p>Yields the name of the internal {@code CalendarText.Provider}. </p> */ /*[deutsch] * <p>Liefert den Namen des internen {@code CalendarText.Provider}. </p> */ @Override public String toString() { return this.provider; } /** * <p>Clears the internal cache. </p> * * <p>This method should be called if the internal text resources have * changed and must be reloaded with a suitable {@code ClassLoader}. </p> */ /*[deutsch] * <p>L&ouml;scht den internen Cache. </p> * * <p>Diese Methode sollte aufgerufen werden, wenn sich die internen * Text-Ressourcen ge&auml;ndert haben und mit einem geeigneten * {@code ClassLoader} neu geladen werden m&uuml;ssen. </p> */ public static void clearCache() { CACHE.clear(); } /** * <p>Extrahiert den Kalendertyp aus der angegebenen Chronologie. </p> * * <p>Kann kein Kalendertyp ermittelt werden, wird {@code ISO_CALENDAR_TYPE} * als Ausweichoption zur&uuml;ckgegeben. </p> * * @param chronology chronology to be evaluated * @return calendar type, never {@code null} */ static String extractCalendarType(Chronology<?> chronology) { CalendarType ft = chronology.getChronoType().getAnnotation(CalendarType.class); return ((ft == null) ? ISO_CALENDAR_TYPE : ft.value()); } private Accessor getMonths( TextWidth textWidth, OutputContext outputContext, boolean leapForm ) { if (leapForm) { return this.leapMonths.get(textWidth).get(outputContext); } else { return this.stdMonths.get(textWidth).get(outputContext); } } private static Map<TextWidth, Map<OutputContext, Accessor>> getMonths( String calendarType, Locale locale, Provider p, boolean leapForm ) { Map<TextWidth, Map<OutputContext, Accessor>> mt = new EnumMap<TextWidth, Map<OutputContext, Accessor>> (TextWidth.class); boolean usesDifferentLeapForm = false; for (TextWidth tw : TextWidth.values()) { Map<OutputContext, Accessor> mo = new EnumMap<OutputContext, Accessor>(OutputContext.class); for (OutputContext oc : OutputContext.values()) { String[] ls = p.months(calendarType, locale, tw, oc, leapForm); if (leapForm && !usesDifferentLeapForm) { String[] std = p.months(calendarType, locale, tw, oc, false); usesDifferentLeapForm = !Arrays.equals(std, ls); } mo.put(oc, new Accessor(ls, locale)); } mt.put(tw, mo); } return ((!leapForm || usesDifferentLeapForm) ? mt : null); } private static boolean isCalendarTypeSupported( Provider p, String calendarType ) { for (String c : p.getSupportedCalendarTypes()) { if (c.equals(calendarType)) { return true; } } return false; } private static boolean isLocaleSupported( Provider p, Locale locale ) { for (Locale l : p.getAvailableLocales()) { String lang = locale.getLanguage(); String country = locale.getCountry(); if ( lang.equals(l.getLanguage()) && (country.isEmpty() || country.equals(l.getCountry())) ) { return true; } } return false; } private static final String toKey( String raw, int counter ) { StringBuilder keyBuilder = new StringBuilder(raw); keyBuilder.append('_'); keyBuilder.append(counter + 1); return keyBuilder.toString(); } /** * <p>This <strong>SPI-interface</strong> enables the access to calendrical * standard text informations and will be instantiated by a * {@code ServiceLoader}-mechanism. </p> * * <p>The motivation is mainly to override the language-dependent forms * of JDK-defaults with respect to standard elements like months, weekdays * etc. Specific text forms which are not contained in JDK will instead * be supplied by help of properties-files in the &quot;data&quot;-folder. </p> * * @author Meno Hochschild * @spec Implementations must have a public no-arg constructor. * @see java.util.ServiceLoader */ /*[deutsch] * <p>Dieses <strong>SPI-Interface</strong> erm&ouml;glicht den Zugriff * auf kalendarische Standard-Textinformationen und wird &uuml;ber einen * {@code ServiceLoader}-Mechanismus instanziert. </p> * * <p>Sinn und Zweck dieses Interface ist in erster Linie das sprachliche * Erg&auml;nzen oder &Uuml;berschreiben von JDK-Vorgaben bez&uuml;glich * der Standardelemente Monat, Wochentag etc. Kalenderspezifische Texte, * die gar nicht im JDK vorhanden sind, werden stattdessen mit Hilfe von * properties-Dateien im data-Verzeichnis bereitgestellt. </p> * * @author Meno Hochschild * @spec Implementations must have a public no-arg constructor. * @see java.util.ServiceLoader */ public interface Provider { /** * <p>Defines the supported calendar types. </p> * * @return String-array with calendar types * @see CalendarType */ /*[deutsch] * <p>Definiert die unterst&uuml;tzten Kalendertypen. </p> * * @return String-array with calendar types * @see CalendarType */ String[] getSupportedCalendarTypes(); /** * <p>Yields the supported languages. </p> * * @return Locale-array */ /*[deutsch] * <p>Gibt die unterst&uuml;tzten Sprachen an. </p> * * @return Locale-array */ Locale[] getAvailableLocales(); /** * <p>See {@link CalendarText#getStdMonths}. </p> * * @param calendarType calendar type * @param locale language of text output * @param textWidth text width * @param outputContext output context * @param leapForm use leap form (for example the hebrew * month &quot;Adar II&quot;)? * @return unmodifiable sorted array of month names */ /*[deutsch] * <p>Siehe {@link CalendarText#getStdMonths}. </p> * * @param calendarType calendar type * @param locale language of text output * @param textWidth text width * @param outputContext output context * @param leapForm use leap form (for example the hebrew * month &quot;Adar II&quot;)? * @return unmodifiable sorted array of month names */ String[] months( String calendarType, Locale locale, TextWidth textWidth, OutputContext outputContext, boolean leapForm ); /** * <p>See {@link CalendarText#getQuarters}. </p> * * @param calendarType calendar type * @param locale language of text output * @param textWidth text width * @param outputContext output context * @return unmodifiable sorted array of quarter names */ /*[deutsch] * <p>Siehe {@link CalendarText#getQuarters}. </p> * * @param calendarType calendar type * @param locale language of text output * @param textWidth text width * @param outputContext output context * @return unmodifiable sorted array of quarter names */ String[] quarters( String calendarType, Locale locale, TextWidth textWidth, OutputContext outputContext ); /** * <p>See {@link CalendarText#getWeekdays}. </p> * * @param calendarType calendar type * @param locale language of text output * @param textWidth text width * @param outputContext output context * @return unmodifiable sorted array of weekday names * in calendar specific order (ISO-8601 starts with monday) */ /*[deutsch] * <p>Siehe {@link CalendarText#getWeekdays}. </p> * * @param calendarType calendar type * @param locale language of text output * @param textWidth text width * @param outputContext output context * @return unmodifiable sorted array of weekday names * in calendar specific order (ISO-8601 starts with monday) */ String[] weekdays( String calendarType, Locale locale, TextWidth textWidth, OutputContext outputContext ); /** * <p>See {@link CalendarText#getEras}. </p> * * @param calendarType calendar type * @param locale language of text output * @param textWidth text width * @return unmodifiable sorted array of era names */ /*[deutsch] * <p>Siehe {@link CalendarText#getEras}. </p> * * @param calendarType calendar type * @param locale language of text output * @param textWidth text width * @return unmodifiable sorted array of era names */ String[] eras( String calendarType, Locale locale, TextWidth textWidth ); /** * <p>See {@link CalendarText#getMeridiems}. </p> * * @param calendarType calendar type * @param locale language of text output * @param textWidth text width * @return unmodifiable sorted array of AM/PM-names */ /*[deutsch] * <p>Siehe {@link CalendarText#getMeridiems}. </p> * * @param calendarType calendar type * @param locale language of text output * @param textWidth text width * @return unmodifiable sorted array of AM/PM-names */ String[] meridiems( String calendarType, Locale locale, TextWidth textWidth ); } /** * <p>Supplies an access to the internal name list of an enum-based * element value. </p> * * @author Meno Hochschild * @concurrency <immutable> */ /*[deutsch] * <p>Stellt einen Zugriff auf die enthaltenen Namen per Elementwert-Enum * bereit. </p> * * @author Meno Hochschild * @concurrency <immutable> */ public static final class Accessor { private final List<String> textForms; private final Locale locale; private Accessor( String[] textForms, Locale locale ) { super(); this.textForms = Collections.unmodifiableList(Arrays.asList(textForms)); this.locale = locale; } /** * <p>Prints the given element value as String. </p> * * <p>If the element value has no localized representation then this * method will simply print the enum name. </p> * * @param value current value of element * @return localized text form */ /*[deutsch] * <p>Stellt den angegebenen Elementwert als String dar. </p> * * <p>Hat der Elementwert keine lokalisierte Darstellung, wird einfach * sein Enum-Name ausgegeben. </p> * * @param value current value of element * @return localized text form */ public String print(Enum<?> value) { int index = value.ordinal(); if (this.textForms.size() <= index) { return value.name(); } else { return this.textForms.get(index); } } /** * <p>Interpretes given text form as enum-based element value. </p> * * <p>Parsing is case-insensitive. No partial compare is performed, * instead the whole element text will be evaluated. </p> * * @param <V> generic value type of element * @param parseable text to be parsed * @param status current parsing position * @param valueType value class of element * @return element value (as enum) or {@code null} if not found * @see #parse(CharSequence, ParseLog, Class, AttributeQuery) */ /*[deutsch] * <p>Interpretiert die angegebene Textform als Enum-Elementwert. </p> * * <p>Die Gro&szlig;- und Kleinschreibung ist nicht relevant. Es * wird immer jeweils der ganze Text verglichen. </p> * * @param <V> generic value type of element * @param parseable text to be parsed * @param status current parsing position * @param valueType value class of element * @return element value (as enum) or {@code null} if not found * @see #parse(CharSequence, ParseLog, Class, AttributeQuery) */ public <V extends Enum<V>> V parse( CharSequence parseable, ParseLog status, Class<V> valueType ) { return this.parse(parseable, status, valueType, true, false); } /** * <p>Interpretes given text form as enum-based element value. </p> * * <p>The attributes {@code Attributes.PARSE_CASE_INSENSITIVE} and * {@code Attributes.PARSE_PARTIAL_COMPARE} will be evaluated. </p> * * @param <V> generic value type of element * @param parseable text to be parsed * @param status current parsing position * @param valueType value class of element * @param attributes format attributes * @return element value (as enum) or {@code null} if not found * @see Attributes#PARSE_CASE_INSENSITIVE * @see Attributes#PARSE_PARTIAL_COMPARE */ /*[deutsch] * <p>Interpretiert die angegebene Textform als Enum-Elementwert. </p> * * <p>Es werden die Attribute {@code Attributes.PARSE_CASE_INSENSITIVE} * und {@code Attributes.PARSE_PARTIAL_COMPARE} ausgewertet. </p> * * @param <V> generic value type of element * @param parseable text to be parsed * @param status current parsing position * @param valueType value class of element * @param attributes format attributes * @return element value (as enum) or {@code null} if not found * @see Attributes#PARSE_CASE_INSENSITIVE * @see Attributes#PARSE_PARTIAL_COMPARE */ public <V extends Enum<V>> V parse( CharSequence parseable, ParseLog status, Class<V> valueType, AttributeQuery attributes ) { boolean caseInsensitive = attributes .get(Attributes.PARSE_CASE_INSENSITIVE, Boolean.TRUE) .booleanValue(); boolean partialCompare = attributes .get(Attributes.PARSE_PARTIAL_COMPARE, Boolean.FALSE) .booleanValue(); return this.parse( parseable, status, valueType, caseInsensitive, partialCompare); } /** * <p>Supports mainly debugging. </p> */ /*[deutsch] * <p>Dient im wesentlichen Debugging-Zwecken. </p> */ @Override public String toString() { int n = this.textForms.size(); StringBuilder sb = new StringBuilder(n * 16 + 2); sb.append('{'); boolean first = true; for (int i = 0; i < n; i++) { if (first) { first = false; } else { sb.append(','); } sb.append(this.textForms.get(i)); } sb.append('}'); return sb.toString(); } private <V extends Enum<V>> V parse( CharSequence parseable, ParseLog status, Class<V> valueType, boolean caseInsensitive, boolean partialCompare ) { V[] enums = valueType.getEnumConstants(); int len = this.textForms.size(); int start = status.getPosition(); int end = parseable.length(); int maxEq = 0; V candidate = null; for (int i = 0; i < enums.length; i++) { String s = ( (i >= len) ? enums[i].name() : this.textForms.get(i)); int pos = start; int n = s.length(); boolean eq = true; for (int j = 0; eq && (j < n); j++) { if (start + j >= end) { eq = false; } else { char c = parseable.charAt(start + j); char t = s.charAt(j); if (caseInsensitive) { eq = this.compareIgnoreCase(c, t); } else { eq = (c == t); } if (eq) { pos++; } } } if (eq) { assert pos == start + n; status.setPosition(pos); return enums[i]; } else if ( partialCompare && (maxEq < pos - start) ) { maxEq = pos - start; candidate = enums[i]; } } if (candidate == null) { status.setError(start); } else { status.setPosition(start + maxEq); } return candidate; } private boolean compareIgnoreCase(char c1, char c2) { if (c1 >= 'a' && c1 <= 'z') { c1 = (char) (c1 - 'a' + 'A'); } if (c2 >= 'a' && c2 <= 'z') { c2 = (char) (c2 - 'a' + 'A'); } if (c1 >= 'A' && c1 <= 'Z') { return (c1 == c2); } String s1 = String.valueOf(c1).toUpperCase(this.locale); String s2 = String.valueOf(c2).toUpperCase(this.locale); return s1.equals(s2); } } private static class OldJdkProvider implements Provider { @Override public String[] getSupportedCalendarTypes() { return new String[] { ISO_CALENDAR_TYPE }; } @Override public Locale[] getAvailableLocales() { return DateFormatSymbols.getAvailableLocales(); } @Override public String[] months( String calendarType, Locale locale, TextWidth textWidth, OutputContext outputContext, boolean leapForm ) { try { ResourceBundle rb = getBundle(locale); if ( (rb != null) && (outputContext == OutputContext.STANDALONE) && "true".equals(rb.getObject("enableStandalone")) ) { String[] names = new String[12]; for (int m = 0; m < 12; m++) { StringBuilder b = new StringBuilder(); b.append("MONTH_OF_YEAR("); b.append(textWidth); b.append('|'); b.append(outputContext); b.append(")_"); b.append(m + 1); names[m] = rb.getString(b.toString()); } return names; } } catch (MissingResourceException ex) { // continue standard case } // Normalfall DateFormatSymbols dfs = DateFormatSymbols.getInstance(locale); switch (textWidth) { case WIDE: return dfs.getMonths(); case ABBREVIATED: case SHORT: return dfs.getShortMonths(); case NARROW: String[] months = dfs.getShortMonths(); String[] ret = new String[months.length]; for (int i = 0, n = months.length; i < n; i++) { if (!months[i].isEmpty()) { ret[i] = toLatinLetter(months[i]); } else { ret[i] = String.valueOf(i + 1); } } return ret; default: throw new UnsupportedOperationException(textWidth.name()); } } @Override public String[] quarters( String calendarType, Locale locale, TextWidth textWidth, OutputContext outputContext ) { ResourceBundle rb = getBundle(locale); if (rb != null) { if ( (outputContext == OutputContext.STANDALONE) && !"true".equals(rb.getObject("enableStandalone")) ) { return quarters( calendarType, locale, textWidth, OutputContext.FORMAT); } String[] names = new String[4]; boolean useFallback = false; if (textWidth == TextWidth.SHORT) { textWidth = TextWidth.ABBREVIATED; } for (int q = 0; q < 4; q++) { StringBuilder b = new StringBuilder(); b.append("QUARTER_OF_YEAR("); b.append(textWidth); if (outputContext == OutputContext.STANDALONE) { b.append('|'); b.append(outputContext); } b.append(")_"); b.append(q + 1); try { names[q] = rb.getString(b.toString()); } catch (MissingResourceException ex) { useFallback = true; break; } } if (!useFallback) { return names; } } return new String[] {"Q1", "Q2", "Q3", "Q4"}; // fallback } @Override public String[] weekdays( String calendarType, Locale locale, TextWidth textWidth, OutputContext outputContext ) { ResourceBundle rb = getBundle(locale); try { if ( (rb != null) && (outputContext == OutputContext.STANDALONE) && "true".equals(rb.getObject("enableStandalone")) ) { String[] names = new String[7]; for (int d = 0; d < 7; d++) { StringBuilder b = new StringBuilder(); b.append("DAY_OF_WEEK("); b.append(textWidth); b.append('|'); b.append(outputContext); b.append(")_"); b.append(d + 1); names[d] = rb.getString(b.toString()); } return names; } } catch (MissingResourceException ex) { // continue standard case } DateFormatSymbols dfs = DateFormatSymbols.getInstance(locale); String[] result; switch (textWidth) { case WIDE: result = dfs.getWeekdays(); break; case ABBREVIATED: result = dfs.getShortWeekdays(); break; case SHORT: result = dfs.getShortWeekdays(); if (rb != null) { try { String[] names = new String[7]; for (int d = 0; d < 7; d++) { StringBuilder skey = new StringBuilder(); skey.append("DAY_OF_WEEK(SHORT)_"); skey.append(d + 1); names[d] = rb.getString(skey.toString()); } result = names; } catch (MissingResourceException mre) { // no-op } } break; case NARROW: String[] weekdays = dfs.getShortWeekdays(); String[] ret = new String[weekdays.length]; for (int i = 1; i < weekdays.length; i++) { if (!weekdays[i].isEmpty()) { ret[i] = toLatinLetter(weekdays[i]); } else { ret[i] = String.valueOf(i); } } result = ret; break; default: throw new UnsupportedOperationException( "Unknown text width: " + textWidth); } if (result.length == 8) { // ISO-Reihenfolge erzwingen String sunday = result[1]; for (int i = 2; i < 8; i++) { result[i - 2] = result[i]; } result[6] = sunday; } return result; } @Override public String[] eras( String calendarType, Locale locale, TextWidth textWidth ) { DateFormatSymbols dfs = DateFormatSymbols.getInstance(locale); if (textWidth == TextWidth.NARROW) { String[] eras = dfs.getEras(); String[] ret = new String[eras.length]; for (int i = 0, n = eras.length; i < n; i++) { if (!eras[i].isEmpty()) { ret[i] = toLatinLetter(eras[i]); } else if ((i == 0) && (eras.length == 2)) { ret[i] = "B"; } else if ((i == 1) && (eras.length == 2)) { ret[i] = "A"; } else { ret[i] = String.valueOf(i); } } return ret; } else { return dfs.getEras(); } } @Override public String[] meridiems( String calendarType, Locale locale, TextWidth textWidth ) { DateFormatSymbols dfs = DateFormatSymbols.getInstance(locale); if (textWidth == TextWidth.NARROW) { return new String[] {"A", "P"}; } else { return dfs.getAmPmStrings(); } } private static String toLatinLetter(String input) { // diakritische Zeichen entfernen char c = Normalizer.normalize(input, Normalizer.Form.NFD).charAt(0); if ((c >= 'A') && (c <= 'Z')) { return String.valueOf(c); } else if ((c >= 'a') && (c <= 'z')) { c += ('A' - 'a'); return String.valueOf(c); } else { return input; } } private static ResourceBundle getBundle(Locale locale) { try { return ResourceBundle.getBundle( "data/" + ISO_CALENDAR_TYPE, locale, CONTROL); } catch (MissingResourceException ex) { return null; } } } private static class FallbackProvider implements Provider { @Override public String[] getSupportedCalendarTypes() { throw new UnsupportedOperationException("Never called."); } @Override public Locale[] getAvailableLocales() { throw new UnsupportedOperationException("Never called."); } @Override public String[] months( String calendarType, Locale locale, TextWidth textWidth, OutputContext outputContext, boolean leapForm ) { if (textWidth == TextWidth.WIDE) { return new String[] { "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12", "13"}; } else { return new String[] { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13"}; } } @Override public String[] quarters( String calendarType, Locale locale, TextWidth textWidth, OutputContext outputContext ) { if (textWidth == TextWidth.NARROW) { return new String[] {"1", "2", "3", "4"}; } else { return new String[] {"Q1", "Q2", "Q3", "Q4"}; } } @Override public String[] weekdays( String calendarType, Locale locale, TextWidth textWidth, OutputContext outputContext ) { return new String[] {"1", "2", "3", "4", "5", "6", "7"}; } @Override public String[] eras( String calendarType, Locale locale, TextWidth textWidth ) { if (textWidth == TextWidth.NARROW) { return new String[] {"B", "A"}; } else { return new String[] {"BC", "AD"}; } } @Override public String[] meridiems( String calendarType, Locale locale, TextWidth textWidth ) { if (textWidth == TextWidth.NARROW) { return new String[] {"A", "P"}; } else { return new String[] {"AM", "PM"}; } } } private static class UTF8NoFallbackControl extends ResourceBundle.Control { @Override public Locale getFallbackLocale( String baseName, Locale locale ) { if (baseName == null || locale == null) { throw new NullPointerException(); } return null; } @Override public List<String> getFormats(String baseName) { return ResourceBundle.Control.FORMAT_PROPERTIES; } @Override public ResourceBundle newBundle( String baseName, Locale locale, String format, ClassLoader loader, boolean reload ) throws IllegalAccessException, InstantiationException, IOException { if (format.equals("java.properties")) { ResourceBundle bundle = null; InputStream stream = null; String bundleName = this.toBundleName(baseName, locale); String resourceName = this.toResourceName(bundleName, "properties"); if (reload) { URL url = loader.getResource(resourceName); if (url != null) { URLConnection uconn = url.openConnection(); uconn.setUseCaches(false); stream = uconn.getInputStream(); } } else { stream = loader.getResourceAsStream(resourceName); } if (stream != null) { Reader reader = null; try { reader = new BufferedReader( new InputStreamReader(stream, "UTF-8")); bundle = new PropertyResourceBundle(reader); } finally { if (reader != null) { reader.close(); } } } return bundle; } else { throw new UnsupportedOperationException( "Unknown resource bundle format: " + format); } } } }
package org.bitcoinj.core; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import static org.bitcoinj.core.SporkManager.SPORK_6_NEW_SIGS; public class SporkMessage extends Message{ private static final Logger log = LoggerFactory.getLogger(SporkMessage.class); MasternodeSignature sig; int nSporkID; long nValue; long nTimeSigned; static int HASH_SIZE = 20; public SporkMessage(NetworkParameters params) { super(params);} public SporkMessage(NetworkParameters params, byte [] payload, int cursor) { super(params, payload, cursor); } protected static int calcLength(byte[] buf, int offset) { VarInt varint; int cursor = offset; //vin cursor += 36; varint = new VarInt(buf, cursor); long scriptLen = varint.value; // 4 = length of sequence field (unint32) cursor += scriptLen + 4 + varint.getOriginalSizeInBytes(); //MasternodeAddress address; cursor += MasternodeAddress.MESSAGE_SIZE; //PublicKey pubkey; cursor += PublicKey.calcLength(buf, cursor); //PublicKey pubkey2; cursor += PublicKey.calcLength(buf, cursor); // byte [] sig; cursor += MasternodeSignature.calcLength(buf, cursor); cursor += 4 + 8 + 8; cursor += MasternodeSignature.calcLength(buf, cursor); return cursor - offset; } @Override protected void parse() throws ProtocolException { nSporkID = (int)readUint32(); nValue = readInt64(); nTimeSigned = readInt64(); sig = new MasternodeSignature(params, payload, cursor); cursor += sig.getMessageSize(); length = cursor - offset; } @Override protected void bitcoinSerializeToStream(OutputStream stream) throws IOException { Utils.uint32ToByteStreamLE(nSporkID, stream); Utils.int64ToByteStreamLE(nValue, stream); Utils.int64ToByteStreamLE(nTimeSigned, stream); sig.bitcoinSerialize(stream); } @Override public Sha256Hash getHash() { try { ByteArrayOutputStream bos = new UnsafeByteArrayOutputStream(HASH_SIZE); Utils.uint32ToByteStreamLE(nSporkID, bos); Utils.int64ToByteStreamLE(nValue, bos); Utils.int64ToByteStreamLE(nTimeSigned, bos); return Sha256Hash.wrapReversed(Sha256Hash.hashTwice(bos.toByteArray())); } catch (IOException e) { throw new RuntimeException(e); // Cannot happen. } } public Sha256Hash getSignatureHash() { return getHash(); } boolean checkSignature(byte [] publicKeyId) { StringBuilder errorMessage = new StringBuilder(); if(Context.get().sporkManager.isSporkActive(SPORK_6_NEW_SIGS)) { Sha256Hash hash = getSignatureHash(); if(!HashSigner.verifyHash(Sha256Hash.wrapReversed(hash.getBytes()), publicKeyId, sig, errorMessage)) { // Note: unlike for many other messages when SPORK_6_NEW_SIGS is ON sporks with sigs in old format // and newer timestamps should not be accepted, so if we failed here - that's it log.error("CSporkMessage::CheckSignature -- VerifyHash() failed, error: {}", errorMessage); return false; } } else { String strMessage = "" + nSporkID + nValue + nTimeSigned; if (!MessageSigner.verifyMessage(publicKeyId, sig, strMessage, errorMessage)) { Sha256Hash hash = getSignatureHash(); if (!HashSigner.verifyHash(Sha256Hash.wrapReversed(hash.getBytes()), publicKeyId, sig, errorMessage)) { log.error("CSporkMessage::CheckSignature -- VerifyHash() failed, error: {}", errorMessage); return false; } } } return true; } public boolean sign(ECKey key) { /*if (!key.IsValid()) { LogPrintf("CSporkMessage::Sign -- signing key is not valid\n"); return false; }*/ PublicKey pubKey = new PublicKey(key.getPubKey()); StringBuilder strError = new StringBuilder(); if (Context.get().sporkManager.isSporkActive(SPORK_6_NEW_SIGS)) { Sha256Hash hash = getSignatureHash(); sig = HashSigner.signHash(hash, key); if (sig == null) { log.error("CSporkMessage::Sign -- SignHash() failed"); return false; } if (!HashSigner.verifyHash(hash, pubKey, sig, strError)) { log.error("CSporkMessage::Sign -- VerifyHash() failed, error: %s\n", strError); return false; } } else { String strMessage = "" + nSporkID + nValue + nTimeSigned; if (null == (sig = MessageSigner.signMessage(strMessage, key))) { log.error("CSporkMessage::Sign -- SignMessage() failed\n"); return false; } if (!MessageSigner.verifyMessage(pubKey, sig, strMessage, strError)) { log.error("CSporkMessage::Sign -- VerifyMessage() failed, error: %s\n", strError); return false; } } return true; } }
package org.jfree.chart.axis.junit; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Font; import java.awt.GradientPaint; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.jfree.chart.axis.Axis; import org.jfree.chart.axis.CategoryAxis; import org.jfree.chart.util.RectangleInsets; /** * Tests for the {@link Axis} class. */ public class AxisTests extends TestCase { /** * Returns the tests as a test suite. * * @return The test suite. */ public static Test suite() { return new TestSuite(AxisTests.class); } /** * Constructs a new set of tests. * * @param name the name of the tests. */ public AxisTests(String name) { super(name); } /** * Confirm that cloning works. */ public void testCloning() { CategoryAxis a1 = new CategoryAxis("Test"); a1.setAxisLinePaint(Color.red); CategoryAxis a2 = null; try { a2 = (CategoryAxis) a1.clone(); } catch (CloneNotSupportedException e) { e.printStackTrace(); } assertTrue(a1 != a2); assertTrue(a1.getClass() == a2.getClass()); assertTrue(a1.equals(a2)); } /** * Confirm that the equals method can distinguish all the required fields. */ public void testEquals() { Axis a1 = new CategoryAxis("Test"); Axis a2 = new CategoryAxis("Test"); assertTrue(a1.equals(a2)); // visible flag... a1.setVisible(false); assertFalse(a1.equals(a2)); a2.setVisible(false); assertTrue(a1.equals(a2)); // label... a1.setLabel("New Label"); assertFalse(a1.equals(a2)); a2.setLabel("New Label"); assertTrue(a1.equals(a2)); // label font... a1.setLabelFont(new Font("Dialog", Font.PLAIN, 8)); assertFalse(a1.equals(a2)); a2.setLabelFont(new Font("Dialog", Font.PLAIN, 8)); assertTrue(a1.equals(a2)); // label paint... a1.setLabelPaint(new GradientPaint(1.0f, 2.0f, Color.white, 3.0f, 4.0f, Color.black)); assertFalse(a1.equals(a2)); a2.setLabelPaint(new GradientPaint(1.0f, 2.0f, Color.white, 3.0f, 4.0f, Color.black)); assertTrue(a1.equals(a2)); // label insets... a1.setLabelInsets(new RectangleInsets(10.0, 10.0, 10.0, 10.0)); assertFalse(a1.equals(a2)); a2.setLabelInsets(new RectangleInsets(10.0, 10.0, 10.0, 10.0)); assertTrue(a1.equals(a2)); // label angle... a1.setLabelAngle(1.23); assertFalse(a1.equals(a2)); a2.setLabelAngle(1.23); assertTrue(a1.equals(a2)); a1.setLabelToolTip("123"); assertFalse(a1.equals(a2)); a2.setLabelToolTip("123"); assertTrue(a1.equals(a2)); a1.setLabelURL("ABC"); assertFalse(a1.equals(a2)); a2.setLabelURL("ABC"); assertTrue(a1.equals(a2)); // axis line visible... a1.setAxisLineVisible(false); assertFalse(a1.equals(a2)); a2.setAxisLineVisible(false); assertTrue(a1.equals(a2)); // axis line stroke... BasicStroke s = new BasicStroke(1.1f); a1.setAxisLineStroke(s); assertFalse(a1.equals(a2)); a2.setAxisLineStroke(s); assertTrue(a1.equals(a2)); // axis line paint... a1.setAxisLinePaint(new GradientPaint(1.0f, 2.0f, Color.red, 3.0f, 4.0f, Color.black)); assertFalse(a1.equals(a2)); a2.setAxisLinePaint(new GradientPaint(1.0f, 2.0f, Color.red, 3.0f, 4.0f, Color.black)); assertTrue(a1.equals(a2)); // tick labels visible flag... a1.setTickLabelsVisible(false); assertFalse(a1.equals(a2)); a2.setTickLabelsVisible(false); assertTrue(a1.equals(a2)); // tick label font... a1.setTickLabelFont(new Font("Dialog", Font.PLAIN, 12)); assertFalse(a1.equals(a2)); a2.setTickLabelFont(new Font("Dialog", Font.PLAIN, 12)); assertTrue(a1.equals(a2)); // tick label paint... a1.setTickLabelPaint(new GradientPaint(1.0f, 2.0f, Color.yellow, 3.0f, 4.0f, Color.black)); assertFalse(a1.equals(a2)); a2.setTickLabelPaint(new GradientPaint(1.0f, 2.0f, Color.yellow, 3.0f, 4.0f, Color.black)); assertTrue(a1.equals(a2)); // tick label insets... a1.setTickLabelInsets(new RectangleInsets(10.0, 10.0, 10.0, 10.0)); assertFalse(a1.equals(a2)); a2.setTickLabelInsets(new RectangleInsets(10.0, 10.0, 10.0, 10.0)); assertTrue(a1.equals(a2)); // tick marks visible flag... a1.setTickMarksVisible(true); assertFalse(a1.equals(a2)); a2.setTickMarksVisible(true); assertTrue(a1.equals(a2)); // tick mark inside length... a1.setTickMarkInsideLength(1.23f); assertFalse(a1.equals(a2)); a2.setTickMarkInsideLength(1.23f); assertTrue(a1.equals(a2)); // tick mark outside length... a1.setTickMarkOutsideLength(1.23f); assertFalse(a1.equals(a2)); a2.setTickMarkOutsideLength(1.23f); assertTrue(a1.equals(a2)); // tick mark stroke... a1.setTickMarkStroke(new BasicStroke(2.0f)); assertFalse(a1.equals(a2)); a2.setTickMarkStroke(new BasicStroke(2.0f)); assertTrue(a1.equals(a2)); // tick mark paint... a1.setTickMarkPaint(new GradientPaint(1.0f, 2.0f, Color.cyan, 3.0f, 4.0f, Color.black)); assertFalse(a1.equals(a2)); a2.setTickMarkPaint(new GradientPaint(1.0f, 2.0f, Color.cyan, 3.0f, 4.0f, Color.black)); assertTrue(a1.equals(a2)); // tick mark outside length... a1.setFixedDimension(3.21f); assertFalse(a1.equals(a2)); a2.setFixedDimension(3.21f); assertTrue(a1.equals(a2)); } /** * Two objects that are equal are required to return the same hashCode. */ public void testHashCode() { Axis a1 = new CategoryAxis("Test"); Axis a2 = new CategoryAxis("Test"); assertTrue(a1.equals(a2)); int h1 = a1.hashCode(); int h2 = a2.hashCode(); assertEquals(h1, h2); } }
package com.manifoldjs.hostedwebapp; import android.content.Intent; import android.net.Uri; import android.content.res.AssetManager; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.LinearLayout; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaActivity; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.PluginResult; import org.apache.cordova.Whitelist; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * This class manipulates the Web App W3C manifest. */ public class HostedWebApp extends CordovaPlugin { private static final String LOG_TAG = "HostedWebApp"; private static final String DEFAULT_MANIFEST_FILE = "manifest.json"; private static final String OFFLINE_PAGE = "offline.html"; private static final String OFFLINE_PAGE_TEMPLATE = "<html><body><div style=\"top:50%%;text-align:center;position:absolute\">%s</div></body></html>"; private boolean loadingManifest; private JSONObject manifestObject; private CordovaActivity activity; private CordovaPlugin whiteListPlugin; private LinearLayout rootLayout; private WebView offlineWebView; private boolean offlineOverlayEnabled; private boolean isConnectionError = false; @Override public void pluginInitialize() { final HostedWebApp me = HostedWebApp.this; this.activity = (CordovaActivity)this.cordova.getActivity(); // Load default manifest file. this.loadingManifest = true; if (this.assetExists(HostedWebApp.DEFAULT_MANIFEST_FILE)) { try { this.manifestObject = this.loadLocalManifest(HostedWebApp.DEFAULT_MANIFEST_FILE); this.onManifestLoaded(); } catch (JSONException e) { e.printStackTrace(); } } this.loadingManifest = false; // Initialize offline overlay this.activity.runOnUiThread(new Runnable() { @Override public void run() { if (me.rootLayout == null) { me.rootLayout = me.createOfflineRootLayout(); me.activity.addContentView(me.rootLayout, me.rootLayout.getLayoutParams()); } if (me.offlineWebView == null) { me.offlineWebView = me.createOfflineWebView(); me.rootLayout.addView(me.offlineWebView); } if (me.assetExists(HostedWebApp.OFFLINE_PAGE)) { me.offlineWebView.loadUrl("file:///android_asset/www/" + HostedWebApp.OFFLINE_PAGE); } else { me.offlineWebView.loadData( String.format(HostedWebApp.OFFLINE_PAGE_TEMPLATE, "It looks like you are offline. Please reconnect to use this application."), "text/html", null); } me.offlineOverlayEnabled = true; } }); } @Override public boolean execute(String action, JSONArray args, final CallbackContext callbackContext) throws JSONException { final HostedWebApp me = HostedWebApp.this; if (action.equals("getManifest")) { if (this.manifestObject != null) { callbackContext.success(manifestObject.toString()); } else { callbackContext.error("Manifest not loaded, load a manifest using loadManifest."); } return true; } if (action.equals("loadManifest")) { if (this.loadingManifest) { callbackContext.error("Already loading a manifest"); } else if (args.length() == 0) { callbackContext.error("Manifest file name required"); } else { final String configFilename = args.getString(0); this.loadingManifest = true; this.cordova.getThreadPool().execute(new Runnable() { @Override public void run() { if (me.assetExists(configFilename)) { try { me.manifestObject = me.loadLocalManifest(configFilename); me.onManifestLoaded(); callbackContext.success(me.manifestObject); } catch (JSONException e) { callbackContext.error(e.getMessage()); } } else { callbackContext.error("Manifest file not found in folder assets/www"); } me.loadingManifest = false; } }); PluginResult pluginResult = new PluginResult(PluginResult.Status.NO_RESULT); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); } return true; } if (action.equals("enableOfflinePage")) { this.offlineOverlayEnabled = true; return true; } if (action.equals("disableOfflinePage")) { this.offlineOverlayEnabled = false; return true; } if (action.equals("injectPluginScript")) { final List<String> scripts = new ArrayList<String>(); scripts.add(args.getString(0)); cordova.getActivity().runOnUiThread(new Runnable() { @Override public void run() { boolean result = injectScripts(scripts); callbackContext.success(result ? 1 : 0); } }); return true; } return false; } @Override public Object onMessage(String id, Object data) { if (id.equals("networkconnection") && data != null) { this.handleNetworkConnectionChange(data.toString()); } else if (id.equals("onPageStarted")) { this.isConnectionError = false; } else if (id.equals("onReceivedError")) { if (data instanceof JSONObject) { JSONObject errorData = (JSONObject) data; try { int errorCode = errorData.getInt("errorCode"); if (404 == errorCode || WebViewClient.ERROR_HOST_LOOKUP == errorCode || WebViewClient.ERROR_CONNECT == errorCode || WebViewClient.ERROR_TIMEOUT == errorCode) { this.isConnectionError = true; this.showOfflineOverlay(); } } catch (JSONException e) { e.printStackTrace(); } } } else if (id.equals("onPageFinished")) { Log.v(LOG_TAG, String.format("Finished loading URL '%s'", this.webView.getUrl())); if (!this.isConnectionError) { this.hideOfflineOverlay(); } this.injectCordovaScripts(); } return null; } @Override public Boolean shouldAllowRequest(String url) { CordovaPlugin whiteListPlugin = this.getWhitelistPlugin(); if (whiteListPlugin != null && Boolean.TRUE != whiteListPlugin.shouldAllowRequest(url)) { Log.w(LOG_TAG, String.format("Whitelist rejection: url='%s'", url)); } // do not alter default behavior. return super.shouldAllowRequest(url); } @Override public boolean onOverrideUrlLoading(String url) { CordovaPlugin whiteListPlugin = this.getWhitelistPlugin(); if (whiteListPlugin != null && Boolean.TRUE != whiteListPlugin.shouldAllowNavigation(url)) { // If the URL is not in the list URLs to allow navigation, open the URL in the external browser // (code extracted from CordovaLib/src/org/apache/cordova/CordovaWebViewImpl.java) Log.w(LOG_TAG, String.format("Whitelist rejection: url='%s'", url)); try { Intent intent = new Intent(Intent.ACTION_VIEW); intent.addCategory(Intent.CATEGORY_BROWSABLE); Uri uri = Uri.parse(url); // Omitting the MIME type for file: URLs causes "No Activity found to handle Intent". // Adding the MIME type to http: URLs causes them to not be handled by the downloader. if ("file".equals(uri.getScheme())) { intent.setDataAndType(uri, this.webView.getResourceApi().getMimeType(uri)); } else { intent.setData(uri); } this.activity.startActivity(intent); } catch (android.content.ActivityNotFoundException e) { e.printStackTrace(); } return true; } else { return false; } } public JSONObject getManifest() { return this.manifestObject; } private void injectCordovaScripts() { JSONObject cordovaSettings = this.manifestObject.optJSONObject("mjs_cordova"); // Inject cordova scripts if configured if (cordovaSettings != null) { String pluginMode = cordovaSettings.optString("pluginMode", "client"); if (!pluginMode.equals("none")) { String cordovaBaseUrl = cordovaSettings.optString("baseUrl", "").trim(); if (!cordovaBaseUrl.endsWith("/")) { cordovaBaseUrl += "/"; } this.webView.getEngine().loadUrl("javascript: window.hostedWebApp = { 'platform': 'android', 'pluginMode': '" + pluginMode + "', 'cordovaBaseUrl': '" + cordovaBaseUrl + "'};", false); List<String> scriptList = new ArrayList<String>(); if (pluginMode.equals("client")) { scriptList.add("cordova.js"); } scriptList.add("hostedapp-bridge.js"); injectScripts(scriptList); } } // Inject custom scripts JSONArray customScripts = this.manifestObject.optJSONArray("mjs_custom_scripts"); if (customScripts != null && customScripts.length() > 0) { String pageUrl = this.webView.getUrl(); for (int i = 0; i < customScripts.length(); i++) { JSONObject item = customScripts.optJSONObject(i); if (item != null) { String source = item.optString("source", ""); if (!source.trim().isEmpty()) { // ensure script applies to current page boolean isURLMatch = true; JSONArray match = item.optJSONArray("match"); if (match == null) { match = new JSONArray(); String matchString = item.optString("match", ""); if (!matchString.trim().isEmpty()) { match.put(matchString); } } if (match.length() > 0) { Whitelist whitelist = new Whitelist(); for (int j = 0; j < match.length(); j++) { whitelist.addWhiteListEntry(match.optString(j), false); } isURLMatch = whitelist.isUrlWhiteListed(pageUrl); } // ensure script applies to current platform boolean isPlatformMatch = true; String platform = item.optString("platform", ""); if (!platform.trim().isEmpty()) { isPlatformMatch = false; String[] platforms = platform.split(";"); for (String p : platforms) { if (p.trim().equalsIgnoreCase("android")) { isPlatformMatch = true; break; } } } if (isURLMatch && isPlatformMatch) { injectScripts(Arrays.asList(new String[] { source })); } } } } } } private void onManifestLoaded() { this.webView.postMessage("hostedWebApp_manifestLoaded", this.manifestObject); } private CordovaPlugin getWhitelistPlugin() { if (this.whiteListPlugin == null) { this.whiteListPlugin = this.webView.getPluginManager().getPlugin("Whitelist"); } return whiteListPlugin; } private boolean assetExists(String asset) { final AssetManager assetManager = this.activity.getResources().getAssets(); try { return Arrays.asList(assetManager.list("www")).contains(asset); } catch (IOException e) { e.printStackTrace(); } return false; } private WebView createOfflineWebView() { WebView webView = new WebView(activity); webView.getSettings().setJavaScriptEnabled(true); if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.HONEYCOMB) { webView.setLayerType(View.LAYER_TYPE_SOFTWARE, null); } webView.setLayoutParams(new LinearLayout.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1.0F)); return webView; } private LinearLayout createOfflineRootLayout() { LinearLayout root = new LinearLayout(activity.getBaseContext()); root.setOrientation(LinearLayout.VERTICAL); root.setVisibility(View.INVISIBLE); root.setLayoutParams(new LinearLayout.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 0.0F)); return root; } private void handleNetworkConnectionChange(String info) { final HostedWebApp me = HostedWebApp.this; if (info.equals("none")) { this.showOfflineOverlay(); } else { if (this.isConnectionError) { this.activity.runOnUiThread(new Runnable() { @Override public void run() { String currentUrl = me.webView.getUrl(); me.webView.loadUrlIntoView(currentUrl, false); } }); } else { this.hideOfflineOverlay(); } } } private void showOfflineOverlay() { final HostedWebApp me = HostedWebApp.this; if (this.offlineOverlayEnabled) { this.activity.runOnUiThread(new Runnable() { @Override public void run() { if (me.rootLayout != null) { me.rootLayout.setVisibility(View.VISIBLE); } } }); } } private void hideOfflineOverlay() { final HostedWebApp me = HostedWebApp.this; this.activity.runOnUiThread(new Runnable() { @Override public void run() { if (me.rootLayout != null) { me.rootLayout.setVisibility(View.INVISIBLE); } } }); } private JSONObject loadLocalManifest(String manifestFile) throws JSONException { try { InputStream inputStream = this.activity.getResources().getAssets().open("www/" + manifestFile); int size = inputStream.available(); byte[] bytes = new byte[size]; inputStream.read(bytes); inputStream.close(); String jsonString = new String(bytes, "UTF-8"); return new JSONObject(jsonString); } catch (IOException e) { e.printStackTrace(); } return null; } private boolean injectScripts(List<String> files) { String script = ""; for( int i = 0; i < files.size(); i++) { String fileName = files.get(i); Log.w(LOG_TAG, String.format("Injecting script: '%s'", fileName)); try { InputStream inputStream = this.activity.getResources().getAssets().open("www/" + fileName); int size = inputStream.available(); byte[] bytes = new byte[size]; inputStream.read(bytes); inputStream.close(); String content = new String(bytes, "UTF-8"); script += "\r\n//# sourceURL=" + fileName + "\r\n" + content; } catch(IOException e) { Log.v(LOG_TAG, String.format("ERROR: failed to load script file: '%s'", fileName)); e.printStackTrace(); } } this.webView.getEngine().loadUrl("javascript:" + Uri.encode(script), false); return true; } }
package com.kk.bus; import org.junit.Test; import java.lang.reflect.Method; import java.util.Set; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; public class RegisteredClassTest { private static class EventA {} private static class EventAA extends EventA {} private static class EventAAA extends EventAA {} private static class EventB {} private static interface EventInterface {} // Methods: Constructor(s) private static class ClassConstructor {} public void constructors() { RegisteredClass registeredClass = new RegisteredClass(ClassConstructor.class); assertNotNull(registeredClass); } // Methods: hasAnySubscribers() private static class ClassHasAnySubscribers_None { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } } private static class ClassHasAnySubscribers_One { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } @Subscribe public void onEventA(EventA event) { } } private static class ClassHasAnySubscribers_Two { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } } @Test public void hasAnySubscribers() { RegisteredClass registeredClass = new RegisteredClass(ClassHasAnySubscribers_None.class); assertFalse(registeredClass.hasAnySubscribers()); registeredClass = new RegisteredClass(ClassHasAnySubscribers_One.class); assertTrue(registeredClass.hasAnySubscribers()); registeredClass = new RegisteredClass(ClassHasAnySubscribers_Two.class); assertTrue(registeredClass.hasAnySubscribers()); } // Methods: getSubscriberMethods() private static class GetSubscribedMethods_None { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } } private static class GetSubscribedMethods_One { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } @Subscribe public void onEventA(EventA event) { } } private static class GetSubscribedMethods_Two { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } } @Test public void getSubscribedMethods() { RegisteredClass registeredClass = new RegisteredClass(GetSubscribedMethods_None.class); Set<Method> methods = registeredClass.getSubscriberMethods(EventA.class); assertNull(methods); methods = registeredClass.getSubscriberMethods(EventB.class); assertNull(methods); registeredClass = new RegisteredClass(GetSubscribedMethods_One.class); methods = registeredClass.getSubscriberMethods(EventA.class); assertNotNull(methods); assertEquals(1, methods.size()); methods = registeredClass.getSubscriberMethods(EventB.class); assertNull(methods); registeredClass = new RegisteredClass(GetSubscribedMethods_Two.class); methods = registeredClass.getSubscriberMethods(EventA.class); assertNotNull(methods); assertEquals(1, methods.size()); methods = registeredClass.getSubscriberMethods(EventB.class); assertNotNull(methods); assertEquals(1, methods.size()); } // Methods: getSubscribedEventClasses() private static class ClassGetSubscribedEventClasses_None { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } } private static class ClassGetSubscribedEventClasses_One { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } @Subscribe public void onEventA(EventA event) { } } private static class ClassGetSubscribedEventClasses_Two { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } } @Test public void getSubscribedEventClasses() { RegisteredClass registeredClass = new RegisteredClass(ClassGetSubscribedEventClasses_None.class); assertNull(registeredClass.getSubscribedEventClasses()); registeredClass = new RegisteredClass(ClassGetSubscribedEventClasses_One.class); Set<Class<?>> classes = registeredClass.getSubscribedEventClasses(); assertNotNull(classes); assertEquals(1, classes.size()); assertTrue(classes.contains(EventA.class)); registeredClass = new RegisteredClass(ClassGetSubscribedEventClasses_Two.class); classes = registeredClass.getSubscribedEventClasses(); assertNotNull(classes); assertEquals(2, classes.size()); assertTrue(classes.contains(EventA.class)); assertTrue(classes.contains(EventB.class)); } // Methods: hasAnyProducers() private static class HasAnyProducers_None { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } } private static class HasAnyProducers_One { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } @Produce public EventA produceEventA() { return null; } } private static class HasAnyProducers_Two { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } } @Test public void hasAnyProducers() { RegisteredClass registeredClass = new RegisteredClass(HasAnyProducers_None.class); assertFalse(registeredClass.hasAnyProducers()); registeredClass = new RegisteredClass(HasAnyProducers_One.class); assertTrue(registeredClass.hasAnyProducers()); registeredClass = new RegisteredClass(HasAnyProducers_Two.class); assertTrue(registeredClass.hasAnyProducers()); } // Methods: getProducedEventClasses() private static class GetProducedEventClasses_None { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } } private static class GetProducedEventClasses_One { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } @Produce public EventA produceEventA() { return null; } } private static class GetProducedEventClasses_Two { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } } @Test public void getProducedEventClasses() { RegisteredClass registeredClass = new RegisteredClass(GetProducedEventClasses_None.class); Set<Class<?>> classes = registeredClass.getProducedEventClasses(); assertNull(classes); registeredClass = new RegisteredClass(GetProducedEventClasses_One.class); classes = registeredClass.getProducedEventClasses(); assertNotNull(classes); assertEquals(1, classes.size()); assertTrue(classes.contains(EventA.class)); registeredClass = new RegisteredClass(GetProducedEventClasses_Two.class); assertTrue(registeredClass.hasAnyProducers()); classes = registeredClass.getProducedEventClasses(); assertNotNull(classes); assertEquals(2, classes.size()); assertTrue(classes.contains(EventA.class)); assertTrue(classes.contains(EventB.class)); } // Methods: getProducerMethod() class ClassGetProducerMethod { public void fakePubA(EventA event) { } public void fakePubB(EventB event) { } public void fakePubC(int event) { } public int fakePubD() { return 0; } protected void fakeProA(EventA event) { } protected void fakeProB(EventB event) { } protected void fakeProC(int event) { } protected int fakeProD() { return 0; } void fakePproA(EventA event) { } void fakePproB(EventB event) { } void fakePproC(int event) { } int fakePproD() { return 0; } private void fakePriA(EventA event) { } private void fakePriB(EventB event) { } private void fakePriC(int event) { } private int fakePriD() { return 0; } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } @Produce public EventA produceEventA() { return null; } @Produce public EventAA produceEventAA() { return null; } @Produce public EventAAA produceEventAAA() { return null; } } @Test public void getProducerMethod() { RegisteredClass registeredClass = new RegisteredClass(ClassGetProducerMethod.class); assertNotNull(registeredClass.getProducerMethod(EventA.class)); assertNotNull(registeredClass.getProducerMethod(EventAA.class)); assertNotNull(registeredClass.getProducerMethod(EventAAA.class)); assertNull(registeredClass.getProducerMethod(EventB.class)); } // Functionality: Subscribers private static class ClassNoSubscribersA {} @Test public void testClassNoSubscribersA() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassNoSubscribersA.class); assertFalse(registeredClass.hasAnySubscribers()); } private static class ClassNoSubscribersB { public void fakeMethod() { } } @Test public void testClassNoSubscribersB() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassNoSubscribersB.class); assertFalse(registeredClass.hasAnySubscribers()); } private static class ClassNoSubscribersC { public void fakeMethod() { } @Produce public EventA produceEventA() { return null; } } @Test public void testClassNoSubscribersC() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassNoSubscribersC.class); assertFalse(registeredClass.hasAnySubscribers()); } private static class ClassSubscriberEventA { @Subscribe public void onEventA(EventA event) { } } @Test public void testClassSubscriberEventA() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassSubscriberEventA.class); assertTrue(registeredClass.hasAnySubscribers()); assertNotNull(registeredClass.getSubscribedEventClasses()); assertEquals(1, registeredClass.getSubscribedEventClasses().size()); assertEquals(1, registeredClass.getSubscriberMethods(EventA.class).size()); } private static class ClassSubscriberEventFA { public void fakeMethod() { } @Subscribe public void onEventA(EventA event) { } } @Test public void testClassSubscriberEventFA() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassSubscriberEventFA.class); assertTrue(registeredClass.hasAnySubscribers()); assertNotNull(registeredClass.getSubscribedEventClasses()); assertEquals(1, registeredClass.getSubscribedEventClasses().size()); assertEquals(1, registeredClass.getSubscriberMethods(EventA.class).size()); } private static class ClassSubscriberEventAA { @Subscribe public void onEventA1(EventA event) { } @Subscribe public void onEventA2(EventA event) { } } @Test public void testClassSubscriberEventAA() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassSubscriberEventAA.class); assertTrue(registeredClass.hasAnySubscribers()); assertNotNull(registeredClass.getSubscribedEventClasses()); assertEquals(1, registeredClass.getSubscribedEventClasses().size()); assertEquals(2, registeredClass.getSubscriberMethods(EventA.class).size()); } private static class ClassSubscriberEventFAA { public void fakeMethod() { } @Subscribe public void onEventA1(EventA event) { } @Subscribe public void onEventA2(EventA event) { } } @Test public void testClassSubscriberEventFAA() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassSubscriberEventFAA.class); assertTrue(registeredClass.hasAnySubscribers()); assertNotNull(registeredClass.getSubscribedEventClasses()); assertEquals(1, registeredClass.getSubscribedEventClasses().size()); assertEquals(2, registeredClass.getSubscriberMethods(EventA.class).size()); } private static class ClassSubscriberEventAB { @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } } @Test public void testClassSubscriberEventAB() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassSubscriberEventAB.class); assertTrue(registeredClass.hasAnySubscribers()); assertNotNull(registeredClass.getSubscribedEventClasses()); assertEquals(2, registeredClass.getSubscribedEventClasses().size()); assertEquals(1, registeredClass.getSubscriberMethods(EventA.class).size()); assertEquals(1, registeredClass.getSubscriberMethods(EventB.class).size()); } private static class ClassSubscriberEventFAB { public void fakeMethod() { } @Subscribe public void onEventA(EventA event) { } @Subscribe public void onEventB(EventB event) { } } @Test public void testClassSubscriberEventFAB() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassSubscriberEventFAB.class); assertTrue(registeredClass.hasAnySubscribers()); assertNotNull(registeredClass.getSubscribedEventClasses()); assertEquals(2, registeredClass.getSubscribedEventClasses().size()); assertEquals(1, registeredClass.getSubscriberMethods(EventA.class).size()); assertEquals(1, registeredClass.getSubscriberMethods(EventB.class).size()); } private static class ClassSubscriberEventFAABB { public void fakeMethod() { } @Subscribe public void onEventA1(EventA event) { } @Subscribe public void onEventA2(EventA event) { } @Subscribe public void onEventB1(EventB event) { } @Subscribe public void onEventB2(EventB event) { } } @Test public void testClassSubscriberEventFAABB() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassSubscriberEventFAABB.class); assertTrue(registeredClass.hasAnySubscribers()); assertNotNull(registeredClass.getSubscribedEventClasses()); assertEquals(2, registeredClass.getSubscribedEventClasses().size()); assertEquals(2, registeredClass.getSubscriberMethods(EventA.class).size()); assertEquals(2, registeredClass.getSubscriberMethods(EventB.class).size()); } private static class ClassSubscriberErrorTooManyParams { @Subscribe public void onEventA(EventA event, int count) { } } @Test public void testClassSubscriberErrorTooManyParams() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassSubscriberErrorTooManyParams.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } private static class ClassSubscriberErrorPrivate { @Subscribe private void onEventA(EventA event) { } } @Test public void testClassSubscriberErrorPrivate() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassSubscriberErrorPrivate.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } private static class ClassSubscriberErrorProtected { @Subscribe protected void onEventA(EventA event) { } } @Test public void testClassSubscriberErrorProtected() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassSubscriberErrorProtected.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } private static class ClassSubscriberErrorPackageProtected { @Subscribe void onEventA(EventA event) { } } @Test public void testClassSubscriberErrorPackageProtected() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassSubscriberErrorPackageProtected.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } // Functionality: Producers private static class ClassNoProducerA {} @Test public void testClassNoProducerA() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassNoProducerA.class); assertFalse(registeredClass.hasAnyProducers()); } private static class ClassNoProducerB { public void fakeMethod() { } } @Test public void testClassNoProducerB() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassNoProducerB.class); assertFalse(registeredClass.hasAnyProducers()); } private static class ClassNoProducerC { public void fakeMethod() { } @Subscribe public void onEventA1(EventA event) { } @Subscribe public void onEventA2(EventA event) { } @Subscribe public void onEventB1(EventB event) { } @Subscribe public void onEventB2(EventB event) { } } @Test public void testClassNoProducerC() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassNoProducerC.class); assertFalse(registeredClass.hasAnyProducers()); } private static class ClassProducerEventA { @Produce public EventA produceEventA() { return null; } } @Test public void testClassProducerEventA() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassProducerEventA.class); assertTrue(registeredClass.hasAnyProducers()); assertNotNull(registeredClass.getProducerMethod(EventA.class)); } private static class ClassProducerEventFA { public void fakeMethod() { } @Produce public EventA produceEventA() { return null; } } @Test public void testClassProducerEventFA() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassProducerEventFA.class); assertTrue(registeredClass.hasAnyProducers()); assertNotNull(registeredClass.getProducerMethod(EventA.class)); } private static class ClassProducerEventAB { @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } } @Test public void testClassProducerEventAB() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassProducerEventAB.class); assertTrue(registeredClass.hasAnyProducers()); assertNotNull(registeredClass.getProducerMethod(EventA.class)); assertNotNull(registeredClass.getProducerMethod(EventB.class)); } private static class ClassProducerEventFAB { public void fakeMethod() { } @Produce public EventA produceEventA() { return null; } @Produce public EventB produceEventB() { return null; } } @Test public void testClassProducerEventFAB() throws Exception { RegisteredClass registeredClass = new RegisteredClass(ClassProducerEventFAB.class); assertTrue(registeredClass.hasAnyProducers()); assertNotNull(registeredClass.getProducerMethod(EventA.class)); assertNotNull(registeredClass.getProducerMethod(EventB.class)); } private static class ClassProducerErrorUnexpectedParameters { @Produce public EventA produceEventA(int count) { return null; } } @Test public void testClassProducerErrorUnexpectedParameters() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassProducerErrorUnexpectedParameters.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } private static class ClassProducerErrorNoResult { @Produce public void produceEventA() { } } @Test public void testClassProducerErrorNoResult() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassProducerErrorNoResult.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } private static class ClassProducerErrorVoidResult { @Produce public Void produceEventA() { return null; } } @Test public void testClassProducerErrorVoidResult() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassProducerErrorVoidResult.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } private static class ClassProducerErrorPrivate { @Produce private EventA produceEventA() { return null; } } @Test public void testClassProducerErrorPrivate() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassProducerErrorPrivate.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } private static class ClassProducerErrorProtected { @Produce protected EventA produceEventA() { return null; } } @Test public void testClassProducerErrorProtected() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassProducerErrorProtected.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } private static class ClassProducerErrorPackageProtected { @Produce EventA produceEventA() { return null; } } @Test public void testClassProducerErrorPackageProtected() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassProducerErrorPackageProtected.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } private static class ClassProducerErrorDuplicate { @Produce public EventA produceEventA1() { return null; } @Produce public EventA produceEventA2() { return null; } } @Test public void testClassProducerErrorDuplicate() throws Exception { IllegalArgumentException exception = null; try { new RegisteredClass(ClassProducerErrorDuplicate.class); } catch (IllegalArgumentException e) { exception = e; } assertNotNull(exception); } }
import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Properties; import javax.xml.parsers.ParserConfigurationException; import javax.xml.rpc.ServiceException; import javax.xml.stream.XMLStreamException; import javax.xml.transform.TransformerException; import org.apache.commons.io.FileUtils; import org.bridgedb.IDMapperException; import org.bridgedb.IDMapperStack; import org.pathvisio.model.ConverterException; import org.w3c.dom.Document; import org.w3c.dom.NodeList; import org.w3c.dom.Element; import org.xml.sax.SAXException; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Resource; import org.apache.commons.io.FilenameUtils; public class WP2RDFConversion { public static void main(String[] args) throws ParserConfigurationException, SAXException, IOException, ServiceException, ClassNotFoundException, IDMapperException, ParseException, XMLStreamException, TransformerException, ConverterException { /* * Set the preference for this to work on your local machine */ Properties prop = new Properties(); prop.load(new FileInputStream("config.properties")); /* We keep a three dimensional type of versioning for the RDF dumps of WikiPathways. * First there is the software version. This indicates the incremental updates or changes in the code * responsible for the RDF generation (these files) * The schema version is to keep track of changes in the underlying data model. Base are the vocabulary.wikipathways.org * The latest revision is the highest revision number of the pathways converted into RDF. A pathway with a * higer revision number is not yet available in RDF. */ //TODO The versioning system is not fully implemented yet. int softwareVersion = 0; int schemaVersion = 0; int latestRevision = 0; HashMap<String, String> organismTaxonomy = WpRDFFunctionLibrary.getOrganismsTaxonomyMapping(); // Model model = ModelFactory.createDefaultModel(); Model voidModel = ModelFactory.createDefaultModel(); // Model openPhactsLinkSets = ModelFactory.createDefaultModel(); // WpRDFFunctionLibrary.setModelPrefix(model); WpRDFFunctionLibrary.setModelPrefix(voidModel); WpRDFFunctionLibrary.populateVoid(voidModel, organismTaxonomy); Model bridgeDbmodel = WpRDFFunctionLibrary.createBridgeDbModel(); IDMapperStack mapper = WpRDFFunctionLibrary.createBridgeDbMapper(prop); InputStream in = new FileInputStream("/tmp/BioDataSource.ttl"); bridgeDbmodel.read(in, "", "TURTLE"); File dir = new File(prop.getProperty("wikipathwaysDownloadDumps")); File[] rootFiles = dir.listFiles(); //the section below is only in case of analysis sets for (File rootFile : rootFiles) { String fileName = FilenameUtils.removeExtension(rootFile.getName()); System.out.println(fileName); String[] identifiers = fileName.split("_"); System.out.println(fileName); Document currentGPML = basicCalls.openXmlFile(rootFile.getPath()); Element pathwayElement = (Element) currentGPML.getElementsByTagName("Pathway").item(0); String wpIdentifier = identifiers[identifiers.length-2]; String wpRevision = identifiers[identifiers.length-1]; pathwayElement.setAttribute("identifier", wpIdentifier); pathwayElement.setAttribute("revision", wpRevision); processGPMLFile(currentGPML, latestRevision, prop, bridgeDbmodel, mapper, organismTaxonomy); } Date myDate = new Date(); SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); String myDateString = sdf.format(myDate); FileUtils.writeStringToFile(new File("latestVersion.txt"), "v"+schemaVersion+"."+softwareVersion+"."+latestRevision+"_"+myDateString); // basicCalls.saveRDF2File(model, "/tmp/wpContent_v"+schemaVersion+"."+softwareVersion+"."+latestRevision+"_"+myDateString+".ttl", "TURTLE"); basicCalls.saveRDF2File(voidModel, "/tmp/void.ttl", "TURTLE"); // basicCalls.saveRDF2File(openPhactsLinkSets, "/tmp/opsLinkSets_v"+schemaVersion+"."+softwareVersion+"."+latestRevision+"_"+myDateString+".ttl", "TURTLE"); /*BufferedReader constructQueryText = new BufferedReader(new FileReader("sparqlQueries/DirectedInteraction.construct")); StringBuilder sb = new StringBuilder(); String line = constructQueryText.readLine(); while (line != null) { sb.append(line); sb.append('\n'); line = constructQueryText.readLine(); } String queryText = sb.toString(); */ /*Query query = QueryFactory.create(queryText); QueryExecution queryExecution = QueryExecutionFactory.create(query, model); Model results = queryExecution.execConstruct(); basicCalls.saveRDF2File(results, "/tmp/directedInteractions.ttl", "TURTLE"); */ } public static void processGPMLFile( Document wikiPathwaysDom, int latestRevision, Properties prop, Model bridgeDbmodel, IDMapperStack mapper, HashMap<String, String> organismTaxonomy ) throws ParserConfigurationException, SAXException, IOException, ServiceException, ClassNotFoundException, IDMapperException, ParseException, XMLStreamException, TransformerException, ConverterException { /* From here on the actual RDF conversion starts. The concatenated pathways into a single file is loaded and now * being processed in a strait forward way. First the pathway information is converted into RDF and then each individual * pathway element. */ NodeList pathwayElements = wikiPathwaysDom.getElementsByTagName("Pathway"); for (int i=0; i<pathwayElements.getLength(); i++){ Model pathwayModel = WpRDFFunctionLibrary.createPathwayModel(); // create empty rdf model Model openPhactsLinkSets = ModelFactory.createDefaultModel(); String wpId = pathwayElements.item(i).getAttributes().getNamedItem("identifier").getTextContent(); String revision = pathwayElements.item(i).getAttributes().getNamedItem("revision").getTextContent(); String pathwayOrganism = ""; if (pathwayElements.item(i).getAttributes().getNamedItem("Organism") != null) pathwayOrganism = pathwayElements.item(i).getAttributes().getNamedItem("Organism").getTextContent().trim(); if (Integer.valueOf(revision) > latestRevision){ latestRevision = Integer.valueOf(revision); } File f = new File(prop.getProperty("rdfRepository") + "/" +wpId+"_r"+revision+".ttl"); System.out.println(f.getName()); if(!f.exists()) { Resource pwResource = WpRDFFunctionLibrary.addPathwayLevelTriple(pathwayModel, pathwayElements.item(i), organismTaxonomy); // Get the comments NodeList commentElements = ((Element) pathwayElements.item(i)).getElementsByTagName("Comment"); WpRDFFunctionLibrary.addCommentTriples(pathwayModel, pwResource, commentElements, wpId, revision); // Get the Groups NodeList groupElements = ((Element) pathwayElements.item(i)).getElementsByTagName("Group"); for (int n=0;n<groupElements.getLength(); n++){ WpRDFFunctionLibrary.addGroupTriples(pathwayModel, pwResource, groupElements.item(n), wpId, revision); } // Get all the Datanodes NodeList dataNodesElement = ((Element) pathwayElements.item(i)).getElementsByTagName("DataNode"); for (int j=0; j<dataNodesElement.getLength(); j++){ WpRDFFunctionLibrary.addDataNodeTriples(pathwayModel, pwResource, dataNodesElement.item(j), wpId, revision, bridgeDbmodel, mapper, openPhactsLinkSets); } // Get all the lines NodeList linesElement = ((Element) pathwayElements.item(i)).getElementsByTagName("Interaction"); for (int k=0; k<linesElement.getLength(); k++){ WpRDFFunctionLibrary.addLineTriples(pathwayModel, pwResource, linesElement.item(k), wpId, revision); } //Get all the labels NodeList labelsElement = ((Element) pathwayElements.item(i)).getElementsByTagName("Label"); for (int l=0; l<labelsElement.getLength(); l++){ WpRDFFunctionLibrary.addLabelTriples(pathwayModel, pwResource, labelsElement.item(l), wpId, revision); } //Get the references. There are three casing examples of publicationxref, that is why the call is repeated three times. NodeList referenceElements = ((Element) pathwayElements.item(i)).getElementsByTagName("bp:PublicationXref"); for (int m=0; m<referenceElements.getLength(); m++){ WpRDFFunctionLibrary.addReferenceTriples(pathwayModel, pwResource, referenceElements.item(m), wpId, revision); } NodeList referenceElements2 = ((Element) pathwayElements.item(i)).getElementsByTagName("bp:publicationXref"); for (int m=0; m<referenceElements2.getLength(); m++){ WpRDFFunctionLibrary.addReferenceTriples(pathwayModel, pwResource, referenceElements2.item(m), wpId, revision); } NodeList referenceElements3 = ((Element) pathwayElements.item(i)).getElementsByTagName("bp:PublicationXRef"); for (int m=0; m<referenceElements3.getLength(); m++){ WpRDFFunctionLibrary.addReferenceTriples(pathwayModel, pwResource, referenceElements3.item(m), wpId, revision); } //Get the ontologies. NodeList ontologyElements = ((Element) pathwayElements.item(i)).getElementsByTagName("bp:openControlledVocabulary"); for (int n=0; n<ontologyElements.getLength();n++){ WpRDFFunctionLibrary.addPathwayOntologyTriples(pathwayModel, pwResource, ontologyElements.item(n)); } System.out.println(wpId); basicCalls.saveRDF2File(pathwayModel, f.getAbsolutePath(), "TURTLE"); // model.add(pathwayModel); pathwayModel.removeAll(); } } } }
package com.exedio.cope.instrument; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Arrays; import java.util.Date; import com.exedio.cope.AttributeValue; import com.exedio.cope.LengthViolationException; import com.exedio.cope.MandatoryViolationException; import com.exedio.cope.Type; import com.exedio.cope.UniqueViolationException; import com.exedio.cope.instrument.testmodel.DoubleUnique; import com.exedio.cope.instrument.testmodel.Qualified; import com.exedio.cope.instrument.testmodel.QualifiedName; import com.exedio.cope.instrument.testmodel.Standard; import com.exedio.cope.instrument.testmodel.Sub; import com.exedio.cope.instrument.testmodel.Super; import com.exedio.cope.instrument.testmodel.TypeNone; import com.exedio.cope.instrument.testmodel.TypePrivate; import com.exedio.cope.instrument.testmodel.sub.SubTarget; import com.exedio.cope.util.ReactivationConstructorDummy; public class GeneratorTest extends InstrumentorTest { public static final int PUBLIC = Modifier.PUBLIC; public static final int PROTECTED = Modifier.PROTECTED; public static final int PRIVATE = Modifier.PRIVATE; public static final int STATIC = Modifier.STATIC; public static final int FINAL = Modifier.FINAL; final static Class STRING = String.class; final static Class MANDATORY_VIOLATION = MandatoryViolationException.class; final static Class UNIQUE_VIOLATION = UniqueViolationException.class; final static Class REACTIVATION_DUMMY = ReactivationConstructorDummy.class; public void testStandard() throws ClassNotFoundException { final Class standard = Standard.class; assertConstructor(standard, new Class[]{ STRING, // notNullString STRING, // readOnlyString STRING, // initialString int.class, // nativeInteger long.class, // nativeLong double.class, // nativeDouble boolean.class, // nativeBoolean Date.class, // mandatoryDate }, PUBLIC, new Class[]{ MANDATORY_VIOLATION, }); assertConstructor(standard, new Class[]{(new AttributeValue[0]).getClass()}, PRIVATE); assertConstructor(standard, new Class[]{REACTIVATION_DUMMY, int.class}, PRIVATE); assertMethod(standard, "getDefaultString", STRING, PUBLIC|FINAL); assertMethod(standard, "setDefaultString", new Class[]{STRING}, PUBLIC|FINAL); assertMethod(standard, "getNotNullString", STRING, PUBLIC|FINAL); assertMethod(standard, "setNotNullString", new Class[]{STRING}, PUBLIC|FINAL, new Class[]{MANDATORY_VIOLATION}); assertMethod(standard, "getReadOnlyString", STRING, PUBLIC|FINAL); assertNoMethod(standard, "setReadOnlyString", new Class[]{STRING}); assertMethod(standard, "getUniqueString", STRING, PUBLIC|FINAL); assertMethod(standard, "setUniqueString", new Class[]{STRING}, PUBLIC|FINAL, new Class[]{UNIQUE_VIOLATION}); assertMethod(standard, "findByUniqueString", new Class[]{STRING}, standard, PUBLIC|STATIC|FINAL); assertMethod(standard, "getInitialString", STRING, PUBLIC|FINAL); assertMethod(standard, "setInitialString", new Class[]{STRING}, PUBLIC|FINAL); assertMethod(standard, "getDefaultInteger", Integer.class, PUBLIC|FINAL); assertMethod(standard, "setDefaultInteger", new Class[]{Integer.class}, PUBLIC|FINAL); assertMethod(standard, "getNativeInteger", int.class, PUBLIC|FINAL); assertMethod(standard, "setNativeInteger", new Class[]{int.class}, PUBLIC|FINAL); assertMethod(standard, "getDefaultLong", Long.class, PUBLIC|FINAL); assertMethod(standard, "setDefaultLong", new Class[]{Long.class}, PUBLIC|FINAL); assertMethod(standard, "getNativeLong", long.class, PUBLIC|FINAL); assertMethod(standard, "setNativeLong", new Class[]{long.class}, PUBLIC|FINAL); assertMethod(standard, "getDefaultDouble", Double.class, PUBLIC|FINAL); assertMethod(standard, "setDefaultDouble", new Class[]{Double.class}, PUBLIC|FINAL); assertMethod(standard, "getNativeDouble", double.class, PUBLIC|FINAL); assertMethod(standard, "setNativeDouble", new Class[]{double.class}, PUBLIC|FINAL); assertMethod(standard, "getDefaultBoolean", Boolean.class, PUBLIC|FINAL); assertMethod(standard, "setDefaultBoolean", new Class[]{Boolean.class}, PUBLIC|FINAL); assertMethod(standard, "getNativeBoolean", boolean.class, PUBLIC|FINAL); assertMethod(standard, "setNativeBoolean", new Class[]{boolean.class}, PUBLIC|FINAL); assertMethod(standard, "getMandatoryDate", Date.class, PUBLIC|FINAL); assertMethod(standard, "setMandatoryDate", new Class[]{Date.class}, PUBLIC|FINAL, new Class[]{MANDATORY_VIOLATION}); assertMethod(standard, "touchMandatoryDate", new Class[]{}, PUBLIC|FINAL); assertMethod(standard, "getPrivateDate", Date.class, PRIVATE|FINAL); assertMethod(standard, "setPrivateDate", new Class[]{Date.class}, PRIVATE|FINAL); assertMethod(standard, "touchPrivateDate", new Class[]{}, PRIVATE|FINAL); assertMethod(standard, "getPrivateString", STRING, PRIVATE|FINAL); assertMethod(standard, "setPrivateString", new Class[]{STRING}, PRIVATE|FINAL); assertNoMethod(standard, "getNoneGetterString"); assertMethod(standard, "setNoneGetterString", new Class[]{STRING}, PUBLIC|FINAL); assertMethod(standard, "getPrivateGetterString", STRING, PRIVATE|FINAL); assertMethod(standard, "setPrivateGetterString", new Class[]{STRING}, PUBLIC|FINAL); assertMethod(standard, "getInternalGetterStringInternal", STRING, PRIVATE|FINAL); assertMethod(standard, "setInternalGetterString", new Class[]{STRING}, PUBLIC|FINAL); assertNoMethod(standard, "getInternalGetterString"); assertMethod(standard, "getNoneSetterString", STRING, PUBLIC|FINAL); assertNoMethod(standard, "setNoneSetterString", new Class[]{STRING}); assertMethod(standard, "getPrivateSetterString", STRING, PUBLIC|FINAL); assertMethod(standard, "setPrivateSetterString", new Class[]{STRING}, PRIVATE|FINAL); assertMethod(standard, "getInternalSetterString", STRING, PUBLIC|FINAL); assertMethod(standard, "setInternalSetterStringInternal", new Class[]{STRING}, PRIVATE|FINAL); assertNoMethod(standard, "setInternalSetterString", new Class[]{STRING}); assertMethod(standard, "getNonfinalGetterString", STRING, PUBLIC); assertMethod(standard, "setNonfinalGetterString", new Class[]{STRING}, PROTECTED|FINAL); assertMethod(standard, "getNonfinalSetterString", STRING, PROTECTED|FINAL); assertMethod(standard, "setNonfinalSetterString", new Class[]{STRING}, PUBLIC); assertMethod(standard, "isAsIsBoolean", Boolean.class, PUBLIC|FINAL); assertNoMethod(standard, "getAsIsBoolean"); assertMethod(standard, "setAsIsBoolean", new Class[]{Boolean.class}, PUBLIC|FINAL); assertMethod(standard, "getDoubleUnique1", STRING, PUBLIC|FINAL); assertMethod(standard, "setDoubleUnique1", new Class[]{STRING}, PUBLIC|FINAL, new Class[]{UNIQUE_VIOLATION}); assertMethod(standard, "getDoubleUnique2", Integer.class, PUBLIC|FINAL); assertMethod(standard, "setDoubleUnique2", new Class[]{Integer.class}, PUBLIC|FINAL, new Class[]{UNIQUE_VIOLATION}); assertMethod(standard, "findByDoubleUnique", new Class[]{STRING, Integer.class}, standard, PUBLIC|STATIC|FINAL); assertMethod(standard, "isAnyMediaNull", boolean.class, PUBLIC|FINAL); assertMethod(standard, "getAnyMediaURL", STRING, PUBLIC|FINAL); assertMethod(standard, "getAnyMediaMimeMajor", STRING, PUBLIC|FINAL); assertMethod(standard, "getAnyMediaMimeMinor", STRING, PUBLIC|FINAL); assertMethod(standard, "getAnyMediaContentType", STRING, PUBLIC|FINAL); assertMethod(standard, "getAnyMediaLength", long.class, PUBLIC|FINAL); assertMethod(standard, "getAnyMediaLastModified", long.class, PUBLIC|FINAL); assertMethod(standard, "getAnyMediaData", InputStream.class, PUBLIC|FINAL); assertMethod(standard, "getAnyMediaData", new Class[]{File.class}, PUBLIC|FINAL, new Class[]{IOException.class}); assertMethod(standard, "setAnyMedia", new Class[]{InputStream.class, STRING, STRING}, PUBLIC|FINAL, new Class[]{IOException.class}); assertMethod(standard, "setAnyMedia", new Class[]{File.class, STRING, STRING}, PUBLIC|FINAL, new Class[]{IOException.class}); assertMethod(standard, "isMajorMediaNull", boolean.class, FINAL); assertMethod(standard, "getMajorMediaURL", STRING, FINAL); assertMethod(standard, "getMajorMediaMimeMajor", STRING, FINAL); assertMethod(standard, "getMajorMediaMimeMinor", STRING, FINAL); assertMethod(standard, "getMajorMediaContentType", STRING, FINAL); assertMethod(standard, "getMajorMediaLength", long.class, FINAL); assertMethod(standard, "getMajorMediaLastModified", long.class, FINAL); assertMethod(standard, "getMajorMediaData", InputStream.class, FINAL); assertMethod(standard, "getMajorMediaData", new Class[]{File.class}, FINAL, new Class[]{IOException.class}); assertMethod(standard, "setMajorMedia", new Class[]{InputStream.class, STRING}, FINAL, new Class[]{IOException.class}); assertMethod(standard, "setMajorMedia", new Class[]{File.class, STRING}, FINAL, new Class[]{IOException.class}); assertMethod(standard, "isMinorMediaNull", boolean.class, PROTECTED|FINAL); assertMethod(standard, "getMinorMediaURL", STRING, PROTECTED|FINAL); assertMethod(standard, "getMinorMediaMimeMajor", STRING, PROTECTED|FINAL); assertMethod(standard, "getMinorMediaMimeMinor", STRING, PROTECTED|FINAL); assertMethod(standard, "getMinorMediaContentType", STRING, PROTECTED|FINAL); assertMethod(standard, "getMinorMediaLength", long.class, PROTECTED|FINAL); assertMethod(standard, "getMinorMediaLastModified", long.class, PROTECTED|FINAL); assertMethod(standard, "getMinorMediaData", InputStream.class, PROTECTED|FINAL); assertMethod(standard, "getMinorMediaData", new Class[]{File.class}, PROTECTED|FINAL, new Class[]{IOException.class}); assertMethod(standard, "setMinorMedia", new Class[]{InputStream.class}, PROTECTED|FINAL, new Class[]{IOException.class}); assertMethod(standard, "setMinorMedia", new Class[]{File.class}, PROTECTED|FINAL, new Class[]{IOException.class}); assertMethod(standard, "isNoSetterMediaNull", boolean.class, PUBLIC|FINAL); assertMethod(standard, "getNoSetterMediaURL", STRING, PUBLIC|FINAL); assertMethod(standard, "getNoSetterMediaMimeMajor", STRING, PUBLIC|FINAL); assertMethod(standard, "getNoSetterMediaMimeMinor", STRING, PUBLIC|FINAL); assertMethod(standard, "getNoSetterMediaContentType", STRING, PUBLIC|FINAL); assertMethod(standard, "getNoSetterMediaLength", long.class, PUBLIC|FINAL); assertMethod(standard, "getNoSetterMediaLastModified", long.class, PUBLIC|FINAL); assertMethod(standard, "getNoSetterMediaData", InputStream.class, PUBLIC|FINAL); assertMethod(standard, "getNoSetterMediaData", new Class[]{File.class}, PUBLIC|FINAL, new Class[]{IOException.class}); assertNoMethod(standard, "setNoSetterMedia", new Class[]{InputStream.class,STRING,STRING}); assertNoMethod(standard, "setNoSetterMedia", new Class[]{File.class,STRING,STRING}); assertMethod(standard, "isPrivateSetterMediaNull", boolean.class, PUBLIC|FINAL); assertMethod(standard, "getPrivateSetterMediaURL", STRING, PUBLIC|FINAL); assertMethod(standard, "getPrivateSetterMediaMimeMajor", STRING, PUBLIC|FINAL); assertMethod(standard, "getPrivateSetterMediaMimeMinor", STRING, PUBLIC|FINAL); assertMethod(standard, "getPrivateSetterMediaContentType", STRING, PUBLIC|FINAL); assertMethod(standard, "getPrivateSetterMediaLength", long.class, PUBLIC|FINAL); assertMethod(standard, "getPrivateSetterMediaLastModified", long.class, PUBLIC|FINAL); assertMethod(standard, "getPrivateSetterMediaData", InputStream.class, PUBLIC|FINAL); assertMethod(standard, "getPrivateSetterMediaData", new Class[]{File.class}, PUBLIC|FINAL, new Class[]{IOException.class}); assertMethod(standard, "setPrivateSetterMedia", new Class[]{InputStream.class,STRING,STRING}, PRIVATE|FINAL, new Class[]{IOException.class}); assertMethod(standard, "setPrivateSetterMedia", new Class[]{File.class,STRING,STRING}, PRIVATE|FINAL, new Class[]{IOException.class}); assertMethod(standard, "checkPublicHash", new Class[]{STRING}, Boolean.TYPE, PUBLIC|FINAL); assertMethod(standard, "checkPrivateHash", new Class[]{STRING}, Boolean.TYPE, PRIVATE|FINAL); assertMethod(standard, "checkMandatoryHash", new Class[]{STRING}, Boolean.TYPE, PUBLIC|FINAL); assertMethod(standard, "setPublicHash", new Class[]{STRING}, PUBLIC|FINAL); assertMethod(standard, "setPrivateHash", new Class[]{STRING}, PRIVATE|FINAL); assertMethod(standard, "setMandatoryHash", new Class[]{STRING}, PUBLIC|FINAL, new Class[]{MANDATORY_VIOLATION}); assertNoMethod(standard, "getPublicHash"); assertNoMethod(standard, "getPrivateHash"); assertNoMethod(standard, "getMandatoryHash"); assertField(standard, "TYPE", Type.class, PUBLIC|STATIC|FINAL); final Class typeNone = TypeNone.class; assertConstructor(typeNone, new Class[]{}, PRIVATE); assertConstructor(typeNone, new Class[]{(new AttributeValue[0]).getClass()}, PUBLIC); // @cope.generic.constructor public assertConstructor(typeNone, new Class[]{REACTIVATION_DUMMY, int.class}, PRIVATE); assertMethod(typeNone, "getDefaultString", STRING, PUBLIC|FINAL); assertMethod(typeNone, "setDefaultString", new Class[]{STRING}, PUBLIC|FINAL); assertNoField(typeNone, "TYPE"); final Class typePrivate = TypePrivate.class; assertConstructor(typePrivate, new Class[]{}, PUBLIC); assertConstructor(typePrivate, new Class[]{(new AttributeValue[0]).getClass()}, PRIVATE); assertConstructor(typePrivate, new Class[]{REACTIVATION_DUMMY, int.class}, PRIVATE); assertMethod(typePrivate, "getDefaultString", STRING, PUBLIC|FINAL); assertMethod(typePrivate, "setDefaultString", new Class[]{STRING}, PUBLIC|FINAL); assertField(typePrivate, "TYPE", Type.class, PRIVATE|STATIC|FINAL); } public void testDoubleUnique() throws ClassNotFoundException { final Class doubleUnique = DoubleUnique.class; final Class subTarget = SubTarget.class; assertConstructor(doubleUnique, new Class[]{STRING, subTarget}, PUBLIC, new Class[]{MANDATORY_VIOLATION, UNIQUE_VIOLATION}); assertMethod(doubleUnique, "getString", STRING, PUBLIC|FINAL); assertMethod(doubleUnique, "getItem", subTarget, PUBLIC|FINAL); assertMethod(doubleUnique, "findByUnique", new Class[]{STRING, subTarget}, doubleUnique, PUBLIC|STATIC|FINAL); } public void testQualified() throws ClassNotFoundException { final Class qualified = Qualified.class; final Class qualifiedString = QualifiedName.class; assertMethod(qualified, "getNameQualifier", new Class[]{STRING}, qualifiedString, PUBLIC|FINAL); assertMethod(qualified, "getNumber", new Class[]{STRING}, Integer.class, PUBLIC|FINAL); assertMethod(qualified, "setNumber", new Class[]{STRING, int.class}, PUBLIC|FINAL); assertMethod(qualified, "getOptionalNumber", new Class[]{STRING}, Integer.class, PUBLIC|FINAL); assertMethod(qualified, "setOptionalNumber", new Class[]{STRING, Integer.class}, PUBLIC|FINAL); assertNoMethod(qualified, "getNoneGetterNumber", new Class[]{STRING}); assertMethod(qualified, "setNoneGetterNumber", new Class[]{STRING, int.class}, PUBLIC|FINAL); assertMethod(qualified, "getPrivateGetterNumber", new Class[]{STRING}, Integer.class, PRIVATE|FINAL); assertMethod(qualified, "setPrivateGetterNumber", new Class[]{STRING, int.class}, PUBLIC|FINAL); assertMethod(qualified, "getInternalGetterNumberInternal", new Class[]{STRING}, Integer.class, PRIVATE|FINAL); assertMethod(qualified, "setInternalGetterNumber", new Class[]{STRING, int.class}, PUBLIC|FINAL); assertNoMethod(qualified, "getInternalGetterNumber", new Class[]{STRING}); assertMethod(qualified, "getNoneSetterNumber", new Class[]{STRING}, Integer.class, PUBLIC|FINAL); assertNoMethod(qualified, "setNoneSetterNumber", new Class[]{STRING, int.class}); assertNoMethod(qualified, "setNoneSetterNumber", new Class[]{STRING, Integer.class}); assertMethod(qualified, "getPrivateSetterNumber", new Class[]{STRING}, Integer.class, PUBLIC|FINAL); assertMethod(qualified, "setPrivateSetterNumber", new Class[]{STRING, int.class}, PRIVATE|FINAL); assertMethod(qualified, "getInternalSetterNumber", new Class[]{STRING}, Integer.class, PUBLIC|FINAL); assertMethod(qualified, "setInternalSetterNumberInternal", new Class[]{STRING, int.class}, PRIVATE|FINAL); assertNoMethod(qualified, "setInternalSetterNumber", new Class[]{STRING, int.class}); assertNoMethod(qualified, "setInternalSetterNumber", new Class[]{STRING, Integer.class}); } public void testHierarchy() throws ClassNotFoundException { final Class superc = Super.class; final Class sub = Sub.class; assertConstructor(superc, new Class[]{ STRING, // superMandatory Integer.class, // superInitial }, PUBLIC, new Class[]{ LengthViolationException.class, MANDATORY_VIOLATION, }); assertConstructor(superc, new Class[]{(new AttributeValue[0]).getClass()}, PROTECTED); assertConstructor(superc, new Class[]{REACTIVATION_DUMMY, int.class}, PROTECTED); assertConstructor(sub, new Class[]{ STRING, // superMandatory Integer.class, // superInitial boolean.class, // subMandatory Long.class, // subInitial }, PUBLIC, new Class[]{ LengthViolationException.class, MANDATORY_VIOLATION, }); assertConstructor(sub, new Class[]{(new AttributeValue[0]).getClass()}, PRIVATE); assertConstructor(sub, new Class[]{REACTIVATION_DUMMY, int.class}, PRIVATE); } void assertField( final Class javaClass, final String name, final Class returnType, final int modifiers) { final Field field; try { field = javaClass.getDeclaredField(name); } catch(NoSuchFieldException e) { throw new AssertionError(e); } assertEquals(returnType, field.getType()); assertEquals(modifiers, field.getModifiers()); } void assertNoField(final Class javaClass, final String name) { try { javaClass.getDeclaredField(name); fail("field " + name + " exists."); } catch(NoSuchFieldException e) { // success } } void assertMethod(final Class javaClass, final String name, final Class returnType, final int modifiers) { assertMethod(javaClass, name, null, returnType, modifiers, new Class[]{}); } void assertMethod(final Class javaClass, final String name, final Class[] parameterTypes, final int modifiers) { assertMethod(javaClass, name, parameterTypes, Void.TYPE, modifiers, new Class[]{}); } void assertMethod(final Class javaClass, final String name, final Class[] parameterTypes, final int modifiers, final Class[] exceptionTypes) { assertMethod(javaClass, name, parameterTypes, Void.TYPE, modifiers, exceptionTypes); } void assertMethod(final Class javaClass, final String name, final Class[] parameterTypes, final Class returnType, final int modifiers) { assertMethod(javaClass, name, parameterTypes, returnType, modifiers, new Class[]{}); } void assertMethod( final Class javaClass, final String name, final Class[] parameterTypes, final Class returnType, final int modifiers, final Class[] exceptionTypes) { final Method method; try { method = javaClass.getDeclaredMethod(name, parameterTypes); } catch(NoSuchMethodException e) { throw new AssertionError(e); } assertEquals(returnType, method.getReturnType()); assertEquals(modifiers, method.getModifiers()); assertEquals(Arrays.asList(exceptionTypes), Arrays.asList(method.getExceptionTypes())); } void assertNoMethod(final Class javaClass, final String name) { assertNoMethod(javaClass, name, null); } void assertNoMethod(final Class javaClass, final String name, final Class[] parameterTypes) { try { javaClass.getDeclaredMethod(name, parameterTypes); fail("method " + name + " exists."); } catch(NoSuchMethodException e) { // success } } void assertConstructor( final Class javaClass, final Class[] parameterTypes, final int modifiers) { assertConstructor(javaClass, parameterTypes, modifiers, new Class[]{}); } void assertConstructor( final Class javaClass, final Class[] parameterTypes, final int modifiers, final Class[] exceptionTypes) { final Constructor constructor; try { constructor = javaClass.getDeclaredConstructor(parameterTypes); } catch(NoSuchMethodException e) { throw new AssertionError(e); } assertEquals(modifiers, constructor.getModifiers()); assertEquals(Arrays.asList(exceptionTypes), Arrays.asList(constructor.getExceptionTypes())); } }
package org.intermine.dataconversion; import java.io.Reader; import java.util.Set; import java.util.Stack; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.Enumeration; import java.util.TreeSet; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import org.xml.sax.InputSource; import org.xml.sax.Parser; import org.exolab.castor.xml.schema.reader.Sax2ComponentReader; import org.exolab.castor.xml.schema.reader.SchemaUnmarshaller; import javax.xml.parsers.SAXParserFactory; import javax.xml.parsers.SAXParser; import org.exolab.castor.xml.schema.*; import org.intermine.util.StringUtil; import org.apache.log4j.Logger; /** * Derive information about an XML Schema required when converting the schema * to an InterMine model and when converting XML conforming to the schema * into InterMine fulldata XML. Public methods to access class names and * key/reference pair information. * * @author Richard Smith * @author Andrew Varley * @author Thomas Riley */ public class XmlMetaData { protected static final Logger LOG = Logger.getLogger(XmlMetaData.class); protected Stack paths; /** path -> Set of key fields */ protected Map keyFields; /** key name -> path */ protected Map keyNameToPath; /** key name -> field name */ protected Map keyNameToField; /** path -> Set of referring fields */ protected Map keyrefFields; /** path+"/"+field -> key name */ protected Map keyrefFieldToKey; /** key/keyref xpath -> to regex pattern */ protected Map xpathToRegex; /** element path -> field name. */ protected Map referenceElements; protected Map keyNames; protected Map clsNameMap; /** * Construct with a reader for the XML Schema * @param xsdReader reader pointing ant an XML Schema * @throws Exception if anything goes wrong */ public XmlMetaData(Reader xsdReader) throws Exception { paths = new Stack(); keyFields = new HashMap(); keyNameToPath = new HashMap(); keyNameToField = new HashMap(); keyrefFields = new HashMap(); keyrefFieldToKey = new HashMap(); clsNameMap = new HashMap(); keyNames = new HashMap(); xpathToRegex = new HashMap(); referenceElements = new HashMap(); SchemaUnmarshaller schemaUnmarshaller = null; schemaUnmarshaller = new SchemaUnmarshaller(); Sax2ComponentReader handler = new Sax2ComponentReader(schemaUnmarshaller); SAXParserFactory factory = SAXParserFactory.newInstance(); factory.setValidating(false); SAXParser saxParser = factory.newSAXParser(); Parser parser = saxParser.getParser(); parser.setDocumentHandler(handler); parser.parse(new InputSource(xsdReader)); Schema schema = schemaUnmarshaller.getSchema(); buildRefsMap(schema); filterReferenceElements(); LOG.info("clsNameMap: " + clsNameMap); LOG.info(toString()); } /** * Construct with a Castor Schema object describing and XML Schema * @param schema descrption of XML Schema * @throws Exception if anyhting goes wrong */ public XmlMetaData(Schema schema) throws Exception { paths = new Stack(); keyFields = new HashMap(); keyNameToPath = new HashMap(); keyNameToField = new HashMap(); keyrefFields = new HashMap(); keyrefFieldToKey = new HashMap(); clsNameMap = new HashMap(); keyNames = new HashMap(); xpathToRegex = new HashMap(); referenceElements = new HashMap(); buildRefsMap(schema); filterReferenceElements(); LOG.info("clsNameMap: " + clsNameMap); LOG.info(toString()); } /** * Return true if the given field on the given element path is a key. * * @param path the path to examine * @param field the field name * @return true if field is a key field */ public boolean isKeyField(String path, String field) { return getKeyFields(path).contains(field); } /** * Given a path, return a Set of field names that are keys. If the path * has no key fields, an empty Set will be returned. * * @param path path * @return Set of field names */ public Set getKeyFields(String path) { Set set = (Set) keyFields.get(path); if (set == null) { return new TreeSet(); } else { return set; } } /** * Return true if the given field on the given element path is a reference. * * @param path the path to examine * @param field the field name * @return true if field is a reference field */ public boolean isReferenceField(String path, String field) { return getReferenceFields(path).contains(field); } /** * Given a path, return a List of field names that are references. * * @param path path * @return List of field names */ public Set getReferenceFields(String path) { Set set = (Set) keyrefFields.get(path); if (set == null) { return new TreeSet(); } else { return set; } } /** * Return the name of the key referenced by the given reference field. * * @param path path to element with reference field * @param field the name of the reference field * @return name of key referenced */ public String getReferencingKeyName(String path, String field) { return (String) keyrefFieldToKey.get(path + "/" + field); } /** * Return the element path associated with the given key. * * @param key the key name * @return the associated element path * @see #getKeyField */ public String getKeyPath(String key) { return (String) keyNameToPath.get(key); } /** * Return the field name associated with the given key. * * @param key the key name * @return the associated field name * @see #getKeyPath */ public String getKeyField(String key) { return (String) keyNameToField.get(key); } /** * Return true if given path refers to an element which contains * a single reference attribute. In which case the element as a * whole is treated as a reference. * * @param path path to element * @return true if element is reference (single reference attribute) */ public boolean isReferenceElement(String path) { return referenceElements.containsKey(path); } /** * Given a path that refers to a reference element - it has a single * reference attribute - return the name of that reference attribute. * * @param path path to reference element * @return name of element's single reference attribute */ public String getReferenceElementField(String path) { return (String) referenceElements.get(path); } /** * Return a classname for the element ant specified by xpath. * Will possible have _EnclosingClass... according to element * nesting. * @param xpath the path to examine * @return the generated class name */ public String getClsNameFromXPath(String xpath) { // If xpath is a reference element, then we resolve the target // path using the referring key and call this method again if (isReferenceElement(xpath)) { String field = getReferenceElementField(xpath); String key = getReferencingKeyName(xpath, field); String xpath2 = getKeyPath(key); LOG.debug("getClsNameFromXPath(\"" + xpath + "\") - is reference element, " + "recursing with path \"" + xpath2 + "\""); return getClsNameFromXPath(xpath2); } // Try and map directly String name = (String) clsNameMap.get(xpath); if (name != null) { return name; } // If we don't map directly, then try matching // via regular expression String regex = (String) xpathToRegex.get(xpath); if (regex != null) { LOG.debug("getClsNameFromXPath(\"" + xpath + "\") - matching with regex \"" + regex + "\""); Pattern pattern = Pattern.compile(regex); Iterator iter = clsNameMap.keySet().iterator(); while (iter.hasNext()) { String path = (String) iter.next(); if (pattern.matcher(path).matches()) { name = (String) clsNameMap.get(path); break; } } } LOG.debug("getClsNameFromXPath(\"" + xpath + "\") - returning \"" + name + "\""); return name; } /** * For a given full element path, return the set of key field names. This method * finds the xpath that keys the key field map via <code>getKeyXPathMatchingPath</code>. * * @param path the full element path * @return set of key field names */ public Set getKeyFieldsForPath(String path) { String xpath = getKeyXPathMatchingPath(path); if (!xpath.equals(path)) { LOG.debug("getKeyFieldsForPath() found matching xpath " + xpath + " for path " + path); } return this.getKeyFields(xpath); } /** * For a given path, return the xpath that keys the key field set associated with * this path. Use this method when you have constructed your own path. * * @param path a full path * @return xpath that keys key field set for element at given path */ public String getKeyXPathMatchingPath(String path) { Iterator iter = keyFields.keySet().iterator(); while (iter.hasNext()) { String xpath = (String) iter.next(); String regex = (String) xpathToRegex.get(xpath); if (regex != null) { // compile regex and try to match path against it Pattern pattern = Pattern.compile(regex); if (pattern.matcher(path).matches()) { // path matched this key - get keyfields for key LOG.debug("getKeyXPathMatchingPath() found matching xpath " + xpath + " for path " + path); return xpath; } } } return path; } private void buildRefsMap(Schema schema) throws Exception { Enumeration structures = schema.getElementDecls(); while (structures.hasMoreElements()) { ElementDecl e = (ElementDecl) structures.nextElement(); processElementDecl((ElementDecl) e, false); } } private void processElementDecl(ElementDecl eDecl, boolean isCollection) throws Exception { String path = eDecl.getName(); if (!paths.empty()) { path = (String) paths.peek() + "/" + path; } if (path.length() > 1000) { // TODO: Infinite recursion return; } LOG.debug("pushing path: " + path); paths.push(path); String clsName = null; XMLType xmlType = eDecl.getType(); // Record those elements that are actually references if (xmlType != null && xmlType.isComplexType()) { Enumeration e1 = ((ComplexType) xmlType).getAttributeDecls(); AttributeDecl attrib = null; if (e1.hasMoreElements() && ((attrib = (AttributeDecl) e1.nextElement()) != null) && !e1.hasMoreElements() /*&& xmlInfo.isReferenceField(path, attrib.getName())*/) { // Add as candidate, these are checked at the end of the parse referenceElements.put(path, attrib.getName()); } } isCollection = isCollection || (eDecl.getMaxOccurs() < 0) || (eDecl.getMaxOccurs() > 1); LOG.debug("Processing path: " + path + ", isCollection = " + isCollection + (xmlType == null ? "" : ", isComplexType = " + xmlType.isComplexType() + ", isSimpleType = " + xmlType.isSimpleType() + ", xmlType.getName() = " + xmlType.getName()) + ", isReference = " + eDecl.isReference()); if (eDecl.isReference()) { // nothing needs to be done to name clsName = eDecl.getReference().getName(); } else if ((xmlType != null) && xmlType.isSimpleType() && isCollection) { clsName = eDecl.getName() + "_" + ((String) clsNameMap.get(path.substring(0, path.lastIndexOf('/')))); } else if ((xmlType != null) && (xmlType.getName() != null)) { // named complex type clsName = xmlType.getName(); } else if (xmlType != null && xmlType.isComplexType() && (xmlType.getName() != null)) { LOG.debug("named complex type"); clsName = xmlType.getName(); } else if (xmlType != null && (xmlType.isComplexType() || (xmlType.isSimpleType() && isCollection))) { LOG.debug("anon complex type"); // anon complex type String encPath = null; if (path.indexOf('/') >= 0) { encPath = path.substring(0, path.lastIndexOf('/')); clsName = eDecl.getName() + "_" + (String) clsNameMap.get(encPath); } else { clsName = eDecl.getName(); } } if (clsName != null) { LOG.debug("clsName = " + clsName); clsNameMap.put(path, StringUtil.capitalise(clsName)); } findRefs(eDecl); if (eDecl.getType().isComplexType()) { ComplexType complexType = (ComplexType) eDecl.getType(); LOG.debug("processContentModel"); processContentModelGroup(complexType, false); if (complexType.getBaseType() != null && complexType.getBaseType().isComplexType()) { LOG.error("processContentModel(parent)"); processContentModelGroup((ComplexType) complexType.getBaseType(), false); } } path = (String) paths.pop(); LOG.debug("popped path: " + path); } private void processContentModelGroup(ContentModelGroup cmGroup, boolean isCollection) throws Exception { if (cmGroup instanceof Group) { if ((((Group) cmGroup).getMaxOccurs() < 0) || (((Group) cmGroup).getMaxOccurs() > 1)) { isCollection = true; } } Enumeration cmGroupEnum = cmGroup.enumerate(); while (cmGroupEnum.hasMoreElements()) { Structure struc = (Structure) cmGroupEnum.nextElement(); switch (struc.getStructureType()) { case Structure.ELEMENT: LOG.debug("process element"); processElementDecl((ElementDecl) struc, isCollection); break; case Structure.GROUP: LOG.debug("process group"); //handle nested groups processContentModelGroup((Group) struc, isCollection); break; default: break; } } } private void filterReferenceElements() { Iterator iter = this.referenceElements.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); String path = (String) entry.getKey(); String field = (String) entry.getValue(); if (!isReferenceField(path, field)) { iter.remove(); } } } private void findRefs(ElementDecl eDecl) throws Exception { Map keys = new HashMap(); Map keyrefs = new HashMap(); Enumeration idCons = eDecl.getIdentityConstraints(); while (idCons.hasMoreElements()) { IdentityConstraint idCon = (IdentityConstraint) idCons.nextElement(); if (idCon.getStructureType() == Structure.KEYREF) { keyrefs.put(idCon.getName(), idCon); } else if (idCon.getStructureType() == Structure.KEY) { keys.put(idCon.getName(), idCon); } } Iterator iter = keys.values().iterator(); while (iter.hasNext()) { Key key = (Key) iter.next(); String selector = key.getSelector().getXPath(); String selectors[] = StringUtils.split(selector, '|'); for (int i = 0; i < selectors.length; i++) { String path = null; String regex = null; if (selectors[i].equals(".")) { regex = "^" + (String) paths.peek() + "$"; path = (String) paths.peek(); } else if (selectors[i].startsWith(". // all descendents regex = "^" + paths.peek() + "/.+/" + selectors[i].substring(3) + "$"; regex += "|^" + paths.peek() + "/" + selectors[i].substring(3) + "$"; path = paths.peek() + "/" + selectors[i]; } else { regex = "^" + paths.peek() + "/" + selectors[i] + "$"; path = paths.peek() + "/" + selectors[i]; } Enumeration keyFieldEnum = key.getFields(); String field = ((IdentityField) keyFieldEnum.nextElement()).getXPath(); if (keyFieldEnum.hasMoreElements()) { //throw new Exception("Unable to deal with Keys on more than one field"); LOG.debug("skipping key " + key.getName() + " on more than one field"); continue; } if (field.startsWith("@")) { field = field.substring(field.indexOf('@') + 1); } Set fields = (Set) keyFields.get(path); if (fields == null) { fields = new TreeSet(); keyFields.put(path, fields); } fields.add(field); xpathToRegex.put(path, regex); keyNameToPath.put(key.getName(), path); keyNameToField.put(key.getName(), field); LOG.debug("found key name:" + key.getName() + " path:" + path + " field:" + field); } } iter = keyrefs.values().iterator(); while (iter.hasNext()) { KeyRef keyref = (KeyRef) iter.next(); String path = null; String selector = keyref.getSelector().getXPath(); if (selector.equals(".")) { path = (String) paths.peek(); } else if (selector.startsWith(". // all descendents path = paths.peek() + "/" + selector.substring(3); } else { path = paths.peek() + "/" + selector; } Enumeration keyrefEnum = keyref.getFields(); String field = ((IdentityField) keyrefEnum.nextElement()).getXPath(); if (keyrefEnum.hasMoreElements()) { throw new Exception("Unable to deal with KeyRefs on more than one field"); } if (field.startsWith("@")) { field = field.substring(field.indexOf('@') + 1); } Set fields = (Set) keyrefFields.get(path); if (fields == null) { fields = new TreeSet(); keyrefFields.put(path, fields); } fields.add(field); keyrefFieldToKey.put(path + "/" + field, keyref.getRefer()); LOG.debug("keyref path:" + path + " field:" + field + " refer:" + keyref.getRefer() + " refid:" + keyNames.get(keyref.getRefer())); } } /** * @see Object#toString */ public String toString() { String endl = System.getProperty("line.separator"); StringBuffer sb = new StringBuffer(); Iterator iter = keyFields.keySet().iterator(); sb.append("\n\n ========= keyFields ========= " + endl); while (iter.hasNext()) { Object path = iter.next(); sb.append(path + endl); sb.append("\t\t" + keyFields.get((String) path) + endl); } iter = keyrefFields.keySet().iterator(); sb.append("\n\n ========= keyrefFields ========= " + endl); while (iter.hasNext()) { Object path = iter.next(); sb.append(path + endl); Iterator fields = getReferenceFields((String) path).iterator(); while (fields.hasNext()) { String field = (String) fields.next(); sb.append("\t\t" + field + " -> " + getReferencingKeyName((String) path, field) + endl); } } iter = referenceElements.keySet().iterator(); sb.append("\n\n ======== reference elements ======== " + endl); while (iter.hasNext()) { String path = (String) iter.next(); String field = (String) referenceElements.get(path); sb.append("\t\t" + path + " -> " + field + endl); } // sb.append("idFields: " + idFields + endl + endl) // .append("refFields: " + refFields + endl + endl) // .append("refIdPaths: " + refIdPaths + endl + endl) // .append("clsNameMap: " + clsNameMap + endl + endl); return sb.toString(); } }
import org.junit.Test; import org.junit.Ignore; import java.util.Arrays; import java.util.Collection; import static org.junit.Assert.assertEquals; public class RaindropConverterTest { private RaindropConverter raindropConverter = new RaindropConverter(); @Test public void soundFor1Is1() { assertEquals("1", raindropConverter.convert(1)); } @Ignore("Remove to run test") @Test public void soundFor3OrNumberWithOnlyDistinctFactorOf3() { assertEquals("Pling", raindropConverter.convert(3)); assertEquals("Pling", raindropConverter.convert(6)); assertEquals("Pling", raindropConverter.convert(9)); assertEquals("Pling", raindropConverter.convert(27)); } @Ignore("Remove to run test") @Test public void soundFor5OrNumberWithOnlyDistinctFactorOf5IsPlang() { assertEquals("Plang", raindropConverter.convert(5)); assertEquals("Plang", raindropConverter.convert(10)); assertEquals("Plang", raindropConverter.convert(25)); assertEquals("Plang", raindropConverter.convert(3125)); } @Ignore("Remove to run test") @Test public void soundFor7OrNumberWithOnlyDistinctFactorOf7IsPlong() { assertEquals("Plong", raindropConverter.convert(7)); assertEquals("Plong", raindropConverter.convert(14)); assertEquals("Plong", raindropConverter.convert(49)); } @Ignore("Remove to run test") @Test public void noSoundFor2Cubed() { assertEquals("8", raindropConverter.convert(8)); } @Ignore("Remove to run test") @Test public void soundFor15IsPlingPlang() { assertEquals("PlingPlang", raindropConverter.convert(15)); } @Ignore("Remove to run test") @Test public void soundFor21IsPlingPlong() { assertEquals("PlingPlong", raindropConverter.convert(21)); } @Ignore("Remove to run test") @Test public void soundFor35IsPlangPlong() { assertEquals("PlangPlong", raindropConverter.convert(35)); } @Ignore("Remove to run test") @Test public void noSoundFor52() { assertEquals("52", raindropConverter.convert(52)); } @Ignore("Remove to run test") @Test public void soundFor105IsPlingPlangPlong() { assertEquals("PlingPlangPlong", raindropConverter.convert(105)); } }
package at.sw2017.q_up; import android.app.Activity; import android.content.Intent; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.KeyEvent; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; import java.util.List; import android.view.View.OnKeyListener; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; public class RegisterActivity extends Activity implements View.OnClickListener { Button loginNavigationButton; Button registerButton; EditText inputPassword; EditText confirmPassword; EditText inputUsername; OnKeyListener myKeyListener = new View.OnKeyListener() { @Override public boolean onKey(View arg0, int actionID, KeyEvent event) { // TODO: do what you got to do if ((event.getAction() == KeyEvent.ACTION_DOWN) && (actionID == KeyEvent.KEYCODE_ENTER)) { Button click = (Button)findViewById(R.id.registerButton); click.performClick(); } return false; } }; public void switchLoginRegister() { Button ButtonLogin = (Button) findViewById(R.id.loginNavigationButton); ButtonLogin.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(RegisterActivity.this, MainActivity.class); startActivity(intent); } }); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_register); loginNavigationButton = (Button) findViewById(R.id.loginNavigationButton); loginNavigationButton.setOnClickListener(this); registerButton = (Button) findViewById(R.id.registerButton); registerButton.setOnClickListener(this); inputPassword = (EditText) findViewById(R.id.inputPassword); confirmPassword = (EditText) findViewById(R.id.confirmPassword); inputUsername = (EditText) findViewById(R.id.inputUsername); confirmPassword.setOnKeyListener(myKeyListener); switchLoginRegister(); } @Override public void onClick(View v) { Button clickedButton = (Button) v; DatabaseHandler db_handle = QUpApp.getInstance().getDBHandler(); // check for DB timeout int timeout = 4 * 1000; long startTime = System.currentTimeMillis(); //fetch starting time boolean data_ready = false; while(!data_ready && (System.currentTimeMillis()-startTime) < timeout) { if (!db_handle.isUsersListEmpty()) data_ready = true; } if (data_ready != true) { Toast.makeText(getApplicationContext(), "Server timeout!", Toast.LENGTH_SHORT).show(); return; } boolean user_already_in_list = false; switch (clickedButton.getId()) { case R.id.registerButton: if (!inputPassword.getText().toString().equals(confirmPassword.getText().toString()) || inputPassword.getText().toString().equals("")) { Toast.makeText(getApplicationContext(), "Passwords don't match / are too weak!", Toast.LENGTH_SHORT).show(); break; } if(inputUsername.getText().toString().equals("")) { Toast.makeText(getApplicationContext(), "Enter a username for registration", Toast.LENGTH_SHORT).show(); break; } // check if user already exists db_handle.usersLock(); for (User u : db_handle.getUsersList()) { if (u.userName.equals(inputUsername.getText().toString())) { // username is already in list Toast.makeText(getApplicationContext(), "This user already exists!", Toast.LENGTH_SHORT).show(); user_already_in_list = true; break; } } db_handle.usersUnlock(); // check if user is not in list of existing users and create the user if (!user_already_in_list) { db_handle.addUser(inputUsername.getText().toString(), inputPassword.getText().toString()); Toast.makeText(getApplicationContext(), "User created..", Toast.LENGTH_SHORT).show(); clearText(); // go back to login screen Intent intent = new Intent(RegisterActivity.this, MainActivity.class); startActivity(intent); } // check texfield is already in db // if not add data // check confirm password is the same as password break; default: break; } } public void clearText() { inputUsername.setText(""); inputPassword.setText(""); confirmPassword.setText(""); } }
package com.aldebaran.qi; public class Application { static { // Loading native C++ libraries. if (!EmbeddedTools.LOADED_EMBEDDED_LIBRARY) { EmbeddedTools loader = new EmbeddedTools(); loader.loadEmbeddedLibraries(); } } // Native function private static native long qiApplicationCreate(String[] args, String defaultUrl, boolean listen); private static native long qiApplicationGetSession(long pApp); private static native void qiApplicationStart(long pApp); private static native void qiApplicationRun(long pApp); private static native void qiApplicationStop(long pApp); private static native void qiApplicationDestroy(long pApplication); /** * Crude interface to native log system */ public static native void setLogCategory(String category, long verbosity); // Members private long _application; private Session _session; /** * Application constructor. * @param args Arguments given to main() function. * @param defaultUrl Default url to connect to if none was provided in the * program arguments */ public Application(String[] args, String defaultUrl) { if (args == null) throw new NullPointerException("Creating application with null args"); if (defaultUrl == null) throw new NullPointerException("Creating application with null defaultUrl"); init(args, defaultUrl, false); } /** * Application constructor. * @param args Arguments given to main() function. */ public Application(String[] args) { if (args == null) throw new RuntimeException("Creating application with null args"); init(args, null, false); } private void init(String[] args, String defaultUrl, boolean listen) { _application = Application.qiApplicationCreate(args, defaultUrl, listen); _session = new Session(Application.qiApplicationGetSession(_application)); } /** * Start Application eventloops and connects the Session */ public void start() { Application.qiApplicationStart(_application); } public Session session() { return _session; } /** * Stop Application eventloops and calls atStop() callbacks. * @since 1.20 */ public void stop() { Application.qiApplicationStop(_application); } /** * Blocking function. Application.run() join eventloop thread. * Return when : * - Eventloop is stopped. * - Application.stop() is called * @since 1.20 */ public void run() { Application.qiApplicationRun(_application); } }
package krasa.grepconsole.filter; import krasa.grepconsole.model.StreamBufferSettings; import krasa.grepconsole.utils.Utils; import org.jctools.queues.MpscChunkedArrayQueue; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; public class StreamBuffer implements Disposable { private static final Logger LOG = com.intellij.openapi.diagnostic.Logger.getInstance(StreamBuffer.class); private final long currentlyPrintingDeltaNano; private final long maxWaitTimeNano; private final long maxWaitForIncompleteLineNano; private final SleepingPolicy sleepingPolicy; private ConsoleView console; private final MyQueue<Pair<String, ConsoleViewContentType>> otherOutput = new MyQueue<>(false); private final MyQueue<Pair<String, ConsoleViewContentType>> errorOutput = new MyQueue<>(true); private volatile long firstErrorNano = 0; private volatile boolean lastErrorMissingNewLine; private volatile long lastErrorNano; private volatile long lastNonErrorNano = 0; private volatile Thread worker; private boolean lastPrintedError; private volatile boolean exit = false; private final Object STICK = new Object(); public final Object LOOP_GUARD = new Object(); public StreamBuffer(ConsoleView console, StreamBufferSettings streamBufferSettings) { this.console = console; Disposer.register(console, this); currentlyPrintingDeltaNano = Utils.toNano(streamBufferSettings.getCurrentlyPrintingDelta(), StreamBufferSettings.CURRENTLY_PRINTING_DELTA); maxWaitTimeNano = Utils.toNano(streamBufferSettings.getMaxWaitTime(), StreamBufferSettings.MAX_WAIT_TIME); maxWaitForIncompleteLineNano = Utils.toNano(streamBufferSettings.getMaxWaitForIncompleteLine(), StreamBufferSettings.MAX_WAIT_FOR_INCOMPLETE_LINE); sleepingPolicy = new SleepingPolicy(streamBufferSettings.getSleepTimeWhenWasActive(), streamBufferSettings.getSleepTimeWhenIdle()); startWorker(); } private void startWorker() { worker = new Thread(new Runnable() { @Override public void run() { StreamBuffer.this.threadWork(StreamBuffer.this); } }, StreamBuffer.class.getName()); worker.setDaemon(true); worker.start(); } public boolean buffer(String text, ConsoleViewContentType consoleViewContentType) { if (exit) { return false; } if (consoleViewContentType == ConsoleViewContentType.ERROR_OUTPUT) { checkIfEndsWithNewLine(text); bufferError(text, consoleViewContentType); } else if (consoleViewContentType == ConsoleViewContentType.SYSTEM_OUTPUT) { lastNonErrorNano = System.nanoTime(); return false; // bufferSystem(text); } else if (consoleViewContentType == ConsoleViewContentType.USER_INPUT) { return false; } else { bufferOther(text, consoleViewContentType); } // synchronized (STICK) { // STICK.notify(); return true; } // private void bufferSystem(String text) { // systemOutput.add(text); // lastNonErrorNano = System.nanoTime(); private void bufferError(String text, ConsoleViewContentType consoleViewContentType) { errorOutput.add(Pair.create(text, consoleViewContentType)); if (firstErrorNano == 0) { firstErrorNano = System.nanoTime(); } lastErrorNano = System.nanoTime(); } private void bufferOther(String text, ConsoleViewContentType consoleViewContentType) { lastNonErrorNano = System.nanoTime(); otherOutput.add(Pair.create(text, consoleViewContentType)); } private void threadWork(StreamBuffer streamBuffer) { while (!exit) { boolean worked; synchronized (LOOP_GUARD) { worked = streamBuffer.flush(); } synchronized (STICK) { try { STICK.wait(sleepingPolicy.getTimeToSleep(worked)); } catch (InterruptedException e) { LOG.error(e); exit = true; return; } } } } private boolean flush() { boolean anyPolled = false; if (lastPrintedError) { anyPolled |= flushError(); anyPolled |= flush(otherOutput); } else { anyPolled |= flush(otherOutput); anyPolled |= flushError(); } // anyPolled |= flushSystem(); return anyPolled; } private boolean flushError() { long current = System.nanoTime(); if ((nonErrorBeingPrinted(current) || errorsBeingPrinted(current) || lastErrorMissingNewLine) && notWaitingTooLong(current) && consistencyCheck()) { return false; } return flush(errorOutput); } private boolean flush(MyQueue<Pair<String, ConsoleViewContentType>> queue) { boolean anyPolled = false; Pair<String, ConsoleViewContentType> temp = null; try { Pair<String, ConsoleViewContentType> poll = queue.poll(); if (poll != null) { anyPolled = true; } while (poll != null) { if (poll.first.endsWith("\n")) { print(queue, poll); poll = queue.poll(); } else { temp = poll; poll = queue.poll(); if (poll != null) { if (poll.second == temp.second) { poll = Pair.create(temp.first + poll.first, poll.second); temp = null; } else { print(queue, temp); temp = null; } } else { if (queue.tempNano != 0 && System.nanoTime() - queue.tempNano > maxWaitForIncompleteLineNano) { print(queue, temp); temp = null; } } } } } finally { if (temp != null) { queue.setTemp(temp); } if (queue.errorQueue) { if (temp != null) { firstErrorNano = System.nanoTime(); } else { firstErrorNano = 0; // something new could already be in the queue - that's why we have // #consistencyCheck } } } return anyPolled; } private void print(MyQueue<Pair<String, ConsoleViewContentType>> queue, Pair<String, ConsoleViewContentType> poll) { console.print(poll.first, poll.second); queue.tempNano = 0; lastPrintedError = queue.errorQueue; } protected void checkIfEndsWithNewLine(String text) { // something wrong, better to wait before flushing errors this.lastErrorMissingNewLine = text.length() > 0 && !text.endsWith("\n"); } private boolean errorsBeingPrinted(long current) { return lastErrorNano != 0 && current - lastErrorNano < currentlyPrintingDeltaNano; } private boolean nonErrorBeingPrinted(long current) { return lastNonErrorNano != 0 && current - lastNonErrorNano < currentlyPrintingDeltaNano; } private boolean notWaitingTooLong(long current) { return firstErrorNano != 0 && current - firstErrorNano < maxWaitTimeNano; } private boolean consistencyCheck() { boolean consistent = firstErrorNano != 0 || firstErrorNano == 0 && errorOutput.isEmpty(); return consistent; } @Override public void dispose() { console = null; exit = true; } public static class SleepingPolicy { private int sleepTimeWhenWasActive; private int sleepTimeWhenIdle; public SleepingPolicy(String sleepTimeWhenWasActive, String sleepTimeWhenIdle) { this.sleepTimeWhenWasActive = Utils.toPositiveInt(sleepTimeWhenWasActive, StreamBufferSettings.SLEEP_TIME_WHEN_WAS_ACTIVE); this.sleepTimeWhenIdle = Utils.toPositiveInt(sleepTimeWhenIdle, StreamBufferSettings.SLEEP_TIME_WHEN_IDLE); } public int getTimeToSleep(boolean wasActive) { return wasActive ? sleepTimeWhenWasActive : sleepTimeWhenIdle; } } class MyQueue<T> { public final boolean errorQueue; private long tempNano = 0; private T temp; private MpscChunkedArrayQueue<T> queue; public MyQueue(boolean errorQueue) { this.errorQueue = errorQueue; queue = new MpscChunkedArrayQueue<T>(100, 1_000_000); } public void add(T t) { queue.add(t); } public T poll() { if (temp != null) { T t = this.temp; this.temp = null; return t; } return queue.poll(); } public void setTemp(T temp) { this.temp = temp; if (tempNano == 0) { tempNano = System.nanoTime(); } } public boolean isEmpty() { return temp != null && queue.isEmpty(); } } }
package org.obolibrary.robot; import java.util.*; import java.util.regex.Pattern; import javax.annotation.Nonnull; import org.obolibrary.robot.exceptions.ColumnException; import org.obolibrary.robot.exceptions.RowParseException; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntaxClassExpressionParser; import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.util.SimpleShortFormProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** @author <a href="mailto:rctauber@gmail.com">Becky Tauber</a> */ public class Template { /** Logger */ private static final Logger logger = LoggerFactory.getLogger(Template.class); /** Template IOHelper to resolve prefixes. */ private IOHelper ioHelper; /** Template QuotedEntityChecker to get entities and IRIs by label. */ private QuotedEntityChecker checker; /** Manchester Syntax parser to parse class expressions. */ private ManchesterOWLSyntaxClassExpressionParser parser; /** Set of axioms generated from template. */ private Set<OWLAxiom> axioms; /** Name of the table. */ private String name; /** Location of IDs (ID). */ private int idColumn = -1; /** Location of labels (LABEL, A rdfs:label, A label). */ private int labelColumn = -1; /** Location of entity types (TYPE). */ private int typeColumn = -1; /** Location of class types (CLASS_TYPE). */ private int classTypeColumn = -1; /** Location of property types (PROPERTY_TYPE). */ private int propertyTypeColumn = -1; /** Location of property characteristic (CHARACTERISTIC). */ private int characteristicColumn = -1; /** Location of individual types (INDIVIDUAL_TYPE). */ private int individualTypeColumn = -1; /** Character to split property characteristics on. */ private String characteristicSplit = null; /** Character to split generic types on. */ private String typeSplit = null; /** List of human-readable template headers. */ private List<String> headers; /** List of ROBOT template strings. */ private List<String> templates; /** All other rows of the table (does not include headers and template strings). */ private List<List<String>> tableRows; /** Row number tracker. Start with 2 to skip headers. */ private int rowNum = 2; /** Shared data factory. */ private final OWLDataFactory dataFactory = OWLManager.getOWLDataFactory(); /** Namespace for error messages. */ private static final String NS = "template /** Error message when an annotation property has a characteristic. */ private static final String annotationPropertyCharacteristicError = NS + "ANNOTATION PROPERTY CHARACTERISTIC ERROR annotation property '%s' should not have any characteristics at line %d, column %d in table \"%s\""; /** Error message when an annotation property gets a property type other than subproperty. */ private static final String annotationPropertyTypeError = NS + "ANNOTATION PROPERTY TYPE ERROR annotation property %s type '%s' must be 'subproperty' at row %d, column %d in table \"%s\"."; /** Error message when an invalid class type is provided. */ private static final String classTypeError = NS + "CLASS TYPE ERROR class %s has unknown type '%s' at row %d, column %d in table \"%s\"."; /** Error message when CLASS_TYPE has a SPLIT. */ private static final String classTypeSplitError = NS + "CLASS TYPE SPLIT ERROR the SPLIT functionality should not be used for CLASS_TYPE in column %d in table \"%s\"."; /** * Error message when the number of header columns does not match the number of template columns. * Expects: table name, header count, template count. */ private static final String columnMismatchError = NS + "COLUMN MISMATCH ERROR the number of header columns (%2$d) must match the number of template columns (%3$d) in table \"%1$s\"."; /** Error message when a data property has a characteristic other than 'functional'. */ private static final String dataPropertyCharacteristicError = NS + "DATA PROPERTY CHARACTERISTIC ERROR data property '%s' can only have characteristic 'functional' at line %d, column %d in table \"%s\"."; /** Error message when an invalid individual type is provided. */ private static final String individualTypeError = NS + "INDIVIDUAL TYPE ERROR individual %s has unknown type '%s' at row %d, column %d in table \"%s\"."; /** Error message when INDIVIDUAL_TYPE has a SPLIT. */ private static final String individualTypeSplitError = NS + "INDIVIDUAL TYPE SPLIT ERROR the SPLIT functionality should not be used for INDIVIDUAL_TYPE in column %d in table \"%s\"."; /** Error message when an invalid property type is provided. */ private static final String propertyTypeError = NS + "PROPERTY TYPE ERROR property %s has unknown type '%s' at row %d, column %d in table \"%s\"."; /** Error message when more than one logical type is used in PROPERTY_TYPE. */ private static final String propertyTypeSplitError = NS + "PROPERTY TYPE SPLIT ERROR thee SPLIT functionality should not be used for PROPERTY_TYPE in column %d in table \"%s\"."; /** Error message when property characteristic not valid. */ private static final String unknownCharacteristicError = NS + "UNKNOWN CHARACTERISTIC ERROR property '%s' has unknown characteristic '%s' at line %d, column %d in table \"%s\"."; /** * Error message when a template cannot be understood. Expects: table name, column number, column * name, template. */ private static final String unknownTemplateError = NS + "UNKNOWN TEMPLATE ERROR could not interpret template string \"%4$s\" for column %2$d (\"%3$s\") in table \"%1$s\"."; private static final List<String> validClassTypes = new ArrayList<>(Arrays.asList("subclass", "disjoint", "equivalent")); /** * Given a template name and a list of rows, create a template object with a new IOHelper and * QuotedEntityChecker. The rows are added to the object, new labels from the rows are added to * the checker, and a Manchester Syntax parser is created. * * @param name template name * @param rows list of rows (lists) * @throws Exception on issue creating IOHelper or adding table to template object */ public Template(@Nonnull String name, @Nonnull List<List<String>> rows) throws Exception { this.name = name; this.ioHelper = new IOHelper(); tableRows = new ArrayList<>(); templates = new ArrayList<>(); headers = new ArrayList<>(); axioms = new HashSet<>(); checker = new QuotedEntityChecker(); checker.setIOHelper(this.ioHelper); checker.addProvider(new SimpleShortFormProvider()); // Add the contents of the tableRows addTable(rows); addLabels(); createParser(); } /** * Given a template name, a list of rows, and an IOHelper, create a template object with a new * QuotedEntityChecker. The rows are added to the object, new labels from the rows are added to * the checker, and a Manchester Syntax parser is created. * * @param name template name * @param rows list of rows (lists) * @param ioHelper IOHelper to resolve prefixes * @throws Exception on issue adding table to template object */ public Template(@Nonnull String name, @Nonnull List<List<String>> rows, IOHelper ioHelper) throws Exception { this.name = name; this.ioHelper = ioHelper; tableRows = new ArrayList<>(); templates = new ArrayList<>(); headers = new ArrayList<>(); axioms = new HashSet<>(); checker = new QuotedEntityChecker(); checker.setIOHelper(this.ioHelper); checker.addProvider(new SimpleShortFormProvider()); checker.addProperty(dataFactory.getRDFSLabel()); // Add the contents of the tableRows addTable(rows); addLabels(); createParser(); } /** * Given a template name, a list of rows, and an input ontology, create a template object with a * new IOHelper and QuotedEntityChecker populated by the input ontology. The rows are added to the * object, new labels from the rows are added to the checker, and a Manchester Syntax parser is * created. * * @param name template name * @param rows list of rows (lists) * @param inputOntology OWLOntology to get labels of entities for QuotedEntityChecker * @throws Exception on issue creating IOHelper or adding table to template object */ public Template(@Nonnull String name, @Nonnull List<List<String>> rows, OWLOntology inputOntology) throws Exception { this.name = name; ioHelper = new IOHelper(); tableRows = new ArrayList<>(); templates = new ArrayList<>(); headers = new ArrayList<>(); axioms = new HashSet<>(); checker = new QuotedEntityChecker(); checker.setIOHelper(this.ioHelper); checker.addProvider(new SimpleShortFormProvider()); checker.addProperty(dataFactory.getRDFSLabel()); if (inputOntology != null) { checker.addAll(inputOntology); } // Add the contents of the tableRows addTable(rows); addLabels(); createParser(); } /** * Given a template name, a list of rows, an input ontology, and an IOHelper, create a template * object with a new QuotedEntityChecker with the IOHelper populated by the input ontology. The * rows are added to the object, new labels from the rows are added to the checker, and a * Manchester Syntax parser is created. * * @param name template name * @param rows list of rows (lists) * @param inputOntology OWLOntology to get labels of entities for QuotedEntityChecker * @param ioHelper IOHelper to resolve prefixes * @throws Exception on issue adding table to template object */ public Template( @Nonnull String name, @Nonnull List<List<String>> rows, OWLOntology inputOntology, IOHelper ioHelper) throws Exception { this.name = name; this.ioHelper = ioHelper; tableRows = new ArrayList<>(); templates = new ArrayList<>(); headers = new ArrayList<>(); axioms = new HashSet<>(); checker = new QuotedEntityChecker(); checker.setIOHelper(this.ioHelper); checker.addProvider(new SimpleShortFormProvider()); checker.addProperty(dataFactory.getRDFSLabel()); if (inputOntology != null) { checker.addAll(inputOntology); } // Add the contents of the tableRows addTable(rows); addLabels(); createParser(); } /** * Given a template name, a list of rows, an IOHelper, and a QuotedEntityChecker, create a * template object. The rows are added to the object, new labels from the rows are added to the * checker, and a Manchester Syntax parser is created. * * @param name template name * @param rows list of rows (lists) * @param inputOntology OWLOntology to get labels of entities for QuotedEntityChecker * @param ioHelper IOHelper to resolve prefixes * @param checker QuotedEntityChecker to get entities by label * @throws Exception on issue adding table to template object */ public Template( @Nonnull String name, @Nonnull List<List<String>> rows, OWLOntology inputOntology, IOHelper ioHelper, QuotedEntityChecker checker) throws Exception { this.name = name; this.ioHelper = ioHelper; if (checker == null) { this.checker = new QuotedEntityChecker(); this.checker.setIOHelper(this.ioHelper); this.checker.addProvider(new SimpleShortFormProvider()); this.checker.addProperty(dataFactory.getRDFSLabel()); } else { this.checker = checker; } tableRows = new ArrayList<>(); templates = new ArrayList<>(); headers = new ArrayList<>(); axioms = new HashSet<>(); if (inputOntology != null) { this.checker.addAll(inputOntology); } // Add the contents of the tableRows addTable(rows); addLabels(); createParser(); parser.setOWLEntityChecker(this.checker); } /** * Return the QuotedEntityChecker. * * @return QuotedEntityChecker */ public QuotedEntityChecker getChecker() { return checker; } /** * Generate an OWLOntology based on the rows of the template. * * @return new OWLOntology * @throws Exception on issue parsing rows to axioms or creating new ontology */ public OWLOntology generateOutputOntology() throws Exception { return generateOutputOntology(null, false); } /** * Generate an OWLOntology with given IRI based on the rows of the template. * * @param outputIRI IRI for final ontology * @param force if true, do not exit on errors * @return new OWLOntology * @throws Exception on issue parsing rows to axioms or creating new ontology */ public OWLOntology generateOutputOntology(String outputIRI, boolean force) throws Exception { // Set to true on first exception boolean hasException = false; for (List<String> row : tableRows) { try { processRow(row); } catch (RowParseException e) { // If force = false, fail on the first exception if (!force) { throw e; } // otherwise print exceptions as they show up hasException = true; logger.error(e.getMessage().substring(e.getMessage().indexOf(" } } if (hasException) { logger.warn("Ontology created from template with errors"); } // Create a new ontology object to add axioms to OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology outputOntology; if (outputIRI != null) { IRI iri = IRI.create(outputIRI); outputOntology = manager.createOntology(iri); } else { outputOntology = manager.createOntology(); } manager.addAxioms(outputOntology, axioms); return outputOntology; } /** * Given a list of rows for a table, first validate the headers and template strings. Then, get * the location of important columns (e.g. IDs and labels). Finally, add all template rows to the * object. * * @param rows list of rows (lists) * @throws Exception on malformed template */ private void addTable(List<List<String>> rows) throws Exception { // Get and validate headers headers = rows.get(0); templates = rows.get(1); if (headers.size() != templates.size()) { throw new ColumnException( String.format(columnMismatchError, name, headers.size(), templates.size())); } for (int column = 0; column < templates.size(); column++) { String template = templates.get(column); // If the template is null or the column is empty, skip this column if (template == null) { continue; } template = template.trim(); if (template.isEmpty()) { continue; } // Validate the template string if (!TemplateHelper.validateTemplateString(template)) { throw new ColumnException( String.format(unknownTemplateError, name, column + 1, headers.get(column), template)); } // Get the location of important columns // If it is an annotation, check if it resolves to RDFS label if (template.startsWith("A ")) { String property = template.substring(2); maybeSetLabelColumn(property, column); } else if (template.startsWith("AT ")) { String property; if (template.contains("^^")) { property = template.substring(3, template.indexOf("^^")).trim(); } else { property = template.substring(3).trim(); } maybeSetLabelColumn(property, column); } else if (template.startsWith("AL ")) { String property; if (template.contains("@")) { property = template.substring(3, template.indexOf("@")).trim(); } else { property = template.substring(3).trim(); } maybeSetLabelColumn(property, column); } else if (template.startsWith("AI ")) { String property = template.substring(3); maybeSetLabelColumn(property, column); } else if (template.equals("ID")) { // Unique identifier (CURIE, IRI...) idColumn = column; } else if (template.equals("LABEL")) { // Label identifier labelColumn = column; } else if (template.startsWith("TYPE")) { // Entity type typeColumn = column; if (template.contains("SPLIT=")) { typeSplit = template.substring(template.indexOf("SPLIT=") + 6); } } else if (template.startsWith("CLASS_TYPE")) { // Class expression type classTypeColumn = column; if (template.contains("SPLIT=")) { // Classes should only have one class type throw new ColumnException(String.format(classTypeSplitError, column, name)); } } else if (template.startsWith("PROPERTY_TYPE")) { // Property expression type propertyTypeColumn = column; if (template.contains("SPLIT=")) { // Instances should only have one individual type throw new ColumnException(String.format(propertyTypeSplitError, column, name)); } } else if (template.startsWith("INDIVIDUAL_TYPE")) { // Individual expression type individualTypeColumn = column; if (template.contains("SPLIT=")) { // Instances should only have one individual type throw new ColumnException(String.format(individualTypeSplitError, column, name)); } } else if (template.startsWith("CHARACTERISTIC")) { // Property characteristic characteristicColumn = column; if (template.contains("SPLIT=")) { characteristicSplit = template.substring(template.indexOf("SPLIT=") + 6); } } } // Each template needs a way to identify the entities // Without one, we cannot continue if (idColumn == -1 && labelColumn == -1) { throw new ColumnException( "Template row must include an \"ID\" or \"LABEL\" column in table: " + name); } // Add the rest of the tableRows to Template for (int row = 2; row < rows.size(); row++) { tableRows.add(rows.get(row)); } } /** Add the labels from the rows of the template to the QuotedEntityChecker. */ private void addLabels() { // If there's no label column, we can't add labels if (labelColumn == -1) { return; } for (List<String> row : tableRows) { String id = null; if (idColumn != -1) { try { id = row.get(idColumn); } catch (IndexOutOfBoundsException e) { // ignore } } String label = null; try { label = row.get(labelColumn); } catch (IndexOutOfBoundsException e) { // ignore } if (idColumn != -1 && id == null) { continue; } if (id == null || label == null) { continue; } String type = null; if (typeColumn != -1) { try { type = row.get(typeColumn); } catch (IndexOutOfBoundsException e) { // ignore } } if (type == null || type.trim().isEmpty()) { type = "class"; } IRI iri = ioHelper.createIRI(id); if (iri == null) { iri = IRI.create(id); } // Try to resolve a CURIE IRI typeIRI = ioHelper.createIRI(type); // Set to IRI string or to type string String typeOrIRI = type; if (typeIRI != null) { typeOrIRI = typeIRI.toString(); } OWLEntity entity; switch (typeOrIRI) { case "": case "http://www.w3.org/2002/07/owl#Class": case "class": entity = dataFactory.getOWLEntity(EntityType.CLASS, iri); break; case "http://www.w3.org/2002/07/owl#ObjectProperty": case "object property": entity = dataFactory.getOWLEntity(EntityType.OBJECT_PROPERTY, iri); break; case "http://www.w3.org/2002/07/owl#DataProperty": case "data property": entity = dataFactory.getOWLEntity(EntityType.DATA_PROPERTY, iri); break; case "http://www.w3.org/2002/07/owl#AnnotationProperty": case "annotation property": entity = dataFactory.getOWLEntity(EntityType.ANNOTATION_PROPERTY, iri); break; case "http://www.w3.org/2002/07/owl#Individual": case "individual": case "http://www.w3.org/2002/07/owl#NamedIndividual": case "named individual": entity = dataFactory.getOWLEntity(EntityType.NAMED_INDIVIDUAL, iri); break; case "http://www.w3.org/2002/07/owl#Datatype": case "datatype": entity = dataFactory.getOWLEntity(EntityType.DATATYPE, iri); break; default: // Assume type is an individual (checked later) entity = dataFactory.getOWLEntity(EntityType.NAMED_INDIVIDUAL, iri); break; } checker.add(entity, label); } } /** Create a Manchester Syntax parser from the OWLDataFactory and QuotedEntityChecker. */ private void createParser() { this.parser = new ManchesterOWLSyntaxClassExpressionParser(dataFactory, checker); } /** * Process each of the table rows. First, get an entity based on ID or label. If the template * contains an ID column, but it is empty, skip that row. If it does not contain an ID column, * skip if the label is empty. Add axioms based on the entity type (class, object property, data * property, annotation property, datatype, or individual). * * @throws Exception on issue creating axioms from template */ private void processRow(List<String> row) throws Exception { rowNum++; String id = null; try { id = row.get(idColumn); } catch (IndexOutOfBoundsException e) { // ignore } String label = null; try { label = row.get(labelColumn); } catch (IndexOutOfBoundsException e) { // ignore } String type = null; try { type = row.get(typeColumn); } catch (IndexOutOfBoundsException e) { // ignore } // Skip if no ID and no label if (id == null && label == null) { return; } if (type == null || type.trim().isEmpty()) { // Try to guess the type from already existing entities if (label != null) { OWLEntity e = checker.getOWLEntity(label); if (e != null) { type = e.getEntityType().getIRI().toString(); } } else { OWLEntity e = checker.getOWLEntity(id); if (e != null) { type = e.getEntityType().getIRI().toString(); } } // If the entity type is not defined // and the entity does not already exist // default to class if (type == null) { type = "class"; } } IRI iri = getIRI(id, label); if (iri == null) { return; } // Try to resolve a CURIE IRI typeIRI = ioHelper.createIRI(type); // Set to IRI string or to type string String typeOrIRI = type; if (typeIRI != null) { typeOrIRI = typeIRI.toString(); } switch (typeOrIRI) { case "http://www.w3.org/2002/07/owl#Class": case "class": addClassAxioms(iri, row); break; case "http://www.w3.org/2002/07/owl#ObjectProperty": case "object property": addObjectPropertyAxioms(iri, row); break; case "http://www.w3.org/2002/07/owl#DataProperty": case "data property": addDataPropertyAxioms(iri, row); break; case "http://www.w3.org/2002/07/owl#AnnotationProperty": case "annotation property": addAnnotationPropertyAxioms(iri, row); break; case "http://www.w3.org/2002/07/owl#Datatype": case "datatype": addDatatypeAxioms(iri, row); break; case "http://www.w3.org/2002/07/owl#Individual": case "individual": case "http://www.w3.org/2002/07/owl#NamedIndividual": case "named individual": default: addIndividualAxioms(iri, row); break; } } /* CLASS AXIOMS */ /** * Given a class IRI and the row containing the class details, generate class axioms. * * @param iri class IRI * @param row list of template values for given class * @throws Exception on issue creating class axioms from template */ private void addClassAxioms(IRI iri, List<String> row) throws Exception { if (iri == null) { return; } // Add the declaration OWLClass cls = dataFactory.getOWLClass(iri); OWLDeclarationAxiom ax = dataFactory.getOWLDeclarationAxiom(cls); axioms.add(ax); String classType = null; if (classTypeColumn != -1) { try { classType = row.get(classTypeColumn); } catch (IndexOutOfBoundsException e) { // do nothing } } if (classType == null || classType.trim().isEmpty()) { classType = "subclass"; } else { classType = classType.trim().toLowerCase(); } if (!validClassTypes.contains(classType)) { // Unknown class type throw new RowParseException( String.format( classTypeError, cls.getIRI().getShortForm(), classType, rowNum, classTypeColumn, name)); } // Iterate through all columns and add annotations as we go // Also collect any class expressions that will be used in logical definitions // We collect all of these together so that equivalent expressions can be made into an // intersection // Instead of adding them in as we iterate through Map<Integer, Set<OWLClassExpression>> subclassExpressionColumns = new HashMap<>(); Map<Integer, Set<OWLClassExpression>> equivalentExpressionColumns = new HashMap<>(); Map<Integer, Set<OWLClassExpression>> intersectionEquivalentExpressionColumns = new HashMap<>(); Map<Integer, Set<OWLClassExpression>> disjointExpressionColumns = new HashMap<>(); for (int column = 0; column < templates.size(); column++) { String template = templates.get(column); String value = null; try { value = row.get(column); } catch (IndexOutOfBoundsException e) { // do nothing } if (value == null || value.trim().isEmpty()) { continue; } if (template.startsWith("A") || template.startsWith("LABEL")) { // Handle class annotations Set<OWLAnnotation> annotations = getAnnotations(template, value, row, column); for (OWLAnnotation annotation : annotations) { axioms.add(dataFactory.getOWLAnnotationAssertionAxiom(iri, annotation)); } } else if (template.startsWith("SC")) { // Subclass expression subclassExpressionColumns.put( column, TemplateHelper.getClassExpressions(name, parser, template, value, rowNum, column + 1)); } else if (template.startsWith("EC")) { // Equivalent expression equivalentExpressionColumns.put( column, TemplateHelper.getClassExpressions(name, parser, template, value, rowNum, column + 1)); } else if (template.startsWith("DC")) { // Disjoint expression disjointExpressionColumns.put( column, TemplateHelper.getClassExpressions(name, parser, template, value, rowNum, column + 1)); } else if (template.startsWith("C") && !template.startsWith("CLASS_TYPE")) { // Use class type to determine what to do with the expression switch (classType) { case "subclass": subclassExpressionColumns.put( column, TemplateHelper.getClassExpressions( name, parser, template, value, rowNum, column + 1)); break; case "equivalent": intersectionEquivalentExpressionColumns.put( column, TemplateHelper.getClassExpressions( name, parser, template, value, rowNum, column + 1)); break; case "disjoint": disjointExpressionColumns.put( column, TemplateHelper.getClassExpressions( name, parser, template, value, rowNum, column + 1)); break; default: break; } } } // Add the axioms if (!subclassExpressionColumns.isEmpty()) { addSubClassAxioms(cls, subclassExpressionColumns, row); } if (!equivalentExpressionColumns.isEmpty()) { addEquivalentClassesAxioms(cls, equivalentExpressionColumns, row); } if (!intersectionEquivalentExpressionColumns.isEmpty()) { // Special case to support legacy "C"/"equivalent" class type // Which is the intersection of all C columns addIntersectionEquivalentClassesAxioms(cls, intersectionEquivalentExpressionColumns, row); } if (!disjointExpressionColumns.isEmpty()) { addDisjointClassAxioms(cls, disjointExpressionColumns, row); } } /** * Given an OWLClass, a map of column number to class expressions, and the row containing the * class details, generate subClassOf axioms for the class where the parents are the class * expressions. Maybe annotate the axioms. * * @param cls OWLClass to create subClassOf axioms for * @param expressionColumns map of column numbers to sets of parent class expressions * @param row list of template values for given class * @throws Exception on issue getting axiom annotations */ private void addSubClassAxioms( OWLClass cls, Map<Integer, Set<OWLClassExpression>> expressionColumns, List<String> row) throws Exception { // Generate axioms for (int column : expressionColumns.keySet()) { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); Set<OWLClassExpression> exprs = expressionColumns.get(column); // Each expression will be its own subclass statement for (OWLClassExpression expr : exprs) { axioms.add(dataFactory.getOWLSubClassOfAxiom(cls, expr, axiomAnnotations)); } } } /** * Given an OWLClass, a map of column number to class expressions, and the row containing the * class details, generate equivalent class axioms for the class where the equivalents are the * class expressions. Maybe annotate the axioms. * * @param cls OWLClass to create equivalentClasses axiom for * @param expressionColumns map of column number to equivalent class expression * @param row list of template values for given class * @throws Exception on issue getting axiom annotations */ private void addEquivalentClassesAxioms( OWLClass cls, Map<Integer, Set<OWLClassExpression>> expressionColumns, List<String> row) throws Exception { Set<OWLAnnotation> axiomAnnotations = new HashSet<>(); Set<OWLClassExpression> expressions = new HashSet<>(); expressions.add(cls); for (int column : expressionColumns.keySet()) { // Maybe get an annotation on the expression (all annotations will be on the one intersection) axiomAnnotations.addAll(maybeGetAxiomAnnotations(row, column)); // Add all expressions to the set of expressions expressions.addAll(expressionColumns.get(column)); } // Create the axiom as an intersection of the provided expressions axioms.add(dataFactory.getOWLEquivalentClassesAxiom(expressions, axiomAnnotations)); } /** * Given an OWLClass, a map of column number to class expressions, and the row for this class, * generate an equivalentClasses axiom for the class where the equivalent is the intersection of * the provided class expressions. Maybe annotate the axioms. * * @param cls OWLClass to create equivalentClasses axiom for * @param expressionColumns map of column number to equivalent class expression * @param row list of template values for given class * @throws Exception on issue getting axiom annotations */ private void addIntersectionEquivalentClassesAxioms( OWLClass cls, Map<Integer, Set<OWLClassExpression>> expressionColumns, List<String> row) throws Exception { Set<OWLAnnotation> axiomAnnotations = new HashSet<>(); Set<OWLClassExpression> expressions = new HashSet<>(); for (int column : expressionColumns.keySet()) { // Maybe get an annotation on the expression (all annotations will be on the one intersection) axiomAnnotations.addAll(maybeGetAxiomAnnotations(row, column)); // Add all expressions to the set of expressions expressions.addAll(expressionColumns.get(column)); } // Create the axiom as an intersection of the provided expressions OWLObjectIntersectionOf intersection = dataFactory.getOWLObjectIntersectionOf(expressions); axioms.add(dataFactory.getOWLEquivalentClassesAxiom(cls, intersection, axiomAnnotations)); } /** * Given an OWLClass, a map of column number to class expressions, and the row containing the * class details, generate disjointClasses axioms for the class where the disjoints are the class * expressions. Maybe annotate the axioms. * * @param cls OWLClass to create disjointClasses axioms for * @param expressionColumns map of column number to equivalent class expression * @param row list of template values for given class * @throws Exception on issue getting axiom annotations */ private void addDisjointClassAxioms( OWLClass cls, Map<Integer, Set<OWLClassExpression>> expressionColumns, List<String> row) throws Exception { for (int column : expressionColumns.keySet()) { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); Set<OWLClassExpression> exprs = expressionColumns.get(column); // Each expression will be its own disjoint statement for (OWLClassExpression expr : exprs) { Set<OWLClassExpression> disjoint = new HashSet<>(Arrays.asList(cls, expr)); axioms.add(dataFactory.getOWLDisjointClassesAxiom(disjoint, axiomAnnotations)); } } } /* OBJECT PROPERTY AXIOMS */ /** * Given an object property IRI and the row containing the property details, generate property * axioms. * * @param iri object property IRI * @param row list of template values for given object property * @throws Exception on issue creating object property axioms from template */ private void addObjectPropertyAxioms(IRI iri, List<String> row) throws Exception { // Add the declaration axioms.add( dataFactory.getOWLDeclarationAxiom( dataFactory.getOWLEntity(EntityType.OBJECT_PROPERTY, iri))); // Maybe get a property type (default subproperty) String propertyType = getPropertyType(row); // Maybe get characteristics (default none) List<String> characteristics = getCharacteristics(row); // Create the property object OWLObjectProperty property = dataFactory.getOWLObjectProperty(iri); // Handle special property types for (String c : characteristics) { switch (c.trim().toLowerCase()) { case "asymmetric": axioms.add(dataFactory.getOWLAsymmetricObjectPropertyAxiom(property)); break; case "functional": axioms.add(dataFactory.getOWLFunctionalObjectPropertyAxiom(property)); break; case "inversefunctional": case "inverse functional": axioms.add(dataFactory.getOWLInverseFunctionalObjectPropertyAxiom(property)); break; case "irreflexive": axioms.add(dataFactory.getOWLIrreflexiveObjectPropertyAxiom(property)); break; case "reflexive": axioms.add(dataFactory.getOWLReflexiveObjectPropertyAxiom(property)); break; case "symmetric": axioms.add(dataFactory.getOWLSymmetricObjectPropertyAxiom(property)); break; case "transitive": axioms.add(dataFactory.getOWLTransitiveObjectPropertyAxiom(property)); break; default: throw new Exception( String.format( unknownCharacteristicError, property.getIRI().getShortForm(), c, rowNum, characteristicColumn, name)); } } for (int column = 0; column < templates.size(); column++) { String template = templates.get(column); String value = null; try { value = row.get(column); } catch (IndexOutOfBoundsException e) { // do nothing } if (value == null || value.trim().isEmpty()) { continue; } if (template.startsWith("A") || template.startsWith("LABEL")) { // Handle annotations Set<OWLAnnotation> annotations = getAnnotations(template, value, row, column); for (OWLAnnotation annotation : annotations) { axioms.add(dataFactory.getOWLAnnotationAssertionAxiom(iri, annotation)); } } else if (template.startsWith("SP")) { // Subproperty expressions Set<OWLObjectPropertyExpression> expressions = TemplateHelper.getObjectPropertyExpressions( name, checker, template, value, rowNum, column); addSubObjectPropertyAxioms(property, expressions, row, column); } else if (template.startsWith("EP")) { // Equivalent properties expressions Set<OWLObjectPropertyExpression> expressions = TemplateHelper.getObjectPropertyExpressions( name, checker, template, value, rowNum, column); addEquivalentObjectPropertiesAxioms(property, expressions, row, column); } else if (template.startsWith("DP")) { // Disjoint properties expressions Set<OWLObjectPropertyExpression> expressions = TemplateHelper.getObjectPropertyExpressions( name, checker, template, value, rowNum, column); addDisjointObjectPropertiesAxioms(property, expressions, row, column); } else if (template.startsWith("IP")) { // Inverse properties expressions Set<OWLObjectPropertyExpression> expressions = TemplateHelper.getObjectPropertyExpressions( name, checker, template, value, rowNum, column); addInverseObjectPropertiesAxioms(property, expressions, row, column); } else if (template.startsWith("P") && !template.startsWith("PROPERTY_TYPE")) { // Use the property type to determine what type of expression Set<OWLObjectPropertyExpression> expressions = TemplateHelper.getObjectPropertyExpressions( name, checker, template, value, rowNum, column); switch (propertyType) { case "subproperty": addSubObjectPropertyAxioms(property, expressions, row, column); break; case "equivalent": addEquivalentObjectPropertiesAxioms(property, expressions, row, column); break; case "disjoint": addDisjointObjectPropertiesAxioms(property, expressions, row, column); break; case "inverse": addInverseObjectPropertiesAxioms(property, expressions, row, column); break; default: // Unknown property type throw new RowParseException( String.format( propertyTypeError, iri.getShortForm(), propertyType, rowNum, column + 1, name)); } } else if (template.startsWith("DOMAIN")) { // Handle domains Set<OWLClassExpression> expressions = TemplateHelper.getClassExpressions(name, parser, template, value, rowNum, column); addObjectPropertyDomains(property, expressions, row, column); } else if (template.startsWith("RANGE")) { // Handle ranges Set<OWLClassExpression> expressions = TemplateHelper.getClassExpressions(name, parser, template, value, rowNum, column); addObjectPropertyRanges(property, expressions, row, column); } } } /** * Given an OWLObjectProperty, a set of OWLObjectPropertyExpressions, and the row containing the * property details, generate subPropertyOf axioms for the property where the parents are the * property expressions. Maybe annotate the axioms. * * @param property OWLObjectProperty to create subPropertyOf axioms for * @param expressions set of OWLObjectPropertyExpressions * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addSubObjectPropertyAxioms( OWLObjectProperty property, Set<OWLObjectPropertyExpression> expressions, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (OWLObjectPropertyExpression expr : expressions) { axioms.add(dataFactory.getOWLSubObjectPropertyOfAxiom(property, expr, axiomAnnotations)); } } /** * Given an OWLObjectProperty, a set of OWLObjectPropertyExpressions, and the row containing the * property details, generate equivalentProperties axioms for the property where the equivalents * are the property expressions. Maybe annotate the axioms. * * @param property OWLObjectProperty to create equivalentProperties axioms for * @param expressions set of equivalent OWLObjectPropertyExpressions * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addEquivalentObjectPropertiesAxioms( OWLObjectProperty property, Set<OWLObjectPropertyExpression> expressions, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (OWLObjectPropertyExpression expr : expressions) { axioms.add( dataFactory.getOWLEquivalentObjectPropertiesAxiom(property, expr, axiomAnnotations)); } } /** * Given an OWLObjectProperty, a set of OWLObjectPropertyExpressions, and the row containing the * property details, generate disjointProperties axioms for the property where the disjoints are * the property expressions. Maybe annotate the axioms. * * @param property OWLObjectProperty to create disjointProperties axioms for * @param expressions set of disjoint OWLObjectPropertyExpressions * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addDisjointObjectPropertiesAxioms( OWLObjectProperty property, Set<OWLObjectPropertyExpression> expressions, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms expressions.add(property); axioms.add(dataFactory.getOWLDisjointObjectPropertiesAxiom(expressions, axiomAnnotations)); } /** * Given an OWLObjectProperty, a set of OWLObjectPropertyExpressions, and the row containing the * property details, generate inverseProperties axioms for the property where the inverses are the * property expressions. Maybe annotate the axioms. * * @param property OWLObjectProperty to create inverseProperties axioms for * @param expressions set of inverse OWLObjectPropertyExpressions * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addInverseObjectPropertiesAxioms( OWLObjectProperty property, Set<OWLObjectPropertyExpression> expressions, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (OWLObjectPropertyExpression expr : expressions) { axioms.add(dataFactory.getOWLInverseObjectPropertiesAxiom(property, expr, axiomAnnotations)); } } /** * Given an OWLObjectProperty, a set of OWLClassExpressions, the row containing the property * details, and a column location, generate domain axioms where the domains are the class * expressions. Maybe annotation the axioms. * * @param property OWLObjectProperty to create domain axioms for * @param expressions set of domain OWLClassExpressions * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addObjectPropertyDomains( OWLObjectProperty property, Set<OWLClassExpression> expressions, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (OWLClassExpression expr : expressions) { axioms.add(dataFactory.getOWLObjectPropertyDomainAxiom(property, expr, axiomAnnotations)); } } /** * Given an OWLObjectProperty, a set of OWLClassExpressions, the row containing the property * details, and a column location, generate range axioms where the ranges are the class * expressions. Maybe annotation the axioms. * * @param property OWLObjectProperty to create range axioms for * @param expressions set of range OWLClassExpressions * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addObjectPropertyRanges( OWLObjectProperty property, Set<OWLClassExpression> expressions, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (OWLClassExpression expr : expressions) { axioms.add(dataFactory.getOWLObjectPropertyRangeAxiom(property, expr, axiomAnnotations)); } } /* DATA PROPERTY AXIOMS */ /** * Given an data property IRI and the row containing the property details, generate property * axioms. * * @param iri data property IRI * @param row list of template values for given data property * @throws Exception on issue creating data property axioms from template */ private void addDataPropertyAxioms(IRI iri, List<String> row) throws Exception { // Add the declaration axioms.add( dataFactory.getOWLDeclarationAxiom( dataFactory.getOWLEntity(EntityType.DATA_PROPERTY, iri))); OWLDataProperty property = dataFactory.getOWLDataProperty(iri); // Maybe get property type (default subproperty) String propertyType = getPropertyType(row); // Maybe get property characteristics (default empty list) List<String> characteristics = getCharacteristics(row); // Maybe add property characteristics for (String c : characteristics) { if (!c.equalsIgnoreCase("functional")) { throw new Exception( String.format( dataPropertyCharacteristicError, property.getIRI().getShortForm(), rowNum, characteristicColumn, name)); } axioms.add(dataFactory.getOWLFunctionalDataPropertyAxiom(property)); } for (int column = 0; column < templates.size(); column++) { String template = templates.get(column); String value = null; try { value = row.get(column); } catch (IndexOutOfBoundsException e) { // do nothing } if (value == null || value.trim().isEmpty()) { continue; } String split = null; if (template.contains("SPLIT=")) { split = template.substring(template.indexOf("SPLIT=") + 6).trim(); } if (template.startsWith("A") || template.startsWith("LABEL")) { // Handle annotations Set<OWLAnnotation> annotations = getAnnotations(template, value, row, column); for (OWLAnnotation annotation : annotations) { axioms.add(dataFactory.getOWLAnnotationAssertionAxiom(iri, annotation)); } } else if (template.startsWith("SP")) { // Subproperty expressions Set<OWLDataPropertyExpression> expressions = TemplateHelper.getDataPropertyExpressions( name, checker, template, value, rowNum, column); addSubDataPropertyAxioms(property, expressions, row, column); } else if (template.startsWith("EP")) { // Equivalent properties expressions Set<OWLDataPropertyExpression> expressions = TemplateHelper.getDataPropertyExpressions( name, checker, template, value, rowNum, column); addEquivalentDataPropertiesAxioms(property, expressions, row, column); } else if (template.startsWith("DP")) { // Disjoint properties expressions Set<OWLDataPropertyExpression> expressions = TemplateHelper.getDataPropertyExpressions( name, checker, template, value, rowNum, column); addDisjointDataPropertiesAxioms(property, expressions, row, column); } else if (template.startsWith("IP")) { // Cannot use inverse with data properties throw new RowParseException( String.format( propertyTypeError, iri.getShortForm(), propertyType, rowNum, column + 1, name)); } else if (template.startsWith("P ") && !template.startsWith("PROPERTY_TYPE")) { // Use property type to handle expression type Set<OWLDataPropertyExpression> expressions = TemplateHelper.getDataPropertyExpressions( name, checker, template, value, rowNum, column); switch (propertyType) { case "subproperty": addSubDataPropertyAxioms(property, expressions, row, column); break; case "equivalent": addEquivalentDataPropertiesAxioms(property, expressions, row, column); break; case "disjoint": addDisjointDataPropertiesAxioms(property, expressions, row, column); break; default: // Unknown property type throw new RowParseException( String.format( propertyTypeError, iri.getShortForm(), propertyType, rowNum, column + 1, name)); } } else if (template.startsWith("DOMAIN")) { // Handle domains Set<OWLClassExpression> expressions = TemplateHelper.getClassExpressions(name, parser, template, value, rowNum, column); addDataPropertyDomains(property, expressions, row, column); } else if (template.startsWith("RANGE")) { // Handle ranges Set<OWLDatatype> datatypes = TemplateHelper.getDatatypes(name, checker, value, split, rowNum, column); addDataPropertyRanges(property, datatypes, row, column); } } } /** * Given an OWLDataProperty, a set of OWLDataPropertyExpressions, and the row containing the * property details, generate subPropertyOf axioms for the property where the parents are the * property expressions. Maybe annotate the axioms. * * @param property OWLDataProperty to create subPropertyOf axioms for * @param expressions set of parent OWLDataPropertyExpressions * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addSubDataPropertyAxioms( OWLDataProperty property, Set<OWLDataPropertyExpression> expressions, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (OWLDataPropertyExpression expr : expressions) { if (expr != null) { axioms.add(dataFactory.getOWLSubDataPropertyOfAxiom(property, expr, axiomAnnotations)); } } } /** * Given an OWLDataProperty, a set of OWLDataPropertyExpressions, and the row containing the * property details, generate equivalentProperties axioms for the property where the equivalents * are the property expressions. Maybe annotate the axioms. * * @param property OWLDataProperty to create equivalentProperties axioms for * @param expressions set of equivalent OWLDataPropertyExpressions * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addEquivalentDataPropertiesAxioms( OWLDataProperty property, Set<OWLDataPropertyExpression> expressions, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (OWLDataPropertyExpression expr : expressions) { axioms.add(dataFactory.getOWLEquivalentDataPropertiesAxiom(property, expr, axiomAnnotations)); } } /** * Given an OWLDataProperty, a set of OWLObjectPropertyExpressions, and the row containing the * property details, generate disjointProperties axioms for the property where the disjoints are * the property expressions. Maybe annotate the axioms. * * @param property OWLDataProperty to create disjointProperties axioms for * @param expressions set of disjoint OWLDataPropertyExpressions * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addDisjointDataPropertiesAxioms( OWLDataProperty property, Set<OWLDataPropertyExpression> expressions, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms expressions.add(property); axioms.add(dataFactory.getOWLDisjointDataPropertiesAxiom(expressions, axiomAnnotations)); } /** * Given an OWLDataProperty, a set of OWLClassExpressions, the row containing the property * details, and a column location, generate domain axioms where the domains are the class * expressions. Maybe annotation the axioms. * * @param property OWLDataProperty to create domain axioms for * @param expressions set of domain OWLClassExpressions * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addDataPropertyDomains( OWLDataProperty property, Set<OWLClassExpression> expressions, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (OWLClassExpression expr : expressions) { axioms.add(dataFactory.getOWLDataPropertyDomainAxiom(property, expr, axiomAnnotations)); } } /** * Given an OWLObjectProperty, a set of OWLDatatypes, the row containing the property details, and * a column location, generate range axioms where the ranges are the datatypes. Maybe annotation * the axioms. * * @param property OWLObjectProperty to create range axioms for * @param datatypes set of range OWLDatatypes * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addDataPropertyRanges( OWLDataProperty property, Set<OWLDatatype> datatypes, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (OWLDatatype datatype : datatypes) { axioms.add(dataFactory.getOWLDataPropertyRangeAxiom(property, datatype, axiomAnnotations)); } } /* ANNOTATION PROPERTY AXIOMS */ /** * Given an annotation property IRI and the row containing the property details, generate property * axioms. * * @param iri annotation property IRI * @param row list of template values for given annotation property * @throws Exception on issue creating annotation property axioms from template */ private void addAnnotationPropertyAxioms(IRI iri, List<String> row) throws Exception { // Add the declaration axioms.add( dataFactory.getOWLDeclarationAxiom( dataFactory.getOWLEntity(EntityType.ANNOTATION_PROPERTY, iri))); String propertyType = getPropertyType(row); if (!propertyType.equals("subproperty")) { // Annotation properties can only have type "subproperty" throw new RowParseException( String.format( annotationPropertyTypeError, iri, propertyType, rowNum, propertyTypeColumn, name)); } // Annotation properties should not have characteristics if (characteristicColumn != -1) { String propertyCharacteristicString = row.get(characteristicColumn); if (propertyCharacteristicString != null && !propertyCharacteristicString.trim().isEmpty()) { throw new RowParseException( String.format( annotationPropertyCharacteristicError, iri.getShortForm(), rowNum, characteristicColumn, name)); } } // Create the property object OWLAnnotationProperty property = dataFactory.getOWLAnnotationProperty(iri); for (int column = 0; column < templates.size(); column++) { String template = templates.get(column); String value = null; try { value = row.get(column); } catch (IndexOutOfBoundsException e) { // do nothing } if (value == null || value.trim().isEmpty()) { continue; } // Maybe get the split character String split = null; if (template.contains("SPLIT=")) { split = template.substring(template.indexOf("SPLIT=") + 6).trim(); } if (template.startsWith("A") || template.startsWith("LABEL")) { // Handle annotations Set<OWLAnnotation> annotations = getAnnotations(template, value, row, column); for (OWLAnnotation annotation : annotations) { axioms.add(dataFactory.getOWLAnnotationAssertionAxiom(iri, annotation)); } } else if (template.startsWith("SP") || template.startsWith("P") && !template.startsWith("PROPERTY_TYPE")) { // Handle property logic Set<OWLAnnotationProperty> parents = TemplateHelper.getAnnotationProperties(checker, value, split); addSubAnnotationPropertyAxioms(property, parents, row, column); } else if (template.startsWith("DOMAIN")) { // Handle domains Set<IRI> iris = TemplateHelper.getValueIRIs(checker, value, split); addAnnotationPropertyDomains(property, iris, row, column); } else if (template.startsWith("RANGE")) { // Handle ranges Set<IRI> iris = TemplateHelper.getValueIRIs(checker, value, split); addAnnotationPropertyRanges(property, iris, row, column); } } } /** * Given an OWLAnnotationProperty, a set of OWLAnnotationProperties, and the row containing the * property details, generate subPropertyOf axioms for the property where the parents are the * other properties. Maybe annotate the axioms. * * @param property OWLObjectProperty to create subPropertyOf axioms for * @param parents set of parent OWLAnnotationProperties * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addSubAnnotationPropertyAxioms( OWLAnnotationProperty property, Set<OWLAnnotationProperty> parents, List<String> row, int column) throws Exception { // Maybe get an annotation on the subproperty axiom Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (OWLAnnotationProperty parent : parents) { axioms.add( dataFactory.getOWLSubAnnotationPropertyOfAxiom(property, parent, axiomAnnotations)); } } /** * Given an OWLAnnotationProperty, a set of IRIs, the row containing the property details, and a * column location, generate domain axioms where the domains are the IRIs. Maybe annotation the * axioms. * * @param property OWLObjectProperty to create domain axioms for * @param iris set of domain IRIs * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addAnnotationPropertyDomains( OWLAnnotationProperty property, Set<IRI> iris, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (IRI iri : iris) { axioms.add(dataFactory.getOWLAnnotationPropertyDomainAxiom(property, iri, axiomAnnotations)); } } /** * Given an OWLAnnotationProperty, a set of IRIs, the row containing the property details, and a * column location, generate range axioms where the ranges are the IRIs. Maybe annotation the * axioms. * * @param property OWLAnnotationProperty to create range axioms for * @param iris set of range IRIs * @param row list of template values for given property * @param column column number of logical template string * @throws Exception on issue getting axiom annotations */ private void addAnnotationPropertyRanges( OWLAnnotationProperty property, Set<IRI> iris, List<String> row, int column) throws Exception { // Maybe get an annotation on the expression Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms for (IRI iri : iris) { axioms.add(dataFactory.getOWLAnnotationPropertyRangeAxiom(property, iri, axiomAnnotations)); } } /* DATATYPE AXIOMS */ /** * Given a datatype IRI and the row containing the datatype details, generate datatype axioms. * * @param iri datatype IRI * @param row list of template values for given datatype * @throws Exception on issue creating datatype annotations */ private void addDatatypeAxioms(IRI iri, List<String> row) throws Exception { // Add the declaration axioms.add( dataFactory.getOWLDeclarationAxiom(dataFactory.getOWLEntity(EntityType.DATATYPE, iri))); for (int column = 0; column < templates.size(); column++) { String template = templates.get(column); String value = null; try { value = row.get(column); } catch (IndexOutOfBoundsException e) { // do nothing } if (value == null || value.trim().isEmpty()) { continue; } // Handle annotations if (template.startsWith("A")) { // Add the annotations to the datatype Set<OWLAnnotation> annotations = getAnnotations(template, value, row, column); for (OWLAnnotation annotation : annotations) { axioms.add(dataFactory.getOWLAnnotationAssertionAxiom(iri, annotation)); } } // TODO - future support for data definitions with DT } } /* INDIVIDUAL AXIOMS */ /** * Given an individual IRI and the row containing the individual details, generate individual * axioms. * * @param iri individual IRI * @param row list of template values for given individual * @throws Exception on issue creating individual axioms from template */ private void addIndividualAxioms(IRI iri, List<String> row) throws Exception { // Should not return null, as empty defaults to a class String typeCol = row.get(typeColumn).trim(); // Use the 'type' to get the class assertion for the individual // If it is owl:Individual or owl:NamedIndividual, it will not have a class assertion // There may be more than one class assertion - right now only named classes are supported List<String> types = new ArrayList<>(); if (typeSplit != null) { for (String t : typeCol.split(Pattern.quote(typeSplit))) { if (!t.trim().equals("")) { types.add(t.trim()); } } } else { types.add(typeCol.trim()); } // The individualType is used to determine what kind of axioms are associated // e.g. different individuals, same individuals... // The default is just "named" individual (no special axioms) String individualType = "named"; if (individualTypeColumn != -1) { try { individualType = row.get(individualTypeColumn); } catch (IndexOutOfBoundsException e) { // do nothing } } OWLNamedIndividual individual = dataFactory.getOWLNamedIndividual(iri); // Add declaration axioms.add(dataFactory.getOWLDeclarationAxiom(dataFactory.getOWLNamedIndividual(iri))); for (String type : types) { // Trim for safety type = type.trim(); // Try to resolve a CURIE IRI typeIRI = ioHelper.createIRI(type); // Set to IRI string or to type string String typeOrIRI = type; if (typeIRI != null) { typeOrIRI = typeIRI.toString(); } // Add a type if the type is not owl:Individual or owl:NamedIndividual if (!typeOrIRI.equalsIgnoreCase("individual") && !typeOrIRI.equalsIgnoreCase("named individual") && !typeOrIRI.equalsIgnoreCase("http://www.w3.org/2002/07/owl#NamedIndividual") && !typeOrIRI.equalsIgnoreCase("http://www.w3.org/2002/07/owl#Individual")) { OWLClass typeCls = checker.getOWLClass(type); if (typeCls != null) { axioms.add(dataFactory.getOWLClassAssertionAxiom(typeCls, individual)); } else { // If the class is null, assume it is a class expression OWLClassExpression typeExpr = TemplateHelper.tryParse(name, parser, type, rowNum, typeColumn); axioms.add(dataFactory.getOWLClassAssertionAxiom(typeExpr, individual)); } } } for (int column = 0; column < templates.size(); column++) { String template = templates.get(column); String split = null; if (template.contains("SPLIT=")) { split = template.substring(template.indexOf("SPLIT=") + 6).trim(); template = template.substring(0, template.indexOf("SPLIT=")).trim(); } String value = null; try { value = row.get(column); } catch (IndexOutOfBoundsException e) { // do nothing } if (value == null || value.trim().isEmpty()) { continue; } // Handle annotations if (template.startsWith("A") || template.startsWith("LABEL")) { // Add the annotations to the individual Set<OWLAnnotation> annotations = getAnnotations(template, value, row, column); for (OWLAnnotation annotation : annotations) { axioms.add(dataFactory.getOWLAnnotationAssertionAxiom(iri, annotation)); } } else if (template.startsWith("SI")) { // Same individuals axioms Set<OWLIndividual> sameIndividuals = TemplateHelper.getIndividuals(checker, value, split); if (!sameIndividuals.isEmpty()) { addSameIndividualsAxioms(individual, sameIndividuals, row, column); } } else if (template.startsWith("DI")) { // Different individuals axioms Set<OWLIndividual> differentIndividuals = TemplateHelper.getIndividuals(checker, value, split); if (!differentIndividuals.isEmpty()) { addDifferentIndividualsAxioms(individual, differentIndividuals, row, column); } } else if (template.startsWith("I") && !template.startsWith("INDIVIDUAL_TYPE")) { // Use individual type to determine how to handle expressions switch (individualType) { case "named": if (template.startsWith("I ")) { String propStr = template.substring(2).replace("'", ""); OWLObjectProperty objectProperty = checker.getOWLObjectProperty(propStr); if (objectProperty != null) { Set<OWLIndividual> otherIndividuals = TemplateHelper.getIndividuals(checker, value, split); addObjectPropertyAssertionAxioms( individual, otherIndividuals, objectProperty, row, column); break; } OWLDataProperty dataProperty = checker.getOWLDataProperty(propStr); if (dataProperty != null) { Set<OWLLiteral> literals = TemplateHelper.getLiterals(name, checker, value, split, rowNum, column); addDataPropertyAssertionAxioms(individual, literals, dataProperty, row, column); break; } } break; case "same": Set<OWLIndividual> sameIndividuals = TemplateHelper.getIndividuals(checker, value, split); if (!sameIndividuals.isEmpty()) { addSameIndividualsAxioms(individual, sameIndividuals, row, column); } break; case "different": Set<OWLIndividual> differentIndividuals = TemplateHelper.getIndividuals(checker, value, split); if (!differentIndividuals.isEmpty()) { addDifferentIndividualsAxioms(individual, differentIndividuals, row, column); } break; default: throw new RowParseException( String.format( individualTypeError, iri.getShortForm(), individualType, rowNum, column + 1, name)); } } } } /** * Given an OWLIndividual, a set of OWLIndividuals, an object property expression, the row as list * of strings, and the column number, add each individual as the object of the object property * expression for that individual. * * @param individual OWLIndividual to add object property assertion axioms to * @param otherIndividuals set of other OWLIndividuals representing the objects of the axioms * @param expression OWLObjectPropertyExpression to use as property of the axioms * @param row list of strings * @param column column number * @throws Exception on problem handling axiom annotations */ private void addObjectPropertyAssertionAxioms( OWLNamedIndividual individual, Set<OWLIndividual> otherIndividuals, OWLObjectPropertyExpression expression, List<String> row, int column) throws Exception { // Maybe get an annotation on the subproperty axiom Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); for (OWLIndividual other : otherIndividuals) { axioms.add( dataFactory.getOWLObjectPropertyAssertionAxiom( expression, individual, other, axiomAnnotations)); } } /** * Given an OWLIndividual, a set of OWLLiterals, a data property expression, the row as list of * strings, and the column number, add each literal as the object of the data property expression * for that individual. * * @param individual OWLIndividual to add data property assertion axioms to * @param literals set of OWLLiterals representing the objects of the axioms * @param expression OWLDataPropertyExpression to use as property of the axioms * @param row list of strings * @param column column number * @throws Exception on problem handling axiom annotations */ private void addDataPropertyAssertionAxioms( OWLNamedIndividual individual, Set<OWLLiteral> literals, OWLDataPropertyExpression expression, List<String> row, int column) throws Exception { // Maybe get an annotation on the subproperty axiom Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); for (OWLLiteral lit : literals) { axioms.add( dataFactory.getOWLDataPropertyAssertionAxiom( expression, individual, lit, axiomAnnotations)); } } /** * Given an OWLIndividual, a set of same individuals, a row as list of strings, and a column * number, add the same individual axioms. * * @param individual OWLIndiviudal to add axioms to * @param sameIndividuals set of same individuals * @param row list of strings * @param column column number * @throws Exception on problem handling axiom annotations */ private void addSameIndividualsAxioms( OWLNamedIndividual individual, Set<OWLIndividual> sameIndividuals, List<String> row, int column) throws Exception { // Maybe get an annotation on the subproperty axiom Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms sameIndividuals.add(individual); axioms.add(dataFactory.getOWLSameIndividualAxiom(sameIndividuals, axiomAnnotations)); } /** * Given an OWLIndividual, a set of different individuals, a row as list of strings, and a column * number, add the different individual axioms. * * @param individual OWLIndiviudal to add axioms to * @param differentIndividuals set of different individuals * @param row list of strings * @param column column number * @throws Exception on problem handling axiom annotations */ private void addDifferentIndividualsAxioms( OWLNamedIndividual individual, Set<OWLIndividual> differentIndividuals, List<String> row, int column) throws Exception { // Maybe get an annotation on the subproperty axiom Set<OWLAnnotation> axiomAnnotations = maybeGetAxiomAnnotations(row, column); // Generate axioms differentIndividuals.add(individual); axioms.add(dataFactory.getOWLDifferentIndividualsAxiom(differentIndividuals, axiomAnnotations)); } /* ANNOTATION HELPERS */ /** * Given a template string, a value string, a row as a list of strings, and the column number, * return a set of one or more OWLAnnotations. * * @param template template string * @param value value of annotation(s) * @param row list of strings * @param column column number * @return Set of OWLAnnotations * @throws Exception on issue getting OWLAnnotations */ private Set<OWLAnnotation> getAnnotations( String template, String value, List<String> row, int column) throws Exception { if (value == null || value.trim().equals("")) { return new HashSet<>(); } Set<OWLAnnotation> annotations = TemplateHelper.getAnnotations(name, checker, template, value, rowNum, column); // Maybe get an annotation on the annotation String nextTemplate; int nextColumn = column + 1; try { nextTemplate = templates.get(nextColumn); } catch (IndexOutOfBoundsException e) { nextTemplate = null; } if (nextTemplate != null) { Set<OWLAnnotation> axiomAnnotations = getAxiomAnnotations(row, nextTemplate, nextColumn); if (axiomAnnotations != null) { Set<OWLAnnotation> fixedAnnotations = new HashSet<>(); for (OWLAnnotation annotation : annotations) { fixedAnnotations.add(annotation.getAnnotatedAnnotation(axiomAnnotations)); } annotations = fixedAnnotations; } } return annotations; } /** * Given a row as a list of strings, the template string, and the number of the next column, maybe * get axiom annotations on existing axiom annotations. * * @param row list of strings * @param nextTemplate template string for the column * @param nextColumn next column number * @return set of OWLAnnotations, or an empty set * @throws Exception on issue getting the OWLAnnotations */ private Set<OWLAnnotation> getAxiomAnnotations( List<String> row, String nextTemplate, int nextColumn) throws Exception { // Handle axiom annotations if (!nextTemplate.trim().isEmpty() && (nextTemplate.startsWith(">"))) { nextTemplate = nextTemplate.substring(1); String nextValue; try { nextValue = row.get(nextColumn); } catch (IndexOutOfBoundsException e) { nextValue = null; } if (nextValue != null && !nextValue.trim().equals("")) { Set<OWLAnnotation> nextAnnotations = TemplateHelper.getAnnotations( name, checker, nextTemplate, nextValue, rowNum, nextColumn); String nextNextTemplate; try { nextNextTemplate = templates.get(nextColumn + 1); } catch (IndexOutOfBoundsException e) { nextNextTemplate = null; } if (nextNextTemplate != null) { Set<OWLAnnotation> nextNextAnnotations = getAxiomAnnotations(row, nextNextTemplate, nextColumn + 1); if (nextNextAnnotations != null) { Set<OWLAnnotation> fixedAnnotations = new HashSet<>(); for (OWLAnnotation annotation : nextAnnotations) { fixedAnnotations.add(annotation.getAnnotatedAnnotation(nextNextAnnotations)); } return fixedAnnotations; } else { return nextAnnotations; } } else { return nextAnnotations; } } } return null; } /** * Given a row as a list of strings and a column number, determine if the next column contains a * one or more axiom annotations. If so, return the axiom annotation or annotations as a set of * OWLAnnotations. * * @param row list of strings * @param column column number * @return set of OWLAnnotations, maybe empty * @throws Exception on issue getting the OWLAnnotations */ private Set<OWLAnnotation> maybeGetAxiomAnnotations(List<String> row, int column) throws Exception { // Look at the template string of the next column String nextTemplate; try { nextTemplate = templates.get(column + 1); } catch (IndexOutOfBoundsException e) { nextTemplate = null; } Set<OWLAnnotation> axiomAnnotations = new HashSet<>(); // If the next template string is not null, not empty, and it starts with > // Get the axiom annotations from the row if (nextTemplate != null && !nextTemplate.trim().isEmpty() && (nextTemplate.startsWith(">"))) { axiomAnnotations = getAxiomAnnotations(row, nextTemplate, column + 1); } return axiomAnnotations; } /** * Given a property string (label or CURIE) and the column of that template string, determine if * this is RDFS label and if so, set the label column. * * @param property property string * @param column int column number */ private void maybeSetLabelColumn(String property, int column) { OWLAnnotationProperty ap = checker.getOWLAnnotationProperty(property, true); if (ap != null) { if (ap.getIRI().toString().equals(dataFactory.getRDFSLabel().getIRI().toString())) { labelColumn = column; } } } /* OTHER HELPERS */ /** * Given a string ID and a string label, with at least one of those being non-null, return an IRI * for the entity. * * @param id String ID of entity, maybe null * @param label String label of entity, maybe null * @return IRI of entity * @throws Exception if both id and label are null */ private IRI getIRI(String id, String label) throws Exception { if (id == null && label == null) { // This cannot be hit by CLI users throw new Exception("You must specify either an ID or a label"); } if (id != null) { return ioHelper.createIRI(id); } return checker.getIRI(label, true); } /** * Given a row, get the property type if it exists. If not, return default of "subproperty". * * @param row list of strings * @return property type */ private String getPropertyType(List<String> row) { String propertyType = null; if (propertyTypeColumn != -1) { try { propertyType = row.get(propertyTypeColumn); } catch (IndexOutOfBoundsException e) { // do nothing } } if (propertyType == null || propertyType.trim().isEmpty()) { return "subproperty"; } else { return propertyType.trim().toLowerCase(); } } /** * Given a row, get the list of characteristics if they exist. If not, return an empty list. * * @param row list of strings * @return characteristics */ private List<String> getCharacteristics(List<String> row) { if (characteristicColumn != -1) { String characteristicString = row.get(characteristicColumn); if (characteristicSplit != null && characteristicString.contains(characteristicSplit)) { return Arrays.asList(characteristicString.split(Pattern.quote(characteristicSplit))); } else { return Collections.singletonList(characteristicString.trim()); } } return new ArrayList<>(); } }
package dyvil.reflect; import dyvil.annotation.internal.DyvilModifiers; import dyvil.annotation.internal.NonNull; import dyvil.annotation.internal.Nullable; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; public class FieldReflection { private static final @Nullable Field modifiersField; static { Field modField; try { modField = Field.class.getDeclaredField("modifiers"); // Makes the 'modifiers' field of the java.lang.reflect.Field class // accessible modField.setAccessible(true); } catch (ReflectiveOperationException ignored) { modField = null; } modifiersField = modField; } /** * Adds the modifiers {@code mod} to the given {@link Field} {@code field} if {@code flag} is true, and removed them * otherwise. * * @param field * the field * @param mod * the modifiers * @param flag * add or remove */ @DyvilModifiers(Modifiers.INFIX) public static void setModifier(@NonNull Field field, int mod, boolean flag) { try { field.setAccessible(true); int modifiers = field.getModifiers(); if (flag) { modifiers |= mod; } else { modifiers &= ~mod; } modifiersField.setInt(field, modifiers); } catch (ReflectiveOperationException ex) { ex.printStackTrace(); } } @DyvilModifiers(Modifiers.INFIX) public static void setAssignable(@NonNull Field field) { try { field.setAccessible(true); modifiersField.setInt(field, field.getModifiers() & ~Modifiers.FINAL); } catch (Exception ignored) { } } // Fields public static @NonNull <T> T[] getStaticObjects(@NonNull Class clazz, @NonNull Class<T> fieldType, boolean subtypes) { return getObjects(clazz, null, fieldType, subtypes); } public static @NonNull <T> T[] getObjects(@NonNull Class clazz, Object instance, @NonNull Class<T> fieldType, boolean subtypes) { List<T> list = new ArrayList<>(); Field[] fields = clazz.getDeclaredFields(); for (Field field : fields) { try { Class c = field.getType(); Object o = field.get(instance); if (c == fieldType || subtypes && fieldType.isAssignableFrom(c)) { list.add((T) o); } } catch (Exception ignored) { } } return list.toArray((T[]) Array.newInstance(fieldType, 0)); } // Fields /** * Returns the {@link Field} of the given {@link Class} {@code clazz} with the name {@code name}. * * @param clazz * the clazz * @param name * the field name * * @return the field */ public static Field getField(@NonNull Class clazz, @NonNull String name) { Field[] fields = clazz.getDeclaredFields(); for (Field field : fields) { if (name.equals(field.getName())) { return field; } } return null; } /** * Returns the {@link Field} of the given {@link Class} {@code clazz} with a name contained in {@code fieldNames}. * * @param clazz * the clazz * @param fieldNames * the possible field names * * @return the field */ public static Field getField(@NonNull Class clazz, @NonNull String... fieldNames) { Field[] fields = clazz.getDeclaredFields(); for (String fieldName : fieldNames) { for (Field field : fields) { if (fieldName.equals(field.getName())) { return field; } } } return null; } /** * Returns the {@link Field} of the given {@link Class} {@code clazz} with the field ID {@code fieldID} * * @param clazz * the clazz * @param fieldID * the field ID * * @return the field */ public static Field getField(@NonNull Class clazz, int fieldID) { return clazz.getDeclaredFields()[fieldID]; } // Field getters // Reference public static @Nullable <T, R> R getStaticValue(@NonNull Class<? super T> clazz, String... fieldNames) { return getValue(clazz, null, fieldNames); } public static @Nullable <T, R> R getValue(@NonNull T instance, String... fieldNames) { return getValue((Class<T>) instance.getClass(), instance, fieldNames); } public static @Nullable <T, R> R getValue(@NonNull Class<? super T> clazz, T instance, String... fieldNames) { Field f = getField(clazz, fieldNames); return getValue(f, instance); } // Field ID public static @Nullable <T, R> R getStaticValue(@NonNull Class<? super T> clazz, int fieldID) { return getValue(clazz, null, fieldID); } public static @Nullable <T, R> R getValue(@NonNull T instance, int fieldID) { return getValue((Class<? super T>) instance.getClass(), instance, fieldID); } public static @Nullable <T, R> R getValue(@NonNull Class<? super T> clazz, T instance, int fieldID) { Field f = getField(clazz, fieldID); return getValue(f, instance); } /** * Directly gets the value of the given {@link Field} on the given {@link Object} {@code instance}. * * @param field * the field to get * @param instance * the instance * * @return the value */ public static <R> R getValue(@NonNull Field field, Object instance) { try { field.setAccessible(true); return (R) field.get(instance); } catch (Exception ex) { ex.printStackTrace(); return null; } } // Field setters // Reference public static <T, V> void setStaticValue(@NonNull Class<? super T> clazz, V value, String... fieldNames) { setValue(clazz, null, value, fieldNames); } public static <T, V> void setValue(@NonNull T instance, V value, String... fieldNames) { setValue((Class<? super T>) instance.getClass(), instance, value, fieldNames); } public static <T, V> void setValue(@NonNull Class<? super T> clazz, T instance, V value, String... fieldNames) { Field f = getField(clazz, fieldNames); setField(f, instance, value); } // Field ID public static <T, V> void setStaticValue(@NonNull Class<? super T> clazz, V value, int fieldID) { setValue(clazz, null, value, fieldID); } public static <T, V> void setValue(@NonNull T instance, V value, int fieldID) { setValue((Class<? super T>) instance.getClass(), instance, value, fieldID); } public static <T, V> void setValue(@NonNull Class<? super T> clazz, T instance, V value, int fieldID) { Field f = getField(clazz, fieldID); setField(f, instance, value); } /** * Directly sets the value of the given {@link Field} on the given {@link Object} {@code instance} to the given * {@link Object} {@code value} . * * @param field * the field to set * @param instance * the instance * @param value * the new value */ public static <T, V> void setField(@NonNull Field field, T instance, V value) { try { field.setAccessible(true); field.set(instance, value); } catch (Exception ex) { ex.printStackTrace(); } } }
package com.sailthru.client.params; import com.google.gson.reflect.TypeToken; import com.sailthru.client.ApiAction; import java.lang.reflect.Type; import java.util.Map; /** * User params * @author Prajwal Tuladhar <praj@sailthru.com> */ public class Event extends AbstractApiParams implements ApiParams { protected String id; protected String key; protected Map<String, Object> vars; protected String name; public Event(String id) { this.id = id; } public Event() { // this will be used when new user_id is to be created } public Event setKey(String key) { this.key = key; return this; } public Event setName(String name) { this.name = name; return this; } public Event setVars(Map<String, Object> vars) { this.vars = vars; return this; } public Type getType() { java.lang.reflect.Type _type = new TypeToken<User>() {}.getType(); return _type; } public ApiAction getApiCall() { return ApiAction.event; } }
package com.jme.util; import java.util.concurrent.*; /** * <code>GameTask</code> is used in <code>GameTaskQueue</code> to manage tasks that have * yet to be accomplished. * * @author Matthew D. Hicks */ class GameTask<V> implements Future<V> { private Callable<V> callable; private boolean cancelled; private boolean completed; private V result; private ExecutionException exc; public GameTask(Callable<V> callable) { this.callable = callable; } public boolean cancel(boolean mayInterruptIfRunning) { if (result != null) { return false; } cancelled = true; return true; } public synchronized V get() throws InterruptedException, ExecutionException { while ((!completed) && (exc == null)) { wait(); } if (exc != null) throw exc; return result; } public synchronized V get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { if ((!completed) && (exc == null)) { unit.timedWait(this, timeout); } if (exc != null) throw exc; if (result == null) throw new TimeoutException("Object not returned in time allocated."); return result; } public boolean isCancelled() { return cancelled; } public boolean isDone() { return completed; } public Callable<V> getCallable() { return callable; } public synchronized void invoke() { try { result = callable.call(); completed = true; } catch(Exception e) { e.printStackTrace(); exc = new ExecutionException(e); } notifyAll(); } }
package gw2trades.importer; import gw2trades.importer.dao.TradingPost; import gw2trades.repository.api.ItemRepository; import gw2trades.repository.api.model.Item; import gw2trades.repository.api.model.ItemListings; import gw2trades.repository.influxdb.InfluxDbConnectionManager; import gw2trades.repository.influxdb.InfluxDbRepository; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.influxdb.InfluxDB; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; /** * @author Stefan Lotties (slotties@gmail.com) */ public class Importer { private static final Logger LOGGER = LogManager.getLogger(Importer.class); private TradingPost tradingPost; private Config config; public Importer(Config config, TradingPost tradingPost) { this.config = config; this.tradingPost = tradingPost; } public void execute() throws Exception { InfluxDbConnectionManager connectionManager = new InfluxDbConnectionManager( config.required("influxdb.url"), config.optional("influxdb.user").orElse(""), config.optional("influxdb.pass").orElse("") ); setupDatabase(connectionManager); String indexDir = config.required("index.dir"); ItemRepository repository = new InfluxDbRepository(connectionManager, indexDir, false); int chunkSize = Integer.valueOf(config.required("importer.chunkSize")); int threadCount = Integer.valueOf(config.required("importer.threads")); LOGGER.info("Importing with {} threads (each {} chunks) into {}...\n", threadCount, chunkSize, indexDir); ExecutorService executorService = Executors.newFixedThreadPool(threadCount); Map<Integer, Item> allItems = new ConcurrentHashMap<>(); Map<Integer, ItemListings> allListings = new ConcurrentHashMap<>(); long t0 = System.currentTimeMillis(); List<Integer> itemIds = tradingPost.listItemIds(); for (int i = 0; i < itemIds.size(); i += chunkSize) { List<Integer> chunk = itemIds.subList(i, Math.min(itemIds.size(), i + chunkSize)); int chunkNumber = i / chunkSize; executorService.execute(() -> { try { LOGGER.info("Pulling chunk #{} ...", chunkNumber); List<ItemListings> listings = tradingPost.listings(chunk); List<Item> items = tradingPost.listItems(chunk); for (Item item : items) { allItems.put(item.getItemId(), item); } for (ItemListings listing : listings) { Item item = allItems.get(listing.getItemId()); if (item != null) { listing.setItem(item); allListings.put(listing.getItemId(), listing); } else { LOGGER.warn("Could not find item {}.", listing.getItemId()); } } } catch (IOException e) { LOGGER.error("Could not import item ids {}", chunk, e); } }); } executorService.shutdown(); executorService.awaitTermination(10, TimeUnit.MINUTES); LOGGER.info("Writing everything into repository ..."); repository.store(allListings.values(), System.currentTimeMillis()); repository.close(); long t1 = System.currentTimeMillis(); LOGGER.info("Imported the trading post within {} ms.", t1 - t0); } private void setupDatabase(InfluxDbConnectionManager influxDbConnectionManager) { InfluxDB influxDb = influxDbConnectionManager.getConnection(); try { influxDb.createDatabase("gw2trades"); } catch (Exception e) { LOGGER.info("Database exists already."); } } }
package com.redomar.game; import java.awt.BorderLayout; import java.awt.Canvas; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics; import java.awt.image.BufferStrategy; import java.awt.image.BufferedImage; import java.awt.image.DataBufferInt; import javax.swing.JFrame; import javax.swing.JOptionPane; import javax.swing.UIManager; import org.apache.commons.lang3.text.WordUtils; import com.redomar.game.entities.Dummy; import com.redomar.game.entities.Player; import com.redomar.game.entities.PlayerMP; import com.redomar.game.gfx.Screen; import com.redomar.game.gfx.SpriteSheet; import com.redomar.game.level.LevelHandler; import com.redomar.game.lib.Font; import com.redomar.game.lib.Music; import com.redomar.game.lib.Time; import com.redomar.game.net.GameClient; import com.redomar.game.net.GameServer; import com.redomar.game.net.packets.Packet00Login; import com.thehowtotutorial.splashscreen.JSplash; public class Game extends Canvas implements Runnable { private static final long serialVersionUID = 1L; private static final String game_Version = "v1.5.3 Alpha"; // Setting the size and name of the frame/canvas private static final int WIDTH = 160; private static final int HEIGHT = (WIDTH / 3 * 2); private static final int SCALE = 3; private static final String NAME = "Game"; private static Game game; private static int Jdata_Host; private static String Jdata_UserName = ""; private static String Jdata_IP = "127.0.0.1"; private static boolean changeLevel = false; private static boolean npc = false; private static int map = 0; private JFrame frame; private boolean running = false; private int tickCount = 0; private BufferedImage image = new BufferedImage(WIDTH, HEIGHT, BufferedImage.TYPE_INT_RGB); private int[] pixels = ((DataBufferInt) image.getRaster().getDataBuffer()) .getData(); private int[] colours = new int[6 * 6 * 6]; private BufferedImage image2 = new BufferedImage(WIDTH, HEIGHT - 30, BufferedImage.TYPE_INT_RGB); private Screen screen; private InputHandler input; private WindowHandler window; private LevelHandler level; private Player player; private Dummy dummy; private Music music = new Music(); private Time time = new Time(); private Font font = new Font(); private Thread musicThread = new Thread(music, "MUSIC"); private String nowPlaying; private boolean notActive = true; private boolean noAudioDevice = false; private int trigger = 0; private GameClient socketClient; private GameServer socketServer; public Game() { setMinimumSize(new Dimension(WIDTH * SCALE, HEIGHT * SCALE)); setMaximumSize(new Dimension(WIDTH * SCALE, HEIGHT * SCALE)); setPreferredSize(new Dimension(WIDTH * SCALE, HEIGHT * SCALE)); setFrame(new JFrame(NAME)); getFrame().setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); getFrame().setLayout(new BorderLayout()); getFrame().add(this, BorderLayout.CENTER); getFrame().pack(); getFrame().setResizable(false); getFrame().setLocationRelativeTo(null); getFrame().setVisible(true); } public void init() { setGame(this); int index = 0; for (int r = 0; r < 6; r++) { for (int g = 0; g < 6; g++) { for (int b = 0; b < 6; b++) { int rr = (r * 255 / 5); int gg = (g * 255 / 5); int bb = (b * 255 / 5); colours[index++] = rr << 16 | gg << 8 | bb; } } } screen = new Screen(WIDTH, HEIGHT, new SpriteSheet("/sprite_sheet.png")); input = new InputHandler(this); setWindow(new WindowHandler(this)); setMap("/levels/custom_level.png"); setMap(1); Packet00Login loginPacket = new Packet00Login(player.getUsername(), player.getX(), player.getY()); if (socketServer != null) { socketServer.addConnection((PlayerMP) getPlayer(), loginPacket); } // socketClient.sendData("ping".getBytes()); loginPacket.writeData(getSocketClient()); } public void setMap(String Map_str) { setLevel(new LevelHandler(Map_str)); setPlayer(new PlayerMP(getLevel(), 100, 100, input, Jdata_UserName, null, -1)); level.addEntity(player); } public static void npcSpawn(){ if(isNpc() == true){ game.setDummy(new Dummy(Game.getLevel(), "h", 215, 215, 500, 543)); game.level.addEntity(Game.getDummy()); } } public static void npcKill(){ if(isNpc() == false){ game.level.removeEntity(Game.getDummy()); } } public synchronized void start() { running = true; new Thread(this, "GAME").start(); if (Jdata_Host == 0) { socketServer = new GameServer(this); socketServer.start(); } setSocketClient(new GameClient(this, Jdata_IP)); getSocketClient().start(); } public synchronized void stop() { running = false; } public void run() { long lastTime = System.nanoTime(); double nsPerTick = 1000000000D / 60D; int ticks = 0; int frames = 0; long lastTimer = System.currentTimeMillis(); double delta = 0; init(); while (running) { long now = System.nanoTime(); delta += (now - lastTime) / nsPerTick; lastTime = now; boolean shouldRender = false; while (delta >= 1) { ticks++; tick(); delta -= 1; shouldRender = true; } try { Thread.sleep(2); } catch (InterruptedException e) { e.printStackTrace(); } if (shouldRender) { frames++; render(); } if (System.currentTimeMillis() - lastTimer >= 1000) { lastTimer += 1000; getFrame().setTitle("Frames: " + frames + " Ticks: " + ticks); frames = 0; ticks = 0; } } } public void tick() { setTickCount(getTickCount() + 1); getLevel().tick(); } public void render() { BufferStrategy bs = getBufferStrategy(); if (bs == null) { createBufferStrategy(3); return; } int xOffset = getPlayer().getX() - (screen.getWidth() / 2); int yOffset = getPlayer().getY() - (screen.getHeight() / 2); getLevel().renderTiles(screen, xOffset, yOffset); /* * for (int x = 0; x < level.width; x++) { int colour = Colours.get(-1, * -1, -1, 000); if (x % 10 == 0 && x != 0) { colour = Colours.get(-1, * -1, -1, 500); } Font.render((x % 10) + "", screen, 0 + (x * 8), 0, * colour, 1); } */ getLevel().renderEntities(screen); for (int y = 0; y < screen.getHeight(); y++) { for (int x = 0; x < screen.getWidth(); x++) { int colourCode = screen.getPixels()[x + y * screen.getWidth()]; if (colourCode < 255) { pixels[x + y * WIDTH] = colours[colourCode]; } } } if (noAudioDevice == false){ if (input.isPlayMusic() == true && notActive == true){ int musicOption = JOptionPane.showConfirmDialog(this, "You are about to turn on music and can be VERY loud", "Music Options", 2, 2); if (musicOption == 0){ musicThread.start(); notActive = false; } else { System.out.println("[GAME] Canceled music option"); input.setPlayMusic(false); } } } if (isChangeLevel() == true && getTickCount() % 60 == 0){ Game.setChangeLevel(true); setChangeLevel(false); } if (changeLevel == true){ if(getMap() == 1){ setMap("/levels/water_level.png"); setMap(2); }else if(getMap() == 2){ setMap("/levels/custom_level.png"); setMap(1); } changeLevel = false; } Graphics g = bs.getDrawGraphics(); g.drawRect(0, 0, getWidth(), getHeight()); g.drawImage(image, 0, 0, getWidth(), getHeight()-30, null); // Font.render("Hi", screen, 0, 0, Colours.get(-1, -1, -1, 555), 1); g.drawImage(image2, 0, getHeight()-30, getWidth(), getHeight(), null); g.setColor(Color.WHITE); g.setFont(font.getSegoe()); g.drawString("Welcome "+WordUtils.capitalizeFully(player.getSantizedUsername()), 3, getHeight()-17); g.setColor(Color.YELLOW); g.drawString(time.getTime(), (getWidth() - 58), (getHeight()-3)); g.setColor(Color.WHITE); if(noAudioDevice == true){ g.setColor(Color.RED); g.drawString("MUSIC is OFF | no audio device for playback", 3, getHeight()-3); trigger++; if(trigger == 25){ JOptionPane.showMessageDialog(this, "No Audio device found", "Audio Issue", 0); } } else if (notActive == true){ g.setColor(Color.RED); g.drawString("MUSIC is OFF | press 'M' to start", 3, getHeight()-3); } else{ g.setColor(Color.GREEN); g.drawString("MUSIC is ON | You cannot turn off the music", 3, getHeight()-3); g.setColor(Color.WHITE); setNowPlaying(WordUtils.capitalize(music.getSongName()[music.getSongNumber()].substring(7, (music.getSongName()[music.getSongNumber()].length() - 4)))); if (getNowPlaying().startsWith("T")){ g.drawString(nowPlaying, getWidth() - (nowPlaying.length() * 9) + 12, getHeight() - 17); } else { g.drawString(nowPlaying, getWidth() - (nowPlaying.length() * 9) + 8, getHeight() - 17); } } g.dispose(); bs.show(); } public static void main(String[] args) { try { JSplash splash = new JSplash(Game.class.getResource("/splash/splash.png"), true, true, false, game_Version, null, Color.RED, Color.ORANGE); splash.toFront(); splash.splashOn(); splash.setProgress(10, "Initializing Game"); Thread.sleep(250); splash.setProgress(25, "Loading Classes"); Thread.sleep(125); splash.setProgress(35, "Applying Configurations"); Thread.sleep(125); splash.setProgress(40, "Loading Sprites"); Thread.sleep(250); splash.setProgress(50, "Loading Textures"); Thread.sleep(125); splash.setProgress(60, "Loading Map"); Thread.sleep(500); splash.setProgress(80, "Configuring Map"); Thread.sleep(125); splash.setProgress(90, "Pulling InputPanes"); Thread.sleep(250); splash.setProgress(92, "Aquring data: Multiplayer"); Thread.sleep(125); UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); Jdata_Host = JOptionPane.showConfirmDialog(getGame(), "Do you want to be the HOST?"); if (Jdata_Host == 1){ Jdata_IP = JOptionPane.showInputDialog(getGame(), "Enter the name \nleave blank for local"); } Thread.sleep(125); splash.setProgress(95, "Aquring data: Username"); Thread.sleep(125); splash.setProgress(96, "Initalizing as Server:Host"); Jdata_UserName = JOptionPane.showInputDialog(getGame(), "Enter a name"); splash.setProgress(97, "Connecting as" + Jdata_UserName); Thread.sleep(250); splash.splashOff(); new Game().start(); // new Menu().start(); } catch (Exception e) { e.printStackTrace(); } } public JFrame getFrame() { return frame; } public void setFrame(JFrame frame) { this.frame = frame; } public GameClient getSocketClient() { return socketClient; } public void setSocketClient(GameClient socketClient) { this.socketClient = socketClient; } public static Player getPlayer() { return game.player; } public void setPlayer(Player player) { game.player = player; } public static LevelHandler getLevel() { return game.level; } public void setLevel(LevelHandler level) { this.level = level; } public WindowHandler getWindow() { return window; } public void setWindow(WindowHandler window) { this.window = window; } public String getNowPlaying() { return nowPlaying; } public void setNowPlaying(String nowPlaying) { this.nowPlaying = nowPlaying; } public int getTickCount() { return tickCount; } public void setTickCount(int tickCount) { this.tickCount = tickCount; } public static Game getGame() { return game; } public static void setGame(Game game) { Game.game = game; } public static boolean isChangeLevel() { return changeLevel; } public static void setChangeLevel(boolean changeLevel) { Game.changeLevel = changeLevel; } public static int getMap() { return map; } public static void setMap(int map) { Game.map = map; } public static boolean isNpc() { return npc; } public static void setNpc(boolean npc) { Game.npc = npc; } public static Dummy getDummy() { return game.dummy; } public void setDummy(Dummy dummy) { this.dummy = dummy; } }
package com.rtg.sam; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.OutputStream; import com.rtg.launcher.CommonFlags; import com.rtg.reader.SequencesReader; import com.rtg.tabix.IndexingStreamCreator; import com.rtg.tabix.TabixIndexer; import com.rtg.util.io.AdjustableGZIPOutputStream; import com.rtg.util.io.FileUtils; import htsjdk.samtools.CRAMFileWriter; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMFileWriter; import htsjdk.samtools.SAMFileWriterFactory; import htsjdk.samtools.util.BlockCompressedOutputStream; /** * Handles managing the various things we want when outputting SAM or BAM with indexing */ public final class SamOutput implements Closeable { private final File mOutFile; private final Closeable mStreamCreator; private final SAMFileWriter mWriter; private SamOutput(File outFile, Closeable streamCreator, SAMFileWriter writer) { mOutFile = outFile; mStreamCreator = streamCreator; mWriter = writer; } /** * Creates a SAM or BAM writer as appropriate and generates an index for this output if possible. Also writes to standard output if filename is "-". * This method does not support CRAM output. * @param filename filename given by user * @param stdio output stream to use if filename is "-" (standard out) * @param header header for output SAM/BAM file * @param gzipIfPossible whether we should attempt to compress output file * @param presorted if input if in correct sort order * @param reference the reference used to resolve CRAM, or null if no CRAM support is required * @return wrapper containing writer and other relevant things * @throws IOException if an IO Error occurs */ public static SamOutput getSamOutput(File filename, OutputStream stdio, SAMFileHeader header, boolean gzipIfPossible, boolean presorted, SequencesReader reference) throws IOException { return getSamOutput(filename, stdio, header, gzipIfPossible, presorted, true, true, true, reference); } /** * Creates a SAM or BAM writer as appropriate and generates an index for this output if possible. Also writes to standard output if filename is "-". * @param filename filename given by user * @param stdio output stream to use if filename is "-" (standard out) * @param header header for output SAM/BAM file * @param gzipIfPossible whether we should attempt to compress output file * @param presorted if input if in correct sort order * @param writeHeader true if the header should be written, false otherwise * @param terminateBlockGzip true if the output stream should contain a termination block (may be false if doing indexing of chunks) * @param indexIfPossible true if the output should be indexed if possible * @param reference the reference used to resolve CRAM, or null if no CRAM support is required * @return wrapper containing writer and other relevant things * @throws IOException if an IO Error occurs */ public static SamOutput getSamOutput(File filename, OutputStream stdio, SAMFileHeader header, boolean gzipIfPossible, boolean presorted, boolean writeHeader, boolean terminateBlockGzip, boolean indexIfPossible, SequencesReader reference) throws IOException { final SamBamBaseFile baseFile = SamBamBaseFile.getBaseFile(filename, gzipIfPossible); final File outputFile; final OutputStream outputStream; final SamBamBaseFile.SamFormat type; final boolean compress; if (CommonFlags.isStdio(filename)) { // Use uncompressed SAM for stdout outputFile = null; outputStream = stdio; type = SamBamBaseFile.SamFormat.SAM; compress = false; } else { outputFile = baseFile.suffixedFile(""); outputStream = null; type = baseFile.format(); compress = type != SamBamBaseFile.SamFormat.SAM || baseFile.isGzip(); } if (type == SamBamBaseFile.SamFormat.CRAM) { if (!writeHeader || !terminateBlockGzip) { throw new UnsupportedOperationException("Piecewise CRAM output is not supported"); } final OutputStream indexOut = indexIfPossible ? FileUtils.createOutputStream(BamIndexer.indexFileName(outputFile)) : null; try { final SAMFileWriter writer = new CRAMFileWriter(FileUtils.createOutputStream(outputFile), indexOut, presorted, reference == null ? SamUtils.NO_CRAM_REFERENCE_SOURCE : reference.referenceSource(), header, outputFile.getName()); return new SamOutput(outputFile, indexOut, writer); } catch (Throwable t) { indexOut.close(); throw t; } } else { final TabixIndexer.IndexerFactory indexerFactory = type == SamBamBaseFile.SamFormat.SAM ? new TabixIndexer.SamIndexerFactory() : null; final IndexingStreamCreator streamCreator = new IndexingStreamCreator(outputFile, outputStream, compress, indexerFactory, indexIfPossible); try { final OutputStream samOutputstream = streamCreator.createStreamsAndStartThreads(header.getSequenceDictionary().size(), writeHeader, terminateBlockGzip); try { final SAMFileWriter writer; final File dir = filename.getAbsoluteFile().getParentFile(); switch (type) { case BAM: writer = new SAMFileWriterFactory().setTempDirectory(dir).makeBAMWriter(header, presorted, new BlockCompressedOutputStream(samOutputstream, null, AdjustableGZIPOutputStream.DEFAULT_GZIP_LEVEL, terminateBlockGzip), writeHeader, false /* ignored */, true); break; case SAM: writer = new SAMFileWriterFactory().setTempDirectory(dir).makeSAMWriter(header, presorted, samOutputstream, writeHeader); break; default: throw new UnsupportedOperationException(); } return new SamOutput(outputFile, streamCreator, writer); } catch (Throwable t) { samOutputstream.close(); throw t; } } catch (Throwable t) { streamCreator.close(); throw t; } } } @Override @SuppressWarnings("try") public void close() throws IOException { try (Closeable ignored = mStreamCreator; SAMFileWriter ignored2 = mWriter ) { } } public File getOutFile() { return mOutFile; } public SAMFileWriter getWriter() { return mWriter; } }
package com.amee.base.engine; import com.amee.base.transaction.TransactionController; import org.apache.commons.cli.*; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.joda.time.DateTimeZone; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.tanukisoftware.wrapper.WrapperListener; import org.tanukisoftware.wrapper.WrapperManager; import java.io.Serializable; import java.util.TimeZone; public class Engine implements WrapperListener, Serializable { private final Log log = LogFactory.getLog(getClass()); private ApplicationContext springContext; private TransactionController transactionController; // These are used to determine the PID of the instance in the init script. private String appName = "amee"; private String serverName = "localhost"; private String instanceName = "live"; public Engine() { super(); } public Engine(String appName, String serverName, String instanceName) { this(); this.appName = appName; this.serverName = serverName; this.instanceName = instanceName; } public static void main(String[] args) { start(new Engine(), args); } protected static void start(WrapperListener wrapperListener, String[] args) { WrapperManager.start(wrapperListener, args); } public Integer start(String[] args) { parseOptions(args); log.debug("Starting Engine..."); // Initialise Spring ApplicationContext. springContext = new ClassPathXmlApplicationContext(new String[]{"applicationContext*.xml"}); // Initialise TransactionController (for controlling Spring). transactionController = (TransactionController) springContext.getBean("transactionController"); // Do onStart callback wrapped in a transaction. boolean started; try { transactionController.begin(true); started = onStart(); log.debug("...Engine started."); } finally { transactionController.end(); } // Handle result. if (started) { return null; } else { // An arbitrary error code to indicate startup failure. return 1; } } protected void parseOptions(String[] args) { CommandLine line = null; CommandLineParser parser = new GnuParser(); Options options = new Options(); // Define appName option. Option appNameOpt = OptionBuilder.withArgName("appName") .hasArg() .withDescription("The app name") .create("appName"); appNameOpt.setRequired(true); options.addOption(appNameOpt); // Define serverName option. Option serverNameOpt = OptionBuilder.withArgName("serverName") .hasArg() .withDescription("The server name") .create("serverName"); serverNameOpt.setRequired(true); options.addOption(serverNameOpt); // Define instanceName option. Option instanceNameOpt = OptionBuilder.withArgName("instanceName") .hasArg() .withDescription("The instance name") .create("instanceName"); instanceNameOpt.setRequired(true); options.addOption(instanceNameOpt); // Define timeZone option. Option timeZoneOpt = OptionBuilder.withArgName("timeZone") .hasArg() .withDescription("The time zone") .create("timeZone"); timeZoneOpt.setRequired(false); options.addOption(timeZoneOpt); // Parse the options. try { line = parser.parse(options, args); } catch (ParseException exp) { new HelpFormatter().printHelp("java " + this.getClass().getName(), options); System.exit(-1); } // Handle appName. if (line.hasOption(appNameOpt.getOpt())) { appName = line.getOptionValue(appNameOpt.getOpt()); } // Handle serverName. if (line.hasOption(serverNameOpt.getOpt())) { serverName = line.getOptionValue(serverNameOpt.getOpt()); } // Handle instanceName. if (line.hasOption(instanceNameOpt.getOpt())) { instanceName = line.getOptionValue(instanceNameOpt.getOpt()); } // Handle timeZone. if (line.hasOption(timeZoneOpt.getOpt())) { String timeZoneStr = line.getOptionValue(timeZoneOpt.getOpt()); if (!StringUtils.isBlank(timeZoneStr)) { TimeZone timeZone = TimeZone.getTimeZone(timeZoneStr); if (timeZone != null) { TimeZone.setDefault(timeZone); DateTimeZone.setDefault(DateTimeZone.forTimeZone(timeZone)); } } } log.info("parseOptions() Time Zone is: " + TimeZone.getDefault().getDisplayName() + " (" + TimeZone.getDefault().getID() + ")"); } protected boolean onStart() { // Do nothing. return true; } protected boolean onShutdown() { // Do nothing. return true; } public int stop(int exitCode) { try { log.debug("Stopping Engine..."); onShutdown(); log.debug("...Engine stopped."); } catch (Exception e) { log.error("Caught Exception: " + e); } return exitCode; } public void controlEvent(int event) { log.debug("controlEvent() " + event); // Do nothing. } public ApplicationContext getSpringContext() { return springContext; } public TransactionController getTransactionController() { return transactionController; } public String getAppName() { return appName; } public String getServerName() { return serverName; } public String getInstanceName() { return instanceName; } }
package com.hartveld.rx; import static com.google.common.base.Preconditions.checkNotNull; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public interface IObservable<T> { static final Logger LOG = LoggerFactory.getLogger(IObservable.class); AutoCloseable subscribe(Procedure1<T> onNext, Procedure1<Throwable> onError, Procedure onCompleted); /** * Select from observations that which is of interesting nature. * * @param selector The function used to do the selection. * * @return A new {@link IObservable} that forwards the result of the application of the selector to each observation. */ default <R> IObservable<R> select(Function1<R, T> selector) { LOG.trace("select()"); checkNotNull(selector, "selector must be non-null"); return (onNext, onError, onCompleted) -> { AtomicBoolean stopped = new AtomicBoolean(false); AutoCloseable ac = subscribe( e -> { if (stopped.get()) return; try { R inner = selector.function(e); onNext.procedure(inner); } catch (RuntimeException ex) { LOG.trace("Caught exception: {}", ex.getMessage(), ex); stopped.set(true); onError.procedure(ex); } }, e -> { if (stopped.get()) return; onError.procedure(e); }, () -> { if (stopped.get()) return; stopped.set(true); onCompleted.procedure(); } ); return () -> ac.close(); }; } /** * Execute observations with the given executor. * <p> * This operator can be used to schedule the execution of observations on another thread, for example to run them on a background thread. * * @param executor The {@link Executor} to execute the observations through. Must be non-<code>null</code>. * * @return A new {@link IObservable} that executes observations for subscribers through the given executor. */ default IObservable<T> observeOn(Executor executor) { LOG.trace("observeOn({})", executor); checkNotNull(executor, "executor must be non-null"); return (onNext, onError, onCompleted) -> { AutoCloseable ac = subscribe( e -> { LOG.trace("Executing onNext asynchronously for: {}", e); executor.execute(() -> { LOG.trace("onNext({}) (asynchronously called)", e); onNext.procedure(e); }); }, e -> { LOG.trace("Executing onError asynchronously for: {}", e); executor.execute(() -> { LOG.trace("onNext({}) (asynchronously called)", e); onError.procedure(e); }); }, () -> { LOG.trace("Executing onCompleted asynchronously..."); executor.execute(() -> { LOG.trace("onCompleted() (asynchronously called)"); onCompleted.procedure(); }); } ); return () -> ac.close(); }; } /** * Execute subscription and closing of the subscription with the given executor. * * @param executor The {@link Executor} to execute the subscription and closing of the subscription. Must be non-<code>null</code>. * * @return A new {@link IObservable} that executes subscription and closing of subscription through the given executor. */ default IObservable<T> subscribeOn(Executor executor) { LOG.trace("subscribeOn({})", executor); return (onNext, onError, onCompleted) -> { FutureAutoCloseable futureAC = new FutureAutoCloseable(); executor.execute(() -> { LOG.trace("Executing asynchronous subscription"); futureAC.set(subscribe( e -> onNext.procedure(e), e -> onError.procedure(e), () -> onCompleted.procedure() )); } ); return () -> { LOG.trace("Executing asynchronous close..."); executor.execute(() -> { LOG.trace("Executing close..."); try { futureAC.close(); } catch (Exception e) { LOG.trace("Caught exception on close: {}", e.getMessage(), e); // TODO: Create test case for this scenario, then implement a proper handling. } }); }; }; } }
package com.hpe.caf.codec; import com.hpe.caf.api.Codec; import com.hpe.caf.api.CodecException; import com.hpe.caf.api.DecodeMethod; import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.error.YAMLException; import org.yaml.snakeyaml.introspector.BeanAccess; import org.yaml.snakeyaml.representer.Representer; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.nio.charset.StandardCharsets; /** * Use SnakeYAML to serialise/deserialise data. * * The strict version does not allow missing properties, whereas the lenient version does. */ public class YamlCodec implements Codec { private final Yaml strictYaml; private final Yaml lenientYaml; public YamlCodec() { Representer lenient = new Representer(); lenient.getPropertyUtils().setSkipMissingProperties(true); lenient.getPropertyUtils().setBeanAccess(BeanAccess.FIELD); Representer strict = new Representer(); strict.getPropertyUtils().setSkipMissingProperties(false); strict.getPropertyUtils().setBeanAccess(BeanAccess.FIELD); lenientYaml = new Yaml(lenient); strictYaml = new Yaml(strict); } @Override public <T> T deserialise(final byte[] data, final Class<T> clazz, final DecodeMethod method) throws CodecException { try { return getYaml(method).loadAs(new ByteArrayInputStream(data), clazz); } catch (YAMLException e) { throw new CodecException("Failed to deserialise", e); } } @Override public <T> T deserialise(final InputStream stream, final Class<T> clazz, final DecodeMethod method) throws CodecException { try { return getYaml(method).loadAs(stream, clazz); } catch (YAMLException e) { throw new CodecException("Failed to deserialise", e); } } @Override public byte[] serialise(final Object object) throws CodecException { try { return getYaml(DecodeMethod.getDefault()).dump(object).getBytes(StandardCharsets.UTF_8); } catch (YAMLException e) { throw new CodecException("Failed to serialise", e); } } private Yaml getYaml(final DecodeMethod method) { return method == DecodeMethod.STRICT ? strictYaml : lenientYaml; } }
package com.jaamsim.render; //import com.jaamsim.math.*; import java.awt.Font; import java.awt.Frame; import java.awt.Image; import java.awt.event.ComponentEvent; import java.awt.event.ComponentListener; import java.awt.image.BufferedImage; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.URL; import java.nio.IntBuffer; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Queue; import java.util.concurrent.atomic.AtomicBoolean; import javax.media.opengl.DebugGL2; import javax.media.opengl.GL; import javax.media.opengl.GL2GL3; import javax.media.opengl.GL3; import javax.media.opengl.GLAutoDrawable; import javax.media.opengl.GLCapabilities; import javax.media.opengl.GLContext; import javax.media.opengl.GLEventListener; import javax.media.opengl.GLException; import javax.media.opengl.GLProfile; import com.jaamsim.DisplayModels.DisplayModel; import com.jaamsim.MeshFiles.MeshData; import com.jaamsim.font.OverlayString; import com.jaamsim.font.TessFont; import com.jaamsim.math.AABB; import com.jaamsim.math.Color4d; import com.jaamsim.math.Ray; import com.jaamsim.math.Vec4d; import com.jaamsim.render.util.ExceptionLogger; import com.jogamp.newt.event.WindowEvent; import com.jogamp.newt.event.WindowListener; import com.jogamp.newt.event.WindowUpdateEvent; import com.jogamp.newt.opengl.GLWindow; import com.sandwell.JavaSimulation.ColourInput; /** * The central renderer for JaamSim Renderer, Contains references to all context * specific data (like shader caches) * * @author Matt.Chudleigh * */ public class Renderer { public enum ShaderHandle { MESH, FONT, HULL, OVERLAY_FONT, OVERLAY_FLAT, DEBUG, SKYBOX } static private Object idLock = new Object(); static private int _nextID = 1; /** * Get a system wide unique ID * @return */ public static int getAssetID() { synchronized(idLock) { return _nextID++; } } private static boolean RENDER_DEBUG_INFO = false; private static boolean USE_DEBUG_GL = true; private EnumMap<ShaderHandle, Shader> _shaders; // Display _display = null; // Screen _screen = null; private GLContext _sharedContext = null; Map<Integer, Integer> _sharedVaoMap = new HashMap<Integer, Integer>(); GLWindow _dummyWindow; private GLCapabilities _caps = null; private TexCache _texCache = new TexCache(this); // An initalization time flag specifying if the 'safest' graphical techniques should be used private boolean _safeGraphics; private final Thread _renderThread; private final Object _rendererLock = new Object(); private final Map<MeshProtoKey, MeshProto> _protoCache; private final Map<TessFontKey, TessFont> _fontCache; private final HashMap<Integer, RenderWindow> _openWindows; private final Queue<RenderMessage> _renderMessages = new ArrayDeque<RenderMessage>(); private final AtomicBoolean _displayNeeded = new AtomicBoolean(true); private final AtomicBoolean _initialized = new AtomicBoolean(false); private final AtomicBoolean _shutdown = new AtomicBoolean(false); private final AtomicBoolean _fatalError = new AtomicBoolean(false); private String _errorString; // This is the string that caused the fatal error private StackTraceElement[] _fatalStackTrace; // the stack trace from the fatal error private final ExceptionLogger _exceptionLogger; private TessFontKey _defaultFontKey = new TessFontKey(Font.SANS_SERIF, Font.PLAIN); private TessFontKey _defaultBoldFontKey = new TessFontKey(Font.SANS_SERIF, Font.BOLD); private final Object _sceneLock = new Object(); private ArrayList<RenderProxy> _proxyScene = new ArrayList<RenderProxy>(); private boolean _allowDelayedTextures; private double _sceneTimeMS; private double _loopTimeMS; private long _usedVRAM = 0; // This may not be the best way to cache this //private GL2GL3 _currentGL = null; private GLContext _drawContext = null; private Skybox _skybox; // A cache of the current scene, needed by the individual windows to render private ArrayList<Renderable> _currentScene = new ArrayList<Renderable>(); private ArrayList<OverlayRenderable> _currentOverlay = null; public Renderer(boolean safeGraphics) throws RenderException { _safeGraphics = safeGraphics; _protoCache = new HashMap<MeshProtoKey, MeshProto>(); _fontCache = new HashMap<TessFontKey, TessFont>(); _exceptionLogger = new ExceptionLogger(1); // Print the call stack on the first exception of any kind _openWindows = new HashMap<Integer, RenderWindow>(); _renderThread = new Thread(new Runnable() { @Override public void run() { mainRenderLoop(); } }, "RenderThread"); _renderThread.start(); } private void mainRenderLoop() { //long startNanos = System.nanoTime(); try { // GLProfile.initSingleton(); GLProfile glp = GLProfile.get(GLProfile.GL2GL3); _caps = new GLCapabilities(glp); _caps.setSampleBuffers(true); _caps.setNumSamples(4); _caps.setDepthBits(24); // Create a dummy window _dummyWindow = GLWindow.create(_caps); _dummyWindow.setSize(128, 128); _dummyWindow.setPosition(-2000, -2000); // This is unfortunately necessary due to JOGL's (newt's?) involved // startup code // I can not find a way to make a context valid without a visible window _dummyWindow.setVisible(true); _sharedContext = _dummyWindow.getContext(); assert (_sharedContext != null); _dummyWindow.setVisible(false); // long endNanos = System.nanoTime(); // long ms = (endNanos - startNanos) /1000000L; // System.out.println("Creating shared context at:" + ms + "ms"); initSharedContext(); // Notify the main thread we're done synchronized (_initialized) { _initialized.set(true); _initialized.notifyAll(); } } catch (Exception e) { _fatalError.set(true); _errorString = e.getLocalizedMessage(); _fatalStackTrace = e.getStackTrace(); System.out.println("Renderer encountered a fatal error:"); e.printStackTrace(); } finally { if (_sharedContext != null && _sharedContext.isCurrent()) _sharedContext.release(); } // endNanos = System.nanoTime(); // ms = (endNanos - startNanos) /1000000L; // System.out.println("Started renderer loop after:" + ms + "ms"); long lastLoopEnd = System.nanoTime(); while (!_shutdown.get()) { try { // If a fatal error was encountered, clean up the renderer if (_fatalError.get()) { // We should clean up everything we can, then die try { for (Entry<Integer, RenderWindow> entry : _openWindows.entrySet()){ entry.getValue().getGLWindowRef().destroy(); entry.getValue().getAWTFrameRef().dispose(); } } catch(Exception e) {} // Ignore any exceptions, this is just a best effort cleanup try { _dummyWindow.destroy(); _sharedContext.destroy(); _dummyWindow = null; _sharedContext = null; _openWindows.clear(); _currentScene = null; _currentOverlay = null; _caps = null; _fontCache.clear(); _protoCache.clear(); _shaders.clear(); } catch (Exception e) { } break; // Exiting the loop will end the thread } // Run all render messages RenderMessage message; do { // Only lock the queue while reading messages, release it while // processing them message = null; synchronized (_renderMessages) { if (!_renderMessages.isEmpty()) { message = _renderMessages.remove(); } } if (message != null) { try { handleMessage(message); } catch (Throwable t) { // Log this error but continue processing logException(t); } } } while (!_renderMessages.isEmpty()); if (_displayNeeded.compareAndSet(true, false)) { updateRenderableScene(); // Defensive copy the window list (in case a window is closed while we render) HashMap<Integer, RenderWindow> winds; synchronized (_openWindows) { winds = new HashMap<Integer, RenderWindow>(_openWindows); } for (RenderWindow wind : winds.values()) { try { GLContext context = wind.getGLWindowRef().getContext(); if (context != null && context.isCreated() && !_shutdown.get()) { wind.getGLWindowRef().display(); } } catch (Throwable t) { // Log it, but move on to the other windows logException(t); } } } long loopEnd = System.nanoTime(); _loopTimeMS = (loopEnd - lastLoopEnd) / 1000000; lastLoopEnd = loopEnd; try { synchronized (_displayNeeded) { if (!_displayNeeded.get()) { _displayNeeded.wait(); } } } catch (InterruptedException e) { // Let's loop anyway... } } catch (Throwable t) { // Any other unexpected exceptions... logException(t); } } } /** * Returns the shader object for this handle, should only be called from the render thread (during a render) * @param h * @return */ public Shader getShader(ShaderHandle h) { return _shaders.get(h); } /** * Returns the MeshProto for the supplied key, should only be called from the render thread (during a render) * @param key * @return */ public MeshProto getProto(MeshProtoKey key) { MeshProto proto = _protoCache.get(key); if (proto == null) { // This prototype needs to be lazily loaded loadMeshProtoImp(key); } return _protoCache.get(key); } public TessFont getTessFont(TessFontKey key) { if (!_fontCache.containsKey(key)) { loadTessFontImp(key); // Try lazy initialization for now } return _fontCache.get(key); } public void setScene(ArrayList<RenderProxy> scene) { synchronized (_sceneLock) { _proxyScene = scene; } } public void queueRedraw() { synchronized(_displayNeeded) { _displayNeeded.set(true); _displayNeeded.notifyAll(); } } private void addRenderMessage(RenderMessage msg) { _renderMessages.add(msg); queueRedraw(); } public void setCameraInfoForWindow(int windowID, CameraInfo info) { synchronized (_renderMessages) { addRenderMessage(new SetCameraMessage(windowID, info)); } } private void setCameraInfoImp(SetCameraMessage mes) { synchronized (_openWindows) { RenderWindow w = _openWindows.get(mes.windowID); if (w != null) { w.getCameraRef().setInfo(mes.cameraInfo); } } } /** * Call this from any thread to shutdown the Renderer, will return * immediately but the renderer will shutdown after the next redraw */ public void shutdown() { _shutdown.set(true); } public GL2GL3 getGL() { return _drawContext.getGL().getGL2GL3(); } /** * Get a list of all the IDs of currently open windows * @return */ public ArrayList<Integer> getOpenWindowIDs() { synchronized(_openWindows) { ArrayList<Integer> ret = new ArrayList<Integer>(); for (int id : _openWindows.keySet()) { ret.add(id); } return ret; } } public String getWindowName(int windowID) { synchronized(_openWindows) { RenderWindow win = _openWindows.get(windowID); if (win == null) { return null; } return win.getName(); } } public Frame getAWTFrame(int windowID) { synchronized(_openWindows) { RenderWindow win = _openWindows.get(windowID); if (win == null) { return null; } return win.getAWTFrameRef(); } } public void focusWindow(int windowID) { synchronized(_openWindows) { RenderWindow win = _openWindows.get(windowID); if (win == null) { return; } win.getAWTFrameRef().setExtendedState(Frame.NORMAL); win.getAWTFrameRef().toFront(); } } /** * Construct a new window (a NEWT window specifically) * * @param width * @param height * @return */ private void createWindowImp(CreateWindowMessage message) { RenderGLListener listener = new RenderGLListener(); RenderWindow window = new RenderWindow(message.x, message.y, message.width, message.height, message.title, message.name, _sharedContext, _caps, listener, message.icon, message.windowID, message.viewID, message.listener); listener.setWindow(window); synchronized (_openWindows) { _openWindows.put(message.windowID, window); } GLWindowListener wl = new GLWindowListener(window.getWindowID()); window.getGLWindowRef().addWindowListener(wl); window.getAWTFrameRef().addComponentListener(wl); window.getGLWindowRef().addMouseListener(new MouseHandler(window, message.listener)); window.getAWTFrameRef().setVisible(true); queueRedraw(); } public int createWindow(int x, int y, int width, int height, int viewID, String title, String name, Image icon, WindowInteractionListener listener) { synchronized (_renderMessages) { int windowID = getAssetID(); addRenderMessage(new CreateWindowMessage(x, y, width, height, title, name, windowID, viewID, icon, listener)); return windowID; } } public void setWindowDebugInfo(int windowID, String debugString, ArrayList<Long> debugIDs) { synchronized(_openWindows) { RenderWindow w = _openWindows.get(windowID); if (w != null) { w.setDebugString(debugString); w.setDebugIDs(debugIDs); } } } public int getNumOpenWindows() { synchronized(_openWindows) { return _openWindows.size(); } } public void closeWindow(int windowID) { synchronized (_renderMessages) { addRenderMessage(new CloseWindowMessage(windowID)); } } private void closeWindowImp(CloseWindowMessage msg) { RenderWindow window; synchronized(_openWindows) { window = _openWindows.get(msg.windowID); if (window == null) { return; } } windowCleanup(msg.windowID); window.getGLWindowRef().destroy(); window.getAWTFrameRef().dispose(); } private String readSource(String file) { URL res = Renderer.class.getResource(file); StringBuilder source = new StringBuilder(); BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(res.openStream())); while (true) { String line = reader.readLine(); if (line == null) break; source.append(line).append("\n"); } reader.close(); reader = null; } catch (IOException e) {} return source.toString(); } private void createShader(ShaderHandle sh, String vert, String frag, GL2GL3 gl) { String vertsrc = readSource(vert); String fragsrc = readSource(frag); Shader s = new Shader(vertsrc, fragsrc, gl); if (s.isGood()) { _shaders.put(sh, s); return; } String failure = s.getFailureLog(); throw new RenderException("Shader failed: " + sh.toString() + " " + failure); } /** * Create and compile all the shaders */ private void initShaders(GL2GL3 gl) throws RenderException { _shaders = new EnumMap<ShaderHandle, Shader>(ShaderHandle.class); String vert, frag; vert = "/resources/shaders/flat.vert"; frag = "/resources/shaders/flat.frag"; createShader(ShaderHandle.MESH, vert, frag, gl); vert = "/resources/shaders/font.vert"; frag = "/resources/shaders/font.frag"; createShader(ShaderHandle.FONT, vert, frag, gl); vert = "/resources/shaders/hull.vert"; frag = "/resources/shaders/hull.frag"; createShader(ShaderHandle.HULL, vert, frag, gl); vert = "/resources/shaders/overlay-font.vert"; frag = "/resources/shaders/overlay-font.frag"; createShader(ShaderHandle.OVERLAY_FONT, vert, frag, gl); vert = "/resources/shaders/overlay-flat.vert"; frag = "/resources/shaders/overlay-flat.frag"; createShader(ShaderHandle.OVERLAY_FLAT, vert, frag, gl); vert = "/resources/shaders/debug.vert"; frag = "/resources/shaders/debug.frag"; createShader(ShaderHandle.DEBUG, vert, frag, gl); vert = "/resources/shaders/skybox.vert"; frag = "/resources/shaders/skybox.frag"; createShader(ShaderHandle.SKYBOX, vert, frag, gl); } /** * Basic message dispatch * * @param message */ private void handleMessage(RenderMessage message) { assert (Thread.currentThread() == _renderThread); if (message instanceof CreateWindowMessage) { createWindowImp((CreateWindowMessage) message); return; } if (message instanceof SetCameraMessage) { setCameraInfoImp((SetCameraMessage) message); return; } if (message instanceof OffScreenMessage) { offScreenImp((OffScreenMessage) message); return; } if (message instanceof CloseWindowMessage) { closeWindowImp((CloseWindowMessage) message); return; } if (message instanceof CreateOffscreenTargetMessage) { populateOffscreenTarget(((CreateOffscreenTargetMessage)message).target); } if (message instanceof FreeOffscreenTargetMessage) { freeOffscreenTargetImp(((FreeOffscreenTargetMessage)message).target); } } private void initSharedContext() { assert (Thread.currentThread() == _renderThread); assert (_drawContext == null); int res = _sharedContext.makeCurrent(); assert (res == GLContext.CONTEXT_CURRENT); if (USE_DEBUG_GL) { _sharedContext.setGL(new DebugGL2(_sharedContext.getGL().getGL2())); } GL2GL3 gl = _sharedContext.getGL().getGL2GL3(); initShaders(gl); // Sub system specific intitializations DebugUtils.init(this, gl); Polygon.init(this, gl); MeshProto.init(this, gl); _texCache.init(gl); // Load the bad mesh proto MeshData badData = MeshDataCache.getBadMesh(); MeshProto badProto = new MeshProto(badData, _safeGraphics, !_safeGraphics); _protoCache.put(MeshDataCache.BAD_MESH_KEY, badProto); badProto.loadGPUAssets(gl, this); _skybox = new Skybox(); _sharedContext.release(); } private void loadMeshProtoImp(final MeshProtoKey key) { //long startNanos = System.nanoTime(); assert (Thread.currentThread() == _renderThread); if (_protoCache.get(key) != null) { return; // This mesh has already been loaded } if (_drawContext != null) { _drawContext.release(); } int res = _sharedContext.makeCurrent(); assert (res == GLContext.CONTEXT_CURRENT); GL2GL3 gl = _sharedContext.getGL().getGL2GL3(); MeshData data = MeshDataCache.getMeshData(key); MeshProto proto = new MeshProto(data, _safeGraphics, !_safeGraphics); assert (proto != null); proto.loadGPUAssets(gl, this); if (!proto.isLoadedGPU()) { // This did not load cleanly, clear it out and use the default bad mesh asset proto.freeResources(gl); System.out.printf("Could not load GPU assset: %s\n", key.getURL().toString()); proto = _protoCache.get(MeshDataCache.BAD_MESH_KEY); } _protoCache.put(key, proto); _sharedContext.release(); if (_drawContext != null) { _drawContext.makeCurrent(); } // long endNanos = System.nanoTime(); // long ms = (endNanos - startNanos) /1000000L; // System.out.println("LoadMeshProtoImp time:" + ms + "ms"); } private void loadTessFontImp(TessFontKey key) { if (_fontCache.get(key) != null) { return; // This font has already been loaded } TessFont tf = new TessFont(key); _fontCache.put(key, tf); } // Recreate the internal scene based on external input private void updateRenderableScene() { synchronized (_sceneLock) { long sceneStart = System.nanoTime(); _currentScene = new ArrayList<Renderable>(); _currentOverlay = new ArrayList<OverlayRenderable>(); for (RenderProxy proxy : _proxyScene) { proxy.collectRenderables(this, _currentScene); proxy.collectOverlayRenderables(this, _currentOverlay); } long sceneTime = System.nanoTime() - sceneStart; _sceneTimeMS = sceneTime / 1000000.0; } } public static class PickResult { public double dist; public long pickingID; public PickResult(double dist, long pickingID) { this.dist = dist; this.pickingID = pickingID; } } /** * Cast the provided ray into the current scene and return the list of bounds collisions * @param ray * @return */ public List<PickResult> pick(Ray pickRay, int viewID, boolean precise) { // Do not update the scene while a pick is underway ArrayList<PickResult> ret = new ArrayList<PickResult>(); if (_currentScene == null) { return ret; } synchronized (_sceneLock) { for (Renderable r : _currentScene) { double rayDist = r.getCollisionDist(pickRay, precise); if (rayDist >= 0.0) { // Also check that this is visible double centerDist = pickRay.getDistAlongRay(r.getBoundsRef().getCenter()); if (r.renderForView(viewID, centerDist)) { ret.add(new PickResult(rayDist, r.getPickingID())); } } } return ret; } } public static class WindowMouseInfo { public int x, y; public int width, height; public int viewableX, viewableY; public boolean mouseInWindow; public CameraInfo cameraInfo; } /** * Get Window specific information about the mouse. This is very useful for picking on the App side * @param windowID * @return */ public WindowMouseInfo getMouseInfo(int windowID) { synchronized(_openWindows) { RenderWindow w = _openWindows.get(windowID); if (w == null) { return null; // Not a valid window ID, or the window has closed } WindowMouseInfo info = new WindowMouseInfo(); info.x = w.getMouseX(); info.y = w.getMouseY(); info.width = w.getViewableWidth(); info.height = w.getViewableHeight(); info.viewableX = w.getViewableX(); info.viewableY = w.getViewableY(); info.mouseInWindow = w.isMouseInWindow(); info.cameraInfo = w.getCameraRef().getInfo(); return info; } } public CameraInfo getCameraInfo(int windowID) { synchronized(_openWindows) { RenderWindow w = _openWindows.get(windowID); if (w == null) { return null; // Not a valid window ID, or the window has closed } return w.getCameraRef().getInfo(); } } // Common cleanup code for window closing. Applies to both user closed and programatically closed windows private void windowCleanup(int windowID) { RenderWindow w; synchronized(_openWindows) { w = _openWindows.get(windowID); if (w == null) { return; } _openWindows.remove(windowID); } w.getAWTFrameRef().setVisible(false); // Fire the window closing callback w.getWindowListener().windowClosing(); } private class GLWindowListener implements WindowListener, ComponentListener { private int _windowID; public GLWindowListener(int id) { _windowID = id; } private WindowInteractionListener getListener() { synchronized(_openWindows) { RenderWindow w = _openWindows.get(_windowID); if (w == null) { return null; // Not a valid window ID, or the window has closed } return w.getWindowListener(); } } @Override public void windowDestroyNotify(WindowEvent we) { windowCleanup(_windowID); } @Override public void windowDestroyed(WindowEvent arg0) { } @Override public void windowGainedFocus(WindowEvent arg0) { WindowInteractionListener listener = getListener(); if (listener != null) { listener.windowGainedFocus(); } } @Override public void windowLostFocus(WindowEvent arg0) { } @Override public void windowMoved(WindowEvent arg0) { } @Override public void windowRepaint(WindowUpdateEvent arg0) { } @Override public void windowResized(WindowEvent arg0) { } private void updateWindowSizeAndPos() { RenderWindow w; synchronized(_openWindows) { w = _openWindows.get(_windowID); if (w == null) { return; } } w.getWindowListener().windowMoved(w.getWindowX(), w.getWindowY(), w.getWindowWidth(), w.getWindowHeight()); } @Override public void componentHidden(ComponentEvent arg0) { } @Override public void componentMoved(ComponentEvent arg0) { updateWindowSizeAndPos(); } @Override public void componentResized(ComponentEvent arg0) { updateWindowSizeAndPos(); } @Override public void componentShown(ComponentEvent arg0) { } } private class RenderGLListener implements GLEventListener { private RenderWindow _window; private long _lastFrameNanos = 0; public void setWindow(RenderWindow win) { _window = win; } @Override public void init(GLAutoDrawable drawable) { synchronized (_rendererLock) { // Per window initialization if (USE_DEBUG_GL) { drawable.setGL(new DebugGL2(drawable.getGL().getGL2())); } GL2GL3 gl = drawable.getGL().getGL2GL3(); // Some of this is probably redundant, but here goes gl.glClearColor(1.0f, 1.0f, 1.0f, 1.0f); gl.glEnable(GL.GL_DEPTH_TEST); gl.glClearDepth(1.0); gl.glDepthFunc(GL2GL3.GL_LEQUAL); gl.glEnable(GL2GL3.GL_CULL_FACE); gl.glCullFace(GL2GL3.GL_BACK); gl.glEnable(GL.GL_MULTISAMPLE); gl.glBlendEquationSeparate(GL2GL3.GL_FUNC_ADD, GL2GL3.GL_MAX); gl.glBlendFuncSeparate(GL2GL3.GL_SRC_ALPHA, GL2GL3.GL_ONE_MINUS_SRC_ALPHA, GL2GL3.GL_ONE, GL2GL3.GL_ONE); } } @Override public void dispose(GLAutoDrawable drawable) { synchronized (_rendererLock) { GL2GL3 gl = drawable.getGL().getGL2GL3(); Map<Integer, Integer> vaoMap = _window.getVAOMap(); int[] vaos = new int[vaoMap.size()]; int index = 0; for (int vao : vaoMap.values()) { vaos[index++] = vao; } if (vaos.length > 0) { gl.glDeleteVertexArrays(vaos.length, vaos, 0); } } } @Override public void display(GLAutoDrawable drawable) { synchronized (_rendererLock) { Camera cam = _window.getCameraRef(); // The ray of the current mouse position (or null if the mouse is not hovering over the window) Ray pickRay = RenderUtils.getPickRay(getMouseInfo(_window.getWindowID())); PerfInfo pi = new PerfInfo(); long startNanos = System.nanoTime(); _allowDelayedTextures = true; renderScene(drawable.getContext(), _window.getVAOMap(), _currentScene, _currentOverlay, cam, _window.getViewableWidth(), _window.getViewableHeight(), pickRay, _window.getViewID(), pi); GL2GL3 gl = drawable.getContext().getGL().getGL2GL3(); // Just to clean up the code below if (RENDER_DEBUG_INFO) { // Draw a window specific performance counter gl.glDisable(GL2GL3.GL_DEPTH_TEST); _drawContext = drawable.getContext(); StringBuilder perf = new StringBuilder("Objects Culled: ").append(pi.objectsCulled); perf.append(" VRAM: ").append(_usedVRAM/(1024.0*1024.0)).append("MB"); perf.append(" Frame time (ms) :").append(_lastFrameNanos / 1000000.0); perf.append(" SceneTime: ").append(_sceneTimeMS); perf.append(" Loop Time: ").append(_loopTimeMS); TessFont defFont = getTessFont(_defaultBoldFontKey); OverlayString os = new OverlayString(defFont, perf.toString(), ColourInput.BLACK, 10, 10, 15, false, false, DisplayModel.ALWAYS); os.render(_window.getVAOMap(), Renderer.this, _window.getViewableWidth(), _window.getViewableHeight()); // Also draw this window's debug string os = new OverlayString(defFont, _window.getDebugString(), ColourInput.BLACK, 10, 10, 30, false, false, DisplayModel.ALWAYS); os.render(_window.getVAOMap(), Renderer.this, _window.getViewableWidth(), _window.getViewableHeight()); _drawContext = null; gl.glEnable(GL2GL3.GL_DEPTH_TEST); } gl.glFinish(); long endNanos = System.nanoTime(); _lastFrameNanos = endNanos - startNanos; } } @Override public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) { //_window.resized(width, height); Camera cam = _window.getCameraRef(); cam.setAspectRatio((double) width / (double) height); } } /** * Abstract base type for internal renderer messages */ private static class RenderMessage { @SuppressWarnings("unused") public long queueTime = System.nanoTime(); } private static class CreateWindowMessage extends RenderMessage { public int x, y; public int width, height; public String title, name; public WindowInteractionListener listener; public int windowID, viewID; public Image icon; public CreateWindowMessage(int x, int y, int width, int height, String title, String name, int windowID, int viewID, Image icon, WindowInteractionListener listener) { this.x = x; this.y = y; this.width = width; this.height = height; this.title = title; this.name = name; this.listener = listener; this.windowID = windowID; this.viewID = viewID; this.icon = icon; } } private static class SetCameraMessage extends RenderMessage { public int windowID; public CameraInfo cameraInfo; public SetCameraMessage(int windowID, CameraInfo cameraInfo) { this.windowID = windowID; this.cameraInfo = cameraInfo; } } private static class OffScreenMessage extends RenderMessage { public ArrayList<RenderProxy> scene; public int viewID; public Camera cam; public int width, height; public Future<BufferedImage> result; public OffscreenTarget target; OffScreenMessage(ArrayList<RenderProxy> s, int vID, Camera c, int w, int h, Future<BufferedImage> r, OffscreenTarget t) { scene = s; viewID = vID; cam = c; width = w; height = h; result = r; target = t; } } private static class CloseWindowMessage extends RenderMessage { public int windowID; public CloseWindowMessage(int id) { windowID = id; } } private static class CreateOffscreenTargetMessage extends RenderMessage { public OffscreenTarget target; } private static class FreeOffscreenTargetMessage extends RenderMessage { public OffscreenTarget target; } public TexCache getTexCache() { return _texCache; } public static boolean debugDrawHulls() { return false; } public static boolean debugDrawAABBs() { return false; } public static boolean debugDrawArmatures() { return false; } public boolean isInitialized() { return _initialized.get() && !_fatalError.get(); } public boolean hasFatalError() { return _fatalError.get(); } public String getErrorString() { return _errorString; } public StackTraceElement[] getFatalStackTrace() { return _fatalStackTrace; } public TessFontKey getDefaultFont() { return _defaultFontKey; } public boolean allowDelayedTextures() { return _allowDelayedTextures; } private void logException(Throwable t) { _exceptionLogger.logException(t); // For now print a synopsis for all exceptions thrown printExceptionLog(); t.printStackTrace(); } private void printExceptionLog() { System.out.println("Exceptions from Renderer: "); _exceptionLogger.printExceptionLog(); System.out.println(""); } /** * Queue up an off screen rendering * @param scene * @param cam * @param width * @param height * @return */ public Future<BufferedImage> renderOffscreen(ArrayList<RenderProxy> scene, int viewID, CameraInfo camInfo, int width, int height, Runnable runWhenDone, OffscreenTarget target) { Future<BufferedImage> result = new Future<BufferedImage>(runWhenDone); Camera cam = new Camera(camInfo, (double)width/(double)height); synchronized (_renderMessages) { addRenderMessage(new OffScreenMessage(scene, viewID, cam, width, height, result, target)); } synchronized (_displayNeeded) { _displayNeeded.set(true); _displayNeeded.notifyAll(); } return result; } public OffscreenTarget createOffscreenTarget(int width, int height) { OffscreenTarget ret = new OffscreenTarget(width, height); synchronized (_renderMessages) { CreateOffscreenTargetMessage msg = new CreateOffscreenTargetMessage(); msg.target = ret; addRenderMessage(msg); } return ret; } public void freeOffscreenTarget(OffscreenTarget target) { synchronized (_renderMessages) { FreeOffscreenTargetMessage msg = new FreeOffscreenTargetMessage(); msg.target = target; addRenderMessage(msg); } } /** * Create the resources for an OffscreenTarget */ private void populateOffscreenTarget(OffscreenTarget target) { int width = target.getWidth(); int height = target.getHeight(); _sharedContext.makeCurrent(); GL3 gl = _sharedContext.getGL().getGL3(); // Just to clean up the code below // This does not support opengl 3, so for now we don't support off screen rendering if (gl == null) { _sharedContext.release(); return; } // Create a new frame buffer for this draw operation int[] temp = new int[2]; gl.glGenFramebuffers(2, temp, 0); int drawFBO = temp[0]; int blitFBO = temp[1]; gl.glGenTextures(2, temp, 0); int drawTex = temp[0]; int blitTex = temp[1]; gl.glGenRenderbuffers(1, temp, 0); int depthBuf = temp[0]; gl.glBindTexture(GL3.GL_TEXTURE_2D_MULTISAMPLE, drawTex); gl.glTexImage2DMultisample(GL3.GL_TEXTURE_2D_MULTISAMPLE, 4, GL2GL3.GL_RGBA8, width, height, true); gl.glBindRenderbuffer(GL2GL3.GL_RENDERBUFFER, depthBuf); gl.glRenderbufferStorageMultisample(GL2GL3.GL_RENDERBUFFER, 4, GL2GL3.GL_DEPTH_COMPONENT, width, height); gl.glBindFramebuffer(GL2GL3.GL_FRAMEBUFFER, drawFBO); gl.glFramebufferTexture2D(GL2GL3.GL_FRAMEBUFFER, GL2GL3.GL_COLOR_ATTACHMENT0, GL3.GL_TEXTURE_2D_MULTISAMPLE, drawTex, 0); gl.glFramebufferRenderbuffer(GL2GL3.GL_FRAMEBUFFER, GL2GL3.GL_DEPTH_ATTACHMENT, GL2GL3.GL_RENDERBUFFER, depthBuf); int fbStatus = gl.glCheckFramebufferStatus(GL2GL3.GL_FRAMEBUFFER); assert(fbStatus == GL2GL3.GL_FRAMEBUFFER_COMPLETE); gl.glBindTexture(GL2GL3.GL_TEXTURE_2D, blitTex); gl.glTexImage2D(GL2GL3.GL_TEXTURE_2D, 0, GL2GL3.GL_RGBA8, width, height, 0, GL2GL3.GL_RGBA, GL2GL3.GL_BYTE, null); gl.glBindFramebuffer(GL2GL3.GL_FRAMEBUFFER, blitFBO); gl.glFramebufferTexture2D(GL2GL3.GL_FRAMEBUFFER, GL2GL3.GL_COLOR_ATTACHMENT0, GL2GL3.GL_TEXTURE_2D, blitTex, 0); gl.glBindFramebuffer(GL2GL3.GL_FRAMEBUFFER, 0); target.load(drawFBO, drawTex, depthBuf, blitFBO, blitTex); _sharedContext.release(); } private void freeOffscreenTargetImp(OffscreenTarget target) { if (!target.isLoaded()) { return; // Nothing to free } _sharedContext.makeCurrent(); GL2GL3 gl = _sharedContext.getGL().getGL2GL3(); // Just to clean up the code below int[] temp = new int[2]; temp[0] = target.getDrawFBO(); temp[1] = target.getBlitFBO(); gl.glDeleteFramebuffers(2, temp, 0); temp[0] = target.getDrawTex(); temp[1] = target.getBlitTex(); gl.glDeleteTextures(2, temp, 0); temp[0] = target.getDepthBuffer(); gl.glDeleteRenderbuffers(1, temp, 0); target.free(); _sharedContext.release(); } private void offScreenImp(OffScreenMessage message) { synchronized(_rendererLock) { try { boolean isTempTarget; OffscreenTarget target; if (message.target == null) { isTempTarget = true; target = new OffscreenTarget(message.width, message.height); populateOffscreenTarget(target); } else { isTempTarget = false; target = message.target; assert(target.getWidth() == message.width); assert(target.getHeight() == message.height); } int width = message.width; int height = message.height; if (!target.isLoaded()) { message.result.setFailed("Contexted not loaded. Is OpenGL 3 supported?"); return; } assert(target.isLoaded()); _sharedContext.makeCurrent(); GL2GL3 gl = _sharedContext.getGL().getGL2GL3(); // Just to clean up the code below // Collect the renderables ArrayList<Renderable> renderables = new ArrayList<Renderable>(); ArrayList<OverlayRenderable> overlay = new ArrayList<OverlayRenderable>(); for (RenderProxy p : message.scene) { p.collectRenderables(this, renderables); p.collectOverlayRenderables(this, overlay); } gl.glBindFramebuffer(GL2GL3.GL_DRAW_FRAMEBUFFER, target.getDrawFBO()); gl.glClearColor(0, 0, 0, 0); gl.glViewport(0, 0, width, height); gl.glEnable(GL2GL3.GL_DEPTH_TEST); gl.glDepthFunc(GL2GL3.GL_LEQUAL); _allowDelayedTextures = false; PerfInfo perfInfo = new PerfInfo(); // Okay, now actually render this thing... renderScene(_sharedContext, _sharedVaoMap, renderables, overlay, message.cam, width, height, null, message.viewID, perfInfo); gl.glFinish(); gl.glBindFramebuffer(GL2GL3.GL_DRAW_FRAMEBUFFER, target.getBlitFBO()); gl.glBindFramebuffer(GL2GL3.GL_READ_FRAMEBUFFER, target.getDrawFBO()); gl.glBlitFramebuffer(0, 0, width, height, 0, 0, width, height, GL2GL3.GL_COLOR_BUFFER_BIT, GL2GL3.GL_NEAREST); gl.glBindTexture(GL2GL3.GL_TEXTURE_2D, target.getBlitTex()); IntBuffer pixels = target.getPixelBuffer(); gl.glGetTexImage(GL2GL3.GL_TEXTURE_2D, 0, GL2GL3.GL_BGRA, GL2GL3.GL_UNSIGNED_INT_8_8_8_8_REV, pixels); gl.glBindTexture(GL2GL3.GL_TEXTURE_2D, 0); BufferedImage img = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); for (int h = 0; h < height; ++h) { // Set this one scan line at a time, in the opposite order as java is y down img.setRGB(0, h, width, 1, pixels.array(), (height - 1 - h) * width, width); } message.result.setComplete(img); // Clean up gl.glBindFramebuffer(GL2GL3.GL_READ_FRAMEBUFFER, 0); gl.glBindFramebuffer(GL2GL3.GL_DRAW_FRAMEBUFFER, 0); if (isTempTarget) { freeOffscreenTargetImp(target); } } catch (GLException ex){ message.result.setFailed(ex.getMessage()); } finally { if (_sharedContext.isCurrent()) _sharedContext.release(); } } // synchronized(_rendererLock) } /** * Returns true if the current thread is this renderer's render thread * @return */ public boolean isRenderThread() { return (Thread.currentThread() == _renderThread); } private static class PerfInfo { public int objectsCulled = 0; } private static class TransSortable implements Comparable<TransSortable> { public Renderable r; public double dist; @Override public int compareTo(TransSortable o) { // Sort such that largest distance sorts to front of list // by reversing argument order in compare. return Double.compare(o.dist, this.dist); } } public void renderScene(GLContext context, Map<Integer, Integer> vaoMap, List<Renderable> scene, List<OverlayRenderable> overlay, Camera cam, int width, int height, Ray pickRay, int viewID, PerfInfo perfInfo) { final Vec4d viewDir = new Vec4d(0.0d, 0.0d, 0.0d, 1.0d); cam.getViewDir(viewDir); final Vec4d temp = new Vec4d(0.0d, 0.0d, 0.0d, 1.0d); assert (_drawContext == null); _drawContext = context; GL2GL3 gl = _drawContext.getGL().getGL2GL3(); // Just to clean up the code below gl.glClear(GL2GL3.GL_COLOR_BUFFER_BIT | GL2GL3.GL_DEPTH_BUFFER_BIT); // The 'height' of a pixel 1 unit from the viewer double unitPixelHeight = 2 * Math.tan(cam.getFOV()/2.0) / height; ArrayList<TransSortable> transparents = new ArrayList<TransSortable>(); if (scene == null) return; for (Renderable r : scene) { AABB bounds = r.getBoundsRef(); double dist = cam.distToBounds(bounds); if (!r.renderForView(viewID, dist)) { continue; } if (!cam.collides(bounds)) { ++perfInfo.objectsCulled; continue; } double apparentSize = 2 * bounds.getRadius().mag3() / Math.abs(dist); if (apparentSize < unitPixelHeight) { // This object is too small to draw ++perfInfo.objectsCulled; continue; } if (r.hasTransparent()) { // Defer rendering of transparent objects TransSortable ts = new TransSortable(); ts.r = r; temp.set4(r.getBoundsRef().getCenter()); temp.sub3(cam.getTransformRef().getTransRef()); ts.dist = temp.dot3(viewDir); transparents.add(ts); } r.render(vaoMap, this, cam, pickRay); } gl.glEnable(GL2GL3.GL_BLEND); gl.glDepthMask(false); // Draw the skybox after _skybox.setTexture(cam.getInfo().skyboxTexture); _skybox.render(vaoMap, this, cam); Collections.sort(transparents); for (TransSortable ts : transparents) { AABB bounds = ts.r.getBoundsRef(); if (!cam.collides(bounds)) { ++perfInfo.objectsCulled; continue; } ts.r.renderTransparent(vaoMap, this, cam, pickRay); } gl.glDisable(GL2GL3.GL_BLEND); gl.glDepthMask(true); // Debug render AABBs if (debugDrawAABBs()) { Color4d yellow = new Color4d(1, 1, 0, 1.0d); Color4d red = new Color4d(1, 0, 0, 1.0d); for (Renderable r : scene) { Color4d aabbColor = yellow; if (pickRay != null && r.getBoundsRef().collisionDist(pickRay) > 0) { aabbColor = red; } DebugUtils.renderAABB(vaoMap, this, r.getBoundsRef(), aabbColor, cam); } } // for renderables // Now draw the overlay gl.glDisable(GL2GL3.GL_DEPTH_TEST); if (overlay != null) { for (OverlayRenderable r : overlay) { if (!r.renderForView(viewID)) { continue; } r.render(vaoMap, this, width, height); } } gl.glEnable(GL2GL3.GL_DEPTH_TEST); gl.glBindVertexArray(0); _drawContext = null; } public void usingVRAM(long bytes) { _usedVRAM += bytes; } }
package tlc2.tool.fp; import java.io.EOFException; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.Callable; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.atomic.LongAccumulator; import java.util.function.LongBinaryOperator; import java.util.function.ToLongFunction; import java.util.logging.Level; import tlc2.output.EC; import tlc2.output.MP; import tlc2.tool.fp.LongArrays.LongComparator; import tlc2.tool.fp.management.DiskFPSetMXWrapper; import tlc2.util.BufferedRandomAccessFile; import tlc2.util.Striped; import util.Assert; /** * see OpenAddressing.tla */ @SuppressWarnings({ "serial" }) public final class OffHeapDiskFPSet extends NonCheckpointableDiskFPSet implements FPSetStatistic { private static final int PROBE_LIMIT = Integer.getInteger(OffHeapDiskFPSet.class.getName() + ".probeLimit", 128); static final long EMPTY = 0L; private final LongAccumulator reprobe; private final LongArray array; /** * The indexer maps a fingerprint to a in-memory bucket and the associated lock */ private final Indexer indexer; private CyclicBarrier barrier; /** * completionException is set by the Runnable iff an exception occurs during * eviction while the worker threads wait at barrier. Worker threads have to * check completionException explicitly. */ private volatile RuntimeException completionException; protected OffHeapDiskFPSet(final FPSetConfiguration fpSetConfig) throws RemoteException { super(fpSetConfig); final long positions = fpSetConfig.getMemoryInFingerprintCnt(); // Determine base address which varies depending on machine architecture. this.array = new LongArray(positions); this.reprobe = new LongAccumulator(new LongBinaryOperator() { public long applyAsLong(long left, long right) { return Math.max(left, right); } }, 0); // If Hamming weight is 1, the logical index address can be calculated // significantly faster by bit-shifting. However, with large memory // sizes, only supporting increments of 2^n sizes would waste memory // (e.g. either 32GiB or 64Gib). Hence, we check if the bitCount allows // us to use bit-shifting. If not, we fall back to less efficient // calculations. if (Long.bitCount(positions) == 1) { this.indexer = new BitshiftingIndexer(positions, fpSetConfig.getFpBits()); } else { // non 2^n buckets cannot use a bit shifting indexer this.indexer = new Indexer(positions, fpSetConfig.getFpBits()); } // Use the non-concurrent flusher as the default. Will be replaced by // the CyclicBarrier-Runnable later. Just set to prevent NPEs when // eviction/flush is called before init. this.flusher = new OffHeapMSBFlusher(array, 0); } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#init(int, java.lang.String, java.lang.String) */ public void init(final int numThreads, String aMetadir, String filename) throws IOException { super.init(numThreads, aMetadir, filename); array.zeroMemory(numThreads); // This barrier gets run after one thread signals the need to suspend // put and contains operations to evict to secondary. Signaling is done // via the flusherChoosen AtomicBoolean. All threads (numThreads) will // then await on the barrier and the Runnable be executed when the // last of numThreads arrives. // Compared to an AtomicBoolean, the barrier operation use locks and // are thus comparably expensive. barrier = new CyclicBarrier(numThreads, new Runnable() { // Atomically evict and reset flusherChosen to make sure no // thread re-read flusherChosen=true after an eviction and // waits again. public void run() { // statistics growDiskMark++; final long timestamp = System.currentTimeMillis(); final long insertions = tblCnt.longValue(); final double lf = tblCnt.doubleValue() / (double) maxTblCnt; final int r = reprobe.intValue(); // Only pay the price of creating threads when array is sufficiently large. if (array.size() > 8192) { OffHeapDiskFPSet.this.flusher = new ConcurrentOffHeapMSBFlusher(array, r, numThreads, insertions); } else { OffHeapDiskFPSet.this.flusher = new OffHeapMSBFlusher(array, r); } try { flusher.flushTable(); // Evict() } catch (RuntimeException e) { completionException = e; throw e; } catch (IOException e) { completionException = new RuntimeException(e); throw completionException; } // statistics and logging again. long l = System.currentTimeMillis() - timestamp; flushTime += l; LOGGER.log(Level.FINE, "Flushed disk {0} {1}. time, in {2} sec after {3} insertions, load factor {4} and reprobe of {5}.", new Object[] { ((DiskFPSetMXWrapper) diskFPSetMXWrapper).getObjectName(), getGrowDiskMark(), l, insertions, lf, r }); // Release exclusive access. It has to be done by the runnable // before workers waiting on the barrier wake up again. Assert.check(flusherChosen.compareAndSet(true, false), EC.GENERAL); } }); } private boolean checkEvictPending() { if (flusherChosen.get()) { try { barrier.await(); if (completionException != null) { throw completionException; } } catch (InterruptedException ie) { throw new RuntimeException(ie); } catch (BrokenBarrierException bbe) { barrier.reset(); if (completionException != null) { throw completionException; } else { throw new RuntimeException(bbe); } } return true; } return false; } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#sizeof() */ public long sizeof() { long size = 44; // approx size of this DiskFPSet object size += maxTblCnt * (long) LongSize; size += getIndexCapacity() * 4; return size; } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#needsDiskFlush() */ protected final boolean needsDiskFlush() { return loadFactorExceeds(1d) || forceFlush; } /** * This limits the (primary) in-memory hash table to grow beyond the given * limit. * * @param limit * A limit in the domain [0, 1] which restricts the hash table * from growing past it. * @return true iff the current hash table load exceeds the given limit */ private final boolean loadFactorExceeds(final double limit) { final double d = (this.tblCnt.doubleValue()) / (double) this.maxTblCnt; return d >= limit; } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#memLookup(long) */ final boolean memLookup(final long fp0) { int r = reprobe.intValue(); for (int i = 0; i <= r; i++) { final long position = indexer.getIdx(fp0, i); final long l = array.get(position); if (fp0 == (l & FLUSHED_MASK)) { // zero the long msb (which is 1 if fp has been flushed to disk) return true; } else if (l == EMPTY) { return false; } } return false; } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#memInsert(long) */ final boolean memInsert(final long fp0) throws IOException { for (int i = 0; i < PROBE_LIMIT; i++) { final long position = indexer.getIdx(fp0, i); final long expected = array.get(position); if (expected == EMPTY || (expected < 0 && fp0 != (expected & FLUSHED_MASK))) { // Increment reprobe if needed. Other threads might have // increased concurrently. Since reprobe is strictly // monotonic increasing, we need no retry when r larger. reprobe.accumulate(i); // Try to CAS the new fingerprint. In case of failure, reprobe // is too large which we ignore. Will be eventually corrected // by eviction. if (array.trySet(position, expected, fp0)) { this.tblCnt.increment(); return false; } // Cannot reduce reprobe to its value before we increased it. // Another thread could have caused an increase to i too which // would be lost. } // Expected is the fingerprint to be inserted. if ((expected & FLUSHED_MASK) == fp0) { return true; } } // We failed to insert into primary. Consequently, lets try and make // some room by signaling all threads to wait for eviction. forceFlush(); // We've signaled for eviction to start or failed because some other // thread beat us to it. Actual eviction and setting flusherChosen back // to false is done by the Barrier's Runnable. We cannot set // flusherChosen back to false after barrier.awaits returns because it // leaves a window during which other threads read the old true value of // flusherChosen a second time and immediately wait again. return put(fp0); } /* (non-Javadoc) * @see tlc2.tool.fp.FPSet#put(long) */ public final boolean put(final long fp) throws IOException { if (checkEvictPending()) { return put(fp); } // zeros the msb final long fp0 = fp & FLUSHED_MASK; // Only check primary and disk iff there exists a disk file. index is // created when we wait and thus cannot race. if (index != null) { // Lookup primary memory if (memLookup(fp0)) { this.memHitCnt.increment(); return true; } // Lookup on disk if (this.diskLookup(fp0)) { this.diskHitCnt.increment(); return true; } } // Lastly, try to insert into memory. return memInsert(fp0); } /* (non-Javadoc) * @see tlc2.tool.fp.FPSet#contains(long) */ public final boolean contains(final long fp) throws IOException { // maintains happen-before with regards to successful put if (checkEvictPending()) { return contains(fp); } // zeros the msb final long fp0 = fp & FLUSHED_MASK; // Lookup in primary if (memLookup(fp0)) { return true; } // Lookup on secondary/disk if (this.diskLookup(fp0)) { diskHitCnt.increment(); return true; } return false; } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#forceFlush() */ public void forceFlush() { flusherChosen.compareAndSet(false, true); } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#acquireTblWriteLock() */ void acquireTblWriteLock() { // no-op for now } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#releaseTblWriteLock() */ void releaseTblWriteLock() { // no-op for now } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#getTblCapacity() */ public long getTblCapacity() { return maxTblCnt; } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#getTblLoad() */ public long getTblLoad() { return getTblCnt(); } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#getOverallCapacity() */ public long getOverallCapacity() { return array.size(); } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#getBucketCapacity() */ public long getBucketCapacity() { // A misnomer, but bucketCapacity is obviously not applicable with open // addressing. return reprobe.longValue(); } // public static class Indexer { private static final long minFingerprint = 1L; //Minimum possible fingerprint (0L marks an empty position) private final float tblScalingFactor; private final long maxFingerprint; protected final long positions; public Indexer(final long positions, final int fpBits) { this(positions, fpBits, 0xFFFFFFFFFFFFFFFFL >>> fpBits); assert fpBits > 0; } public Indexer(final long positions, final int fpBits, final long maxValue) { this.positions = positions; this.maxFingerprint = maxValue; // (position-1L) because array is zero indexed. this.tblScalingFactor = (positions - 1L) / ((maxFingerprint - minFingerprint) * 1f); } protected long getIdx(final long fp) { return getIdx(fp, 0); } protected long getIdx(final long fp, final int probe) { long idx = Math.round(tblScalingFactor * (fp - minFingerprint)) + probe; return idx % positions; } } public static class BitshiftingIndexer extends Indexer { private final long prefixMask; private final int rShift; public BitshiftingIndexer(final long positions, final int fpBits) throws RemoteException { super(positions, fpBits); this.prefixMask = 0xFFFFFFFFFFFFFFFFL >>> fpBits; long n = (0xFFFFFFFFFFFFFFFFL >>> fpBits) - (positions - 1); int moveBy = 0; while (n >= positions) { moveBy++; n = n >>> 1; } this.rShift = moveBy; } @Override protected long getIdx(final long fp) { return ((fp & prefixMask) >>> rShift); } @Override protected long getIdx(final long fp, int probe) { // Have to mod positions because probe might cause us to overshoot. return (((fp & prefixMask) >>> rShift) + probe) % positions; } } // private LongComparator getLongComparator() { return new LongComparator() { public int compare(long fpA, long posA, long fpB, long posB) { // Elements not in Nat \ {0} remain at their current // position. if (fpA <= EMPTY || fpB <= EMPTY) { return 0; } final boolean wrappedA = indexer.getIdx(fpA) > posA; final boolean wrappedB = indexer.getIdx(fpB) > posB; if (wrappedA == wrappedB && posA > posB) { return fpA < fpB ? -1 : 1; } else if ((wrappedA ^ wrappedB)) { if (posA < posB && fpA < fpB) { return -1; } if (posA > posB && fpA > fpB) { return -1; } } return 0; } }; } /** * Returns the number of fingerprints stored in table/array in the range * [start,limit]. */ private long getTableOffset(final LongArray a, final long reprobe, final Indexer indexer, final long start, final long limit) { long occupied = 0L; for (long pos = start; pos < limit; pos++) { final long fp = a.get(pos % a.size()); if (fp <= EMPTY) { continue; } final long idx = indexer.getIdx(fp); if (idx > pos) { // Ignore the elements that wrapped around the // end when scanning the first partition. continue; } if (idx + reprobe < pos) { // Ignore the elements of the first partition // when wrapping around for the last partition. continue; } occupied = occupied + 1L; } return occupied; } private long getNextLower(long idx) { // Reverse to the next non-evicted/empty fp that belongs to this partition. long fp = array.get(idx); while (fp <= EMPTY || indexer.getIdx(fp) > idx) { fp = array.get(--idx); } return fp; } /** * The number of fingerprints stored on disk smaller than fp. */ private long getDiskOffset(final int id, final long fp) throws IOException { if (this.index == null) { return 0L; } final int indexLength = this.index.length; int loPage = 0, hiPage = indexLength - 1; long loVal = this.index[loPage]; long hiVal = this.index[hiPage]; if (fp <= loVal) { return 0L; } if (fp >= hiVal) { return this.braf[id].length() / FPSet.LongSize; } // See DiskFPSet#diskLookup for comments. // Lookup the corresponding disk page in index. final double dfp = (double) fp; while (loPage < hiPage - 1) { final double dhi = (double) hiPage; final double dlo = (double) loPage; final double dhiVal = (double) hiVal; final double dloVal = (double) loVal; int midPage = (loPage + 1) + (int) ((dhi - dlo - 1.0) * (dfp - dloVal) / (dhiVal - dloVal)); if (midPage == hiPage) { midPage } final long v = this.index[midPage]; if (fp < v) { hiPage = midPage; hiVal = v; } else if (fp > v) { loPage = midPage; loVal = v; } else { return (midPage * 1L) * (NumEntriesPerPage * 1L); } } // no page is in between loPage and hiPage at this point Assert.check(hiPage == loPage + 1, EC.SYSTEM_INDEX_ERROR); // Read the disk page and try to find the given fingerprint or the next // smaller one. Calculate its offset in file. long midEntry = -1L; long loEntry = ((long) loPage) * NumEntriesPerPage; long hiEntry = ((loPage == indexLength - 2) ? this.fileCnt - 1 : ((long) hiPage) * NumEntriesPerPage); final BufferedRandomAccessFile raf = this.braf[id]; while (loEntry < hiEntry) { midEntry = calculateMidEntry(loVal, hiVal, dfp, loEntry, hiEntry); raf.seek(midEntry * LongSize); final long v = raf.readLong(); if (fp < v) { hiEntry = midEntry; hiVal = v; } else if (fp > v) { loEntry = midEntry + 1; loVal = v; midEntry = loEntry; } else { break; } } return midEntry; } public class ConcurrentOffHeapMSBFlusher extends OffHeapMSBFlusher { private final int numThreads; private final ExecutorService executorService; private final Striped striped; private final long insertions; /** * The length of a single partition. */ private final long length; private List<Future<Result>> offsets; public ConcurrentOffHeapMSBFlusher(final LongArray array, final int r, final int numThreads, final long insertions) { super(array, r); this.numThreads = numThreads; this.insertions = insertions; this.length = (long) Math.floor(a.size() / numThreads); this.striped = Striped.readWriteLock(numThreads); this.executorService = Executors.newFixedThreadPool(numThreads); } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet.Flusher#prepareTable() */ protected void prepareTable() { final Collection<Callable<Result>> tasks = new ArrayList<Callable<Result>>(numThreads); for (int i = 0; i < numThreads; i++) { final int id = i; tasks.add(new Callable<Result>() { @Override public Result call() throws Exception { final boolean isFirst = id == 0; final boolean isLast = id == numThreads - 1; final long start = id * length; final long end = isLast ? a.size() - 1L : start + length; // Sort partition p_n while holding its // corresponding lock. Sort requires exclusive // access. striped.getAt(id).writeLock().lock(); LongArrays.sort(a, start, end, getLongComparator()); striped.getAt(id).writeLock().unlock(); // Sort the range between partition p_n and // p_n+1 bounded by reprobe. We need no hold // lock for p_n because p_n is done (except for // its non-overlapping lower end). striped.getAt((id + 1) % numThreads).writeLock().lock(); LongArrays.sort(a, end - r, end + r, getLongComparator()); striped.getAt((id + 1) % numThreads).writeLock().unlock(); // Count the occupied positions for this // partition. Occupied positions are those which // get evicted (written to disk). // This could be done as part of (insertion) sort // above at the price of higher complexity. Thus, // it's done here until it becomes a bottleneck. final long limit = isLast ? a.size() + r : end; long occupied = getTableOffset(a, r, indexer, start, limit); if (index == null) { return new Result(occupied, 0L); } // Determine number of elements in the old/current file. if (isFirst && isLast) { return new Result(occupied, fileCnt); } else if (isFirst) { return new Result(occupied, getDiskOffset(id, getNextLower(end))); } else if (isLast) { return new Result(occupied, fileCnt - getDiskOffset(id, getNextLower(start))); } else { return new Result(occupied, getDiskOffset(id, getNextLower(end)) - getDiskOffset(id, getNextLower(start))); } } }); } try { offsets = executorService.invokeAll(tasks); } catch (InterruptedException ie) { throw new RuntimeException(ie); } assert checkSorted(a, indexer, r) == -1L : String.format( "Array %s not fully sorted at index %s and reprobe %s.", a.toString(), checkSorted(array, indexer, r), r); } @Override protected void mergeNewEntries(final RandomAccessFile[] inRAFs, final RandomAccessFile outRAF, final Iterator ignored, final int idx, final long cnt) throws IOException { assert offsets.stream().mapToLong(new ToLongFunction<Future<Result>>() { public long applyAsLong(Future<Result> future) { try { return future.get().getTable(); } catch (InterruptedException ie) { throw new RuntimeException(ie); } catch (ExecutionException ee) { throw new RuntimeException(ee); } } }).sum() == insertions : "Missing inserted elements during eviction."; assert offsets.stream().mapToLong(new ToLongFunction<Future<Result>>() { public long applyAsLong(Future<Result> future) { try { return future.get().getDisk(); } catch (InterruptedException ie) { throw new RuntimeException(ie); } catch (ExecutionException ee) { throw new RuntimeException(ee); } } }).sum() == fileCnt : "Missing disk elements during eviction."; final Collection<Callable<Void>> tasks = new ArrayList<Callable<Void>>(numThreads); // Id = 0 tasks.add(new Callable<Void>() { public Void call() throws Exception { final Result result = offsets.get(0).get(); final Iterator itr = new Iterator(a, result.getTable(), indexer); ConcurrentOffHeapMSBFlusher.super.mergeNewEntries(inRAFs[0], outRAF, itr, 0, 0L, result.getDisk()); assert outRAF.getFilePointer() == result.getTotal() * FPSet.LongSize : "First writer did not write expected amount of fingerprints to disk."; return null; } }); // Id > 0 for (int i = 1; i < numThreads; i++) { final int id = i; tasks.add(new Callable<Void>() { public Void call() throws Exception { final RandomAccessFile tmpRAF = new BufferedRandomAccessFile(new File(tmpFilename), "rw"); tmpRAF.setLength(outRAF.length()); try { // Sum up the combined number of elements in // lower partitions. long skipOutFile = 0L; long skipInFile = 0L; for (int j = 0; j < id; j++) { skipInFile = skipInFile + offsets.get(j).get().getDisk(); skipOutFile = skipOutFile + offsets.get(j).get().getTotal(); } // Set offsets into the out (tmp) file. final Result result = offsets.get(id).get(); tmpRAF.seek(skipOutFile * FPSet.LongSize); // Set offset and the number of elements the // iterator is supposed to return. final long table = result.getTable(); final Iterator itr = new Iterator(a, table, id * length, indexer); final RandomAccessFile inRAF = inRAFs[id]; assert (skipInFile + result.getDisk()) * FPSet.LongSize <= inRAF.length(); inRAF.seek(skipInFile * FPSet.LongSize); // Calculate where the index entries start and end. final int idx = (int) Math.floor(skipOutFile / NumEntriesPerPage); final long cnt = NumEntriesPerPage - (skipOutFile - (idx * NumEntriesPerPage)); // Stop reading after diskReads elements (after // which the next thread continues) except for the // last thread which reads until EOF. Pass 0 when // nothing can be read from disk. final long diskReads = id == numThreads - 1 ? fileCnt - skipInFile : result.getDisk(); ConcurrentOffHeapMSBFlusher.super.mergeNewEntries(inRAF, tmpRAF, itr, idx + 1, cnt, diskReads); assert tmpRAF.getFilePointer() == (skipOutFile + result.getTotal()) * FPSet.LongSize : id + " writer did not write expected amount of fingerprints to disk."; } finally { tmpRAF.close(); } return null; } }); } // Combine the callable results. try { executorService.invokeAll(tasks); } catch (InterruptedException ie) { throw new RuntimeException(ie); } finally { executorService.shutdown(); } assert checkTable(a) : "Missed element during eviction."; assert checkIndex(index) : "Inconsistent disk index."; } private class Result { private final long occupiedTable; private final long occupiedDisk; public Result(long occupiedTable, long occupiedDisk) { this.occupiedTable = occupiedTable; this.occupiedDisk = occupiedDisk; } public long getDisk() { return occupiedDisk; } public long getTable() { return occupiedTable; } public long getTotal() { return occupiedDisk + occupiedTable; } } } public class OffHeapMSBFlusher extends Flusher { protected final int r; protected final LongArray a; public OffHeapMSBFlusher(LongArray array, int reprobe) { a = array; r = reprobe; } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet.Flusher#prepareTable() */ protected void prepareTable() { super.prepareTable(); // Sort with a single thread. LongArrays.sort(a, 0, a.size() - 1L + r, getLongComparator()); } /* (non-Javadoc) * @see tlc2.tool.fp.MSBDiskFPSet#mergeNewEntries(java.io.RandomAccessFile, java.io.RandomAccessFile) */ @Override protected void mergeNewEntries(RandomAccessFile[] inRAFs, RandomAccessFile outRAF) throws IOException { final long buffLen = tblCnt.sum(); final Iterator itr = new Iterator(array, buffLen, indexer); final int indexLen = calculateIndexLen(buffLen); index = new long[indexLen]; mergeNewEntries(inRAFs, outRAF, itr, 0, 0L); // maintain object invariants fileCnt += buffLen; } protected void mergeNewEntries(RandomAccessFile[] inRAFs, RandomAccessFile outRAF, Iterator itr, int currIndex, long counter) throws IOException { inRAFs[0].seek(0); mergeNewEntries(inRAFs[0], outRAF, itr, currIndex, counter, inRAFs[0].length() / FPSet.LongSize); } protected void mergeNewEntries(RandomAccessFile inRAF, RandomAccessFile outRAF, final Iterator itr, int currIndex, long counter, long diskReads) throws IOException { final int startIndex = currIndex; // initialize positions in "buff" and "inRAF" long value = 0L; // initialize only to make compiler happy boolean eof = false; if (fileCnt > 0) { try { value = inRAF.readLong(); diskReads } catch (EOFException e) { eof = true; } } else { assert diskReads == 0L; eof = true; } // merge while both lists still have elements remaining boolean eol = false; long fp = itr.next(); while (!eof || !eol) { if ((value < fp || eol) && !eof) { assert value > EMPTY : "Negative or zero fingerprint found: " + value; outRAF.writeLong(value); diskWriteCnt.increment(); // update in-memory index file if (counter == 0) { index[currIndex++] = value; counter = NumEntriesPerPage; } counter try { value = inRAF.readLong(); if (diskReads eof = true; } } catch (EOFException e) { eof = true; } } else { // prevent converting every long to String when assertion holds (this is expensive) if (value == fp) { //MAK: Commented cause a duplicate does not pose a risk for correctness. // It merely indicates a bug somewhere. //Assert.check(false, EC.TLC_FP_VALUE_ALREADY_ON_DISK, // String.valueOf(value)); MP.printWarning(EC.TLC_FP_VALUE_ALREADY_ON_DISK, String.valueOf(value)); } assert fp > EMPTY : "Wrote an invalid fingerprint to disk."; outRAF.writeLong(fp); diskWriteCnt.increment(); // update in-memory index file if (counter == 0) { index[currIndex++] = fp; counter = NumEntriesPerPage; } counter // we used one fp up, thus move to next one if (itr.hasNext()) { fp = itr.next(); } else { eol = true; } } } // both sets used up completely Assert.check(eof && eol, EC.GENERAL); if (currIndex == index.length - 1) { // Update the last element in index with the larger one of the // current largest element of itr and the largest element of value. index[index.length - 1] = Math.max(fp, value); } else if (counter == 0) { // Write the last index entry if counter reached zero in the // while loop above. index[currIndex] = Math.max(fp, value); } assert checkIndex(Arrays.copyOfRange(index, startIndex, currIndex)) : "Inconsistent disk index range."; } } /* (non-Javadoc) * @see tlc2.tool.fp.DiskFPSet#calculateIndexLen(long) */ protected int calculateIndexLen(final long tblcnt) { int indexLen = super.calculateIndexLen(tblcnt); if ((tblcnt + fileCnt - 1L) % NumEntriesPerPage == 0L) { // This is the special case where the largest fingerprint // happened is going to end up on the last entry of the previous // page. Thus, we won't need the last extra index cell. indexLen } return indexLen; } /** * A non-thread safe Iterator whose next method returns the next largest * element. */ public static class Iterator { private enum WRAP { ALLOWED, FORBIDDEN; }; private final long elements; private final LongArray array; private final Indexer indexer; private final WRAP canWrap; private long pos = 0; private long elementsRead = 0L; public Iterator(final LongArray array, final long elements, final Indexer indexer) { this(array, elements, 0L, indexer, WRAP.ALLOWED); } public Iterator(final LongArray array, final long elements, final long start, final Indexer indexer) { this(array, elements, start, indexer, WRAP.FORBIDDEN); } public Iterator(final LongArray array, final long elements, final long start, final Indexer indexer, final WRAP canWrap) { this.array = array; this.elements = elements; this.indexer = indexer; this.pos = start; this.canWrap = canWrap; } /** * Returns the next element in the iteration that is not EMPTY nor * marked evicted. * <p> * THIS IS NOT SIDEEFFECT FREE. AFTERWARDS, THE ELEMENT WILL BE MARKED * EVICTED. * * @return the next element in the iteration that is not EMPTY nor * marked evicted. * @exception NoSuchElementException * iteration has no more elements. */ public long next() { long elem = EMPTY; do { final long position = pos % array.size(); elem = array.get(position); if (elem == EMPTY) { pos = pos + 1L; continue; } if (elem < EMPTY) { pos = pos + 1L; continue; } final long baseIdx = indexer.getIdx(elem); if (baseIdx > pos) { // This branch should only be active for thread with id 0. assert canWrap == WRAP.ALLOWED; pos = pos + 1L; continue; } pos = pos + 1L; // mark elem in array as being evicted. array.set(position, elem | MARK_FLUSHED); elementsRead = elementsRead + 1L; return elem; } while (hasNext()); throw new NoSuchElementException(); } /** * Returns <tt>true</tt> if the iteration has more elements. (In other * words, returns <tt>true</tt> if <tt>next</tt> would return an element * rather than throwing an exception.) * * @return <tt>true</tt> if the iterator has more elements. */ public boolean hasNext() { return elementsRead < elements; } } /** * @return -1L iff array is sorted, index/position of the element that violates otherwise. */ private static long checkSorted(final LongArray array, final Indexer indexer, final int reprobe) { long e = 0L; for (long pos = 0L; pos < array.size() + reprobe; pos++) { final long tmp = array.get(pos % array.size()); if (tmp <= EMPTY) { continue; } final long idx = indexer.getIdx(tmp); if (idx > pos) { continue; } if (idx + reprobe < pos) { continue; } if (e == 0L) { // Initialize e with the first element that is not <=EMPTY // or has wrapped. e = tmp; continue; } if (e >= tmp) { return pos; } e = tmp; } return -1L; } private static boolean checkTable(LongArray array) { for (long i = 0L; i < array.size(); i++) { long elem = array.get(i); if (elem > EMPTY) { return false; } } return true; } private static boolean checkIndex(final long[] idx) { for (int i = 1; i < idx.length; i++) { if (idx[i - 1] >= idx[i]) { return false; } } return true; } }
package com.mycode; import static com.google.common.base.CharMatcher.is; import com.google.common.io.Files; import java.beans.Statement; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.sql.Blob; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Base64; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.TimeZone; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.Part; import javax.servlet.jsp.JspException; import javax.servlet.jsp.JspWriter; import sun.misc.IOUtils; /** * * @author ryo */ public class AccessToDatabase { //String url = "jdbc:postgresql://localhost/postgres"; String url = "jdbc:postgresql://ec2-23-23-226-41.compute-1.amazonaws.com:5432/d4ppsmhem0c3dv"; //String user = "postgres"; String user = "srzpgehvjaipad"; //String password = "postgres"; int numberOfRow = 0; String password = "CH-M4-l4EC9xR72a-6i5F-YAH_"; String odateturo = "jdbc:postgresql://ec2-23-23-226-41.compute-1.amazonaws.com:5432/d4ppsmhem0c3dv?sslmode=require&user=srzpgehvjaipad&password=CH-M4-l4EC9xR72a-6i5F-YAH_&ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory"; //numberOfRow = 0; LinkedHashMap hashdata; String aritleTitle; String aritleContent; String timestamp; Blob image; String key; LinkedHashMap<Integer, LinkedHashMap<String, String>> outerMap = new LinkedHashMap<Integer, LinkedHashMap<String, String>>(); LinkedHashMap<String, String> innerMap; Connection con; ResultSet executeQuery; String imageBase64; private String value; public AccessToDatabase() throws SQLException, ClassNotFoundException, URISyntaxException, IOException, JspException { Class.forName("org.postgresql.Driver"); // PostgreSQL //Connection con = DriverManager.getConnection(url, user, password);//local con = DriverManager.getConnection(odateturo); hashdata = new LinkedHashMap(); try (java.sql.Statement stmt = con.createStatement()) { String sql = "select * FROM notebook_posts where status = 'active' ORDER BY post_timestamp DESC ;"; executeQuery = stmt.executeQuery(sql); while (executeQuery.next()) { innerMap = new LinkedHashMap<String, String>(); numberOfRow++; aritleTitle = executeQuery.getString("post_title"); aritleContent = executeQuery.getString("post_content"); JspWriter writer = null; // LFLFsplit //\nsplit String addedNewLinesAritleContent = ""; for (String str : aritleContent.replaceAll("\r\n", "\n").replaceAll("\r", "\n").split("\n")) { addedNewLinesAritleContent += "<p>"; addedNewLinesAritleContent += str; addedNewLinesAritleContent += "</p>"; } timestamp = executeQuery.getString("post_timestamp"); InputStream binaryStream = executeQuery.getBinaryStream("image"); key = executeQuery.getString("key"); hashdata.put(numberOfRow, aritleTitle); innerMap.put("title", aritleTitle); innerMap.put("content", addedNewLinesAritleContent); innerMap.put("timestamp", timestamp); InputStream inputStream1 = binaryStream; ByteArrayOutputStream buffer = new ByteArrayOutputStream(); if (inputStream1 != null) { int nRead; byte[] data = new byte[16384]; while ((nRead = inputStream1.read(data, 0, data.length)) != -1) { buffer.write(data, 0, nRead); } buffer.flush(); byte[] toByteArray = buffer.toByteArray(); imageBase64 = new String(Base64.getEncoder().encode(toByteArray)); if (imageBase64.isEmpty()) {//if 0byte then imageBase64 = null; } } else { imageBase64 = null; } innerMap.put("image", imageBase64); innerMap.put("key", key); outerMap.put(numberOfRow, innerMap); } } executeQuery.close(); } public void postDataToDatabase(HttpServletRequest request) throws SQLException, FileNotFoundException, IOException, ServletException, ParseException { // 1) create a java calendar instance Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("Asia/Tokyo")); // 2) get a java.util.Date from the calendar instance. java.util.Date now = calendar.getTime(); // 3) a java current time (now) instance java.sql.Timestamp currentTimestamp = new java.sql.Timestamp(now.getTime());//finish //chage to String SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); //simpleDateFormat.setTimeZone(tzTest);//? String timestamp = simpleDateFormat.format(currentTimestamp); Part part = request.getPart("image");//image update InputStream inputStream = part.getInputStream(); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); int nRead; byte[] data = new byte[16384]; while ((nRead = inputStream.read(data, 0, data.length)) != -1) { buffer.write(data, 0, nRead); } buffer.flush(); byte[] toByteArray = buffer.toByteArray(); String sql = "insert into notebook_posts (post_title,post_content,post_timestamp,image) values (?,?,?,?);"; try (PreparedStatement statement = con.prepareStatement(sql)) { statement.setString(1, request.getParameter("title")); statement.setString(2, request.getParameter("text")); statement.setTimestamp(3, currentTimestamp, calendar); statement.setBytes(4, toByteArray); int row = statement.executeUpdate(); } } public void deleteDatatoDatabase(String keynumber) throws SQLException { //String sql = "delete from notebook_posts where key = " + keynumber; String sql = "update notebook_posts set status = 'invalid' where key = " + keynumber; java.sql.Statement resultStmt; resultStmt = con.createStatement(); resultStmt.execute(sql); } public int getDataFromDatabase() throws SQLException, ClassNotFoundException { return numberOfRow; } public LinkedHashMap getContents() { return outerMap; } }
package tlc2.tool.liveness; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import tlc2.TLCGlobals; import tlc2.output.EC; import tlc2.output.MP; import tlc2.output.StatePrinter; import tlc2.tool.EvalException; import tlc2.tool.TLCState; import tlc2.tool.TLCStateInfo; import tlc2.util.IdThread; import tlc2.util.IntStack; import tlc2.util.LongVec; import tlc2.util.MemIntQueue; import tlc2.util.MemIntStack; import tlc2.util.SynchronousDiskIntStack; import tlc2.util.statistics.BucketStatistics; import tlc2.util.statistics.IBucketStatistics; /** * {@link LiveWorker} is doing the heavy lifting of liveness checking: * <ul> * <li>Searches for strongly connected components (SCC) a.k.a. cycles in the * liveness/behavior graph.</li> * <li>Checks each SCC if it violates the liveness properties.</li> * <li>In case of a violation, reconstructs and prints the error trace.</li> * </ul> */ public class LiveWorker extends IdThread { /** * A marker that is pushed onto the dfsStack during SCC depth-first-search * to marker an explored nodes on the stack. * <p> * A node with a marker is on the comStack. */ private static final long SCC_MARKER = -42L; public static final IBucketStatistics STATS = new BucketStatistics("Histogram SCC sizes", LiveWorker.class .getPackage().getName(), "StronglyConnectedComponent sizes"); private static int errFoundByThread = -1; private static final Object workerLock = new Object(); private OrderOfSolution oos = null; private AbstractDiskGraph dg = null; private PossibleErrorModel pem = null; private final ILiveCheck liveCheck; private final BlockingQueue<ILiveChecker> queue; private final boolean isFinalCheck; /** * Total number of LiveWorkers simultaneously checking liveness. */ private final int numWorkers; public LiveWorker(int id, int numWorkers, final ILiveCheck liveCheck, final BlockingQueue<ILiveChecker> queue, final boolean finalCheck) { super(id); this.numWorkers = numWorkers; this.liveCheck = liveCheck; this.queue = queue; this.isFinalCheck = finalCheck; // Set the name to something more indicative than "Thread-4711". this.setName("TLCLiveWorkerThread-" + String.format("%03d", id)); } /** * Returns true iff an error has already been found. */ public static boolean hasErrFound() { synchronized (workerLock) { return (errFoundByThread != -1); } } /** * Returns true iff either an error has not been found or the error is found * by this thread. * <p> * This is used so that only one of the threads which have found an error * prints it. */ private/* static synchronized */boolean setErrFound() { synchronized (workerLock) { if (errFoundByThread == -1) { errFoundByThread = this.myGetId(); // GetId(); return true; } else if (errFoundByThread == this.myGetId()) { // (* GetId()) { return true; } return false; } } private final void checkSccs() throws IOException, InterruptedException, ExecutionException { // Initialize this.dg: this.dg.makeNodePtrTbl(); // Initialize nodeQueue with initial states. The initial states stored // separately in the DiskGraph are resolved to their pointer location // in the on-disk part of the DiskGraph. // The pointer location generally is obviously used to: // * Speed up disk lookups in the RandomAccessFile(s) backing up the DiskGraph // * Is replaced by the SCC link number the moment the node's successors // are explored during DFS search. At this point the ptr location isn't // needed anymore. The successors have been resolved. // From each node in nodeQueue the SCC search is started down below, // which can subsequently add additional nodes into nodeQueue. // Contrary to plain Tarjan, not all vertices are added to the // nodeQueue of unexplored states, but only the initial states. Since we // know that all non-initial states are reachable from the set of // initial states, this is sufficient to start with. final LongVec initNodes = this.dg.getInitNodes(); final int numOfInits = initNodes.size(); // Allocate space for all initial states, assuming the majority of // initial nodes will be done. Multiplied by 5 because of // <<long, int, long>> per "record. final MemIntQueue nodeQueue = new MemIntQueue(liveCheck.getMetaDir(), "root", (numOfInits / 2) * 5); for (int j = 0; j < numOfInits; j += 2) { final long state = initNodes.elementAt(j); final int tidx = (int) initNodes.elementAt(j + 1); final long ptr = this.dg.getLink(state, tidx); // Check if the node <<state, tidx>> s is done. A node s is undone // if it is an initial state which hasn't been explored yet. This is // the case if s has been added via LiveChecker#addInitState but not // yet via LiveChecker#addNextState. LiveChecker#addNextState fully // explores the given init state s because it has access to s' // successors. if (ptr >= 0) { // Make sure none of the init states has already been assigned a // link number. That would indicate a bug in makeNodePtrTbl // which is supposed to reset all link numbers to file ptrs. assert DiskGraph.isFilePointer(ptr); nodeQueue.enqueueLong(state); nodeQueue.enqueueInt(tidx); nodeQueue.enqueueLong(ptr); } else { // If this is the final check on the complete graph, no node is // allowed to be undone. If it's not the final check, ptr has to // be UNDONE (a non-UNDONE negative pointer is probably a bug). // isFinalCheck => ptr # UNDONE assert !isFinalCheck || ptr != TableauNodePtrTable.UNDONE; } } final int[] eaaction = this.pem.EAAction; final int slen = this.oos.getCheckState().length; final int alen = this.oos.getCheckAction().length; // Tarjan's stack // Append thread id to name for unique disk files during concurrent SCC search final IntStack dfsStack = getStack(liveCheck.getMetaDir(), "dfs" + this.myGetId()); // comStack is only being added to during the deep first search. It is passed // to the checkComponent method while in DFS though. Note that the nodes pushed // onto comStack don't necessarily form a strongly connected component (see // comment above this.checkComponent(...) below for more details). // See tlc2.tool.liveness.LiveWorker.DetailedFormatter.toString(MemIntStack) // which is useful during debugging. final IntStack comStack = getStack(liveCheck.getMetaDir(), "com" + this.myGetId()); // Generate the SCCs and check if they contain a "bad" cycle. while (nodeQueue.size() > 0) { // Pick one of the unexplored nodes as root and start searching the // reachable SCCs from it. final long state = nodeQueue.dequeueLong(); final int tidx = nodeQueue.dequeueInt(); final long loc = nodeQueue.dequeueLong(); // Reset (remove all elements) the stack. Logically a new SCC search // is being started unrelated to the previous one. dfsStack.reset(); // Push the first node onto the DFS stack which makes it the node // from which the depth-first-search is being started. dfsStack.pushLong(state); dfsStack.pushInt(tidx); dfsStack.pushLong(loc); // Push the smallest possible link number (confusingly called // MAX_PTR here but only because file pointers are < MAX_PTR) as the // first link number. // [0, MAX_PTR) for file pointers // [MAX_PTR, MAX_LINK] for links dfsStack.pushLong(DiskGraph.MAX_PTR); long newLink = DiskGraph.MAX_PTR; while (dfsStack.size() >= 7) { final long lowLink = dfsStack.popLong(); final long curLoc = dfsStack.popLong(); final int curTidx = dfsStack.popInt(); final long curState = dfsStack.popLong(); // At this point curLoc is still a file pointer (small MAX_PTR) // and not yet replaced by a link (MAX_PTR < curLoc < MAX_LINK). assert DiskGraph.isFilePointer(curLoc); // The current node is explored iff curLoc < 0. If it is indeed fully explored, // it means it has potentially found an SCC. Thus, check if this is the case // for the current GraphNode. // A node is fully explored if the nested loop over its // successors down below in the else branch has not revealed any // unexplored successors. if (curLoc == SCC_MARKER) { // Check if the current node's link is lowLink which // indicates that the nodes on comStack up to <<curState, // curTidx>> form an SCC. // If curLink # lowLink, continue by pop'ing the next node // from dfsStack. It can either be: // - unexplored in which case the else branch is taken and // DFS continues. // - be an intermediate node of the SCC and thus curLink // lowLink for it too. // - can be the start of the SCC (curLink = lowLink). final long curLink = this.dg.getLink(curState, curTidx); assert curLink < AbstractDiskGraph.MAX_LINK; if (curLink == lowLink) { // The states on the comStack from "top" to <<curState, // curTidx>> form an SCC, thus check for "bad" cycle. // The cycle does not necessarily include all states in // comStack. "top" might very well be curState in which // case only a single state is checked by // checkComponent. // The aforementioned case happens regularly when the // behaviors to check don't have cycles at all (leaving // single node cycles aside for the moment). The DFS // followed each behavior from its initial state (see // nodeQueue) all the way to the behavior's end state at // which point DFS halts. Since DFS cannot continue // (there are no successors) it calls checkComponent now // with the current comStack and the end state as // <<curState, curTidx>> effectively checking the // topmost element of comStack. Unless this single state // violates any liveness properties, it gets removed // from comStack and DFS continues. Iff DFS still cannot // continue because the predecessor to endstate // (endstate - 1) has no more successors to explore // either, it again calls checkComponent for the single // element (endstate - 1). This goes on until either the // initial state is reached or an intermediate state has // unexplored successors with DFS. final boolean isOK = this.checkComponent(curState, curTidx, comStack); if (!isOK) { // Found a "bad" cycle of one to comStack.size() // nodes, no point in searching for more SCCs as we // are only interested in one counter-example at a // time. // checkComponent will have printed the // counter-example by now. return; } } // Replace previous lowLink (plowLink) with the minimum of // the current lowLink and plowLink on the stack. final long plowLink = dfsStack.popLong(); dfsStack.pushLong(Math.min(plowLink, lowLink)); // No SCC found yet } else { // Assign newLink to curState: final long link = this.dg.putLink(curState, curTidx, newLink); // link is -1 if newLink has been assigned to pair // <<curState, curTidx>>. If the pair had been assigned a // link before, the previous link in range [MAX_PTR, // MAX_LINK] is returned. If the link is not -1, it means // the node has been explored by this DFS search before. if (link == -1) { // Push curState back onto dfsStack, but make curState // explored: dfsStack.pushLong(lowLink); dfsStack.pushLong(curState); dfsStack.pushInt(curTidx); // Push a marker onto the stack that, if pop'ed as // curLoc above causes branching to enter the true case // of the if block. dfsStack.pushLong(SCC_MARKER); // Add the tuple <<curState, curTidx, curLoc>> to comStack: comStack.pushLong(curLoc); comStack.pushInt(curTidx); comStack.pushLong(curState); // Look at all the successors of curState: final GraphNode gnode = this.dg.getNode(curState, curTidx, curLoc); final int succCnt = gnode.succSize(); long nextLowLink = newLink; // DFS moved on to a new node, thus increment the newLink // number by 1 for subsequent exploration. newLink = newLink + 1; for (int i = 0; i < succCnt; i++) { final long nextState = gnode.getStateFP(i); final int nextTidx = gnode.getTidx(i); final long nextLink = this.dg.getLink(nextState, nextTidx); // If <<nextState, nextTidx>> node's link is < 0 it // means the node isn't "done" yet (see // tlc2.tool.liveness.TableauNodePtrTable.UNDONE). // A successor node t of gnode is undone if it is: // - An initial state which hasn't been explored yet // - t has not been added to the liveness disk graph // itself (only as the successor (transition) of // gnode). // If it is >= 0, it either is a: // - file pointer location // - a previously assigned link (>= MAX_PTR) // Iff nextLink == MAX_PTR, it means that the // <<nextState, nextTidx>> successor node has been // processed by checkComponent. The checks below // will result in the successor node being skipped. // It is possible that <<nextState, nextTidx>> = // <<curState, curTid>> due to self loops. This is // intended, as checkAction has to be evaluated for // self loops too. if (nextLink >= 0) { // Check if the arc/transition from <<curState, // curTidx>> to <<nextState, nextTidx>> // satisfies ("P-satisfiable" MP page 422ff) // its PEM's EAAction. If it does, 1/3 of the // conditions for P-satisfiability are // satisfied. Thus it makes sense to check the // other 2/3 in checkComponent (AEAction & // Fulfilling promises). If the EAAction does // not hold, there is no point in checking the // other 2/3. All must hold for // P-satisfiability. // This check is related to the fairness spec. // Usually, it evals to true when no or weak // fairness have been specified. False on strong // fairness. if (gnode.getCheckAction(slen, alen, i, eaaction)) { // If the node's nextLink still points to // disk, it means it has no link assigned // yet which is the case if this node gets // explored during DFS search the first // time. Since it is new, add it to dfsStack // to have it explored subsequently by DFS. if (DiskGraph.isFilePointer(nextLink)) { dfsStack.pushLong(nextState); dfsStack.pushInt(nextTidx); dfsStack.pushLong(nextLink); // nextLink is logically a ptr/loc here // One would expect a (logical) lowLink // being pushed (additionally to the // ptr/loc in previous line) onto the // stack here. However, it is pushed // down below after all successors are // on the stack and valid for the // topmost successor. For the other // successors below the topmost, a link // number will be assigned subsequently. } else { // The node has been processed // already, thus use the minimum of its link // (nextLink) and nextLowLink. nextLowLink = Math.min(nextLowLink, nextLink); } } else { // The transition from <<curState, curTidx>> // to <<nextState, nextTidx>> is not // P-satisfiable and thus does not need to // be checkComponent'ed. However, since we // only added initial but no intermediate // states to nodeQueue above, we have to add // <<nextState, nextTidx>> to nodeQueue if // it's still unprocessed (indicated by its // on disk state). The current path // potentially might be the only one by // which DFS can reach it. if (DiskGraph.isFilePointer(nextLink)) { nodeQueue.enqueueLong(nextState); nodeQueue.enqueueInt(nextTidx); nodeQueue.enqueueLong(nextLink); // nextLink is logically a ptr/loc here } } } else { // If this is the final check on the complete // graph, no node is allowed to be undone. If // it's not the final check, nextLink has to be // UNDONE (a non-UNDONE negative nextLink is // probably a bug). // isFinalCheck => nextLink # UNDONE assert !isFinalCheck || nextLink != TableauNodePtrTable.UNDONE; } } // Push the next lowLink onto stack on top of all // successors. It is assigned to the topmost // successor only though. dfsStack.pushLong(nextLowLink); } else { // link above wasn't "-1", thus it has to be a valid // link in the known interval. assert AbstractDiskGraph.MAX_PTR <= link && link <= AbstractDiskGraph.MAX_LINK; // Push the minimum of the two links onto the stack. If // link == DiskGraph.MAX_PTR lowLink will always be the // minimum (unless this graph has a gigantic amount of // SCCs exceeding (MAX_LINK - MAX_PTR). dfsStack.pushLong(Math.min(lowLink, link)); } } } } // Make sure all nodes on comStack have been checkComponent()'ed assert comStack.size() == 0; } private IntStack getStack(final String metaDir, final String name) throws IOException { // Synchronize all LiveWorker instances to consistently read free // memory. This method is only called during initialization of SCC // search, thus synchronization should not cause significant thread // contention. synchronized (LiveWorker.class) { // It is unlikely that the stacks will fit into memory if the // size of the behavior graph is larger relative to the available // memory. Also take the total number of simultaneously running // workers into account that have to share the available memory // among each other. final double freeMemoryInBytes = (Runtime.getRuntime().freeMemory() / (numWorkers * 1d)); final long graphSizeInBytes = this.dg.getSizeOnDisk(); final double ratio = graphSizeInBytes / freeMemoryInBytes; if (ratio > TLCGlobals.livenessGraphSizeThreshold) { // Double SDIS's bufSize/pageSize by how much the graph size // overshoots the free memory size, but limit page size to 1gb. // Also, don't allocate more than what is available. final int capacityInBytes = SynchronousDiskIntStack.BufSize << Math.min((int) ratio, 5); if (capacityInBytes < freeMemoryInBytes) { return new SynchronousDiskIntStack(metaDir, name, capacityInBytes); } else { // Use default SDIS which is 32mb of in-memory size return new SynchronousDiskIntStack(metaDir, name); } } // If the disk graph as a whole fits into memory, do not use a // disk-backed SynchronousDiskIntStack. return new MemIntStack(metaDir, name); } } private boolean checkComponent(final long state, final int tidx, final IntStack comStack) throws IOException, InterruptedException, ExecutionException { final long comStackSize = comStack.size(); // There is something to pop and each is a well formed tuple <<fp, tidx, loc>> assert comStackSize >= 5 && comStackSize % 5 == 0; // long + int + long long state1 = comStack.popLong(); int tidx1 = comStack.popInt(); long loc1 = comStack.popLong(); // Simply return if the component is trivial: It is trivial iff the component // has a single node AND this node is *no* stuttering node. if (state1 == state && tidx1 == tidx && !isStuttering(state1, tidx1, loc1)) { this.dg.setMaxLink(state, tidx); return true; } // Now, we know we are working on a non-trivial component // We first put all the nodes in this component in a hashtable. // The nodes in this component do not correspond to // all elements on the comStack though. Only the nodes up to // the given one are copied to NodePtrTable. // The NodePtrTable would ideally be initialized with the number of // nodes in the comStack. This is the upper limit of elements going // to be kept in com. However, it would destroy NodePtrTable's // collision handling. NodePtrTable uses open addressing (see // Initializing the NTPT with 128 buckets/slows is a significant memory // overhead (especially when comStack contains < 10 elements) which // regularly results in OutOfMemoryErrors being thrown. To alleviate the // problem the key-space of the comStack elements could be checked and // the minimum possible collision-free TNPT size be calculated. // (Btw. the implementation uses a TNPT in the first place because it is // passed on to printTrace iff an error is found. The implementation // here could use a simple java.util.Map or HashTable technically.) final TableauNodePtrTable com = new TableauNodePtrTable(128); while (true) { // Add <state1, tidx1> into com: com.put(state1, tidx1, loc1); this.dg.setMaxLink(state1, tidx1); // Get the next node of the component: if (state == state1 && tidx == tidx1) { break; } state1 = comStack.popLong(); tidx1 = comStack.popInt(); loc1 = comStack.popLong(); } // Just parameter node in com OR com subset of comStack assert com.size() <= (comStackSize / 5); STATS.addSample(com.size()); // Check this component: final int slen = this.oos.getCheckState().length; final int alen = this.oos.getCheckAction().length; final int aeslen = this.pem.AEState.length; final int aealen = this.pem.AEAction.length; final int plen = this.oos.getPromises().length; final boolean[] AEStateRes = new boolean[aeslen]; final boolean[] AEActionRes = new boolean[aealen]; final boolean[] promiseRes = new boolean[plen]; // Extract a node from the nodePtrTable "com". // Note the upper limit is NodePtrTable#getSize() instead of // the more obvious NodePtrTable#size(). // NodePtrTable internally hashes the elements to buckets // and isn't filled start to end. Thus, the code // below iterates NodePtrTable front to end skipping null buckets. // Note that the nodes are processed in random order (depending on a // node's hash in TableauNodePtrTbl) and not in the order given by // comStack. This is fine because the all checks have been evaluated // eagerly during insertion into the liveness graph long before the // SCC search started. Thus, the code here only has to check the // check results which can happen in any order. final int tsz = com.getSize(); for (int ci = 0; ci < tsz; ci++) { final int[] nodes = com.getNodesByLoc(ci); if (nodes == null) { // miss in NotePtrTable (null bucket) continue; } state1 = TableauNodePtrTable.getKey(nodes); for (int nidx = 2; nidx < nodes.length; nidx += com.getElemLength()) { // nidx starts with 2 because [0][1] are the long fingerprint state1. tidx1 = TableauNodePtrTable.getTidx(nodes, nidx); loc1 = TableauNodePtrTable.getElem(nodes, nidx); final GraphNode curNode = this.dg.getNode(state1, tidx1, loc1); // Check AEState: for (int i = 0; i < aeslen; i++) { // Only ever set AEStateRes[i] to true, but never to false // once it was true. It only matters if one state in com // the inversion of <>[]p). // It obviously has to check all nodes in the component // (com) if either of them violates AEState unless all // elements of AEStateRes are true. From that point onwards, // checking further states wouldn't make a difference. if (!AEStateRes[i]) { int idx = this.pem.AEState[i]; AEStateRes[i] = curNode.getCheckState(idx); // Can stop checking AEStates the moment AEStateRes // is completely set to true. However, most of the time // aeslen is small and the compiler will probably optimize // out. } } // Check AEAction: A TLA+ action represents the relationship // between the current node and a successor state. The current // node has n successor states. For each pair, see iff the // successor is in the "com" NodePtrTablecheck, check actions // and store the results in AEActionRes(ult). Note that the // actions have long been checked in advance when the node was // added to the graph and the actual state and not just its // fingerprint was available. Here, the result is just being // looked up. final int succCnt = aealen > 0 ? curNode.succSize() : 0; // No point in looping successors if there are no AEActions to check on them. for (int i = 0; i < succCnt; i++) { final long nextState = curNode.getStateFP(i); final int nextTidx = curNode.getTidx(i); // For each successor <<nextState, nextTdix>> of curNode's // successors check, if it is part of the currently // processed SCC (com). Successors, which are not part of // the current SCC have obviously no relevance here. After // all, we check the SCC. if (com.getLoc(nextState, nextTidx) != -1) { for (int j = 0; j < aealen; j++) { // Only set false to true, but never true to false. if (!AEActionRes[j]) { final int idx = this.pem.AEAction[j]; AEActionRes[j] = curNode.getCheckAction(slen, alen, i, idx); } } } } // Check that the component is fulfilling. (See MP page 453.) // Note that the promises are precomputed and stored in oos. for (int i = 0; i < plen; i++) { final LNEven promise = this.oos.getPromises()[i]; final TBPar par = curNode.getTNode(this.oos.getTableau()).getPar(); if (par.isFulfilling(promise)) { promiseRes[i] = true; } } } } // We find a counterexample if all three conditions are satisfied. If // either of the conditions is false, it means the PEM does not hold and // thus the liveness properties are not violated by the SCC. // All AEState properties, AEActions and promises of PEM must be // satisfied. If a single one isn't satisfied, the PEM as a whole isn't // P-satisfiable. That's why it returns on the first false. As stated // before, EAAction have already been checked if satisfiable. // checkComponent is only called if the EA actions are satisfiable. // Technically: No error is found if any of the AEStateRes, AEActionRes // or promiseRes booleans is false. for (int i = 0; i < aeslen; i++) { if (!AEStateRes[i]) { return true; } } for (int i = 0; i < aealen; i++) { if (!AEActionRes[i]) { return true; } } for (int i = 0; i < plen; i++) { if (!promiseRes[i]) { return true; } } // This component must contain a counter-example because all three // conditions are satisfied. So, print a counter-example (if this thread // is the first one to find a counter-example)! if (setErrFound()) { this.printTrace(state, tidx, com); } return false; } /* Check if the node <state, tidx> stutters. */ private boolean isStuttering(long state, int tidx, long loc) throws IOException { final int slen = this.oos.getCheckState().length; final int alen = this.oos.getCheckAction().length; // Find the self loop and check its <>[]action final GraphNode gnode = this.dg.getNode(state, tidx, loc); final int succCnt = gnode.succSize(); for (int i = 0; i < succCnt; i++) { final long nextState = gnode.getStateFP(i); final int nextTidx = gnode.getTidx(i); if (state == nextState && tidx == nextTidx) { return gnode.getCheckAction(slen, alen, i, this.pem.EAAction); } } // <state, tidx> has no self loop, thus cannot stutter return false; } /** * Print out the error state trace by finding a cycle in the given SCC. The * method first generates a "bad" cycle from the current scc, and then * generates a prefix path from some initial state to the "bad" cycle in the * state graph. The prefix path and the "bad" cycle together forms a * counter-example. * <p> * Additionally, the first part can be divided into the two while loops A) * and B). A) re-creates the sub-path of the error trace starting at the * start state of the SCC as given by the parameters and ends when all * states have be accumulated that -combined- violate the liveness * properties. Iff the last state after termination of A) is not equal to * the start state, there is a gap in the cycle. Thus, B) task is to close * the gap. * <p> * * @see tlatools/test-model/symmetry/ErrorTraceConstructionPhases.png for a * sketch. * @see tlc2.tool.liveness.ErrorTraceConstructionTest which runs a spec that * exemplifies the three staged error trace composition * * @param state * fingerprint of the state which is the "starting" state of the * SCC in nodeTbl. * @param tidx * tableau index pointing to the {@link TBGraph}. Corresponds to * the state fingerprint. Combined <<state, tidx>> unique * identify a node in the liveness/behavior graph. * @param nodeTbl * The current SCC which is known to satisfy the * {@link PossibleErrorModel} and thus violates the liveness * properties. * @throws ExecutionException * @throws InterruptedException */ private void printTrace(final long state, final int tidx, final TableauNodePtrTable nodeTbl) throws IOException, InterruptedException, ExecutionException { // System.out.println(toDotViz(state, tidx, nodeTbl)); MP.printError(EC.TLC_TEMPORAL_PROPERTY_VIOLATED); MP.printError(EC.TLC_COUNTER_EXAMPLE); /* * Use a dedicated thread to concurrently search a prefix-path from some * initial node to the state identified by <<state, tidx>>. */ final ExecutorService executor = Executors.newFixedThreadPool(1); final Future<List<TLCStateInfo>> future = executor.submit(new Callable<List<TLCStateInfo>>() { /* (non-Javadoc) * @see java.util.concurrent.Callable#call() */ public List<TLCStateInfo> call() throws Exception { // Print the error trace. We first construct the prefix that // led to the bad cycle. The nodes on prefix and cycleStack then // form the complete counter example. int stateNum = 0; final LongVec prefix = LiveWorker.this.dg.getPath(state, tidx); final int plen = prefix.size(); final List<TLCStateInfo> states = new ArrayList<TLCStateInfo>(plen); // Recover the initial state: //TODO This throws an ArrayIndexOutOfBounds if getPath returned a // LongVec with just a single element. This happens when the parameter // state is one of the init states already. long fp = prefix.elementAt(plen - 1); TLCStateInfo sinfo = liveCheck.getTool().getState(fp); if (sinfo == null) { throw new EvalException(EC.TLC_FAILED_TO_RECOVER_INIT); } sinfo.stateNumber = stateNum++; states.add(sinfo); // Recover the successor states: //TODO Check if path.size has elements for (int i = plen - 2; i >= 0; i long curFP = prefix.elementAt(i); // The prefix might contain duplicates if the path happens to walk // along two (or more distinct states which differ in the tableau // idx only (same fingerprint). From the counterexample perspective, // this is irrelevant iff the identical fingerprints are contiguous. // It won't be correct to shorten a path <<fp1,fp2,fp1>> to // <<fp2,fp1>> though. if (curFP != fp) { sinfo = liveCheck.getTool().getState(curFP, sinfo.state); if (sinfo == null) { throw new EvalException(EC.TLC_FAILED_TO_RECOVER_NEXT); } sinfo.stateNumber = stateNum++; states.add(sinfo); fp = curFP; } } // Print the prefix: TLCState lastState = null; for (int i = 0; i < stateNum; i++) { StatePrinter.printState(states.get(i), lastState, i + 1); lastState = states.get(i).state; } return states; } }); /* * With the executor concurrently working on the prefix, let this thread * work on the postfix (cycle). */ final MemIntStack cycleStack = new MemIntStack(liveCheck.getMetaDir(), "cycle"); GraphNode curNode = dfsPostFix(state, tidx, nodeTbl, cycleStack); /* * If the cycle is not closed/completed (complete when startState == * state), continue from the curNode at which the previous while loop * terminated and follow its successors until the start state shows up. */ final LongVec postfix = bfsPostFix(state, tidx, nodeTbl, curNode); /* * At this point the cycle part of the error trace has been constructed. * cycleStack contains the states from the start state of the SCC up to * the state that violates all liveness properties. postfix contains the * suffix from the violating state back to the start state of the SCC. * Thus, append the reversed cycleStack onto postfix (cycleStack has the * last state at the top). Postfix then contains the minimal path in the * SCC that violates the liveness property. */ while (cycleStack.size() > 0) { // Do not filter successive <<fp,tidx,permId>> here but do it below // when the actual states get printed. See Test3.tla for reason why. postfix.addElement(cycleStack.popLong()); cycleStack.popInt(); // ignore tableau idx. The tableau idx is // irrelevant as <<fpA, tidx1>> and <<fpA, // tidx2>> both map to the same state in the // error trace. } // Wait for the prefix-path to be searched/generated and fully printed. // get() is a blocking call that makes this thread wait for the executor // to finish its job of searching and printing the prefix-path. final List<TLCStateInfo> states = future.get(); /* * At this point everything from the initial state up to the start state * of the SCC has been printed. Now, print the states in postfix. Obtain * the last state from the prefix (which corresponds to <<state, tidx>>) * to use it to generate the next state. Obviously, we have to wait for * the prefix thread to be done for two reasons: a) the trace has to be * printed and b) we need the TLCState instance to generate the * successor states in the cycle. */ TLCStateInfo sinfo = states.get(states.size() - 1); TLCState lastState = sinfo.state; long fp = lastState.fingerPrint(); int stateNum = (int) sinfo.stateNumber + 1; final int cyclePos = stateNum; final long cycleFP = fp; // Assert.assert(fps.length > 0); for (int i = postfix.size() - 1; i >= 0; i final long curFP = postfix.elementAt(i); // Only print the state if it differs from its predecessor. We don't // want to print an identical state twice. This can happen if the // loops A) and B) above added an identical state multiple times // into cycleStack/postfix. // The reason we don't simply compare the actual states is for // efficiency reason. Regenerating the next state might be // expensive. if (curFP != fp) { sinfo = liveCheck.getTool().getState(curFP, sinfo.state); if (sinfo == null) { throw new EvalException(EC.TLC_FAILED_TO_RECOVER_NEXT); } StatePrinter.printState(sinfo, lastState, ++stateNum); lastState = sinfo.state; // keep lastState to be able to print the diff in StatePringer.printState if requested by user. fp = curFP; } } /* All error trace states have been printed (prefix + cycleStack + * postfix). What is left is to print either the stuttering or the * back-to-cyclePos marker. */ if (fp == cycleFP) { StatePrinter.printStutteringState(++stateNum); } else { sinfo = liveCheck.getTool().getState(cycleFP, sinfo.state); if (sinfo == null) { throw new EvalException(EC.TLC_FAILED_TO_RECOVER_NEXT); } // The print stmts below claim there is a cycle, thus assert that // there is indeed one. Index-based lookup into states array is // reduced by one because cyclePos is human-readable. assert states.get(cyclePos - 1).state.equals(sinfo.state); StatePrinter.printBackToState(cyclePos); } } // BFS search private LongVec bfsPostFix(final long state, final int tidx, final TableauNodePtrTable nodeTbl, GraphNode curNode) throws IOException { final LongVec postfix = new LongVec(16); final long startState = curNode.stateFP; final long startTidx = curNode.tindex; if (startState != state || startTidx != tidx) { final MemIntQueue queue = new MemIntQueue(liveCheck.getMetaDir(), null); long curState = startState; int ploc = -1; int curLoc = nodeTbl.getNodesLoc(curState); int[] nodes = nodeTbl.getNodesByLoc(curLoc); TableauNodePtrTable.setSeen(nodes); _done: while (true) { int tloc = TableauNodePtrTable.startLoc(nodes); while (tloc != -1) { final int curTidx = TableauNodePtrTable.getTidx(nodes, tloc); final long curPtr = TableauNodePtrTable.getPtr(TableauNodePtrTable.getElem(nodes, tloc)); curNode = this.dg.getNode(curState, curTidx, curPtr); final int succCnt = curNode.succSize(); // for each successor of curNode s, check if s is the // destination state. for (int j = 0; j < succCnt; j++) { final long nextState = curNode.getStateFP(j); final int nextTidx = curNode.getTidx(j); if (nextState == state && nextTidx == tidx) { // We have found a path from startState to state, // now backtrack the path the outer loop took to get // us here and add each state to postfix. while (curState != startState) { postfix.addElement(curState); nodes = nodeTbl.getNodesByLoc(ploc); curState = TableauNodePtrTable.getKey(nodes); ploc = TableauNodePtrTable.getParent(nodes); } postfix.addElement(startState); break _done; } // s is not equal to the destination state 'startState'. // If s's successors are still unseen, add s to the // queue to later explore it as well. Mark it seen // to not explore it twice. final int[] nodes1 = nodeTbl.getNodes(nextState); if (nodes1 != null && !TableauNodePtrTable.isSeen(nodes1)) { TableauNodePtrTable.setSeen(nodes1); queue.enqueueLong(nextState); queue.enqueueInt(curLoc); } } tloc = TableauNodePtrTable.nextLoc(nodes, tloc); } // Create a parent pointer to later reverse the path in B2) TableauNodePtrTable.setParent(nodes, ploc); // Dequeue the next unexplored state from the queue. curState = queue.dequeueLong(); ploc = queue.dequeueInt(); curLoc = nodeTbl.getNodesLoc(curState); nodes = nodeTbl.getNodesByLoc(curLoc); } } return postfix; } private GraphNode dfsPostFix(final long state, final int tidx, final TableauNodePtrTable nodeTbl, final MemIntStack cycleStack) throws IOException { // First, find a "bad" cycle from the "bad" scc. final int slen = this.oos.getCheckState().length; final int alen = this.oos.getCheckAction().length; // The 3 boolean arrays are used to make sure that the same check result // is exactly counted once. final boolean[] AEStateRes = new boolean[this.pem.AEState.length]; final boolean[] AEActionRes = new boolean[this.pem.AEAction.length]; final boolean[] promiseRes = new boolean[this.oos.getPromises().length]; // The number/count of all liveness checks. The while loop A) terminates // once it has accumulated all states that violate all checks (we know // that the states in nodeTbl have to violate the liveness property // because we are in printTrace already. checkComponent has already // determined that there is a violation). int cnt = AEStateRes.length + AEActionRes.length + promiseRes.length; // Mark state as visited: int[] nodes = nodeTbl.getNodes(state); int tloc = nodeTbl.getIdx(nodes, tidx); final long ptr = TableauNodePtrTable.getElem(nodes, tloc); TableauNodePtrTable.setSeen(nodes, tloc); // Greedy DFS search for a path satisfying the PossibleErrorModel. GraphNode curNode = this.dg.getNode(state, tidx, ptr); while (cnt > 0) { int cnt0 = cnt; _next: while (true) { // Check AEState: for (int i = 0; i < this.pem.AEState.length; i++) { int idx = this.pem.AEState[i]; if (!AEStateRes[i] && curNode.getCheckState(idx)) { AEStateRes[i] = true; cnt } } // Check if the component is fulfilling. (See MP page 453.) // Note that the promises are precomputed and stored in oos. for (int i = 0; i < this.oos.getPromises().length; i++) { LNEven promise = this.oos.getPromises()[i]; TBPar par = curNode.getTNode(this.oos.getTableau()).getPar(); if (!promiseRes[i] && par.isFulfilling(promise)) { promiseRes[i] = true; cnt } } if (cnt <= 0) { break; } // Check AEAction (which is a check of the out-arc of curNode to // one of its successors): long nextState1 = 0, nextState2 = 0; int nextTidx1 = 0, nextTidx2 = 0; int tloc1 = -1, tloc2 = -1; int[] nodes1 = null, nodes2 = null; boolean hasUnvisitedSucc = false; int cnt1 = cnt; int succCnt = curNode.succSize(); for (int i = 0; i < succCnt; i++) { long nextState = curNode.getStateFP(i); int nextTidx = curNode.getTidx(i); nodes = nodeTbl.getNodes(nextState); if (nodes != null) { tloc = nodeTbl.getIdx(nodes, nextTidx); if (tloc != -1) { // <nextState, nextTidx> is in nodeTbl. nextState1 = nextState; nextTidx1 = nextTidx; tloc1 = tloc; nodes1 = nodes; for (int j = 0; j < this.pem.AEAction.length; j++) { int idx = this.pem.AEAction[j]; if (!AEActionRes[j] && curNode.getCheckAction(slen, alen, i, idx)) { AEActionRes[j] = true; cnt } } } } if (cnt < cnt1) { // Take curNode -> <nextState, nextTidx>: cycleStack.pushInt(curNode.tindex); cycleStack.pushLong(curNode.stateFP); long nextPtr = TableauNodePtrTable.getPtr(TableauNodePtrTable.getElem(nodes, tloc)); curNode = this.dg.getNode(nextState, nextTidx, nextPtr); nodeTbl.resetElems(); break _next; } if (nodes != null && tloc != -1 && !TableauNodePtrTable.isSeen(nodes, tloc)) { // <nextState, nextTidx> is an unvisited successor of // curNode: hasUnvisitedSucc = true; nextState2 = nextState; nextTidx2 = nextTidx; tloc2 = tloc; nodes2 = nodes; } } if (cnt < cnt0) { // Take curNode -> <nextState1, nextTidx1>: cycleStack.pushInt(curNode.tindex); cycleStack.pushLong(curNode.stateFP); long nextPtr = TableauNodePtrTable.getPtr(TableauNodePtrTable.getElem(nodes1, tloc1)); curNode = this.dg.getNode(nextState1, nextTidx1, nextPtr); nodeTbl.resetElems(); break; } // Backtrack if all successors of curNode have been visited // and no successor can reduce cnt. while (!hasUnvisitedSucc) { long curState = cycleStack.popLong(); int curTidx = cycleStack.popInt(); long curPtr = TableauNodePtrTable.getPtr(nodeTbl.get(curState, curTidx)); curNode = this.dg.getNode(curState, curTidx, curPtr); succCnt = curNode.succSize(); for (int i = 0; i < succCnt; i++) { nextState2 = curNode.getStateFP(i); nextTidx2 = curNode.getTidx(i); nodes2 = nodeTbl.getNodes(nextState2); if (nodes2 != null) { tloc2 = nodeTbl.getIdx(nodes2, nextTidx2); if (tloc2 != -1 && !TableauNodePtrTable.isSeen(nodes2, tloc2)) { hasUnvisitedSucc = true; break; } } } } // Take curNode -> <nextState2, nextTidx2>. Set nextState2 // visited. cycleStack.pushInt(curNode.tindex); cycleStack.pushLong(curNode.stateFP); long nextPtr = TableauNodePtrTable.getPtr(TableauNodePtrTable.getElem(nodes2, tloc2)); curNode = this.dg.getNode(nextState2, nextTidx2, nextPtr); TableauNodePtrTable.setSeen(nodes2, tloc2); } } // All the conditions are satisfied. // 1. curNode has not been pushed on cycleStack. // 2. nodeTbl is trashed after this operation, thus reset. Trashed means // that some nodes are still marked seen being left-overs from the // Depth-First search. nodeTbl.resetElems(); return curNode; } /* (non-Javadoc) * @see java.lang.Thread#run() */ public final void run() { try { while (true) { // Use poll() to get the next checker from the queue or null if // there is none. Do *not* block when there are no more checkers // available. Nobody is going to add new checkers to the queue. final ILiveChecker checker = queue.poll(); if (checker == null || hasErrFound()) { // Another thread has either found an error (violation of a // liveness property) OR there is no more work (checker) to // be done. break; } this.oos = checker.getSolution(); this.dg = checker.getDiskGraph(); this.dg.createCache(); PossibleErrorModel[] pems = this.oos.getPems(); for (int i = 0; i < pems.length; i++) { if (!hasErrFound()) { this.pem = pems[i]; this.checkSccs(); } } this.dg.destroyCache(); // Record the size of the disk graph at the time its checked. This // information is later used to decide if it it makes sense to // run the next check on the larger but still *partial* graph. this.dg.recordSize(); } } catch (Exception e) { MP.printError(EC.GENERAL, "checking liveness", e); // LL changed // call 7 April // 2012 // Assert.printStack(e); return; } } public String toDotViz(final long state, final int tidx, TableauNodePtrTable tnpt) throws IOException { final StringBuffer sb = new StringBuffer(tnpt.size() * 10); sb.append("digraph TableauNodePtrTable {\n"); sb.append("nodesep = 0.7\n"); sb.append("rankdir=LR;\n"); // Left to right rather than top to bottom final int tsz = tnpt.getSize(); for (int ci = 0; ci < tsz; ci++) { final int[] nodes = tnpt.getNodesByLoc(ci); if (nodes == null) { // miss in TableauNodePtrTable (null bucket) continue; } long state1 = TableauNodePtrTable.getKey(nodes); for (int nidx = 2; nidx < nodes.length; nidx += tnpt.getElemLength()) { // nidx starts with 2 because [0][1] are the long fingerprint state1. int tidx1 = TableauNodePtrTable.getTidx(nodes, nidx); long loc1 = TableauNodePtrTable.getElem(nodes, nidx); final GraphNode curNode = this.dg.getNode(state1, tidx1, loc1); sb.append(curNode.toDotViz((state1 == state && tidx1 == tidx), true)); } } sb.append("}"); return sb.toString(); } /* * The detailed formatter below can be activated in Eclipse's variable view * by choosing "New detailed formatter" from the MemIntQueue context menu. * Insert "LiveWorker.DetailedFormatter.toString(this);". */ public static class DetailedFormatter { public static String toString(final MemIntStack comStack) { final int size = (int) comStack.size(); final StringBuffer buf = new StringBuffer(size / 5); for (int i = 0; i < comStack.size(); i+=5) { long loc = comStack.peakLong(size - i - 5); int tidx = comStack.peakInt(size - i - 3); long state = comStack.peakLong(size - i - 2); buf.append("state: "); buf.append(state); buf.append(" tidx: "); buf.append(tidx); buf.append(" loc: "); buf.append(loc); buf.append("\n"); } return buf.toString(); } } /* * The detailed formatter below can be activated in Eclipse's variable view * by choosing "New detailed formatter" from the MemIntQueue context menu. * Insert "LiveWorker.DFSStackDetailedFormatter.toString(this);". * Unfortunately it collides with the comStack DetailedFormatter as both use * the same type MemIntStack. So you have to chose what you want to look at * while debugging. * Note that toString treats pops/pushes of nodes and * states atomically. If called during a node is only partially pushed onto * the stack, the detailed formatter will crash. */ public static class DFSStackDetailedFormatter { public static String toString(final MemIntStack dfsStack) { final int size = (int) dfsStack.size(); final StringBuffer buf = new StringBuffer(size / 7); // approximate the size needed (buf will grow or shrink if needed) int i = 0; for (; i < dfsStack.size();) { // Peak element to see if it's a marker or not final long topElement = dfsStack.peakLong(size - i - 2); if (topElement == SCC_MARKER) { // It is the marker element buf.append("node ["); buf.append(" fp: "); buf.append(dfsStack.peakLong(size - i - 5)); buf.append(" tidx: "); buf.append(dfsStack.peakInt(size - i - 3)); buf.append(" lowLink: "); buf.append(dfsStack.peakLong(size - i - 7) - DiskGraph.MAX_PTR); buf.append("]\n"); // Increase i by the number of elements peaked i += 7; } else if (DiskGraph.isFilePointer(topElement)) { final long location = topElement; buf.append("succ ["); buf.append(" fp: "); buf.append(dfsStack.peakLong(size - i - 5)); buf.append(" tidx: "); buf.append(dfsStack.peakInt(size - i - 3)); buf.append(" location: "); buf.append(location); buf.append("]\n"); // Increase i by the number of elements peaked i += 5; } else if (topElement >= DiskGraph.MAX_PTR) { final long pLowLink = topElement - DiskGraph.MAX_PTR; buf.append("pLowLink: "); buf.append(pLowLink); buf.append("\n"); i += 2; } } // Assert all elements are used up assert i == size; return buf.toString(); } } }
// samskivert library - useful routines for java programs // This library is free software; you can redistribute it and/or modify it // (at your option) any later version. // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.samskivert.util; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Random; /** * Provides utility routines to simplify obtaining randomized values. * * <p>Each instance of Randoms contains an underlying {@link java.util.Random} instance and is * only as thread-safe as that is. If you wish to have a private stream of pseudorandom numbers, * use the {@link #with} factory. */ public class Randoms { /** A default Randoms that is thread-safe and can be safely shared by any caller. */ public static final Randoms RAND = with(new Random()); /** * A factory to create a new Randoms object. */ public static Randoms with (Random rand) { return new Randoms(rand); } /** * Get a thread-local Randoms instance that will not contend with any other thread * for random number generation. * * <p><b>Note:</b> This method will return a Randoms instance that is not thread-safe. * It can generate random values with less overhead, however it may be dangerous to share * the reference. Instead you should probably always use it immediately as in the following * example: * <pre style="code"> * Puppy pick = Randoms.threadLocal().pick(Puppy.LITTER, null); * </pre> */ public static Randoms threadLocal () { return _localRandoms.get(); } public int getInt (int high) { return _r.nextInt(high); } public int getInRange (int low, int high) { return low + _r.nextInt(high - low); } /** * Returns a pseudorandom, uniformly distributed <code>float</code> value between * <code>0.0</code> (inclusive) and the <code>high</code> (exclusive). * * @param high the high value limiting the random number sought. */ public float getFloat (float high) { return _r.nextFloat() * high; } /** * Returns a pseudorandom, uniformly distributed <code>float</code> value between * <code>low</code> (inclusive) and <code>high</code> (exclusive). */ public float getInRange (float low, float high) { return low + (_r.nextFloat() * (high - low)); } public boolean getChance (int n) { return (0 == _r.nextInt(n)); } /** * Has a probability <code>p</code> of returning true. */ public boolean getProbability (float p) { return _r.nextFloat() < p; } /** * Returns <code>true</code> or <code>false</code> with approximately even probability. */ public boolean getBoolean () { return _r.nextBoolean(); } /** * Returns a pseudorandom, normally distributed <code>float</code> value around the * <code>mean</code> with the standard deviation <code>dev</code>. */ public float getNormal (float mean, float dev) { return (float)_r.nextGaussian() * dev + mean; } /** * Shuffle the specified list using our Random. */ public void shuffle (List<?> list) { Collections.shuffle(list, _r); } /** * Pick a random element from the specified Iterator, or return <code>ifEmpty</code> * if it is empty. * * <p><b>Implementation note:</b> because the total size of the Iterator is not known, * the random number generator is queried after the second element and every element * thereafter. * * @throws NullPointerException if the iterator is null. */ public <T> T pick (Iterator<? extends T> iterator, T ifEmpty) { if (!iterator.hasNext()) { return ifEmpty; } T pick = iterator.next(); for (int count = 2; iterator.hasNext(); count++) { T next = iterator.next(); if (0 == _r.nextInt(count)) { pick = next; } } return pick; } /** * Pick a random element from the specified Iterable, or return <code>ifEmpty</code> * if it is empty. * * <p><b>Implementation note:</b> optimized implementations are used if the Iterable * is a List or Collection. Otherwise, it behaves as if calling {@link #pick(Iterator, Object)} * with the Iterable's Iterator. * * @throws NullPointerException if the iterable is null. */ public <T> T pick (Iterable<? extends T> iterable, T ifEmpty) { return pickPluck(iterable, ifEmpty, false); } public <T> T pick (Map<? extends T, ? extends Number> weightMap, T ifEmpty) { T pick = ifEmpty; double total = 0.0; for (Map.Entry<? extends T, ? extends Number> entry : weightMap.entrySet()) { double weight = entry.getValue().doubleValue(); if (weight > 0.0) { total += weight; if ((total == weight) || ((_r.nextDouble() * total) < weight)) { pick = entry.getKey(); } } else if (weight < 0.0) { throw new IllegalArgumentException("Weight less than 0: " + entry); } // else: weight == 0.0 is OK } return pick; } // public <T> T pick ( // Iterable<? extends T> iterable, Function<? super T, ? extends Number> weightFunction, // T ifEmpty) // T pick = ifEmpty; // double total = 0.0; // for (T element : iterable) { // double weight = weightFunction.apply(element).doubleValue(); // // The rest is like the current pick(Map) // return pick; // public <T> T pick (Map<? extends T, ? extends Number> weightMap, T ifEmpty) // Map.Entry<? extends T, ? extends Number> pick = pick(weightMap.entrySet(), // new Function<Map.Entry<?, ? extends Number>, Number>() { // public Number apply (Map.Entry<?, ? extends Number> entry) { // return entry.getValue(); // }, null); // return (pick != null) ? pick.getKey() : ifEmpty; /** * Pluck (remove) a random element from the specified Iterable, or return <code>ifEmpty</code> * if it is empty. * * <p><b>Implementation note:</b> optimized implementations are used if the Iterable * is a List or Collection. Otherwise, two Iterators are created from the Iterable * and a random number is generated after the second element and all beyond. * * @throws NullPointerException if the iterable is null. * @throws UnsupportedOperationException if the iterable is unmodifiable or its Iterator * does not support {@link Iterator#remove()}. */ public <T> T pluck (Iterable<? extends T> iterable, T ifEmpty) { return pickPluck(iterable, ifEmpty, true); } /** * Construct a Randoms. */ protected Randoms (Random rand) { _r = rand; } /** * Shared code for pick and pluck. */ protected <T> T pickPluck (Iterable<? extends T> iterable, T ifEmpty, boolean remove) { if (iterable instanceof Collection) { // optimized path for Collection @SuppressWarnings("unchecked") Collection<? extends T> coll = (Collection<? extends T>)iterable; int size = coll.size(); if (size == 0) { return ifEmpty; } if (coll instanceof List) { // extra-special optimized path for Lists @SuppressWarnings("unchecked") List<? extends T> list = (List<? extends T>)coll; int idx = _r.nextInt(size); if (remove) { // ternary conditional causes warning here with javac 1.6, :( return list.remove(idx); } return list.get(idx); } // for other Collections, we must iterate Iterator<? extends T> it = coll.iterator(); for (int idx = _r.nextInt(size); idx > 0; idx it.next(); } try { return it.next(); } finally { if (remove) { it.remove(); } } } if (!remove) { return pick(iterable.iterator(), ifEmpty); } // from here on out, we're doing a pluck with a complicated two-iterator solution Iterator<? extends T> it = iterable.iterator(); if (!it.hasNext()) { return ifEmpty; } Iterator<? extends T> lagIt = iterable.iterator(); T pick = it.next(); lagIt.next(); for (int count = 2, lag = 1; it.hasNext(); count++, lag++) { T next = it.next(); if (0 == _r.nextInt(count)) { pick = next; // catch up lagIt so that it has just returned 'pick' as well for ( ; lag > 0; lag lagIt.next(); } } } lagIt.remove(); // remove 'pick' from the lagging iterator return pick; } /** The random number generator. */ protected final Random _r; /** A ThreadLocal for accessing a thread-local version of Randoms. */ protected static final ThreadLocal<Randoms> _localRandoms = new ThreadLocal<Randoms>() { @Override public Randoms initialValue () { return with(new ThreadLocalRandom()); } }; protected static class ThreadLocalRandom extends Random { // same constants as Random, but must be redeclared because private private final static long multiplier = 0x5DEECE66DL; private final static long addend = 0xBL; private final static long mask = (1L << 48) - 1; /** * The random seed. We can't use super.seed. */ private long rnd; /** * Initialization flag to permit calls to setSeed to succeed only * while executing the Random constructor. We can't allow others * since it would cause setting seed in one part of a program to * unintentionally impact other usages by the thread. */ boolean initialized; // Padding to help avoid memory contention among seed updates in // different TLRs in the common case that they are located near // each other. @SuppressWarnings("unused") private long pad0, pad1, pad2, pad3, pad4, pad5, pad6, pad7; /** * Constructor called only by localRandom.initialValue. */ ThreadLocalRandom() { super(); initialized = true; } /** * Throws {@code UnsupportedOperationException}. Setting seeds in * this generator is not supported. * * @throws UnsupportedOperationException always */ @Override public void setSeed(long seed) { if (initialized) throw new UnsupportedOperationException(); rnd = (seed ^ multiplier) & mask; } @Override protected int next(int bits) { rnd = (rnd * multiplier + addend) & mask; return (int) (rnd >>> (48-bits)); } // as of JDK 1.6, this method does not exist in java.util.Random // public int nextInt(int least, int bound) { // if (least >= bound) // return nextInt(bound - least) + least; public long nextLong(long n) { if (n <= 0) throw new IllegalArgumentException("n must be positive"); // Divide n by two until small enough for nextInt. On each // iteration (at most 31 of them but usually much less), // randomly choose both whether to include high bit in result // (offset) and whether to continue with the lower vs upper // half (which makes a difference only if odd). long offset = 0; while (n >= Integer.MAX_VALUE) { int bits = next(2); long half = n >>> 1; long nextn = ((bits & 2) == 0) ? half : n - half; if ((bits & 1) == 0) offset += n - nextn; n = nextn; } return offset + nextInt((int) n); } public long nextLong(long least, long bound) { if (least >= bound) throw new IllegalArgumentException(); return nextLong(bound - least) + least; } public double nextDouble(double n) { if (n <= 0) throw new IllegalArgumentException("n must be positive"); return nextDouble() * n; } public double nextDouble(double least, double bound) { if (least >= bound) throw new IllegalArgumentException(); return nextDouble() * (bound - least) + least; } private static final long serialVersionUID = -5851777807851030925L; } }
package com.sirtrack.construct; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.ListIterator; import com.sirtrack.construct.lib.*; import com.sirtrack.construct.lib.BitStream.BitStreamReader; import com.sirtrack.construct.lib.BitStream.BitStreamWriter; import com.sirtrack.construct.lib.Containers.Container; import static com.sirtrack.construct.Macros.Field; import static com.sirtrack.construct.lib.Containers.*; public class Core { public static class ConstructError extends RuntimeException { public ConstructError(String string) { super(string); } public ConstructError(String string, Exception e) { super(string, e); } } public static class FieldError extends ConstructError { public FieldError(String string) { super(string); } } public static class SizeofError extends ConstructError { public SizeofError(String string) { super(string); } } public static class ValueError extends ConstructError { public ValueError(String string) { super(string); } } public static class RangeError extends ConstructError { public RangeError(String string) { super(string); } } public static class TypeError extends ConstructError { public TypeError(String string) { super(string); } } public static class SwitchError extends ConstructError { public SwitchError(String string) { super(string); } } public static class ArrayError extends ConstructError { public ArrayError(String string, Exception e) { super(string, e); } } // Bits = BitField // Byte = UBInt8 // Bytes = Field // Const = ConstAdapter // Tunnel = TunnelAdapter // Embed = Embedded static public byte[] ByteArray( int... ints ){ byte[] ba = new byte[ints.length]; int k = 0; for( int i : ints ){ ba[k++] = (byte)i; } return ba; } static public byte[] ByteArray( byte[]... bas ){ ByteArrayOutputStream out = new ByteArrayOutputStream(); for( byte[]ba : bas ){ try { out.write(ba); } catch (IOException e) { throw new RuntimeException(e); } } return out.toByteArray(); } /** A generic container of attributes. Containers are the common way to express parsed data. */ static public Container Container( Object... pairs ){ return new Container( pairs ); } public static byte[] _read_stream( ByteBufferWrapper stream, int length) { if (length < 0) throw new FieldError("length must be >= 0 " + length); { int len = stream.remaining(); if (len < length) throw new FieldError("expected " + length + " found " + len); byte[] out = new byte[length]; stream.get(out, 0, length); return out; } } static public abstract class Construct { public static final int FLAG_COPY_CONTEXT = 0x0001; public static final int FLAG_DYNAMIC = 0x0002; public static final int FLAG_EMBED = 0x0004; public static final int FLAG_NESTING = 0x0008; int conflags; public String name; public Construct(String name) { this( name, 0 ); } public Construct(String name, int flags) { if( name != null ){ if (name.equals("_") || name.startsWith("<")) throw new ValueError("reserved name " + name); // raise } this.name = name; this.conflags = flags; } @Override public String toString(){ return getClass().getName() + "(" + name + ")"; } /** Set the given flag or flags. * @param flag flag to set; may be OR'd combination of flags */ protected void _set_flag(int flag){ conflags |= flag; } /** Clear the given flag or flags. * @param flag flag to clear; may be OR'd combination of flags */ protected void _clear_flag( int flag ){ conflags &= ~flag; } /**Pull flags from subconstructs.*/ protected void _inherit_flags( Construct... subcons ){ for( Construct sc : subcons ){ _set_flag(sc.conflags); } } /** Check whether a given flag is set. * @param flag flag to check * @return */ protected boolean _is_flag( int flag ){ return (conflags & flag) == flag; } static public int getDataLength( Object data ){ if( data instanceof String) return ((String)data).length(); else if( data instanceof Byte || data instanceof Character) return 1; else if( data instanceof Integer ){ int num = (Integer)data; if( num < 256 ) return 1; else if( num < 65536 ) return 2; else return 4; // return Integer.SIZE/8; } else if( data instanceof byte[] ) return ((byte[])data).length; else if( data instanceof List ) return ((List)data).size(); else throw new RuntimeException("Data length unknown for " + data); } static public void appendDataStream( ByteArrayOutputStream stream, Object data ){ if( data instanceof String) try { stream.write(((String)data).getBytes()); } catch (IOException e) { throw new ValueError( "Can't append data " + data + " " + e.getMessage()); } else if( data instanceof Byte ) stream.write((Byte)data); else if( data instanceof Integer ) stream.write((Integer)data); else if( data instanceof byte[] ) try { stream.write((byte[])data); } catch (IOException e) { throw new ValueError( "Can't append data " + data + " " + e.getMessage()); } else throw new ValueError( "Can't append data " + data); } public void _write_stream( ByteArrayOutputStream stream, int length, Object data) { if (length < 0) throw new FieldError("length must be >= 0 " + length); int datalength = getDataLength( data ); if ( length != datalength ) throw new FieldError("expected " + length + " found " + datalength); appendDataStream( stream, data ); }; /** * Parse an in-memory buffer. * * Strings, buffers, memoryviews, and other complete buffers can be parsed with this method. * * @param data */ public <T>T parse(byte[] data) { return (T)parse_stream( new ByteBufferWrapper().wrap( data )); } public <T>T parse(String text) { return (T)parse_stream( new ByteBufferWrapper().wrap( text.getBytes() )); } /** * Parse an in-memory buffer. * Also accepts a context, useful for passing initial values * @param data * @param context * @return */ public <T>T parse( byte[] data, Container context ) { return (T)_parse( new ByteBufferWrapper().wrap( data ), context ); } /** * Parse a stream. * * Files, pipes, sockets, and other streaming sources of data are handled by this method. */ public Object parse_stream( ByteBufferWrapper stream ) { return _parse(stream, new Container()); } abstract public Object _parse( ByteBufferWrapper stream, Container context); /** * Build an object in memory. * * @param obj * @return */ public byte[] build( Object obj ) { ByteArrayOutputStream stream = new ByteArrayOutputStream(); build_stream(obj, stream); return stream.toByteArray(); } /** * Build an object directly into a stream. * * @param obj * @param stream */ public void build_stream( Object obj, ByteArrayOutputStream stream) { _build(obj, stream, new Container()); } // abstract void _build( String obj, OutputStream stream, Container // context); protected abstract void _build( Object obj, ByteArrayOutputStream stream, Container context); /** * Calculate the size of this object, optionally using a context. Some constructs have no fixed size and can only know their size for a given hunk of data; * these constructs will raise an error if they are not passed a context. * * @param context * contextual data * @return the length of this construct */ public int sizeof(Container context) { if (context == null) { context = new Container(); } try { return _sizeof(context); } catch (Exception e) { throw new SizeofError(e.getMessage()); } } public int sizeof() { return sizeof(null); } abstract protected int _sizeof(Container context); } /** * Abstract subconstruct (wraps an inner construct, inheriting its name and flags). */ public static abstract class Subconstruct extends Construct { protected Construct subcon; /** * @param subcon the construct to wrap */ public Subconstruct(Construct subcon) { super(subcon.name, subcon.conflags); this.subcon = subcon; } protected Subconstruct(String name, Construct subcon) { super(name, subcon.conflags); this.subcon = subcon; } @Override public Object _parse( ByteBufferWrapper stream, Container context) { return subcon._parse(stream, context); } @Override protected void _build( Object obj, ByteArrayOutputStream stream, Container context) { subcon._build(obj, stream, context); } @Override protected int _sizeof(Container context){ return subcon._sizeof(context); } } /** * A fixed-size byte field. */ public static class StaticField extends Construct { int length; /** * @param name * field name * @param length * number of bytes in the field */ public StaticField(String name, int length) { super(name); this.length = length; } @Override public Object _parse( ByteBufferWrapper stream, Container context) { return _read_stream( stream, length); } @Override protected void _build( Object obj, ByteArrayOutputStream stream, Container context) { _write_stream(stream, length, obj); } @Override protected int _sizeof(Container context) { return length; } /* * public int _sizeof( Container context ){ return length; } */ } /** * A field that uses ``struct`` to pack and unpack data. * * See ``struct`` documentation for instructions on crafting format strings. */ public static class FormatField extends StaticField { int length; Packer packer; /** * @param name * name of the field * @param endianness * : format endianness string; one of "<", ">", or "=" * @param format * : a single format character */ public FormatField(String name, char endianity, char format) { super(name, 0); if (endianity != '>' && endianity != '<' && endianity != '=') throw new ValueError("endianity must be be '=', '<', or '>' " + endianity); packer = new Packer(endianity, format); super.length = packer.length(); } @Override public Object _parse( ByteBufferWrapper stream, Container context ) { try { return packer.unpack(stream.bb); } catch (Exception e) { throw new FieldError(e.getMessage()); } } @Override public void _build( Object obj, ByteArrayOutputStream stream, Container context) { _write_stream(stream, super.length, packer.pack(obj)); } } /** * callable that takes a context and returns length as an int */ static public interface LengthFunc{ abstract int length(Container context); } /** * @param name context field name * @return get length from context field */ static public LengthFunc LengthField( final String name ) { return new LengthFunc(){ public int length(Container ctx) { return (Integer)ctx.get(name); } }; } /** A variable-length field. The length is obtained at runtime from a function. foo = Struct("foo", Byte("length"), MetaField("data", lambda ctx: ctx["length"]) ) foo.parse("\\x03ABC") Container(data = 'ABC', length = 3) foo.parse("\\x04ABCD") Container(data = 'ABCD', length = 4) * @param name name of the field * @param lengthfunc callable that takes a context and returns length as an int */ public static MetaField MetaField(String name, LengthFunc lengthfunc ){ return new MetaField(name, lengthfunc); } public static class MetaField extends Construct { LengthFunc lengthfunc; /** * @param name name of the field * @param lengthfunc callable that takes a context and returns length as an int */ public MetaField(String name, LengthFunc lengthfunc) { super(name); this.lengthfunc = lengthfunc; this._set_flag(FLAG_DYNAMIC); } @Override public Object _parse(ByteBufferWrapper stream, Container context) { return _read_stream(stream, lengthfunc.length(context)); } @Override protected void _build(Object obj, ByteArrayOutputStream stream, Container context) { _write_stream(stream, lengthfunc.length(context), obj); } @Override protected int _sizeof(Container context) { return lengthfunc.length(context); } } /** * callable that takes a context and returns length as an int */ static public interface CountFunc{ abstract int count(Container context); } /** Example: MetaArray(lambda ctx: 5, UBInt8("foo")) See also Array, Range and RepeatUntil. * @param countfunc a function that takes the context as a parameter and returns the number of elements of the array (count) * @param subcon the subcon to repeat `countfunc()` times * @return An array (repeater) of a meta-count. The array will iterate exactly `countfunc()` times. Will raise ArrayError if less elements are found. */ public static MetaArray MetaArray( CountFunc countfunc, Construct subcon){ return new MetaArray(countfunc,subcon); } /** An array (repeater) of a meta-count. The array will iterate exactly `countfunc()` times. Will raise ArrayError if less elements are found. See also Array, Range and RepeatUntil. Example: MetaArray(lambda ctx: 5, UBInt8("foo")) */ public static class MetaArray extends Subconstruct{ CountFunc countfunc; /** Parameters: * countfunc - a function that takes the context as a parameter and returns the number of elements of the array (count) * subcon - the subcon to repeat `countfunc()` times * @param length * @param name * @param subcon */ protected MetaArray( CountFunc countfunc, Construct subcon) { super(subcon); this.countfunc = countfunc; _clear_flag(FLAG_COPY_CONTEXT); _set_flag(FLAG_DYNAMIC); } @Override public Object _parse( ByteBufferWrapper stream, Container context) { List obj = ListContainer(); int c = 0; int count = countfunc.count(context); try{ if( (subcon.conflags & FLAG_COPY_CONTEXT) != 0){ while( c < count ){ obj.add( subcon._parse(stream, context.clone())); c += 1; } } else{ while( c < count ){ obj.add( subcon._parse(stream, context) ); c += 1; } } } catch( Exception e ){ throw new ArrayError("expected " + count +", found " + c, e ); } return obj; } @Override protected void _build( Object object, ByteArrayOutputStream stream, Container context) { List<Object> obj = (List<Object>)object; int count = countfunc.count(context); if( obj.size() != count ){ throw new ArrayError("expected " + count +", found " + obj.size(), null ); } if( (subcon.conflags & FLAG_COPY_CONTEXT) != 0 ){ for( Object subobj : obj ){ subcon._build(subobj, stream, context.clone()); } } else{ for( Object subobj : obj ){ subcon._build(subobj, stream, context); } } } @Override protected int _sizeof(Container context){ return subcon._sizeof(context) * countfunc.count(context); } } public static Range Range(int mincount, int maxcount, Construct subcon){ return new Range(mincount,maxcount,subcon); } /** A range-array. The subcon will iterate between `mincount` to `maxcount` times. If less than `mincount` elements are found, raises RangeError. See also GreedyRange and OptionalGreedyRange. The general-case repeater. Repeats the given unit for at least mincount times, and up to maxcount times. If an exception occurs (EOF, validation error), the repeater exits. If less than mincount units have been successfully parsed, a RangeError is raised. .. note:: This object requires a seekable stream for parsing. */ public static class Range extends Subconstruct{ /** * @param mincount the minimal count * @param maxcount the maximal count * @param subcon the subcon to repeat c = Range(3, 7, UBInt8("foo")) c.parse("\\x01\\x02") Traceback (most recent call last): construct.core.RangeError: expected 3..7, found 2 c.parse("\\x01\\x02\\x03") [1, 2, 3] c.parse("\\x01\\x02\\x03\\x04\\x05\\x06") [1, 2, 3, 4, 5, 6] c.parse("\\x01\\x02\\x03\\x04\\x05\\x06\\x07") [1, 2, 3, 4, 5, 6, 7] c.parse("\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09") [1, 2, 3, 4, 5, 6, 7] c.build([1,2]) Traceback (most recent call last): construct.core.RangeError: expected 3..7, found 2 c.build([1,2,3,4]) '\\x01\\x02\\x03\\x04' c.build([1,2,3,4,5,6,7,8]) Traceback (most recent call last): construct.core.RangeError: expected 3..7, found 8 */ int mincount; int maxcout; public Range(int mincount, int maxcount, Construct subcon) { super(subcon); this.mincount = mincount; this.maxcout = maxcount; _clear_flag(FLAG_COPY_CONTEXT); _set_flag(FLAG_DYNAMIC); } @Override public Object _parse( ByteBufferWrapper stream, Container context) { // obj = ListContainer() List<Object> obj = ListContainer(); int c = 0; int pos = stream.position(); try{ if( (subcon.conflags & FLAG_COPY_CONTEXT) != 0 ){ while( c < maxcout ){ pos = stream.position(); obj.add( subcon._parse(stream, context.clone() )); c += 1; } } else { while( c < maxcout ){ pos = stream.position(); obj.add( subcon._parse(stream, context )); c += 1; } } } catch( Exception e ){ if( c < mincount ){ throw new RangeError("expected " + mincount + " to " + maxcout + " found " + c + " " + e.getMessage() ); } stream.position(pos); } return obj; } @Override protected void _build( Object object, ByteArrayOutputStream stream, Container context) { if( !(object instanceof List )) throw new TypeError( "Expected object array" ); List<Object> obj = (List<Object>)object; if( obj.size() < mincount || obj.size() > maxcout ){ throw new RangeError("expected " + mincount + " to " + maxcout + " found " + obj.size() ); } int cnt = 0; try{ if( (subcon.conflags & FLAG_COPY_CONTEXT) != 0 ){ for( Object subobj : obj ){ subcon._build(subobj, stream, context.clone() ); cnt += 1; } } else { for( Object subobj : obj ){ subcon._build(subobj, stream, context ); cnt += 1; } } } catch( Exception e ){ throw new RangeError( e.getMessage() ); } } @Override protected int _sizeof(Container context){ throw new SizeofError("can't calculate size"); } } /** A sequence of named constructs, similar to structs in C. The elements are parsed and built in the order they are defined. See also Embedded. Example: Struct("foo", UBInt8("first_element"), UBInt16("second_element"), Padding(2), UBInt8("third_element"), ) */ static public Struct Struct(String name, Construct... subcons){ return new Struct( name, subcons ); } static public class Struct extends Construct{ public boolean nested = true; Construct[] subcons; /** * @param name the name of the structure * @param subcons a sequence of subconstructs that make up this structure. */ public Struct(String name, Construct... subcons) { super(name); this.subcons = subcons; _inherit_flags(subcons); _clear_flag(FLAG_EMBED); } @Override public Object _parse( ByteBufferWrapper stream, Container context) { Container obj; if( context.contains("<obj>")){ obj = context.get("<obj>"); context.del("<obj>"); } else{ obj = new Container(); if( nested ){ context = Container( "_", context ); } } for( Construct sc: subcons ){ if( (sc.conflags & FLAG_EMBED) != 0 ){ context.set("<obj>", obj); sc._parse(stream, context); } else { Object subobj = sc._parse(stream, context); if( sc.name != null ){ obj.set( sc.name, subobj ); context.set( sc.name, subobj ); } } } return obj; } @Override protected void _build( Object obj, ByteArrayOutputStream stream, Container context ) { if( context.contains("<unnested>")){ context.del("<unnested>"); } else if( nested ){ context = Container( "_", context ); } for( Construct sc: subcons){ Object subobj; if( (sc.conflags & FLAG_EMBED) != 0 ){ context.set( "<unnested>", true ); subobj = obj; } else if( sc.name == null ){ subobj = null; } else if( obj instanceof Container ){ Container container = (Container)obj; subobj = container.get( sc.name ); if( subobj == null ) throw new FieldError( "No field found: " + sc.name + " in " + subobj ); context.set(sc.name, subobj); } else continue; sc._build(subobj, stream, context); } } @Override protected int _sizeof(Container context) { int sum = 0; if( nested ) context = Container( "_", context ); for( Construct sc: subcons ){ sum += sc._sizeof(context); } return sum; } } /** * @param name the name of the structure * @param subcons a sequence of subconstructs that make up this structure. * @param nested: a keyword-only argument that indicates whether this struct creates a nested context. The default is True. This parameter is considered "advanced usage", and may be removed in the future. * @return A sequence of unnamed constructs. The elements are parsed and built in the order they are defined. See also Embedded. Example: Sequence("foo", UBInt8("first_element"), UBInt16("second_element"), Padding(2), UBInt8("third_element"), ) */ public static Sequence Sequence(String name, Construct... subcons){ return new Sequence( name, subcons); } public static class Sequence extends Struct{ public Sequence(String name, Construct... subcons ) { super(name, subcons); } @Override public Object _parse( ByteBufferWrapper stream, Container context) { List obj; if( context.contains( "<obj>" )){ obj = context.get( "<obj>" ); context.del("<obj>"); } else{ obj = ListContainer(); if( nested ){ context = Container( "_", context ); } } for( Construct sc: subcons ){ if(( sc.conflags & FLAG_EMBED ) != 0 ){ context.set( "<obj>", obj ); sc._parse(stream, context); } else{ Object subobj = sc._parse(stream, context); if( sc.name != null ){ obj.add(subobj); context.set(sc.name, subobj); } } } return obj; } @Override protected void _build( Object obj, ByteArrayOutputStream stream, Container context ) { if( context.contains("<unnested>")) { context.del("<unnested>"); } else if( nested ){ context = Container( "_", context); } Object subobj; ListIterator objiter; if( obj instanceof List ) objiter = ((List)obj).listIterator(); else objiter = (ListIterator)obj; for( Construct sc: subcons ){ if(( sc.conflags & FLAG_EMBED ) != 0 ){ context.set( "<unnested>", true ); subobj = objiter; } else if( sc.name == null ){ subobj = null; } else { subobj = objiter.next(); context.set( sc.name, subobj ); } sc._build(subobj, stream, context); } } } public static Construct NoDefault = new Construct( null ){ @Override public Object _parse(ByteBufferWrapper stream, Container context) { throw new SwitchError("no default case defined"); } @Override protected void _build(Object obj, ByteArrayOutputStream stream, com.sirtrack.construct.lib.Containers.Container context) { throw new SwitchError("no default case defined"); } @Override protected int _sizeof(com.sirtrack.construct.lib.Containers.Container context) { throw new SwitchError("no default case defined"); } }; /** * a function that takes the context and returns a key */ public abstract static class KeyFunc{ public final String key; public KeyFunc( String key ){ this.key = key; } public KeyFunc(){ this.key = null; } public String key(){ return key; } public abstract Object get(Container context); } /** * @param key a context key * @param val a value * @return A KeyFunc that evaluates ctx.get(key).equals(val) */ public static KeyFunc Equals( final String key, final Object val){ return new KeyFunc(key){ public Object get(Container ctx) { return ctx.get( key ).equals(val); };}; } /** * @param key a context key * @return ctx.get(key) */ public static KeyFunc KeyVal( final String key ){ return new KeyFunc(key){ public Object get(Container ctx) { return ctx.get( key ); };}; } /** A conditional branch. Switch will choose the case to follow based on the return value of keyfunc. If no case is matched, and no default value is given, SwitchError will be raised. See also Pass. Example: Struct("foo", UBInt8("type"), Switch("value", lambda ctx: ctx.type, { 1 : UBInt8("spam"), 2 : UBInt16("spam"), 3 : UBInt32("spam"), 4 : UBInt64("spam"), } ), ) * @param name the name of the construct * @param keyfunc a function that takes the context and returns a key, which will ne used to choose the relevant case. * @param cases a dictionary mapping keys to constructs. the keys can be any values that may be returned by keyfunc. */ public static Switch Switch(String name, KeyFunc keyfunc, Object... cases ) { return new Switch( name, keyfunc, Container(cases), NoDefault, false ); } /** A conditional branch. Switch will choose the case to follow based on the return value of keyfunc. If no case is matched, and no default value is given, SwitchError will be raised. See also Pass. Example: Struct("foo", UBInt8("type"), Switch("value", lambda ctx: ctx.type, { 1 : UBInt8("spam"), 2 : UBInt16("spam"), 3 : UBInt32("spam"), 4 : UBInt64("spam"), } ), ) * @param name the name of the construct * @param keyfunc a function that takes the context and returns a key, which will ne used to choose the relevant case. * @param cases a dictionary mapping keys to constructs. the keys can be any values that may be returned by keyfunc. * @param defaultval a default value to use when the key is not found in the cases. if not supplied, an exception will be raised when the key is not found. You can use the builtin construct Pass for 'do-nothing'. * @param include_key whether or not to include the key in the return value of parsing. defualt is False. */ public static Switch Switch(String name, KeyFunc keyfunc, Container cases, Construct defaultval, boolean include_key ) { return new Switch( name, keyfunc, cases, defaultval, include_key ); } /** A conditional branch. Switch will choose the case to follow based on the return value of keyfunc. If no case is matched, and no default value is given, SwitchError will be raised. See also Pass. Example: Struct("foo", UBInt8("type"), Switch("value", lambda ctx: ctx.type, { 1 : UBInt8("spam"), 2 : UBInt16("spam"), 3 : UBInt32("spam"), 4 : UBInt64("spam"), } ), ) */ public static class Switch extends Construct{ /** * a function that takes the context and returns a key, which will ne used to choose the relevant case. */ KeyFunc keyfunc; Container cases; Construct defaultval; boolean include_key; /** * @param name the name of the construct * @param keyfunc a function that takes the context and returns a key, which will ne used to choose the relevant case. * @param cases a dictionary mapping keys to constructs. the keys can be any values that may be returned by keyfunc. * @param defaultval a default value to use when the key is not found in the cases. if not supplied, an exception will be raised when the key is not found. You can use the builtin construct Pass for 'do-nothing'. * @param include_key whether or not to include the key in the return value of parsing. defualt is False. */ public Switch(String name, KeyFunc keyfunc, Container cases, Construct defaultval, boolean include_key ) { super(name); this.keyfunc = keyfunc; this.cases = cases; this.defaultval = defaultval; this.include_key = include_key; Construct[] ca = cases.values( Construct.class ); this._inherit_flags(ca); this._set_flag(FLAG_DYNAMIC); } @Override public Object _parse(ByteBufferWrapper stream, Container context) { Object key = keyfunc.get(context); Construct c = cases.get(key, defaultval); Object obj = c._parse(stream, context); if( include_key ){ return Container( key, obj ); } else { return obj; } } @Override protected void _build(Object obj, ByteArrayOutputStream stream, Container context) { Object key; if( include_key ){ List list = (List)obj; key = list.get(0); obj = list.get(1); } else { key = keyfunc.get( context ); } Construct casestruct = cases.get(key, defaultval); casestruct._build(obj, stream, context); /* if self.include_key: key, obj = obj else: key = self.keyfunc(context) case = self.cases.get(key, self.default) case._build(obj, stream, context) */ } @Override protected int _sizeof( Container context) { Construct casestruct = cases.get(keyfunc.get( context ), defaultval); return casestruct._sizeof(context); } } /** Creates an in-memory buffered stream, which can undergo encoding and decoding prior to being passed on to the subconstruct. See also Bitwise. Note: * Do not use pointers inside Buffered Example: Buffered(BitField("foo", 16), encoder = decode_bin, decoder = encode_bin, resizer = lambda size: size / 8, ) */ static public class Buffered extends Subconstruct{ Encoder encoder; Decoder decoder; Resizer resizer; /** Creates an in-memory buffered stream, which can undergo encoding and decoding prior to being passed on to the subconstruct. See also Bitwise.<br/> <br/> Note: * Do not use pointers inside Buffered * @param subcon the subcon which will operate on the buffer * @param encoder a function that takes a string and returns an encoded string (used after building) * @param decoder a function that takes a string and returns a decoded string (used before parsing) * @param resizer a function that takes the size of the subcon and "adjusts" or "resizes" it according to the encoding/decoding process. */ public Buffered( Construct subcon, Encoder encoder, Decoder decoder, Resizer resizer ) { super(subcon); this.encoder = encoder; this.decoder = decoder; this.resizer = resizer; } @Override public Object _parse( ByteBufferWrapper stream, Container context) { byte[] data = _read_stream(stream, _sizeof(context)); byte[] stream2 = decoder.decode(data); return subcon._parse(new ByteBufferWrapper().wrap( stream2 ), context); } @Override protected void _build( Object obj, ByteArrayOutputStream stream, Container context) { int size = _sizeof(context); ByteArrayOutputStream stream2 = new ByteArrayOutputStream(); subcon._build(obj, stream2, context); byte[] data = encoder.encode(stream2.toString()); if( data.length != size ) throw new RuntimeException( "Wrong data length: " + data.length ); _write_stream(stream, size, data); } @Override protected int _sizeof(Container context) { return resizer.resize( subcon._sizeof(context)); } } /** Wraps the stream with a read-wrapper (for parsing) or a write-wrapper (for building). The stream wrapper can buffer the data internally, reading it from- or writing it to the underlying stream as needed. For example, BitByteBufferWrapper reads whole bytes from the underlying stream, but returns them as individual bits. See also Bitwise. When the parsing or building is done, the stream's close method will be invoked. It can perform any finalization needed for the stream wrapper, but it must not close the underlying stream. Note: * Do not use pointers inside Restream Example: Restream(BitField("foo", 16), stream_reader = BitByteBufferWrapper, stream_writer = BitStreamWriter, resizer = lambda size: size / 8, ) */ public static class Restream extends Subconstruct{ BitStreamReader stream_reader; BitStreamWriter stream_writer; Resizer resizer; /** Wraps the stream with a read-wrapper (for parsing) or a write-wrapper (for building). The stream wrapper can buffer the data internally, reading it from- or writing it to the underlying stream as needed. For example, BitByteBufferWrapper reads whole bytes from the underlying stream, but returns them as individual bits. See also Bitwise.<br/> <br/> When the parsing or building is done, the stream's close method will be invoked. It can perform any finalization needed for the stream wrapper, but it must not close the underlying stream.<br/> <br/> Note: * Do not use pointers inside Restream * @param subcon the subcon * @param stream_reader the read-wrapper * @param stream_writer the write wrapper * @param resizer a function that takes the size of the subcon and "adjusts" or "resizes" it according to the encoding/decoding process. */ public Restream(Construct subcon, BitStreamReader stream_reader, BitStreamWriter stream_writer, Resizer resizer ) { super(subcon); this.stream_reader = stream_reader; this.stream_writer = stream_writer; this.resizer = resizer; } @Override public Object _parse( ByteBufferWrapper stream, Container context) { stream_reader.init(stream); Object obj = subcon._parse(stream_reader, context); stream_reader.close(); return obj; } @Override protected void _build( Object obj, ByteArrayOutputStream stream, Container context) { ByteArrayOutputStream stream2 = stream_writer.init(stream); subcon._build(obj, stream2, context); stream_writer.close(); } @Override protected int _sizeof(Container context) { return resizer.resize( subcon._sizeof(context)); } } """ Changes the stream position to a given offset, where the construction should take place, and restores the stream position when finished. See also Anchor, OnDemand and OnDemandPointer. Notes: * requires a seekable stream. Parameters: * offsetfunc: a function that takes the context and returns an absolute stream position, where the construction would take place * subcon - the subcon to use at `offsetfunc()` Example: Struct("foo", UBInt32("spam_pointer"), Pointer(lambda ctx: ctx.spam_pointer, Array(5, UBInt8("spam")) ) ) """ """ Peeks at the stream: parses without changing the stream position. See also Union. If the end of the stream is reached when peeking, returns None. Notes: * requires a seekable stream. Parameters: * subcon - the subcon to peek at * perform_build - whether or not to perform building. by default this parameter is set to False, meaning building is a no-op. Example: Peek(UBInt8("foo")) """ """ Allows for on-demand (lazy) parsing. When parsing, it will return a LazyContainer that represents a pointer to the data, but does not actually parses it from stream until it's "demanded". By accessing the 'value' property of LazyContainers, you will demand the data from the stream. The data will be parsed and cached for later use. You can use the 'has_value' property to know whether the data has already been demanded. See also OnDemandPointer. Notes: * requires a seekable stream. Parameters: * subcon - * advance_stream - whether or not to advance the stream position. by default this is True, but if subcon is a pointer, this should be False. * force_build - whether or not to force build. If set to False, and the LazyContainer has not been demaned, building is a no-op. Example: OnDemand(Array(10000, UBInt8("foo")) """ /* class Pointer(Subconstruct): __slots__ = ["offsetfunc"] def __init__(self, offsetfunc, subcon): Subconstruct.__init__(self, subcon) self.offsetfunc = offsetfunc def _parse(self, stream, context): newpos = self.offsetfunc(context) origpos = stream.tell() stream.seek(newpos) obj = self.subcon._parse(stream, context) stream.seek(origpos) return obj def _build(self, obj, stream, context): newpos = self.offsetfunc(context) origpos = stream.tell() stream.seek(newpos) self.subcon._build(obj, stream, context) stream.seek(origpos) def _sizeof(self, context): return 0 class Peek(Subconstruct): __slots__ = ["perform_build"] def __init__(self, subcon, perform_build = False): Subconstruct.__init__(self, subcon) self.perform_build = perform_build def _parse(self, stream, context): pos = stream.tell() try: return self.subcon._parse(stream, context) except FieldError: pass finally: stream.seek(pos) def _build(self, obj, stream, context): if self.perform_build: self.subcon._build(obj, stream, context) def _sizeof(self, context): return 0 class OnDemand(Subconstruct): __slots__ = ["advance_stream", "force_build"] def __init__(self, subcon, advance_stream = True, force_build = True): Subconstruct.__init__(self, subcon) self.advance_stream = advance_stream self.force_build = force_build def _parse(self, stream, context): obj = LazyContainer(self.subcon, stream, stream.tell(), context) if self.advance_stream: stream.seek(self.subcon._sizeof(context), 1) return obj def _build(self, obj, stream, context): if not isinstance(obj, LazyContainer): self.subcon._build(obj, stream, context) elif self.force_build or obj.has_value: self.subcon._build(obj.value, stream, context) elif self.advance_stream: stream.seek(self.subcon._sizeof(context), 1) */ /** * @param name the new name * @param subcon the subcon to reconfigure * @param setflags the flags to set (default is 0) * @param clearflags the flags to clear (default is 0) */ static public Reconfig Reconfig(String name, Construct subcon ) { return new Reconfig(name, subcon); } /** * @param name the new name * @param subcon the subcon to reconfigure * @param setflags the flags to set (default is 0) * @param clearflags the flags to clear (default is 0) */ static public Reconfig Reconfig(String name, Construct subcon, int setflags, int clearflags ) { return new Reconfig(name, subcon, setflags, clearflags); } /** Reconfigures a subconstruct. Reconfig can be used to change the name and set and clear flags of the inner subcon. Example: Reconfig("foo", UBInt8("bar")) */ static public class Reconfig extends Subconstruct{ /** * @param name the new name * @param subcon the subcon to reconfigure * @param setflags the flags to set (default is 0) * @param clearflags the flags to clear (default is 0) */ public Reconfig(String name, Construct subcon, int setflags, int clearflags ) { super(name, subcon); _set_flag(setflags); _clear_flag(clearflags); } public Reconfig(String name, Construct subcon ) { this(name, subcon, 0, 0); } } /** * a function that takes the context and return the computed value */ public static interface ValueFunc{ Object get( Container ctx ); } /** * A computed value. Example: Struct("foo", UBInt8("width"), UBInt8("height"), Value("total_pixels", lambda ctx: ctx.width * ctx.height), ) * @param name the name of the value * @param func a function that takes the context and return the computed value */ public static Value Value( String name, ValueFunc func ){ return new Value( name, func ); }; public static class Value extends Construct{ ValueFunc func; public Value(String name, ValueFunc func ) { super(name); this.func = func; _set_flag(FLAG_DYNAMIC); } @Override public Object _parse(ByteBufferWrapper stream, com.sirtrack.construct.lib.Containers.Container context) { return func.get(context); } @Override protected void _build(Object obj, ByteArrayOutputStream stream, com.sirtrack.construct.lib.Containers.Container context) { context.set( name, func.get(context) ); } @Override protected int _sizeof(com.sirtrack.construct.lib.Containers.Container context) { return 0; } }
package com.sirtrack.construct; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.ListIterator; import com.sirtrack.construct.lib.*; import com.sirtrack.construct.lib.BitStream.BitStreamReader; import com.sirtrack.construct.lib.BitStream.BitStreamWriter; import com.sirtrack.construct.lib.Containers.Container; import static com.sirtrack.construct.lib.Binary.hexStringToByteArray; import static com.sirtrack.construct.lib.Containers.*; public class Core { public static class ConstructError extends RuntimeException { public ConstructError(String string) { super(string); } public ConstructError(String string, Exception e) { super(string, e); } } public static class FieldError extends ConstructError { public FieldError(String string) { super(string); } public FieldError(String string, Exception e) { super(string, e); } } public static class SizeofError extends ConstructError { public SizeofError(String string) { super(string); } } public static class ValueError extends ConstructError { public ValueError(String string) { super(string); } } public static class RangeError extends ConstructError { public RangeError(String string) { super(string); } } public static class TypeError extends ConstructError { public TypeError(String string) { super(string); } } public static class SwitchError extends ConstructError { public SwitchError(String string) { super(string); } } public static class ArrayError extends ConstructError { public ArrayError(String string, Exception e) { super(string, e); } } // Bits = BitField // Byte = UBInt8 // Bytes = Field // Const = ConstAdapter // Tunnel = TunnelAdapter // Embed = Embedded static public byte[] ByteArray(int... ints) { byte[] ba = new byte[ints.length]; int k = 0; for (int i : ints) { ba[k++] = (byte) i; } return ba; } static public byte[] ByteArray(byte[]... bas) { ByteArrayOutputStream out = new ByteArrayOutputStream(); for (byte[] ba : bas) { try { out.write(ba); } catch (IOException e) { throw new RuntimeException(e); } } return out.toByteArray(); } /** * A generic container of attributes. * * Containers are the common way to express parsed data. */ static public <T> Container Container(Object... pairs) { return new Container(pairs); } public static byte[] _read_stream(ByteBufferWrapper stream, int length) { if (length < 0) throw new FieldError("length must be >= 0 " + length); int len = stream.remaining(); if (len < length) throw new FieldError("expected " + length + " found " + len); byte[] out = new byte[length]; stream.get(out, 0, length); return out; } static public abstract class Construct implements Cloneable { public static final int FLAG_COPY_CONTEXT = 0x0001; public static final int FLAG_DYNAMIC = 0x0002; public static final int FLAG_EMBED = 0x0004; public static final int FLAG_NESTING = 0x0008; public int conflags; public String name; protected Object val; public Construct() { this(null, 0); // must set name later } public Construct(String name) { this(name, 0); } public Construct(String name, int flags) { setName(name); this.conflags = flags; } public void setName(String name){ if (name != null) { if (name.equals("_") || name.startsWith("<")) throw new ValueError("reserved name " + name); // raise } this.name = name; } public Construct clone() throws CloneNotSupportedException { return (Construct) super.clone(); } @Override public String toString() { if( get() != null) return get().toString(); else return getClass().getName() + "(" + name + ")"; } public Object get() { return val; } public void set(Object val) { this.val = val; } /** * Set the given flag or flags. * * @param flag * flag to set; may be OR'd combination of flags */ void _set_flag(int flag) { conflags |= flag; } /** * Clear the given flag or flags. * * @param flag * flag to clear; may be OR'd combination of flags */ public void _clear_flag(int flag) { conflags &= ~flag; } /** Pull flags from subconstructs. */ public void _inherit_flags(Construct... subcons) { for (Construct sc : subcons) { _set_flag(sc.conflags); } } /** * Check whether a given flag is set. * * @param flag * flag to check * @return */ boolean _is_flag(int flag) { return (conflags & flag) == flag; } static public int getDataLength(Object data) { if (data instanceof String) return ((String) data).length(); else if (data instanceof Byte || data instanceof Character) return 1; else if (data instanceof Integer) { int num = (Integer) data; if (num < 256) return 1; else if (num < 65536) return 2; else return 4; // return Integer.SIZE/8; } else if (data instanceof byte[]) return ((byte[]) data).length; else if (data instanceof List) return ((List) data).size(); else throw new RuntimeException("Data length unknown for " + data); } static public void appendDataStream(ByteArrayOutputStream stream, Object data) { if (data instanceof String) try { stream.write(((String) data).getBytes()); } catch (IOException e) { throw new ValueError("Can't append data " + data + " " + e.getMessage()); } else if (data instanceof Byte) stream.write((Byte) data); else if (data instanceof Integer) stream.write((Integer) data); else if (data instanceof byte[]) try { stream.write((byte[]) data); } catch (IOException e) { throw new ValueError("Can't append data " + data + " " + e.getMessage()); } else throw new ValueError("Can't append data " + data); } public void _write_stream(ByteArrayOutputStream stream, int length, Object data) { if (length < 0) throw new FieldError("length must be >= 0 " + length); int datalength = getDataLength(data); if (length != datalength) throw new FieldError("expected " + length + " found " + datalength); appendDataStream(stream, data); }; /** * Parse an in-memory buffer. * * Strings, buffers, memoryviews, and other complete buffers can be parsed * with this method. * * @param data */ public <T> T parse(byte[] data) { return parse( data, false ); } /** * Parse an in-memory buffer. * * Strings, buffers, memoryviews, and other complete buffers can be parsed * with this method. * * @param data */ public <T> T parse(byte[] data, boolean debug ) { return (T) parse_stream(new ByteBufferWrapper().wrap(data), debug ); } /** * @param hex * a string representation of hex bytes: 65535 = "FFFF" * @return */ public <T> T parse(String hex) { return (T) parse( hex, false); } /** * @param hex * a string representation of hex bytes: 65535 = "FFFF" * @return */ public <T> T parse(String hex, boolean debug ) { byte[] data = hexStringToByteArray(hex); return (T) parse(data, debug ); } /** * Parse an in-memory buffer. Also accepts a context, useful for passing * initial values * * @param data * @param context * @return */ public <T> T parse(byte[] data, Container context) { return (T) _parse(new ByteBufferWrapper().wrap(data), context); } public <T> T parse(String hex, Container context) { byte[] data = hexStringToByteArray(hex); return (T) _parse(new ByteBufferWrapper().wrap(data), context); } /** * Parse a stream. * * Files, pipes, sockets, and other streaming sources of data are handled by * this method. */ public Object parse_stream(ByteBufferWrapper stream) { return parse_stream( stream, false ); } /** * Parse a stream. * * Files, pipes, sockets, and other streaming sources of data are handled by * this method. */ public Object parse_stream(ByteBufferWrapper stream, boolean debug ) { Container c = Container( "debug", debug ); return _parse(stream, c ); } abstract public Object _parse(ByteBufferWrapper stream, Container context); /** * Build an object in memory. * * @param obj * @return */ public byte[] build(Object obj) { ByteArrayOutputStream stream = new ByteArrayOutputStream(); build_stream(obj, stream); return stream.toByteArray(); } /** * Build an object directly into a stream. * * @param obj * @param stream */ public void build_stream(Object obj, ByteArrayOutputStream stream) { _build(obj, stream, new Container()); } // abstract public void _build( String obj, OutputStream stream, Container // context); public abstract void _build(Object obj, ByteArrayOutputStream stream, Container context); /** * Calculate the size of this object, optionally using a context. Some * constructs have no fixed size and can only know their size for a given * hunk of data; these constructs will raise an error if they are not passed * a context. * * @param context * contextual data * @return the length of this construct */ public int sizeof(Container context) { if (context == null) { context = new Container(); } try { return _sizeof(context); } catch (Exception e) { throw new SizeofError(e.getMessage()); } } public int sizeof() { return sizeof(null); } public abstract int _sizeof(Container context); } /** * Abstract subconstruct (wraps an inner construct, inheriting its name and * flags). */ public static abstract class Subconstruct<T extends Construct> extends Construct { protected T subcon; /** * @param subcon * the construct to wrap */ public Subconstruct(T subcon) { super(subcon.name, subcon.conflags); this.subcon = subcon; } Subconstruct(String name, T subcon) { super(name, subcon.conflags); this.subcon = subcon; } public Subconstruct<T> clone() throws CloneNotSupportedException { Subconstruct<T> s = (Subconstruct<T>) super.clone(); s.subcon = (T)subcon.clone(); return s; } // @Override // public T get(){ // return subcon; @Override public Object _parse(ByteBufferWrapper stream, Container context) { return subcon._parse(stream, context); } @Override public void _build(Object obj, ByteArrayOutputStream stream, Container context) { subcon._build(obj, stream, context); } @Override public int _sizeof(Container context) { return subcon._sizeof(context); } } /** * A fixed-size byte field. */ public static class StaticField extends Construct { int length; /** * @param name * field name * @param length * number of bytes in the field */ public StaticField(String name, int length) { super(name); this.length = length; } @Override public Object _parse(ByteBufferWrapper stream, Container context) { return _read_stream(stream, length); } @Override public void _build(Object obj, ByteArrayOutputStream stream, Container context) { _write_stream(stream, length, obj); } @Override public int _sizeof(Container context) { return length; } /* * public int _sizeof( Container context ){ @Override public Construct clone() { // TODO Auto-generated method stub return null; } return length; } */ } /** * A field that uses ``struct`` to pack and unpack data. * * See ``struct`` documentation for instructions on crafting format strings. */ public static class FormatField<T extends Number> extends StaticField { int length; Packer<T> packer; /** * @param name * name of the field * @param endianness * : format endianness string; one of "<", ">", or "=" * @param format * : a single format character */ public FormatField(String name, char endianity, char format) { super(name, 0); if (endianity != '>' && endianity != '<' && endianity != '=') throw new ValueError("endianity must be be '=', '<', or '>' " + endianity); packer = new Packer<T>(endianity, format); super.length = packer.length(); } @Override public T _parse(ByteBufferWrapper stream, Container context) { try { return packer.unpack(stream.bb); } catch (Exception e) { // e.printStackTrace(); throw new FieldError(e.getMessage(), e); } } @Override public void _build(Object obj, ByteArrayOutputStream stream, Container context) { _write_stream(stream, super.length, packer.pack(obj)); } @Override public T get() { return (T)val; } } /** * callable that takes a context and returns length as an int */ static public interface LengthFunc { abstract int length(Container context); } /** * @param name * context field name * @return get length from context field */ static public LengthFunc LengthField(final String name) { return new LengthFunc() { public int length(Container ctx) { return (Integer) ctx.get(name); } }; } /** * A variable-length field. The length is obtained at runtime from a function. * >>> foo = Struct("foo", ... Byte("length"), ... MetaField("data", lambda * ctx: ctx["length"]) ... ) >>> foo.parse("\\x03ABC") Container(data = 'ABC', * length = 3) >>> foo.parse("\\x04ABCD") Container(data = 'ABCD', length = 4) * * @param name * name of the field * @param lengthfunc * callable that takes a context and returns length as an int */ public static MetaField MetaField(String name, LengthFunc lengthfunc) { return new MetaField(name, lengthfunc); } public static class MetaField extends Construct { LengthFunc lengthfunc; /** * @param name * name of the field * @param lengthfunc * callable that takes a context and returns length as an int */ public MetaField(String name, LengthFunc lengthfunc) { super(name); this.lengthfunc = lengthfunc; this._set_flag(FLAG_DYNAMIC); } @Override public Object _parse(ByteBufferWrapper stream, Container context) { return _read_stream(stream, lengthfunc.length(context)); } @Override public void _build(Object obj, ByteArrayOutputStream stream, Container context) { _write_stream(stream, lengthfunc.length(context), obj); } @Override public int _sizeof(Container context) { return lengthfunc.length(context); } } /** * callable that takes a context and returns length as an int */ static public interface CountFunc { abstract int count(Container context); } /** * Example: MetaArray(lambda ctx: 5, UBInt8("foo")) See also Array, Range and * RepeatUntil. * * @param countfunc * a function that takes the context as a parameter and returns the * number of elements of the array (count) * @param subcon * the subcon to repeat `countfunc()` times * @return An array (repeater) of a meta-count. The array will iterate exactly * `countfunc()` times. Will raise ArrayError if less elements are * found. */ public static MetaArray MetaArray(CountFunc countfunc, Construct subcon) { return new MetaArray(countfunc, subcon); } /** * An array (repeater) of a meta-count. The array will iterate exactly * `countfunc()` times. Will raise ArrayError if less elements are found. See * also Array, Range and RepeatUntil. * * Example: MetaArray(lambda ctx: 5, UBInt8("foo")) */ public static class MetaArray<T extends Construct> extends Subconstruct<T> { CountFunc countfunc; /** * Parameters: countfunc - a function that takes the context as a parameter * and returns the number of elements of the array (count) subcon - the * subcon to repeat `countfunc()` times * * @param length * @param name * @param subcon */ public MetaArray(CountFunc countfunc, T subcon) { super(subcon); this.countfunc = countfunc; _clear_flag(FLAG_COPY_CONTEXT); _set_flag(FLAG_DYNAMIC); } @Override public Object _parse(ByteBufferWrapper stream, Container context) { List obj = ListContainer(); int c = 0; int count = countfunc.count(context); try { if ((subcon.conflags & FLAG_COPY_CONTEXT) != 0) { while (c < count) { obj.add(subcon._parse(stream, context.clone())); c += 1; } } else { while (c < count) { obj.add(subcon._parse(stream, context)); c += 1; } } } catch (Exception e) { throw new ArrayError( this.toString() + ": expected " + count + ", found " + c, e ); } val = obj; return obj; } @Override public void _build(Object object, ByteArrayOutputStream stream, Container context) { List<Object> obj = (List<Object>) object; int count = countfunc.count(context); if (obj.size() != count) { throw new ArrayError("expected " + count + ", found " + obj.size(), null); } if ((subcon.conflags & FLAG_COPY_CONTEXT) != 0) { for (Object subobj : obj) { subcon._build(subobj, stream, context.clone()); } } else { for (Object subobj : obj) { subcon._build(subobj, stream, context); } } } @Override public int _sizeof(Container context) { return subcon._sizeof(context) * countfunc.count(context); } } public static <T extends Construct>Range Range(int mincount, int maxcount, T subcon) { return new Range<T>(mincount, maxcount, subcon); } /** * A range-array. The subcon will iterate between `mincount` to `maxcount` * times. If less than `mincount` elements are found, raises RangeError. See * also GreedyRange and OptionalGreedyRange. * * The general-case repeater. Repeats the given unit for at least mincount * times, and up to maxcount times. If an exception occurs (EOF, validation * error), the repeater exits. If less than mincount units have been * successfully parsed, a RangeError is raised. * * .. note:: This object requires a seekable stream for parsing. */ public static class Range<T extends Construct> extends Subconstruct<T> { /** * @param mincount * the minimal count * @param maxcount * the maximal count * @param subcon * the subcon to repeat >>> c = Range(3, 7, UBInt8("foo")) >>> * c.parse("\\x01\\x02") Traceback (most recent call last): ... * construct.core.RangeError: expected 3..7, found 2 >>> * c.parse("\\x01\\x02\\x03") [1, 2, 3] >>> * c.parse("\\x01\\x02\\x03\\x04\\x05\\x06") [1, 2, 3, 4, 5, 6] >>> * c.parse("\\x01\\x02\\x03\\x04\\x05\\x06\\x07") [1, 2, 3, 4, 5, * 6, 7] >>> * c.parse("\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09") [1, 2, * 3, 4, 5, 6, 7] >>> c.build([1,2]) Traceback (most recent call * last): ... construct.core.RangeError: expected 3..7, found 2 >>> * c.build([1,2,3,4]) '\\x01\\x02\\x03\\x04' >>> * c.build([1,2,3,4,5,6,7,8]) Traceback (most recent call last): * ... construct.core.RangeError: expected 3..7, found 8 */ int mincount; int maxcout; public Range(int mincount, int maxcount, T subcon) { super(subcon); this.mincount = mincount; this.maxcout = maxcount; _clear_flag(FLAG_COPY_CONTEXT); _set_flag(FLAG_DYNAMIC); } @Override public List<T> get(){ return (List<T>)val; } @Override public void set( Object val ){ } @Override public Object _parse(ByteBufferWrapper stream, Container context) { // obj = ListContainer() List<Object> obj = ListContainer(); val = ListContainer(); int c = 0; int pos = stream.position(); try { if ((subcon.conflags & FLAG_COPY_CONTEXT) != 0) { while (c < maxcout) { T clone = (T) subcon.clone(); pos = stream.position(); get().add( clone ); obj.add(clone._parse(stream, context.clone())); c += 1; } } else { while (c < maxcout) { T clone = (T) subcon.clone(); pos = stream.position(); get().add(clone); obj.add(clone._parse(stream, context)); c += 1; } } } catch (Exception e) { if (c < mincount) { throw new RangeError("expected " + mincount + " to " + maxcout + " found " + c + " " + e.getMessage()); } stream.position(pos); } return obj; } @Override public void _build(Object object, ByteArrayOutputStream stream, Container context) { if (!(object instanceof List)) throw new TypeError("Expected object array"); List<Object> obj = (List<Object>) object; if (obj.size() < mincount || obj.size() > maxcout) { throw new RangeError("expected " + mincount + " to " + maxcout + " found " + obj.size()); } int cnt = 0; try { if ((subcon.conflags & FLAG_COPY_CONTEXT) != 0) { for (Object subobj : obj) { subcon._build(subobj, stream, context.clone()); cnt += 1; } } else { for (Object subobj : obj) { subcon._build(subobj, stream, context); cnt += 1; } } } catch (Exception e) { throw new RangeError(e.getMessage()); } } @Override public int _sizeof(Container context) { throw new SizeofError("can't calculate size"); } } /** * A sequence of named constructs, similar to structs in C. The elements are * parsed and built in the order they are defined. See also Embedded. Example: * Struct("foo", UBInt8("first_element"), UBInt16("second_element"), * Padding(2), UBInt8("third_element"), ) */ static public Struct Struct(String name, Construct... subcons) { return new Struct(name, subcons); } static public Struct Struct(Construct... subcons) { return new Struct(null, subcons); } static public class Struct extends Construct { public boolean nested = true; public Construct[] subcons; /** * @param name * the name of the structure * @param subcons * a sequence of subconstructs that make up this structure. */ public Struct(String name, Construct... subcons) { super(name); this.subcons = subcons; _inherit_flags(subcons); _clear_flag(FLAG_EMBED); } @Override public Struct clone() throws CloneNotSupportedException { Struct clone = (Struct) super.clone(); clone.subcons = new Construct[subcons.length]; Field[] fields = getClass().getDeclaredFields(); int i = 0; for( Field f : fields ){ if (Construct.class.isAssignableFrom(f.getType())) try{ f.setAccessible(true); // clone field Construct fclone = ((Construct)f.get(this)).clone(); // set the field clone into the Struct clone f.set(clone, fclone); // also add the field clone to the subcons array clone.subcons[i++] = fclone; } catch( Exception e ){ throw new RuntimeException(e); } // Clone elements in the subcons array // Because we cater for both static and runtime Struct definitions, // we need to make sure subcons don't end up twice in the subcons array // This case has to handle only the runtime (old) definition // So if we already have stuff in the subcons array, carry on else if ( /*f.getType() == Construct[].class &&*/ f.getName().equals("subcons") && clone.subcons[0] == null ) try{ i = 0; for( Construct c : subcons ){ clone.subcons[i++] = c.clone(); } } catch( Exception e ){ throw new RuntimeException(e); } else continue; } return clone; } /** * This is a special constructor for typesafe Structs. * Instead of passing an array of Subcons at runtime, * this constructor inspects the public fields of type Construct for this Struct * and invokes each field's constructor by passing the field name. * It's assumed that all declared fields have a public constructor: Construct( String name ) * @param name */ public Struct(String name) { super(name); Constructor fctor; Field field = null; String fname; try { Field[] fields = getClass().getFields(); List<Construct> subconf = new ArrayList<Construct>(); for( int i = 0; i < fields.length; i++ ) { field = fields[i]; field.setAccessible(true); Class clazz = field.getType(); if (!Construct.class.isAssignableFrom(clazz)) continue; fname = field.getName(); fctor = clazz.getConstructors()[0]; fctor.setAccessible(true); Construct inst; Object enclosingInst; switch (fctor.getParameterTypes().length) { // TODO should check that the first instance is of the right type: enclosing type or String case 2: // inner classes try{ // static class case enclosingInst = getClass().getDeclaredField("this$0").get(this); } catch( NoSuchFieldException nsfe ){ // private nested class case enclosingInst = this; } inst = (Construct) fctor.newInstance(enclosingInst, fname); break; case 1: if( String.class.isAssignableFrom( fctor.getParameterTypes()[0] )){ inst = (Construct) fctor.newInstance(fname); } else { // no arguments constructor try{ // static class case enclosingInst = getClass().getDeclaredField("this$0").get(this); } catch( NoSuchFieldException nsfe ){ // private nested class case enclosingInst = this; } inst = (Construct) fctor.newInstance(enclosingInst); // now call name setter with fname inst.setName(fname); } break; case 0: inst = (Construct) fctor.newInstance(); break; default: throw new Exception("No default case: " + fctor); } field.set(this, inst); subconf.add(inst); } subcons = new Construct[subconf.size()]; subcons = subconf.toArray(subcons); _inherit_flags(subcons); _clear_flag(FLAG_EMBED); } catch (Exception e) { throw new RuntimeException("Error constructing field " + field + "\r\n" + e.toString(), e); } } public Struct() { this((String) null); } @Override public Object _parse(ByteBufferWrapper stream, Container context) { Container obj; if (context.contains("<obj>")) { obj = context.get("<obj>"); context.del("<obj>"); } else { obj = new Container(); if (nested) { context = Container("_", context); } } for (Construct sc : subcons) { if ((sc.conflags & FLAG_EMBED) != 0) { context.set("<obj>", obj); Object val = sc._parse(stream, context); sc.set( val ); } else { Object val = sc._parse(stream, context); sc.set( val ); if (sc.name != null) { obj.set(sc.name, val); context.set(sc.name, val); // System.out.println( " (" + sc.name + ") = " + val ); } } } return obj; } @Override public void _build(Object obj, ByteArrayOutputStream stream, Container context) { if (context.contains("<unnested>")) { context.del("<unnested>"); } else if (nested) { context = Container("_", context); } for (Construct sc : subcons) { Object subobj; if ((sc.conflags & FLAG_EMBED) != 0) { context.set("<unnested>", true); subobj = obj; } else if (sc.name == null) { subobj = null; } else if (obj instanceof Container) { Container container = (Container) obj; subobj = container.get(sc.name); if (subobj == null) throw new FieldError("No field found: " + sc.name + " in " + subobj); context.set(sc.name, subobj); } else continue; sc._build(subobj, stream, context); } } @Override public int _sizeof(Container context) { int sum = 0; // if( nested ) // context = Container( "_", context ); for (Construct sc : subcons) { sum += sc._sizeof(context); } return sum; } } /** * @param name * the name of the structure * @param subcons * a sequence of subconstructs that make up this structure. * @param nested * : a keyword-only argument that indicates whether this struct * creates a nested context. The default is True. This parameter is * considered "advanced usage", and may be removed in the future. * @return A sequence of unnamed constructs. The elements are parsed and built * in the order they are defined. See also Embedded. Example: * Sequence("foo", UBInt8("first_element"), UBInt16("second_element"), * Padding(2), UBInt8("third_element"), ) */ public static Sequence Sequence(String name, Construct... subcons) { return new Sequence(name, subcons); } public static class Sequence extends Struct { public Sequence(String name, Construct... subcons) { super(name, subcons); } @Override public Object _parse(ByteBufferWrapper stream, Container context) { List obj; if (context.contains("<obj>")) { obj = context.get("<obj>"); context.del("<obj>"); } else { obj = ListContainer(); if (nested) { context = Container("_", context); } } for (Construct sc : subcons) { if ((sc.conflags & FLAG_EMBED) != 0) { context.set("<obj>", obj); sc._parse(stream, context); } else { Object subobj = sc._parse(stream, context); if (sc.name != null) { obj.add(subobj); context.set(sc.name, subobj); } } } return obj; } @Override public void _build(Object obj, ByteArrayOutputStream stream, Container context) { if (context.contains("<unnested>")) { context.del("<unnested>"); } else if (nested) { context = Container("_", context); } Object subobj; ListIterator objiter; if (obj instanceof List) objiter = ((List) obj).listIterator(); else objiter = (ListIterator) obj; for (Construct sc : subcons) { if ((sc.conflags & FLAG_EMBED) != 0) { context.set("<unnested>", true); subobj = objiter; } else if (sc.name == null) { subobj = null; } else { subobj = objiter.next(); context.set(sc.name, subobj); } sc._build(subobj, stream, context); } } } public static Construct NoDefault = new Construct(null) { @Override public Object _parse(ByteBufferWrapper stream, Container context) { throw new SwitchError("no default case defined"); } @Override public void _build(Object obj, ByteArrayOutputStream stream, com.sirtrack.construct.lib.Containers.Container context) { throw new SwitchError("no default case defined"); } @Override public int _sizeof(com.sirtrack.construct.lib.Containers.Container context) { throw new SwitchError("no default case defined"); } }; /** * a function that takes the context and returns a key */ public abstract static class KeyFunc { public final String key; public KeyFunc(String key) { this.key = key; } public KeyFunc() { this.key = null; } public String key() { return key; } public abstract Object get(Container context); } /** * @param key * a context key * @param val * a value * @return A KeyFunc that evaluates ctx.get(key).equals(val) */ public static KeyFunc Equals(final String key, final Object val) { return new KeyFunc(key) { public Object get(Container ctx) { return ctx.get(key).equals(val); }; }; } /** * @param key * a context key * @return ctx.get(key) */ public static KeyFunc KeyVal(final String key) { return new KeyFunc(key) { public Object get(Container ctx) { return ctx.get(key); }; }; } /** * A conditional branch. Switch will choose the case to follow based on the * return value of keyfunc. If no case is matched, and no default value is * given, SwitchError will be raised. See also Pass. Example: Struct("foo", * UBInt8("type"), Switch("value", lambda ctx: ctx.type, { 1 : UBInt8("spam"), * 2 : UBInt16("spam"), 3 : UBInt32("spam"), 4 : UBInt64("spam"), } ), ) * * @param name * the name of the construct * @param keyfunc * a function that takes the context and returns a key, which will ne * used to choose the relevant case. * @param cases * a dictionary mapping keys to constructs. the keys can be any * values that may be returned by keyfunc. */ public static Switch Switch(String name, KeyFunc keyfunc, Object... cases) { return new Switch(name, keyfunc, Container(cases)); } /** * A conditional branch. Switch will choose the case to follow based on the * return value of keyfunc. If no case is matched, and no default value is * given, SwitchError will be raised. See also Pass. Example: Struct("foo", * UBInt8("type"), Switch("value", lambda ctx: ctx.type, { 1 : UBInt8("spam"), * 2 : UBInt16("spam"), 3 : UBInt32("spam"), 4 : UBInt64("spam"), } ), ) * * @param name * the name of the construct * @param keyfunc * a function that takes the context and returns a key, which will ne * used to choose the relevant case. * @param cases * a dictionary mapping keys to constructs. the keys can be any * values that may be returned by keyfunc. * @param defaultval * a default value to use when the key is not found in the cases. if * not supplied, an exception will be raised when the key is not * found. You can use the builtin construct Pass for 'do-nothing'. * @param include_key * whether or not to include the key in the return value of parsing. * defualt is False. */ public static Switch Switch(String name, KeyFunc keyfunc, Container cases, Construct defaultval, boolean include_key) { return new Switch(name, keyfunc, cases, defaultval, include_key); } /** * A conditional branch. Switch will choose the case to follow based on the * return value of keyfunc. If no case is matched, and no default value is * given, SwitchError will be raised. See also Pass. Example: Struct("foo", * UBInt8("type"), Switch("value", lambda ctx: ctx.type, { 1 : UBInt8("spam"), * 2 : UBInt16("spam"), 3 : UBInt32("spam"), 4 : UBInt64("spam"), } ), ) */ public static class Switch extends Construct { /** * a function that takes the context and returns a key, which will ne used * to choose the relevant case. */ public KeyFunc keyfunc; public Container cases; public Construct defaultval; public boolean include_key; /** * @param name * the name of the construct * @param keyfunc * a function that takes the context and returns a key, which will * ne used to choose the relevant case. * @param cases * a dictionary mapping keys to constructs. the keys can be any * values that may be returned by keyfunc. * @param defaultval * a default value to use when the key is not found in the cases. * if not supplied, an exception will be raised when the key is not * found. You can use the builtin construct Pass for 'do-nothing'. * @param include_key * whether or not to include the key in the return value of * parsing. defualt is False. */ public Switch(String name, KeyFunc keyfunc, Container cases, Construct defaultval, boolean include_key) { super(name); this.keyfunc = keyfunc; this.cases = cases; this.defaultval = defaultval; this.include_key = include_key; Construct[] ca = cases.values(Construct.class); this._inherit_flags(ca); this._set_flag(FLAG_DYNAMIC); } public Switch(String name, KeyFunc keyfunc, Container cases) { this(name, keyfunc, cases, NoDefault, false); } @Override public Object get() { return val; } @Override public void set(Object val) { // do nothing: prevent Structs from setting val to the parsed value // keep the Switch case construct as a value //this.val = val; } @Override public Switch clone() throws CloneNotSupportedException { Switch c = (Switch) super.clone(); c.cases = cases.clone(); // TODO check deep copy c.defaultval = defaultval.clone(); return c; } @Override public Object _parse(ByteBufferWrapper stream, Container context) { Object key = keyfunc.get(context); /* assign the case Construct as a value for Switch * users can then retrieve the case Construct with get()*/ val = cases.get(key, defaultval); Object res = ((Construct)val)._parse(stream, context); if (include_key) res = Container(key, res); return res; } @Override public void _build(Object obj, ByteArrayOutputStream stream, Container context) { Object key; if (include_key) { List list = (List) obj; key = list.get(0); obj = list.get(1); } else { key = keyfunc.get(context); } Construct casestruct = cases.get(key, defaultval); casestruct._build(obj, stream, context); /* * if self.include_key: key, obj = obj else: key = self.keyfunc(context) * case = self.cases.get(key, self.default) case._build(obj, stream, * context) */ } @Override public int _sizeof(Container context) { Construct casestruct = cases.get(keyfunc.get(context), defaultval); return casestruct._sizeof(context); } } /** * Creates an in-memory buffered stream, which can undergo encoding and * decoding prior to being passed on to the subconstruct. See also Bitwise. * * Note: Do not use pointers inside Buffered * * Example: Buffered(BitField("foo", 16), encoder = decode_bin, decoder = * encode_bin, resizer = lambda size: size / 8, ) */ static public class Buffered<T extends Construct> extends Subconstruct<T> { public Encoder encoder; public Decoder decoder; public Resizer resizer; /** * Creates an in-memory buffered stream, which can undergo encoding and * decoding prior to being passed on to the subconstruct. See also Bitwise.<br/> * <br/> * Note: Do not use pointers inside Buffered * * @param subcon * the subcon which will operate on the buffer * @param encoder * a function that takes a string and returns an encoded string * (used after building) * @param decoder * a function that takes a string and returns a decoded string * (used before parsing) * @param resizer * a function that takes the size of the subcon and "adjusts" or * "resizes" it according to the encoding/decoding process. */ public Buffered(T subcon, Encoder encoder, Decoder decoder, Resizer resizer) { super(subcon); this.encoder = encoder; this.decoder = decoder; this.resizer = resizer; } // @Override // public T get(){ // return subcon; // @Override // public void set( Object val ){ // subcon.set(val); @Override public Object _parse(ByteBufferWrapper stream, Container context) { Boolean debug = context.get("debug"); byte[] data = _read_stream(stream, _sizeof(context)); if( debug != null && debug==true) { for( byte b : data ){ System.out.print( String.format("%02x ", b )); } System.out.print( ": " ); } byte[] stream2 = decoder.decode(data); // if( debug ){ // System.out.print( Arrays.toString(stream2) + ": "); Object val = subcon._parse(new ByteBufferWrapper().wrap(stream2), context); if( debug ){ System.out.println(val); } return val; } @Override public void _build(Object obj, ByteArrayOutputStream stream, Container context) { int size = _sizeof(context); ByteArrayOutputStream stream2 = new ByteArrayOutputStream(); subcon._build(obj, stream2, context); byte[] data = encoder.encode(stream2.toString()); if (data.length != size) throw new RuntimeException("Wrong data length: " + data.length); _write_stream(stream, size, data); } @Override public int _sizeof(Container context) { return resizer.resize( subcon._sizeof(context)); } } /** * Wraps the stream with a read-wrapper (for parsing) or a write-wrapper (for * building). The stream wrapper can buffer the data internally, reading it * from- or writing it to the underlying stream as needed. For example, * BitByteBufferWrapper reads whole bytes from the underlying stream, but * returns them as individual bits. See also Bitwise. * * When the parsing or building is done, the stream's close method will be * invoked. It can perform any finalization needed for the stream wrapper, but * it must not close the underlying stream. * * Note: Do not use pointers inside Restream * * Example: Restream(BitField("foo", 16), stream_reader = * BitByteBufferWrapper, stream_writer = BitStreamWriter, resizer = lambda * size: size / 8, ) */ public static class Restream extends Subconstruct { BitStreamReader stream_reader; BitStreamWriter stream_writer; Resizer resizer; /** * Wraps the stream with a read-wrapper (for parsing) or a write-wrapper * (for building). The stream wrapper can buffer the data internally, * reading it from- or writing it to the underlying stream as needed. For * example, BitByteBufferWrapper reads whole bytes from the underlying * stream, but returns them as individual bits. See also Bitwise.<br/> * <br/> * When the parsing or building is done, the stream's close method will be * invoked. It can perform any finalization needed for the stream wrapper, * but it must not close the underlying stream.<br/> * <br/> * Note: Do not use pointers inside Restream * * @param subcon * the subcon * @param stream_reader * the read-wrapper * @param stream_writer * the write wrapper * @param resizer * a function that takes the size of the subcon and "adjusts" or * "resizes" it according to the encoding/decoding process. */ public Restream(Construct subcon, BitStreamReader stream_reader, BitStreamWriter stream_writer, Resizer resizer) { super(subcon); this.stream_reader = stream_reader; this.stream_writer = stream_writer; this.resizer = resizer; } @Override public Object _parse(ByteBufferWrapper stream, Container context) { stream_reader.init(stream); Object obj = subcon._parse(stream_reader, context); stream_reader.close(); return obj; } @Override public void _build(Object obj, ByteArrayOutputStream stream, Container context) { ByteArrayOutputStream stream2 = stream_writer.init(stream); subcon._build(obj, stream2, context); stream_writer.close(); } @Override public int _sizeof(Container context) { return resizer.resize(subcon._sizeof(context)); } } * class Pointer(Subconstruct): """ Changes the stream position to a given * offset, where the construction should take place, and restores the stream * position when finished. See also Anchor, OnDemand and OnDemandPointer. * * Notes: requires a seekable stream. * * Parameters: offsetfunc: a function that takes the context and returns an * absolute stream position, where the construction would take place subcon - * the subcon to use at `offsetfunc()` * * Example: Struct("foo", UBInt32("spam_pointer"), Pointer(lambda ctx: * ctx.spam_pointer, Array(5, UBInt8("spam")) ) ) """ __slots__ = * class Peek(Subconstruct): """ Peeks at the stream: parses without changing * the stream position. See also Union. If the end of the stream is reached * when peeking, returns None. * * Notes: requires a seekable stream. * * Parameters: subcon - the subcon to peek at perform_build - whether or not * to perform building. by default this parameter is set to False, meaning * building is a no-op. * * Example: Peek(UBInt8("foo")) """ __slots__ = ["perform_build"] def * class OnDemand(Subconstruct): """ Allows for on-demand (lazy) parsing. When * parsing, it will return a LazyContainer that represents a pointer to the * data, but does not actually parses it from stream until it's "demanded". By * accessing the 'value' property of LazyContainers, you will demand the data * from the stream. The data will be parsed and cached for later use. You can * use the 'has_value' property to know whether the data has already been * demanded. See also OnDemandPointer. * * Notes: requires a seekable stream. * * Parameters: subcon - advance_stream - whether or not to advance the stream * position. by default this is True, but if subcon is a pointer, this should * be False. force_build - whether or not to force build. If set to False, and * the LazyContainer has not been demaned, building is a no-op. * * Example: OnDemand(Array(10000, UBInt8("foo")) """ __slots__ = /* * ["offsetfunc"] def __init__(self, offsetfunc, subcon): * Subconstruct.__init__(self, subcon) self.offsetfunc = offsetfunc def * _parse(self, stream, context): newpos = self.offsetfunc(context) origpos = * stream.tell() stream.seek(newpos) obj = self.subcon._parse(stream, context) * stream.seek(origpos) return obj def _build(self, obj, stream, context): * newpos = self.offsetfunc(context) origpos = stream.tell() * stream.seek(newpos) self.subcon._build(obj, stream, context) * stream.seek(origpos) def _sizeof(self, context): return 0 * * __init__(self, subcon, perform_build = False): Subconstruct.__init__(self, * subcon) self.perform_build = perform_build def _parse(self, stream, * context): pos = stream.tell() try: return self.subcon._parse(stream, * context) except FieldError: pass finally: stream.seek(pos) def _build(self, * obj, stream, context): if self.perform_build: self.subcon._build(obj, * stream, context) def _sizeof(self, context): return 0 * * ["advance_stream", "force_build"] def __init__(self, subcon, advance_stream * = True, force_build = True): Subconstruct.__init__(self, subcon) * self.advance_stream = advance_stream self.force_build = force_build def * _parse(self, stream, context): obj = LazyContainer(self.subcon, stream, * stream.tell(), context) if self.advance_stream: * stream.seek(self.subcon._sizeof(context), 1) return obj def _build(self, * obj, stream, context): if not isinstance(obj, LazyContainer): * self.subcon._build(obj, stream, context) elif self.force_build or * obj.has_value: self.subcon._build(obj.value, stream, context) elif * self.advance_stream: stream.seek(self.subcon._sizeof(context), 1) */ /** * @param name * the new name * @param subcon * the subcon to reconfigure * @param setflags * the flags to set (default is 0) * @param clearflags * the flags to clear (default is 0) */ static public Reconfig Reconfig(String name, Construct subcon) { return new Reconfig(name, subcon); } /** * @param name * the new name * @param subcon * the subcon to reconfigure * @param setflags * the flags to set (default is 0) * @param clearflags * the flags to clear (default is 0) */ static public Reconfig Reconfig(String name, Construct subcon, int setflags, int clearflags) { return new Reconfig(name, subcon, setflags, clearflags); } /** * Reconfigures a subconstruct. Reconfig can be used to change the name and * set and clear flags of the inner subcon. Example: Reconfig("foo", * UBInt8("bar")) */ static public class Reconfig<T extends Construct> extends Subconstruct<T> { /** * @param name * the new name * @param subcon * the subcon to reconfigure * @param setflags * the flags to set (default is 0) * @param clearflags * the flags to clear (default is 0) */ public Reconfig(String name, T subcon, int setflags, int clearflags) { super(name, subcon); _set_flag(setflags); _clear_flag(clearflags); } public Reconfig(String name, T subcon) { this(name, subcon, 0, 0); } @Override public T get(){ return subcon; } // @Override // public void set( Object val ){ // subcon.set(val); } /** * a function that takes the context and return the computed value */ public static interface ValueFunc<T> { T get(Container ctx); } /** * A computed value. Example: Struct("foo", UBInt8("width"), UBInt8("height"), * Value("total_pixels", lambda ctx: ctx.width * ctx.height), ) * * @param name * the name of the value * @param func * a function that takes the context and return the computed value */ public static <T>Value Value(String name, ValueFunc<T> func) { return new Value<T>(name, func); }; public static class Value<T> extends Construct implements ValueFunc<T> { public ValueFunc<T> func; /** * Us this consstructor if a class extends Value and implements ValueFunc, * in its own constructor it needs to set super.func = this * * @param name */ public Value() { super(); this.func = this; _set_flag(FLAG_DYNAMIC); } /** * @param name * @param func overrides unimplemented ValueFunc<T> at runtime */ public Value(String name, ValueFunc<T> func) { super(name); this.func = func; _set_flag(FLAG_DYNAMIC); } public T get(Container ctx){ throw new RuntimeException("unimplemented"); } @Override public T get() { return (T)val; } @Override public Object _parse(ByteBufferWrapper stream, com.sirtrack.construct.lib.Containers.Container context) { return func.get(context); } @Override public void _build(Object obj, ByteArrayOutputStream stream, com.sirtrack.construct.lib.Containers.Container context) { context.set(name, func.get(context)); } @Override public int _sizeof(com.sirtrack.construct.lib.Containers.Container context) { return 0; } }
package com.wepay.net; import java.io.*; import java.net.*; import javax.net.ssl.HttpsURLConnection; import org.json.*; import com.google.gson.*; import com.wepay.WePay; import com.wepay.exception.WePayException; import com.wepay.model.data.deserialization.WepayExclusionStrategy; public class WePayResource { public static String apiEndpoint; public static String uiEndpoint; protected final static String STAGE_API_ENDPOINT = "https://stage.wepayapi.com/v2"; protected final static String STAGE_UI_ENDPOINT = "https://stage.wepay.com/v2"; protected final static String PRODUCTION_API_ENDPOINT = "https://wepayapi.com/v2"; protected final static String PRODUCTION_UI_ENDPOINT = "https: public static final Gson gson = new GsonBuilder() .addDeserializationExclusionStrategy(new WepayExclusionStrategy()) .setPrettyPrinting() .setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES) .create(); public static void initializeWePayResource(Boolean useStageEnv) { if (useStageEnv) { apiEndpoint = STAGE_API_ENDPOINT; uiEndpoint = STAGE_UI_ENDPOINT; } else { apiEndpoint = PRODUCTION_API_ENDPOINT; uiEndpoint = PRODUCTION_UI_ENDPOINT; } } protected static javax.net.ssl.HttpsURLConnection httpsConnect(String call, String accessToken) throws IOException { URL url = new URL(apiEndpoint + call); HttpsURLConnection connection = (HttpsURLConnection) url.openConnection(); connection.setConnectTimeout(30000); // 30 seconds connection.setReadTimeout(100000); // 100 seconds connection.setDoOutput(true); connection.setDoInput(true); connection.setRequestMethod("POST"); connection.setRequestProperty("Content-Type", "application/json"); connection.setRequestProperty("Api-Version", "2015-08-15"); connection.setRequestProperty("User-Agent", "WePay Java SDK v2.0.2"); if (accessToken != null) { connection.setRequestProperty("Authorization", "Bearer " + accessToken); } return connection; } public static String request(String call, JSONObject params, String accessToken) throws WePayException, IOException { HttpsURLConnection connection = httpsConnect(call, accessToken); DataOutputStream wr = new DataOutputStream(connection.getOutputStream()); wr.writeBytes(params.toString()); wr.flush(); wr.close(); boolean error = false; int responseCode = connection.getResponseCode(); InputStream is; if (responseCode >= 200 && responseCode < 300) { is = connection.getInputStream(); } else { is = connection.getErrorStream(); error = true; } BufferedReader rd = new BufferedReader(new InputStreamReader(is)); String line; StringBuffer response = new StringBuffer(); while ((line = rd.readLine()) != null) { response.append(line); } rd.close(); String responseString = response.toString(); if (error) { WePayException ex = WePayResource.gson.fromJson(responseString, WePayException.class); throw ex; } return responseString; } }
package core.server.session; import core.Settings; import core.network.DisconnectReason; import java.time.Instant; import java.util.ArrayList; import java.util.List; /** * {@code Session} contain all information about an active session. * * @author Thibault Meyer * @since 1.0.0 */ public class Session { /** * Network information. */ public SessionNetwork network; /** * Random hash generated at connection. */ public String hash; /** * The current stage level of this user session. */ public SessionStageLevel stageLevel; /** * User information. */ public SessionUser user; /** * Input buffer */ public List<String> inputBuffer; /** * Output buffer */ public List<String> outputBuffer; /** * Disconnect user with given reason. This variable must stay at null. */ public DisconnectReason disconnectReason; /** * When the last ping was sent to this session */ public Instant lastPingSent; /** * Default constructor. */ public Session() { this.network = new SessionNetwork(); this.user = new SessionUser(); this.stageLevel = SessionStageLevel.NOT_AUTHENTICATED; this.inputBuffer = new ArrayList<String>(); this.outputBuffer = new ArrayList<String>(); this.lastPingSent = Instant.now(); } /** * Get the next complete command payload. * * @return The payload without the "\n" end of line */ public String[] getNextPayload() { String payload = ""; while (!this.inputBuffer.isEmpty()) { final String data = this.inputBuffer.remove(0); if (data.contains("\n")) { payload += data.substring(0, data.indexOf('\n')); if (data.length() != (data.indexOf('\n') + 1)) { this.inputBuffer.add(0, data.substring(data.indexOf('\n') + 1)); } break; } else { payload += data; } } return payload.isEmpty() ? null : payload.trim().split("\\s+"); } }
package de.prob2.ui.menu; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.ResourceBundle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.inject.Inject; import com.google.inject.Injector; import de.codecentric.centerdevice.MenuToolkit; import de.codecentric.centerdevice.util.StageUtils; import de.prob2.ui.MainController; import de.prob2.ui.config.FileChooserManager; import de.prob2.ui.history.HistoryView; import de.prob2.ui.internal.StageManager; import de.prob2.ui.operations.OperationsView; import de.prob2.ui.persistence.UIState; import de.prob2.ui.project.ProjectView; import de.prob2.ui.stats.StatsView; import de.prob2.ui.verifications.VerificationsView; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.scene.Parent; import javafx.scene.control.Alert; import javafx.scene.control.Menu; import javafx.scene.control.MenuItem; import javafx.scene.control.SeparatorMenuItem; import javafx.stage.FileChooser; import javafx.stage.Stage; import javafx.stage.WindowEvent; public class WindowMenu extends Menu { private static final Logger logger = LoggerFactory.getLogger(WindowMenu.class); private final Injector injector; private final StageManager stageManager; private final ResourceBundle bundle; private final FileChooserManager fileChooserManager; @FXML private Menu presetPerspectivesMenu; @FXML private MenuItem detatchedMenuItem; @Inject private WindowMenu(final StageManager stageManager, final Injector injector, final ResourceBundle bundle, final FileChooserManager fileChooserManager, MenuToolkit menuToolkit) { this.injector = injector; this.stageManager = stageManager; this.bundle = bundle; this.fileChooserManager = fileChooserManager; stageManager.loadFXML(this, "windowMenu.fxml"); if (menuToolkit != null) { MenuItem zoomMenuItem = menuToolkit.createZoomMenuItem(); zoomMenuItem.setOnAction( event -> StageUtils.getFocusedStage().ifPresent(stage -> { if(!stage.isMaximized()) { stage.setMaximized(true); } else { stage.sizeToScene(); stage.setMaximized(false); stage.centerOnScreen(); } })); this.getItems().addAll(0, Arrays.asList(menuToolkit.createMinimizeMenuItem(), zoomMenuItem, menuToolkit.createCycleWindowsItem(), new SeparatorMenuItem())); this.getItems().addAll(new SeparatorMenuItem(), menuToolkit.createBringAllToFrontItem(), new SeparatorMenuItem()); menuToolkit.autoAddWindowMenuItems(this); } } @FXML private void handleCloseWindow() { final Stage stage = this.stageManager.getCurrent(); if (stage != null) { stage.fireEvent(new WindowEvent(stage, WindowEvent.WINDOW_CLOSE_REQUEST)); } } @FXML private void handleLoadDefault() { reset(); loadPreset("main.fxml"); } @FXML private void handleLoadSeparated() { reset(); loadPreset("separatedHistory.fxml"); } @FXML private void handleLoadSeparated2() { reset(); loadPreset("separatedHistoryAndStatistics.fxml"); } @FXML private void handleLoadDetached() { injector.getInstance(DetachViewStageController.class).showAndWait(); } @FXML private void handleLoadPerspective() { FileChooser fileChooser = new FileChooser(); fileChooser.setTitle(bundle.getString("common.fileChooser.open.title")); fileChooser.getExtensionFilters().addAll( new FileChooser.ExtensionFilter(bundle.getString("common.fileChooser.fileTypes.fxml"), "*.fxml")); File selectedFile = fileChooserManager.showOpenDialog(fileChooser, FileChooserManager.Kind.PERSPECTIVES, stageManager.getMainStage()); if (selectedFile != null) { try { MainController main = injector.getInstance(MainController.class); FXMLLoader loader = injector.getInstance(FXMLLoader.class); loader.setLocation(selectedFile.toURI().toURL()); injector.getInstance(UIState.class) .setGuiState("custom " + selectedFile.toURI().toURL().toExternalForm()); reset(); loader.setRoot(main); loader.setController(main); Parent root = loader.load(); stageManager.getMainStage().getScene().setRoot(root); } catch (IOException e) { logger.error("Loading fxml failed", e); stageManager .makeAlert(Alert.AlertType.ERROR, String.format(bundle.getString("common.menu.view.errors.couldNotOpen"), e)) .showAndWait(); } } } private void reset() { injector.getInstance(UIState.class).clearDetachedStages(); injector.getInstance(UIState.class).getExpandedTitledPanes().clear(); injector.getInstance(DetachViewStageController.class).resetCheckboxes(); injector.getInstance(OperationsView.class).setVisible(true); injector.getInstance(HistoryView.class).setVisible(true); injector.getInstance(StatsView.class).setVisible(true); injector.getInstance(VerificationsView.class).setVisible(true); injector.getInstance(ProjectView.class).setVisible(true); } public Parent loadPreset(String location) { injector.getInstance(UIState.class).setGuiState(location); final MainController root = injector.getInstance(MainController.class); root.refresh(); stageManager.getMainStage().getScene().setRoot(root); injector.getInstance(MenuController.class).setMacMenu(); return root; } public void enablePerspectivesAndDetatched() { presetPerspectivesMenu.setDisable(false); presetPerspectivesMenu.setVisible(true); detatchedMenuItem.setDisable(false); detatchedMenuItem.setVisible(true); } }
package de.rennspur.backend; import java.util.Date; import java.util.HashSet; import java.util.List; import javax.annotation.PostConstruct; import javax.enterprise.context.RequestScoped; import javax.inject.Inject; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Query; import de.rennspur.model.Team; import de.rennspur.model.TeamMember; import de.rennspur.model.TeamPosition; /** * Recieves, sends and processes data from the Database * * @author e4_schlender */ @RequestScoped public class Backend { /** * A list for all Teams. */ private HashSet<Team> Teams = new HashSet<Team>(); @Inject private EntityManagerFactory factory; public Backend() { super(); } @PostConstruct public void init() { } /** * Returns all Members of a team. * * @param team * ID of the team */ public List<TeamMember> getMembers(int team) { // factory = Persistence.createEntityManagerFactory("MEMBERS"); EntityManager em = factory.createEntityManager(); Query query = em.createNativeQuery("select * from teams where id=:id"); query.setParameter("id", team); @SuppressWarnings("unchecked") List<TeamMember> members = query.getResultList(); return members; } /** * Returns a specific amount of the latest Positions of a team * * @param teamid * ID of the wanted team * @param positionsCount * @return */ public List<TeamPosition> getLatestMemberPositions(int teamid, Integer positionsCount) { /* * factory = Persistence.createEntityManagerFactory("POSITIONS"); * EntityManager em = factory.createEntityManager(); */ List<TeamPosition> getTeilnehmerPositionen = null; return getTeilnehmerPositionen; } /** * Saves a new Position into the Database * * @param pos * Position * @param key * Token of the Team * @param date * Date of the Position */ private void saveGPSPosition(TeamPosition pos, String key, Date date) { } /** * Waits for new POST Requests from the GPS */ public void getPost() { } /** * Description of the method setRaceNumber. * * @param raceNumber * the chosen race number */ public void setRaceNumber(int raceNumber) { } /** * Returns Teams. * * @return Teams */ public HashSet<Team> getTeams() { return this.Teams; } }
package dk.itu.kelvin.model; // General utilities import java.util.Comparator; import java.util.Map; // I/O utilities import java.io.Serializable; // JavaFX scene utilities import javafx.scene.Node; // Utilities import dk.itu.kelvin.util.StringPool; // Fast utils import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap; public abstract class Element<T extends Node> implements Serializable { /** * UID for identifying serialized objects. */ private static final long serialVersionUID = 42; /** * Internal string pool for elements. */ private static final StringPool STRING_POOL = new StringPool(); /** * Comparator for comparing the drawing order and layer of two elements. */ public static final Comparator<Element> COMPARATOR = (a, b) -> { return Element.compare(a, b); }; /** * The initial capacity of the tables containing the element tags. * * <p> * The default initial capacity of hash tables is 16 slots. We rarely need * that many slots for tags so we lower the initial capacity substantially. */ private static final int INITIAL_TAG_CAPACITY = 2; /** * A map of tags associated with the element. * * <p> * The map is initialized on-demand when first accessed to avoid allocating * memory to empty maps. */ private Map<String, String> tags; //private transient Map<String, String> tags; /** * Add a tag to the element. * * @param key The key of the tag. * @param value The value of the tag. * @return The previous value of the key, if any. */ public final String tag(final String key, final String value) { if (key == null || value == null) { return null; } String k = key.trim(); String v = value.trim(); if (k.isEmpty() || v.isEmpty()) { return null; } if (this.tags == null) { this.tags = new Object2ObjectOpenHashMap<>(INITIAL_TAG_CAPACITY); //this.tags = HashObjObjMaps.newMutableMap(INITIAL_TAG_CAPACITY); } return this.tags.put(STRING_POOL.get(k), STRING_POOL.get(v)); } /** * Get the value of the specified tag. * * @param key The key of the tag to get. * @return The value of the specified tag. */ public final String tag(final String key) { if (key == null || this.tags == null) { return null; } String k = key.trim(); if (k.isEmpty()) { return null; } return this.tags.get(key); } /** * Get a map of tags for the element. * * @return A map of tags for the element. */ public final Map<String, String> tags() { if (this.tags == null) { this.tags = new Object2ObjectOpenHashMap<>(INITIAL_TAG_CAPACITY); //this.tags = HashObjObjMaps.newMutableMap(INITIAL_TAG_CAPACITY); } return this.tags; } /** * Get the drawing order of the element. * * @return The drawing order of the element. */ public final int order() { String v; if ((v = this.tag("land")) != null) { switch (v) { default: return -1; } } if ((v = this.tag("natural")) != null) { switch (v) { default: return 1; } } if ((v = this.tag("landuse")) != null) { switch (v) { case "military": return 8; default: return 2; } } if ((v = this.tag("waterway")) != null) { switch (v) { default: return 3; } } if ((v = this.tag("place")) != null) { switch (v) { case "island": return 4; default: return 5; } } if ((v = this.tag("leisure")) != null) { switch (v) { default: return 6; } } if ((v = this.tag("building")) != null) { switch (v) { default: return 7; } } if ((v = this.tag("highway")) != null) { switch (v) { case "path": case "bridleway": case "footway": case "cycleway": case "steps": case "track": return 9; case "unclassified": return 11; case "living_street": case "road": case "pedestrian": return 12; case "service": return 13; case "residential": return 14; case "tertiary": return 15; case "secondary": return 16; case "primary": return 17; case "trunk": return 18; case "motorway": return 19; default: return 10; } } return 0; } /** * Get a JavaFX representation of the element. * * <p> * This method can be called from the JavaFX thread whenever it wants to * draw the element. * * @return A JavaFX representation of the element. */ public abstract T render(); /** * Compare two elements taking into account their drawing order and layer. * * @param a The first element. * @param b The second element. * @return A negative integer, zero, or a positive integer as the first * element is less than, equal to, or greater than the second * element. */ public static final int compare(final Element a, final Element b) { if (a == b) { return 0; } if (a == null) { return -1; } if (b == null) { return 1; } String als = a.tag("layer"); String bls = b.tag("layer"); int al = als != null ? Integer.parseInt(als) : 0; int bl = bls != null ? Integer.parseInt(bls) : 0; if (al == bl) { return Integer.compare(a.order(), b.order()); } else { return Integer.compare(al, bl); } } }
package fr.ritaly.svngraph; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathFactory; import org.apache.commons.lang.StringUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import fr.ritaly.graphml4j.EdgeStyle; import fr.ritaly.graphml4j.GraphMLWriter; import fr.ritaly.graphml4j.NodeStyle; public class SvnGraph { public static void main(String[] args) throws Exception { if (args.length != 2) { System.out.println(String.format("%s <input-file> <output-file>", SvnGraph.class.getSimpleName())); System.exit(1); } final File input = new File(args[0]); if (!input.exists()) { throw new IllegalArgumentException(String.format("The given file '%s' doesn't exist", input.getAbsolutePath())); } final File output = new File(args[1]); final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(input); final XPath xpath = XPathFactory.newInstance().newXPath(); NodeList nodes = (NodeList) xpath.evaluate("/log/logentry", document.getDocumentElement(), XPathConstants.NODESET); final List<Revision> revisions = new ArrayList<>(); for (int i = 0; i < nodes.getLength(); i++) { revisions.add(new Revision((Element) nodes.item(i))); } System.out.println(String.format("Parsed %d revisions", revisions.size())); int count = 0; FileWriter fileWriter = null; GraphMLWriter graphWriter = null; try { fileWriter = new FileWriter(output); graphWriter = new GraphMLWriter(fileWriter); final NodeStyle nodeStyle = graphWriter.getNodeStyle(); nodeStyle.setWidth(250.0f); graphWriter.setNodeStyle(nodeStyle); graphWriter.graph(); // Map associating node labels to their corresponding node id in the graph final Map<String, String> nodeIdsPerLabel = new TreeMap<>(); for (Revision revision : revisions) { if (revision.isSignificant()) { System.out.println(revision.getNumber() + " - " + revision.getMessage()); // TODO Render also the deletion of branches // there should be only 1 significant update per revision (the one with action ADD) for (Update update : revision.getSignificantUpdates()) { if (update.isCopy()) { final RevisionPath source = update.getCopySource(); System.out.println(String.format(" > %s %s from %s@%d", update.getAction(), update.getPath(), source.getPath(), source.getRevision())); final String sourceLabel = Utils.getRootName(source.getPath()) + "@" + source.getRevision(); // create a node for the source (path, revision) final String sourceId; if (nodeIdsPerLabel.containsKey(sourceLabel)) { // retrieve the id of the existing node sourceId = nodeIdsPerLabel.get(sourceLabel); } else { // create the new node sourceId = graphWriter.node(sourceLabel); nodeIdsPerLabel.put(sourceLabel, sourceId); } // and another for the newly created directory final String targetLabel = Utils.getRootName(update.getPath()) + "@" + revision.getNumber(); final String targetId = graphWriter.node(targetLabel); nodeIdsPerLabel.put(targetLabel, targetId); // create an edge between the 2 nodes graphWriter.edge(sourceId, targetId); } else { System.out.println(String.format(" > %s %s", update.getAction(), update.getPath())); } } System.out.println(); count++; } } // Dispatch the revisions per corresponding branch final Map<String, Set<Long>> revisionsPerBranch = new TreeMap<>(); for (String nodeLabel : nodeIdsPerLabel.keySet()) { if (nodeLabel.contains("@")) { final String branchName = StringUtils.substringBefore(nodeLabel, "@"); final long revision = Long.parseLong(StringUtils.substringAfter(nodeLabel, "@")); if (!revisionsPerBranch.containsKey(branchName)) { revisionsPerBranch.put(branchName, new TreeSet<Long>()); } revisionsPerBranch.get(branchName).add(revision); } else { throw new IllegalStateException(nodeLabel); } } // Recreate the missing edges between revisions from a same branch for (String branchName : revisionsPerBranch.keySet()) { final List<Long> branchRevisions = new ArrayList<>(revisionsPerBranch.get(branchName)); for (int i = 0; i < branchRevisions.size() - 1; i++) { final String nodeLabel1 = String.format("%s@%d", branchName, branchRevisions.get(i)); final String nodeLabel2 = String.format("%s@%d", branchName, branchRevisions.get(i+1)); graphWriter.edge(nodeIdsPerLabel.get(nodeLabel1), nodeIdsPerLabel.get(nodeLabel2)); } } graphWriter.closeGraph(); System.out.println(String.format("Found %d significant revisions", count)); } finally { if (graphWriter != null) { graphWriter.close(); } if (fileWriter != null) { fileWriter.close(); } } System.out.println("Done"); } }
package hudson.remoting; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import edu.umd.cs.findbugs.annotations.CheckForNull; /** * Restricts what classes can be received through remoting. * * @author Kohsuke Kawaguchi * @since 2.53 */ public abstract class ClassFilter { /** * Property to set to <b>override<b> the blacklist used by {{@link #DEFAULT} with a different set. * The location should point to a a file containing regular expressions (one per line) of classes to blacklist. * If this property is set but the file can not be read the default blacklist will be used. * @since 2.53.2 */ public static final String FILE_OVERRIDE_LOCATION_PROPERTY = "hudson.remoting.ClassFilter.DEFAULTS_OVERRIDE_LOCATION"; private static final Logger LOGGER = Logger.getLogger(ClassFilter.class.getName()); protected boolean isBlacklisted(String name) { return false; } protected boolean isBlacklisted(Class c) { return false; } public final String check(String name) { if (isBlacklisted(name)) throw new SecurityException("Rejected: " +name); return name; } public final Class check(Class c) { if (isBlacklisted(c)) throw new SecurityException("Rejected: " +c.getName()); return c; } private static final String[] DEFAULT_PATTERNS = { "^com[.]google[.]inject[.].*", "^com[.]sun[.]jndi[.].*", "^com[.]sun[.]corba[.].*", "^com[.]sun[.]javafx[.].*", "^com[.]sun[.]org[.]apache[.]regex[.]internal[.].*", "^java[.]awt[.].*", "^java[.]rmi[.].*", "^java[.]security[.].*", "^javax[.]management[.].*", "^javax[.]naming[.].*", "^javax[.]script[.].*", "^javax[.]swing[.].*", "^org[.]apache[.]commons[.]beanutils[.].*", "^org[.]apache[.]commons[.]collections[.]functors[.].*", ".*org[.]apache[.]xalan.*", "^org[.]codehaus[.]groovy[.]runtime[.].*", "^org[.]hibernate[.].*", "^org[.]springframework[.](?!(\\p{Alnum}+[.])*\\p{Alnum}*Exception$).*", "^sun[.]rmi[.].*" }; /** * A set of sensible default filtering rules to apply, * unless the context guarantees the trust between two channels. */ public static final ClassFilter DEFAULT = createDefaultInstance(); /** * No filtering whatsoever. */ public static final ClassFilter NONE = new ClassFilter() { }; /** * The default filtering rules to apply, unless the context guarantees the trust between two channels. The defaults * values provide for user specified overrides - see {@link #FILE_OVERRIDE_LOCATION_PROPERTY}. */ /*package*/ static ClassFilter createDefaultInstance() { try { List<String> patternOverride = loadPatternOverride(); if (patternOverride != null) { LOGGER.log(Level.FINE, "Using user specified overrides for class blacklisting"); return new RegExpClassFilter(patternOverride.toArray(new String[patternOverride.size()])); } else { LOGGER.log(Level.FINE, "Using default in built class blacklisting"); return new RegExpClassFilter(DEFAULT_PATTERNS); } } catch (Error e) { // when being used by something like XStream the actual cause gets swallowed LOGGER.log(Level.SEVERE, "Failed to initialize the default class filter", e); throw e; } } @CheckForNull private static List<String> loadPatternOverride() { String prop = System.getProperty(FILE_OVERRIDE_LOCATION_PROPERTY); if (prop==null) { return null; } LOGGER.log(Level.FINE, "Attempting to load user provided overrides for ClassFiltering from ''{0}''.", prop); File f = new File(prop); if (!f.exists() || !f.canRead()) { throw new Error("Could not load user provided overrides for ClassFiltering from as " + prop + " does not exist or is not readable."); } BufferedReader br = null; try { br = new BufferedReader(new InputStreamReader(new FileInputStream(prop), Charset.defaultCharset())); ArrayList<String> patterns = new ArrayList<String>(); for (String line = br.readLine(); line != null; line = br.readLine()) { try { Pattern.compile(line); patterns.add(line); } catch (PatternSyntaxException pex) { throw new Error("Error compiling blacklist expressions - '" + line + "' is not a valid regular expression.", pex); } } return patterns; } catch (IOException ex) { throw new Error("Could not load user provided overrides for ClassFiltering from as "+prop+" does not exist or is not readable.",ex); } finally { if (br != null) { try { br.close(); } catch (IOException ioEx) { LOGGER.log(Level.WARNING, "Failed to cleanly close input stream", ioEx); } } } } /** * A class that uses a given set of regular expression patterns to determine if the class is blacklisted. */ private static final class RegExpClassFilter extends ClassFilter { /** * Any regex that is {@code ^some[.]package[.]name[.].*} or {@code ^some\.package\.name\.*} is really just a * {@link String#startsWith(String)} test and we can reduce CPU usage by performing that test explicitly as * well as reduce GC pressure. */ private static final Pattern OPTIMIZE1 = Pattern.compile( "^\\^(([\\p{L}_$][\\p{L}\\p{N}_$]*(\\.|\\[\\.\\])?)+)\\.\\*$"); /** * Any regex that is {@code ^\Qsome.package.name\E.*} is really just a {@link String#startsWith(String)} * test and we can reduce CPU usage by performing that test explicitly as well as reduce GC pressure. */ private static final Pattern OPTIMIZE2 = Pattern.compile("^\\^\\Q[^\\\\]+\\\\E\\.\\*$"); private final Object[] blacklistPatterns; public RegExpClassFilter(List<Pattern> blacklistPatterns) { this.blacklistPatterns = blacklistPatterns.toArray(new Pattern[blacklistPatterns.size()]); } RegExpClassFilter(String[] patterns) { blacklistPatterns = new Object[patterns.length]; for (int i = 0, patternsLength = patterns.length; i < patternsLength; i++) { if (OPTIMIZE1.matcher(patterns[i]).matches()) { // this is a simple startsWith test, no need to slow things down with a regex blacklistPatterns[i] = patterns[i].substring(1,patterns[i].length()-2).replace("[.]","."); } else if (OPTIMIZE2.matcher(patterns[i]).matches()) { // this is a simple startsWith test, no need to slow things down with a regex blacklistPatterns[i] = patterns[i].substring(3,patterns[i].length()-4); } else { blacklistPatterns[i] = Pattern.compile(patterns[i]); } } } @Override protected boolean isBlacklisted(String name) { for (int i = 0; i < blacklistPatterns.length; i++) { Object p = blacklistPatterns[i]; if (p instanceof Pattern && ((Pattern)p).matcher(name).matches()) { return true; } else if (p instanceof String && name.startsWith((String)p)) { return true; } } return false; } /** * Report the patterns that it's using to help users verify the use of custom filtering rule * and inspect its content at runtime if necessary. */ @Override public String toString() { return Arrays.toString(blacklistPatterns); } } } /* Publicized attack payload: ObjectInputStream.readObject() PriorityQueue.readObject() Comparator.compare() (Proxy) ConvertedClosure.invoke() MethodClosure.call() Method.invoke() Runtime.exec() ObjectInputStream.readObject() AnnotationInvocationHandler.readObject() Map(Proxy).entrySet() AnnotationInvocationHandler.invoke() LazyMap.get() ChainedTransformer.transform() ConstantTransformer.transform() InvokerTransformer.transform() Method.invoke() Class.getMethod() InvokerTransformer.transform() Method.invoke() Runtime.getRuntime() InvokerTransformer.transform() Method.invoke() Runtime.exec() ObjectInputStream.readObject() PriorityQueue.readObject() TransformingComparator.compare() InvokerTransformer.transform() Method.invoke() Runtime.exec() ObjectInputStream.readObject() SerializableTypeWrapper.MethodInvokeTypeProvider.readObject() SerializableTypeWrapper.TypeProvider(Proxy).getType() AnnotationInvocationHandler.invoke() HashMap.get() ReflectionUtils.findMethod() SerializableTypeWrapper.TypeProvider(Proxy).getType() AnnotationInvocationHandler.invoke() HashMap.get() ReflectionUtils.invokeMethod() Method.invoke() Templates(Proxy).newTransformer() AutowireUtils.ObjectFactoryDelegatingInvocationHandler.invoke() ObjectFactory(Proxy).getObject() AnnotationInvocationHandler.invoke() HashMap.get() Method.invoke() TemplatesImpl.newTransformer() TemplatesImpl.getTransletInstance() TemplatesImpl.defineTransletClasses() TemplatesImpl.TransletClassLoader.defineClass() Pwner*(Javassist-generated).<static init> Runtime.exec() */
package innovimax.mixthem; import innovimax.mixthem.exceptions.ArgumentException; import java.io.File; import java.util.ArrayList; import java.util.List; /** * <p>Mix-them command line arguments management.</p> * @author Innovimax * @version 1.0 */ class Arguments { private Rule rule = null; private List<String> ruleParams = null; private File file1 = null; private File file2 = null; void setRule(Rule rule) { this.rule = rule; } Rule getRule() { return this.rule; } void setRuleParameters(List<String> ruleParams) { this.ruleParams = ruleParams; } List<String> getRuleParameters() { return this.ruleParams; } void setFirstFile(File file1) { this.file1 = file1; } File getFirstFile() { return this.file1; } void setSecondFile(File file2) { this.file2 = file2; } File getSecondFile() { return this.file2; } private static Rule getRuleArgument(String[] args, int index, String name) throws ArgumentException { Rule rule = null; if (args.length >= 1) { final String ruleString = args[index]; if (ruleString.startsWith("-")) { rule = Rule.findByName(ruleString); if (rule == null) { throw new ArgumentException(name + " argument is incorrect: " + ruleString); } } } return rule; } private static List<String> getRuleParameters(String[] args, int index, Rule rule) throws ArgumentException { List<String> params = new ArrayList<String>(); // TODO return params; } private static File getFileArgument(String[] args, int index, String name) throws ArgumentException { File file = null; if (args.length < index+1) { throw new ArgumentException(name + " argument missing."); } else { String filepath = args[index]; file = new File(filepath); if (file.exists()) { if (!file.canRead()) { throw new ArgumentException(name + " cannot be read: " + filepath); } } else { throw new ArgumentException(name + " not found: " + filepath); } } return file; } }
package fr.obeo.baseliner; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Enumeration; import java.util.List; import java.util.Map; import org.eclipse.osgi.framework.util.Headers; import org.eclipse.osgi.util.ManifestElement; import org.osgi.framework.BundleException; import org.osgi.framework.Constants; import org.osgi.framework.Version; import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.google.common.collect.HashMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.io.Files; public class ManifestHandler { private String bundleVersion; private String symbolicName; private Map<String, Version> exportedPackages = Maps.newLinkedHashMap(); private Multimap<String, String> extraExtensions = HashMultimap.create(); public ManifestHandler() { } public Map<String, Version> getExportedPackages() { return exportedPackages; } public void load(String in) throws IOException { InputStream is = new ByteArrayInputStream(in.getBytes()); try { load(is); } finally { is.close(); } } public void load(InputStream in) throws IOException { Headers<String, String> headers = new Headers<String, String>(10); Map<String, String> mapHeaders; try { mapHeaders = ManifestElement.parseBundleManifest(in, headers); bundleVersion = mapHeaders.get(Constants.BUNDLE_VERSION); symbolicName = mapHeaders.get(Constants.BUNDLE_SYMBOLICNAME); if (symbolicName.indexOf(";")!= -1) { symbolicName = symbolicName.substring(0,symbolicName.indexOf(";")); } ManifestElement[] packages = ManifestElement.parseHeader( Constants.EXPORT_PACKAGE, mapHeaders.get(Constants.EXPORT_PACKAGE)); if (packages != null) { for (ManifestElement manifestElement : packages) { String ns = manifestElement.getValue(); String manifestPackageVersion = bundleVersion; String version = manifestElement .getAttribute(Constants.VERSION_ATTRIBUTE); if (version != null) { manifestPackageVersion = version; } Enumeration<String> attrKeys = manifestElement.getKeys(); Enumeration<String> directiveKeys = manifestElement .getDirectiveKeys(); StringBuffer result = new StringBuffer(); if (attrKeys != null) { while (attrKeys.hasMoreElements()) { String key = attrKeys.nextElement(); if (Constants.VERSION_ATTRIBUTE.equals(key)) { manifestPackageVersion = manifestElement .getAttribute(key); } else { result.append(addValues(false, key, manifestElement.getAttributes(key))); } } } if (directiveKeys != null) { while (directiveKeys.hasMoreElements()) { String key = directiveKeys.nextElement(); result.append(addValues(true, key, manifestElement.getDirectives(key))); } } if (result.length() > 0) { extraExtensions.put(ns, result.toString()); } exportedPackages.put(ns, createVersion(manifestPackageVersion)); } } } catch (BundleException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private String addValues(boolean directive, String key, String[] values) { StringBuffer result = new StringBuffer(); for (int i = 0; i < values.length; i++) { result.append(';').append(key); if (directive) result.append(':'); result.append("=\"").append(values[i]).append('\"'); //$NON-NLS-1$ } return result.toString(); } private Version createVersion(String manifestPackageVersion) { return new Version(manifestPackageVersion); } public void setPackageVersion(String ns, Version inferedVersion) { if (exportedPackages.get(ns) != null) { exportedPackages.put(ns, inferedVersion); } } public void update(File manifestFile) throws IOException { if (exportedPackages.keySet().size() > 0) { String originalContent = Files.toString(manifestFile, Charsets.UTF_8); String updatedFileContent = getMergedManifest(originalContent); if (!originalContent.equals(updatedFileContent)) { Files.write(updatedFileContent, manifestFile, Charsets.UTF_8); } } } public String getMergedManifest(String originalContent) { List<String> updatedContent = Lists.newArrayList(); boolean isExportPackage = false; for (String part : Splitter.on(": ").split(originalContent)) { if (isExportPackage) { /* * We retrieve the beginning of the last line which should be * another directive. */ List<String> lines = Lists.newArrayList(Splitter.on("\n") .split(part)); String startOfNextDirective = lines.get(lines.size() - 1); updatedContent.add(getExportPackageText() + "\n" + startOfNextDirective); } else { updatedContent.add(part); } isExportPackage = part.endsWith("Export-Package"); } String updatedFileContent = Joiner.on(": ").join(updatedContent); return updatedFileContent; } private String getExportPackageText() { String exportPackagesValues = ""; if (exportedPackages.keySet().size() > 0) { List<String> exportedPackagesText = Lists.newArrayList(); for (String ns : exportedPackages.keySet()) { String version = exportedPackages.get(ns).toString() .replace("-SNAPSHOT", ".qualifier"); version = version.replace(".qualifier", ""); String extensions = ";version=\"" + version + "\""; List<String> allExtensions = Lists.newArrayList(); allExtensions.add(extensions); allExtensions.addAll(extraExtensions.get(ns)); String extensionsText = Joiner.on("").join(allExtensions); exportedPackagesText.add(ns + extensionsText); } exportPackagesValues = Joiner.on(",\n ").join(exportedPackagesText); } return exportPackagesValues; } public String getSymbolicName() { return symbolicName; } public String getBundleVersion() { return bundleVersion; } }
package org.ofbiz.base.util; import org.apache.http.HttpRequestInterceptor; import org.apache.http.HttpResponseInterceptor; import org.apache.http.client.config.RequestConfig; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.HttpClientConnectionManager; import org.apache.http.conn.socket.ConnectionSocketFactory; import org.apache.http.conn.socket.PlainConnectionSocketFactory; import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.conn.ssl.TrustSelfSignedStrategy; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.impl.nio.client.HttpAsyncClients; import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; import org.apache.http.nio.conn.NHttpClientConnectionManager; import org.apache.http.nio.reactor.ConnectingIOReactor; import org.apache.http.nio.reactor.IOReactorException; import org.apache.http.ssl.SSLContexts; import javax.net.ssl.SSLContext; import java.io.Closeable; import java.io.IOException; import java.io.Serializable; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.util.List; import java.util.Map; /** * Typical pooling-enabled, SSL-enabled Apache HTTP client config builder readable from properties (SCIPIO), * kinder than straight HttpClient; also provides a {@link ScipioHttpClient.Config} class which can be used standalone. * ScipioHttpClient itself is a thin wrapper for creating HttpClient on first demand and help close on finalize. * TODO?: This has no helper send methods, just use {@link #getHttpClient()} since well-known and too much wrap. */ public class ScipioHttpClient implements Closeable { private static final Debug.OfbizLogger module = Debug.getOfbizLogger(java.lang.invoke.MethodHandles.lookup().lookupClass()); protected final Config config; protected final boolean autoClose; protected volatile HttpClientConnectionManager connectionManager; protected volatile NHttpClientConnectionManager asyncConnectionManager; protected volatile CloseableHttpClient httpClient; protected volatile CloseableHttpAsyncClient asyncHttpClient; protected ScipioHttpClient(Config config, boolean autoClose) { this.config = config; this.autoClose = autoClose; this.connectionManager = null; this.asyncConnectionManager = null; this.httpClient = null; this.asyncHttpClient = null; } public static ScipioHttpClient fromConfig(Config config, boolean autoClose) { return config.getFactory().getClient(config, autoClose); } public static ScipioHttpClient fromConfig(Config config) { return fromConfig(config, true); } public Config getConfig() { return config; } /** Gets connection manager (normally PoolingHttpClientConnectionManager), creates if needed. */ public HttpClientConnectionManager getConnectionManager() { if (!config.useConnectionManager()) { return null; } HttpClientConnectionManager connectionManager = this.connectionManager; if (connectionManager == null) { synchronized(this) { connectionManager = this.connectionManager; if (connectionManager == null) { connectionManager = createConnectionManager(); this.connectionManager = connectionManager; } } } return connectionManager; } /** Build method for PoolingHttpClientConnectionManager mainly, always creates. */ public HttpClientConnectionManager createConnectionManager() { return config.createConnectionManager(); } /** Build method for HttpClient, always creates. */ public CloseableHttpClient createHttpClient(HttpClientConnectionManager connectionManager) { return config.createHttpClient(connectionManager); } /** Build method for HttpClient: always creates HttpClient but reusing the current connection manager initializing as needed. */ public CloseableHttpClient createHttpClient() { return config.createHttpClient(getConnectionManager()); } public CloseableHttpClient getHttpClient() { CloseableHttpClient httpClient = this.httpClient; if (httpClient == null) { synchronized(this) { httpClient = this.httpClient; if (httpClient == null) { httpClient = createHttpClient(getConnectionManager()); this.httpClient = httpClient; } } } return httpClient; } /** SCIPIO: 2020-01-14: NEW ASYNC SUPPORT: Gets connection manager (normally PoolingNHttpClientConnectionManager), creates if needed. */ public NHttpClientConnectionManager getAsyncConnectionManager() { if (!config.useConnectionManager()) { return null; } NHttpClientConnectionManager connectionManager = this.asyncConnectionManager; if (connectionManager == null) { synchronized(this) { connectionManager = this.asyncConnectionManager; if (connectionManager == null) { connectionManager = createAsyncConnectionManager(); this.asyncConnectionManager = connectionManager; } } } return connectionManager; } /** SCIPIO: 2020-01-14: NEW ASYNC SUPPORT: Build method for NPoolingHttpClientConnectionManager mainly, always creates. */ public NHttpClientConnectionManager createAsyncConnectionManager() { return config.createAsyncConnectionManager(); } /** SCIPIO: 2020-01-14: NEW ASYNC SUPPORT: Build method for async HttpClient, always creates. */ public CloseableHttpAsyncClient createAsyncHttpClient(NHttpClientConnectionManager connectionManager) { return config.createAsyncHttpClient(connectionManager); } /** SCIPIO: 2020-01-14: NEW ASYNC SUPPORT: Build method for async HttpClient: always creates HttpClient but reusing the current connection manager initializing as needed. */ public CloseableHttpAsyncClient createAsyncHttpClient() { return config.createAsyncHttpClient(getAsyncConnectionManager()); } /** SCIPIO: 2020-01-14: NEW ASYNC SUPPORT **/ public CloseableHttpAsyncClient getAsyncHttpClient() { CloseableHttpAsyncClient asyncHttpClient = this.asyncHttpClient; if (asyncHttpClient == null) { synchronized(this) { asyncHttpClient = this.asyncHttpClient; if (asyncHttpClient == null) { asyncHttpClient = createAsyncHttpClient(getAsyncConnectionManager()); this.asyncHttpClient = asyncHttpClient; } } } return asyncHttpClient; } /** If true, {@link #close()} is called in {@link #finalize()}. WARN: May not be sufficient for safe close. */ public boolean isAutoClose() { return autoClose; } @Override public void close() throws IOException { if (httpClient != null) { try { httpClient.close(); } catch(Exception e) { Debug.logWarning("Could not close HttpClient: " + e.toString(), module); } } try { if (connectionManager instanceof PoolingHttpClientConnectionManager) { ((PoolingHttpClientConnectionManager) connectionManager).close(); } else if (connectionManager instanceof PoolingNHttpClientConnectionManager) { ((PoolingNHttpClientConnectionManager) connectionManager).closeExpiredConnections(); } } catch(Exception e) { Debug.logWarning(e, "Could not close HttpClient connection manager: " + e.toString(), module); } } @Deprecated @Override protected void finalize() throws Throwable { // SCIPIO: TODO: alternative (finalize deprecated by java) if (isAutoClose()) { close(); } } /** * Generic HttpClient config/builder, can be used standalone without ScipioHttpClient instance. */ public static class Config implements Serializable { public static final String DEFAULT_JKS_STORE_FILENAME = "component://base/config/ofbizssl.jks"; public static final String DEFAULT_JKS_STORE_PASSWORD = "changeit"; private final Factory factory; private final Boolean pooling; private final Integer maxConnections; private final Integer maxConnectionsPerHost; private final Integer connectTimeout; private final Integer socketTimeout; private final Integer connectionRequestTimeout; private final Boolean expectContinueEnabled; private final Boolean trustSelfCert; private final Boolean trustAnyHost; private final Boolean trustAllCerts; private final String jksStoreFileName; private final String jksStorePassword; protected Config(Map<String, ?> properties, Factory factory) { this.factory = factory; this.pooling = UtilProperties.asBoolean(properties.get("pooling"), true); this.maxConnections = UtilProperties.asInteger(properties.get("maxConnections"), null); this.maxConnectionsPerHost = UtilProperties.asInteger(properties.get("maxConnectionsPerHost"), null); this.connectTimeout = UtilProperties.asInteger(properties.get("connectTimeout"), null); this.socketTimeout = UtilProperties.asInteger(properties.get("socketTimeout"), null); this.connectionRequestTimeout = UtilProperties.asInteger(properties.get("connectionRequestTimeout"), null); this.expectContinueEnabled = UtilProperties.asBoolean(properties.get("expectContinueEnabled"), null); this.trustSelfCert = UtilProperties.asBoolean(properties.get("trustSelfCert"), null); this.trustAnyHost = UtilProperties.asBoolean(properties.get("trustAnyHost"), null); this.trustAllCerts = UtilProperties.asBoolean(properties.get("trustAllCerts"), null); String jksStoreFileName = (String) properties.get("jksStoreFileName"); this.jksStoreFileName = UtilValidate.isNotEmpty(jksStoreFileName) ? jksStoreFileName : DEFAULT_JKS_STORE_FILENAME; String jksStorePassword = (String) properties.get("jksStorePassword"); this.jksStorePassword = UtilValidate.isNotEmpty(jksStorePassword) ? jksStorePassword : DEFAULT_JKS_STORE_PASSWORD; } public static Config fromContext(Map<String, ?> properties) { Factory factory = getFactory(properties); return factory.getConfig(properties, factory); } protected static Factory getFactory(Map<String, ?> properties) { Factory factory = Factory.DEFAULT; String factoryClsName = (String) properties.get("factoryClass"); if (UtilValidate.isNotEmpty(factoryClsName)) { try { factory = (Factory) Thread.currentThread().getContextClassLoader().loadClass(factoryClsName).getConstructor().newInstance(); } catch (Exception e) { Debug.logError(e, "Could not load factoryClass [" + factoryClsName + "] for ScipioHttpClient config", module); } } return factory; } public static Config fromProperties(String resource, String prefix) { return fromContext(UtilProperties.getPropertiesWithPrefix(UtilProperties.getProperties(resource), prefix)); } /** SCIPIO: 2020-01-14: NEW ASYNC SUPPORT: Extracted from createHttpClient for reuse **/ protected RequestConfig buildRequestConfig() { RequestConfig.Builder config = RequestConfig.custom(); if (getConnectionRequestTimeout() != null) { config.setConnectionRequestTimeout(getConnectionRequestTimeout()); } if (getConnectTimeout() != null) { config.setConnectTimeout(getConnectTimeout()); } if (getSocketTimeout() != null) { config.setSocketTimeout(getSocketTimeout()); } if (getExpectContinueEnabled() != null) { config.setExpectContinueEnabled(true); } return config.build(); } /** Build method for PoolingHttpClientConnectionManager mainly. */ public HttpClientConnectionManager createConnectionManager() { if (!useConnectionManager()) { return null; } PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager(RegistryBuilder.<ConnectionSocketFactory>create() .register("http", getPlainConnectionSocketFactory()) .register("https", getSSLConnectionSocketFactory()) .build()); if (getMaxConnections() != null) { cm.setMaxTotal(getMaxConnections()); } if (getMaxConnectionsPerHost() != null) { cm.setDefaultMaxPerRoute(getMaxConnectionsPerHost()); } return cm; } protected PlainConnectionSocketFactory getPlainConnectionSocketFactory() { return PlainConnectionSocketFactory.getSocketFactory(); } protected SSLContext getSSLContext() { if (Boolean.TRUE.equals(getTrustSelfCert())) { try { if(Boolean.TRUE.equals(getTrustAllCerts())){ return SSLContexts.custom().loadTrustMaterial(FileUtil.getFile(getJksStoreFileName()), getJksStorePassword().toCharArray(), new TrustSelfSignedStrategy() { @Override public boolean isTrusted(X509Certificate[] chain, String authType) throws CertificateException { return true; } }) .build(); } return SSLContexts.custom().loadTrustMaterial(FileUtil.getFile(getJksStoreFileName()), getJksStorePassword().toCharArray(), new TrustSelfSignedStrategy()).build(); } catch (Exception e) { Debug.logError(e, "Could not load self-cert trust SSLContext for HttpClient: " + e.toString(), module); //throw new RuntimeException(e); // TODO: REVIEW: will cause some classes to not load due to static instances } } return SSLContexts.createDefault(); } protected SSLConnectionSocketFactory getSSLConnectionSocketFactory(SSLContext sslContext) { if (Boolean.TRUE.equals(getTrustAnyHost())) { return new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE); } return new SSLConnectionSocketFactory(sslContext); } protected SSLConnectionSocketFactory getSSLConnectionSocketFactory() { return getSSLConnectionSocketFactory(getSSLContext()); } /** Build method for HttpClient. */ public CloseableHttpClient createHttpClient(HttpClientConnectionManager connectionManager) { return createHttpClient(connectionManager, null, null); } /** Build method for HttpClient. */ public CloseableHttpClient createHttpClient(HttpClientConnectionManager connectionManager, List<HttpRequestInterceptor> requestInterceptors, List<HttpResponseInterceptor> responseInterceptors) { return createHttpClientBuilder(connectionManager, requestInterceptors, responseInterceptors).build(); } /** Build method for HttpClient. */ public HttpClientBuilder createHttpClientBuilder(HttpClientConnectionManager connectionManager) { return createHttpClientBuilder(connectionManager, null, null); } /** Build method for HttpClient. */ public HttpClientBuilder createHttpClientBuilder(HttpClientConnectionManager connectionManager, List<HttpRequestInterceptor> requestInterceptors, List<HttpResponseInterceptor> responseInterceptors) { HttpClientBuilder builder = HttpClients.custom().setDefaultRequestConfig(buildRequestConfig()); if (UtilValidate.isNotEmpty(requestInterceptors)) { for (HttpRequestInterceptor interceptor : requestInterceptors) { builder.addInterceptorLast(interceptor); } } if (UtilValidate.isNotEmpty(responseInterceptors)) { for (HttpResponseInterceptor interceptor : responseInterceptors) { builder.addInterceptorLast(interceptor); } } if (connectionManager != null) { builder.setConnectionManager(connectionManager); } else { builder.setSSLSocketFactory(getSSLConnectionSocketFactory()); } return builder; } /** SCIPIO: 2020-01-14: NEW ASYNC SUPPORT: Build method for PoolingNHttpClientConnectionManager mainly. */ public NHttpClientConnectionManager createAsyncConnectionManager() { if (!useConnectionManager()) { return null; } PoolingNHttpClientConnectionManager cm = null; try { ConnectingIOReactor ioReactor = new DefaultConnectingIOReactor(); cm = new PoolingNHttpClientConnectionManager(ioReactor); if (getMaxConnections() != null) { cm.setMaxTotal(getMaxConnections()); } if (getMaxConnectionsPerHost() != null) { cm.setDefaultMaxPerRoute(getMaxConnectionsPerHost()); } } catch (IOReactorException e) { Debug.logError(e, module); } return cm; } /** * Build method for HttpAsyncClient. * <p>SCIPIO: 2020-01-14: NEW ASYNC SUPPORT</p> */ public CloseableHttpAsyncClient createAsyncHttpClient(NHttpClientConnectionManager connectionManager) { return createAsyncHttpClient(connectionManager, null, null); } /** * Build method for HttpAsyncClient. * <p>SCIPIO: 2020-01-14: NEW ASYNC SUPPORT</p> */ public CloseableHttpAsyncClient createAsyncHttpClient(NHttpClientConnectionManager connectionManager, List<HttpRequestInterceptor> requestInterceptors, List<HttpResponseInterceptor> responseInterceptors) { CloseableHttpAsyncClient httpAsyncClient = createAsyncHttpClientBuilder(connectionManager, requestInterceptors, responseInterceptors).build(); if (!httpAsyncClient.isRunning()) { httpAsyncClient.start(); } return httpAsyncClient; } /** * Build method for HttpAsyncClient. * <p>SCIPIO: 2020-01-14: NEW ASYNC SUPPORT</p> */ public HttpAsyncClientBuilder createAsyncHttpClientBuilder(NHttpClientConnectionManager connectionManager) { return createAsyncHttpClientBuilder(connectionManager, null, null); } /** * Build method for HttpAsyncClient. * <p>NOTE: If using this method, make sure to call {@link CloseableHttpAsyncClient#start()} if not {@link CloseableHttpAsyncClient#isRunning()}.</p> * <p>SCIPIO: 2020-01-14: NEW ASYNC SUPPORT</p> */ public HttpAsyncClientBuilder createAsyncHttpClientBuilder(NHttpClientConnectionManager connectionManager, List<HttpRequestInterceptor> requestInterceptors, List<HttpResponseInterceptor> responseInterceptors) { HttpAsyncClientBuilder builder = HttpAsyncClients.custom() .setDefaultRequestConfig(buildRequestConfig()) .setConnectionManager(connectionManager); if (UtilValidate.isNotEmpty(requestInterceptors)) { for (HttpRequestInterceptor interceptor : requestInterceptors) { builder.addInterceptorLast(interceptor); } } if (UtilValidate.isNotEmpty(responseInterceptors)) { for (HttpResponseInterceptor interceptor : responseInterceptors) { builder.addInterceptorLast(interceptor); } } return builder; } public Factory getFactory() { return factory; } public Boolean getPooling() { return pooling; } public boolean useConnectionManager() { return Boolean.TRUE.equals(getPooling()); } public Integer getMaxConnections() { return maxConnections; } public Integer getMaxConnectionsPerHost() { return maxConnectionsPerHost; } public Integer getConnectTimeout() { return connectTimeout; } public Integer getSocketTimeout() { return socketTimeout; } public Integer getConnectionRequestTimeout() { return connectionRequestTimeout; } public Boolean getExpectContinueEnabled() { return expectContinueEnabled; } /** * Returns true if should trust own certificate in jks keystore file, default false. */ public Boolean getTrustSelfCert() { return trustSelfCert; } /** * Returns true if should validate any host, usually set in conjunction with trustSelfCert, default false. */ public Boolean getTrustAnyHost() { return trustAnyHost; } /** * Returns true if should ignore certs. */ public Boolean getTrustAllCerts() { return trustAllCerts; } public String getJksStoreFileName() { return jksStoreFileName; } public String getJksStorePassword() { return jksStorePassword; } } public interface Factory { Factory DEFAULT = new Factory() {}; default ScipioHttpClient getClient(Config config, boolean autoClose) { return new ScipioHttpClient(config, autoClose); } default Config getConfig(Map<String, ?> properties, Factory factory) { return new Config(properties, factory); } } }
package io.scif.config; import io.scif.Checker; import io.scif.Groupable; import io.scif.MetadataLevel; import io.scif.Parser; import io.scif.Writer; import io.scif.codec.CodecOptions; import io.scif.img.ImageRegion; import io.scif.img.ImgFactoryHeuristic; import io.scif.img.ImgOpener; import io.scif.img.ImgSaver; import io.scif.img.Range; import io.scif.img.converters.PlaneConverter; import java.awt.image.ColorModel; import java.util.HashMap; import net.imglib2.img.array.ArrayImgFactory; import net.imglib2.img.cell.CellImgFactory; import net.imglib2.img.planar.PlanarImgFactory; import org.scijava.Context; /** * Configuration class for all SCIFIO components. Similar to a {@link Context}, * this class is effectively a container for state. However, its intended scope * is per method call stack, and not through a complete application. If any * object in a call stack has behavior that can be modified through this class, * a complete method chain accepting {@code SCIFIOConfig} instances should be * available - even if the intermediate classes do not require configuration * (the need for configuration is, effectively contagious). * <p> * Note that each getter and setter method signature in this class is prefixed * by the component it affects. * </p> * * @author Mark Hiner * @see Checker * @see Parser * @see Writer * @see Groupable * @see ImgOpener * @see ImgSaver */ public class SCIFIOConfig extends HashMap<String, Object> { // -- Fields -- // Checker private boolean openDataset = true; // Parser private MetadataLevel level; private boolean filterMetadata; private boolean saveOriginalMetadata; // Writer private boolean writeSequential = false; private ColorModel model = null; private int fps = 10; private String compression = null; private CodecOptions options = null; // Groupable /** Whether or not to group multi-file formats. */ private boolean group = false; // ImgOpener /** * Access type options for opening datasets. * <ul> * <li> * {@link ImgMode#ARRAY} will attempt to use {@link ArrayImgFactory}</li> * <li> * {@link ImgMode#AUTO} allows the program to decide, e.g. based on * available memory.</li> * <li> * {@link ImgMode#CELL} will attempt to use {@link CellImgFactory}</li> * <li> * {@link ImgMode#PLANAR} will attempt to use {@link PlanarImgFactory}</li> * </ul> * * @author Mark Hiner */ public static enum ImgMode { ARRAY, AUTO, CELL, PLANAR; } // If true, planarEnabled returns true. If false, cellEnabled returns true. // If null, both planar/cell enabled will return false. private ImgMode[] imgModes = new ImgMode[] { ImgMode.AUTO }; // Whether ImgOpeners should open all images private boolean openAll = false; // Image indices private Range range = new Range("0"); // sub-region specification for opening portions of an image private ImageRegion region = null; // Whether or not to use a MinMaxFilter private boolean computeMinMax = false; // Custom plane converter private PlaneConverter planeConverter = null; // Custom heuristic for choosing an ImgFactory private ImgFactoryHeuristic imgFactoryHeuristic = null; // ImgSaver private boolean writeRGB = true; // -- Constructors -- /** * Zero-param constructor. Creates an empty configuration. */ public SCIFIOConfig() { /* no-op, empty configuration */} /** * Copying constructor. Returns a copy of the given SCIFIOConfig. * * @param config Configuration to copy. */ public SCIFIOConfig(final SCIFIOConfig config) { super(config); openDataset = config.openDataset; level = config.level; filterMetadata = config.filterMetadata; saveOriginalMetadata = config.saveOriginalMetadata; writeSequential = config.writeSequential; model = config.model; fps = config.fps; compression = config.compression; options = config.options; group = config.group; imgModes = config.imgModes; range = config.range; region = config.region; computeMinMax = config.computeMinMax; planeConverter = config.planeConverter; imgFactoryHeuristic = config.imgFactoryHeuristic; writeRGB = config.writeRGB; } // -- Checker Methods -- public SCIFIOConfig checkerSetOpen(final boolean open) { openDataset = open; return this; } public boolean checkerIsOpen() { return openDataset; } // -- Parser methods -- /** * @return {@link MetadataLevel} desired for parsing. */ public MetadataLevel parserGetLevel() { return level; } /** * @param level Desired metadata level for parsing. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig parserSetLevel(final MetadataLevel level) { this.level = level; return this; } /** * @return True if parsers should filter parsed metadata. */ public boolean parserIsFiltered() { return filterMetadata; } /** * @param filterMetadata Desired filtering behavior for parsing. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig parserSetFiltered(final boolean filterMetadata) { this.filterMetadata = filterMetadata; return this; } /** * @return True if parsers should save original metadata. */ public boolean parserIsSaveOriginalMetadata() { return saveOriginalMetadata; } /** * @param saveOriginalMetadata Desired metadata saving behavior for parsing. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig parserSetSaveOriginalMetadata( final boolean saveOriginalMetadata) { this.saveOriginalMetadata = saveOriginalMetadata; return this; } // -- Writer methods -- /** * Sets whether or not we know that planes will be written sequentially. If * planes are written sequentially and this flag is set, then performance will * be slightly improved. * * @param sequential Flag for writing sequential planes. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig writerSetSequential(final boolean sequential) { writeSequential = sequential; return this; } /** * @return True if writers should write image planes sequentially. */ public boolean writerIsSequential() { return writeSequential; } /** * @param cm ColorModel to use for writing. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig writerSetColorModel(final ColorModel cm) { model = cm; return this; } /** * @return The ColorModel to use when writing. */ public ColorModel writerGetColorModel() { return model; } /** * @param rate Desired frames per second to use when writing. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig writerSetFramesPerSecond(final int rate) { fps = rate; return this; } /** * @return The number of frames per second to use when writing. */ public int writerGetFramesPerSecond() { return fps; } /** * @param compress Desired compression type to use when writing. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig writerSetCompression(final String compress) { compression = compress; return this; } /** * @return The compression type writers will use when writing. */ public String writerGetCompression() { return compression; } /** * @param options Desired CodecOptions to use for writing. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig writerSetCodecOptions(final CodecOptions options) { this.options = options; return this; } /** * @return The CodecOptions that writers will use when writing. */ public CodecOptions writerGetCodecOptions() { return options; } // -- Groupable methods -- /** * @param groupFiles Desired behavior for grouping potential multi-file * datasets. If true, these will be grouped into one single dataset. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig groupableSetGroupFiles(final boolean groupFiles) { group = groupFiles; return this; } /** * @return Whether or not Groupable classes should group similar files when * operating on them. */ public boolean groupableIsGroupFiles() { return group; } // -- ImgOpener methods -- /** * @return The access type to attempt to open the dataset with. Default: * imgMode.AUTO, which allows the calling program to decide. */ public ImgMode[] imgOpenerGetImgModes() { return imgModes; } /** * @param imgModes A list of ImgMode access types. How these are interpreted * is up to the ImgFactoryHeuristic, but it is reasonable to expect * modes listed earlier to be preferred. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig imgOpenerSetImgModes(final ImgMode... imgModes) { this.imgModes = imgModes; return this; } /** * @return True if the image should be scaled to its min and max intensities. * Default: false */ public boolean imgOpenerIsComputeMinMax() { return computeMinMax; } /** * @param computeMinMax Whether or not images should be scaled to min/max * intensities. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig imgOpenerSetComputeMinMax(final boolean computeMinMax) { this.computeMinMax = computeMinMax; return this; } /** * Returns a {@link ImageRegion} specifying dimension constraints. This may be * of a different dimensionality than the underlying image, in which case the * lengths are assume to be in the natural ordering of the image. * * @return A Subregion specifying dimension offsets and lengths. Default: null */ public ImageRegion imgOpenerGetRegion() { return region; } /** * @param region Region constraints for any image to open * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig imgOpenerSetRegion(final ImageRegion region) { this.region = region; return this; } /** * @return A custom plane converter. Default: {@code null} */ public PlaneConverter imgOpenerGetPlaneConverter() { return planeConverter; } /** * @param planeConverter Sets a PlaneConverter to use when opening datasets. * This is useful when using a custom Img type. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig imgOpenerSetPlaneConverter( final PlaneConverter planeConverter) { this.planeConverter = planeConverter; return this; } /** * @return The ImgFactoryHeuristic to use when selecting an ImgFactory. * Default: {@code null} */ public ImgFactoryHeuristic imgOpenerGetImgFactoryHeuristic() { return imgFactoryHeuristic; } /** * @param imgFactoryHeuristic Heuristic to use when selecting an ImgFactory. * Will not be used if an ImgFactory is provided to the ImgOpener. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig imgOpenerSetImgFactoryHeuristic( final ImgFactoryHeuristic imgFactoryHeuristic) { this.imgFactoryHeuristic = imgFactoryHeuristic; return this; } /** * @return True if all available images should be opened. Useful if the actual * range of available images is not known. */ public boolean imgOpenerIsOpenAllImages() { return openAll; } /** * @param openAll Whether or not all available images should be opened. * Default: false. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig imgOpenerSetOpenAllImages(final boolean openAll) { this.openAll = openAll; return this; } /** * @return The image range to be opened. Default: [0] */ public Range imgOpenerGetRange() { return range; } /** * @param index Image index within the dataset to open * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig imgOpenerSetIndex(final int index) { return imgOpenerSetRange(new Range(new Long(index))); } public SCIFIOConfig imgOpenerSetRange(final String range) { return imgOpenerSetRange(new Range(range)); } public SCIFIOConfig imgOpenerSetRange(final Range range) { this.range = range; return this; } // -- ImgSaver methods -- /** * @return True if channels should be composited during ImgSaver operation. */ public boolean imgSaverGetWriteRGB() { return writeRGB; } /** * @param rgb Whether or not the ImgSaver should composite channels when * writing. * @return This SCIFIOConfig for method chaining. */ public SCIFIOConfig imgSaverSetWriteRGB(final boolean rgb) { writeRGB = rgb; return this; } // -- Clonable methods -- @Override public SCIFIOConfig clone() { return new SCIFIOConfig(this); } }
package net.sf.jabref.oo; import net.sf.jabref.AuthorList; import net.sf.jabref.BibtexDatabase; import net.sf.jabref.BibtexEntry; import net.sf.jabref.Globals; import net.sf.jabref.export.layout.Layout; import net.sf.jabref.export.layout.LayoutFormatter; import net.sf.jabref.export.layout.LayoutHelper; import java.io.*; import java.util.*; import java.util.regex.Pattern; /** * This class embodies a bibliography formatting for OpenOffice, which is composed * of the following elements: * * 1) Each OO bib entry type must have a formatting. A formatting is an array of elements, each * of which is either a piece of constant text, an entry field value, or a tab. Each element has * a character format associated with it. * * 2) Many field values (e.g. author) need to be formatted before input to OpenOffice. The style * has the responsibility of formatting all field values. Formatting is handled by 0-n * JabRef LayoutFormatter classes. * * 3) If the entries are not numbered, a citation marker must be produced for each entry. This * operation is performed for each JabRef BibtexEntry. */ class OOBibStyle implements Comparable<OOBibStyle> { public static final String UNDEFINED_CITATION_MARKER = "??"; private String name = null; private final SortedSet<String> journals = new TreeSet<String>(); // Formatter to be run on fields before they are used as part of citation marker: private final LayoutFormatter fieldFormatter = new OOPreFormatter(); private Layout defaultBibLayout; // reference layout mapped from entry type number: private final HashMap<String, Layout> bibLayout = new HashMap<String, Layout>(); private final HashMap<String, Object> properties = new HashMap<String, Object>(); private final HashMap<String, Object> citProperties = new HashMap<String, Object>(); private final Pattern numPattern = Pattern.compile("-?\\d+"); private boolean valid = false; private final static int NONE = 0; private final static int LAYOUT = 1; private final static int PROPERTIES = 2; private final static int CITATION = 3; private final static int NAME = 4; private final static int JOURNALS = 5; private final static String LAYOUT_MRK = "LAYOUT"; private final static String PROPERTIES_MARK = "PROPERTIES"; private final static String CITATION_MARK = "CITATION"; private final static String NAME_MARK = "NAME"; private final static String JOURNALS_MARK = "JOURNALS"; private final static String DEFAULT_MARK = "default"; private File styleFile = null; private static long styleFileModificationTime = Long.MIN_VALUE; //private Pattern quoted = Pattern.compile("\".*^\\\\\""); private final Pattern quoted = Pattern.compile("\".*\""); public OOBibStyle(File styleFile) throws Exception { this(new FileReader(styleFile)); this.styleFile = styleFile; OOBibStyle.styleFileModificationTime = (styleFile).lastModified(); } public OOBibStyle(Reader in) throws Exception { // Set default property values: properties.put("Title", "Bibliography"); properties.put("SortAlgorithm", "alphanumeric"); properties.put("IsSortByPosition", Boolean.FALSE); properties.put("IsNumberEntries", Boolean.FALSE); properties.put("BracketBefore", "["); properties.put("BracketAfter", "]"); properties.put("ReferenceParagraphFormat", "Default"); properties.put("ReferenceHeaderParagraphFormat", "Heading 1"); // Set default properties for the citation marker: citProperties.put("AuthorField", "author/editor"); citProperties.put("YearField", "year"); citProperties.put("MaxAuthors", 3); citProperties.put("MaxAuthorsFirst", -1); citProperties.put("AuthorSeparator", ", "); citProperties.put("AuthorLastSeparator", " & "); citProperties.put("AuthorLastSeparatorInText", null); citProperties.put("EtAlString", " et al."); citProperties.put("YearSeparator", ", "); citProperties.put("InTextYearSeparator", " "); citProperties.put("BracketBefore", "("); citProperties.put("BracketAfter", ")"); citProperties.put("CitationSeparator", "; "); citProperties.put("PageInfoSeparator", "; "); citProperties.put("GroupedNumbersSeparator", "-"); citProperties.put("MinimumGroupingCount", 3); citProperties.put("FormatCitations", Boolean.FALSE); citProperties.put("CitationCharacterFormat", "Default"); citProperties.put("ItalicCitations", Boolean.FALSE); citProperties.put("BoldCitations", Boolean.FALSE); citProperties.put("SuperscriptCitations", Boolean.FALSE); citProperties.put("SubscriptCitations", Boolean.FALSE); citProperties.put("MultiCiteChronological", Boolean.TRUE); citProperties.put("BibtexKeyCitations", Boolean.FALSE); citProperties.put("ItalicEtAl", Boolean.FALSE); initialize(in); } public String getName() { return name; } public File getFile() { return styleFile; } public Set<String> getJournals() { return Collections.unmodifiableSet(journals); } private void initialize(Reader in) throws IOException { name = null; readFormatFile(in); } /** * If this style was initialized from a file on disk, reload the style * if the file has been modified since it was read. * @throws Exception */ public void ensureUpToDate() throws Exception { if (!isUpToDate()) { reload(); } } /** * If this style was initialized from a file on disk, reload the style * information. * @throws Exception */ private void reload() throws Exception { if (styleFile != null) { OOBibStyle.styleFileModificationTime = (styleFile).lastModified(); initialize(new FileReader(styleFile)); } } /** * If this style was initialized from a file on disk, check whether the file * is unmodified since initialization. * @return true if the file has not been modified, false otherwise. */ private boolean isUpToDate() { if (styleFile != null) { return styleFile.lastModified() == OOBibStyle.styleFileModificationTime; } else { return true; } } private void readFormatFile(Reader in) throws IOException { // First read all the contents of the file: StringBuffer sb = new StringBuffer(); int c; while ((c = in.read()) != -1) { sb.append((char) c); } // Break into separate lines: String[] lines = sb.toString().split("\n"); int mode = OOBibStyle.NONE; for (String line1 : lines) { String line = line1; if ((line.length() > 0) && (line.charAt(line.length() - 1) == '\r')) { line = line.substring(0, line.length() - 1); } // Check for empty line or comment: if ((line.trim().length() == 0) || (line.charAt(0) == ' continue; } // Check if we should change mode: if (line.equals(OOBibStyle.NAME_MARK)) { mode = OOBibStyle.NAME; continue; } else if (line.equals(OOBibStyle.LAYOUT_MRK)) { mode = OOBibStyle.LAYOUT; continue; } else if (line.equals(OOBibStyle.PROPERTIES_MARK)) { mode = OOBibStyle.PROPERTIES; continue; } else if (line.equals(OOBibStyle.CITATION_MARK)) { mode = OOBibStyle.CITATION; continue; } else if (line.equals(OOBibStyle.JOURNALS_MARK)) { mode = OOBibStyle.JOURNALS; continue; } switch (mode) { case NAME: if (line.trim().length() > 0) { name = line.trim(); } case LAYOUT: handleStructureLine(line); break; case PROPERTIES: handlePropertiesLine(line, properties); break; case CITATION: handlePropertiesLine(line, citProperties); break; case JOURNALS: handleJournalsLine(line); } } // Set validity boolean based on whether we found anything interesting // in the file: if (mode != OOBibStyle.NONE) { valid = true; } } /** * After initalizing this style from a file, this method can be used to check * whether the file appeared to be a proper style file. * @return true if the file could be parsed as a style file, false otherwise. */ public boolean isValid() { return valid; } /** * Parse a line providing bibliography structure information for an entry type. * @param line The string containing the structure description. * @throws IOException */ private void handleStructureLine(String line) { int index = line.indexOf("="); if ((index > 0) && (index < (line.length() - 1))) { String formatString = line.substring(index + 1); //System.out.println("'"+line.substring(0, index)+"' : '"+formatString+"'"); boolean setDefault = line.substring(0, index).equals(OOBibStyle.DEFAULT_MARK); String type = line.substring(0, index); try { /*typeS = new Short(Short.parseShort(type)); OOBibFormatParser parser = new OOBibFormatParser(new StringReader(formatString)); PropertyValue[][] layout = parser.parse();*/ Layout layout = new LayoutHelper(new StringReader(formatString)). getLayoutFromText(Globals.FORMATTER_PACKAGE); if (setDefault) { defaultBibLayout = layout; } else { bibLayout.put(type.toLowerCase(), layout); } } catch (Exception ex) { ex.printStackTrace(); } } } /** * Parse a line providing a property name and value. * @param line The line containing the formatter names. * @throws IOException */ private void handlePropertiesLine(String line, HashMap<String, Object> map) { int index = line.indexOf("="); if ((index > 0) && (index <= (line.length() - 1))) { String propertyName = line.substring(0, index).trim(); String value = line.substring(index + 1); if ((value.trim().length() > 2) && quoted.matcher(value.trim()).matches()) { value = value.trim().substring(1, value.trim().length() - 1); } Object toSet = value; if (numPattern.matcher(value).matches()) { toSet = Integer.parseInt(value); } else if (value.toLowerCase().trim().equals("true")) { toSet = Boolean.TRUE; } else if (value.toLowerCase().trim().equals("false")) { toSet = Boolean.FALSE; } map.put(propertyName, toSet); } } /** * Parse a line providing a journal name for which this style is valid. * @param line * @throws IOException */ private void handleJournalsLine(String line) { if (line.trim().length() > 0) { journals.add(line.trim()); } } public Layout getReferenceFormat(String type) { Layout l = bibLayout.get(type.toLowerCase()); if (l != null) { return l; } else { return defaultBibLayout; } } /** * Get the array of elements composing the reference for a given entry type. * @param bibType The OO type number. * @return The format definition. public PropertyValue[][] getReferenceFormat(short bibType) { Object o = bibLayout.get(new Short(bibType)); if (o != null) return (PropertyValue[][])o; else return defaultBibLayout; }*/ /** * Format a number-based citation marker for the given number. * @param number The citation numbers. * @return The text for the citation. */ public String getNumCitationMarker(int[] number, int minGroupingCount, boolean inList) { String bracketBefore = (String) citProperties.get("BracketBefore"); if (inList && (citProperties.get("BracketBeforeInList") != null)) { bracketBefore = (String) citProperties.get("BracketBeforeInList"); } String bracketAfter = (String) citProperties.get("BracketAfter"); if (inList && (citProperties.get("BracketAfterInList") != null)) { bracketAfter = (String) citProperties.get("BracketAfterInList"); } // Sort the numbers: int[] lNum = new int[number.length]; System.arraycopy(number, 0, lNum, 0, lNum.length); //Arrays.copyOf(number, number.length); Arrays.sort(lNum); StringBuilder sb = new StringBuilder(bracketBefore); int combineFrom = -1, written = 0; for (int i = 0; i < lNum.length; i++) { int i1 = lNum[i]; if (combineFrom < 0) { // Check if next entry is the next in the ref list: if ((i < (lNum.length - 1)) && (lNum[i + 1] == (i1 + 1))) { combineFrom = i1; } else { // Add single entry: if (i > 0) { sb.append((String) citProperties.get("CitationSeparator")); } sb.append(lNum[i] > 0 ? String.valueOf(lNum[i]) : OOBibStyle.UNDEFINED_CITATION_MARKER); written++; } } else { // We are building a list of combined entries. // Check if it ends here: if ((i == (lNum.length - 1)) || (lNum[i + 1] != (i1 + 1))) { if (written > 0) { sb.append((String) citProperties.get("CitationSeparator")); } if ((minGroupingCount > 0) && (((i1 + 1) - combineFrom) >= minGroupingCount)) { sb.append(combineFrom); sb.append((String) citProperties.get("GroupedNumbersSeparator")); sb.append(i1); written++; } else { // Either we should never group, or there aren't enough // entries in this case to group. Output all: for (int jj = combineFrom; jj <= i1; jj++) { sb.append(jj); if (jj < i1) { sb.append((String) citProperties.get("CitationSeparator")); } written++; } } combineFrom = -1; } // If it doesn't end here, just keep iterating. } } sb.append(bracketAfter); return sb.toString(); } /** * Format the marker for the in-text citation according to this bib style. * * @param entry The JabRef BibtexEntry providing the data. * @param inParenthesis Signals whether a parenthesized citation or an in-text citation is wanted. * @param uniquefier String to add behind the year in case it's needed to separate similar * entries. * @return The formatted citation. */ public String getCitationMarker(BibtexEntry entry, BibtexDatabase database, boolean inParenthesis, String uniquefier, int unlimAuthors) { return getCitationMarker(new BibtexEntry[] {entry}, database, inParenthesis, new String[] {uniquefier}, new int[] {unlimAuthors}); } /** * Format the marker for the in-text citation according to this bib style. Uniquefier letters are added as * provided by the uniquefiers argument. If successive entries within the citation are uniquefied from each other, * this method will perform a grouping of these entries. * * @param entries The array of JabRef BibtexEntry providing the data. * @param inParenthesis Signals whether a parenthesized citation or an in-text citation is wanted. * @param uniquefiers Strings to add behind the year for each entry in case it's needed to separate similar * entries. * @param unlimAuthors Boolean for each entry. If true, we should not use "et al" formatting regardless * of the number of authors. Can be null to indicate that no entries should have unlimited names. * @return The formatted citation. */ public String getCitationMarker(BibtexEntry[] entries, BibtexDatabase database, boolean inParenthesis, String[] uniquefiers, int[] unlimAuthors) { // Look for groups of uniquefied entries that should be combined in the output. // E.g. (Olsen, 2005a, b) should be output instead of (Olsen, 2005a; Olsen, 2005b). int piv = -1; String tmpMarker = null; if (uniquefiers != null) { for (int i = 0; i < uniquefiers.length; i++) { if ((uniquefiers[i] != null) && (uniquefiers[i].length() > 0)) { String authorField = (String) citProperties.get("AuthorField"); int maxAuthors = (Integer) citProperties.get("MaxAuthors"); if (piv == -1) { piv = i; tmpMarker = getAuthorYearParenthesisMarker(new BibtexEntry[] {entries[i]}, database, authorField, (String) citProperties.get("YearField"), maxAuthors, (String) citProperties.get("AuthorSeparator"), (String) citProperties.get("AuthorLastSeparator"), (String) citProperties.get("EtAlString"), (String) citProperties.get("YearSeparator"), (String) citProperties.get("BracketBefore"), (String) citProperties.get("BracketAfter"), (String) citProperties.get("CitationSeparator"), null, unlimAuthors); //System.out.println("piv="+piv+" tmpMarker='"+tmpMarker+"'"); } else { // See if this entry can go into a group with the previous one: String thisMarker = getAuthorYearParenthesisMarker(new BibtexEntry[] {entries[i]}, database, authorField, (String) citProperties.get("YearField"), maxAuthors, (String) citProperties.get("AuthorSeparator"), (String) citProperties.get("AuthorLastSeparator"), (String) citProperties.get("EtAlString"), (String) citProperties.get("YearSeparator"), (String) citProperties.get("BracketBefore"), (String) citProperties.get("BracketAfter"), (String) citProperties.get("CitationSeparator"), null, unlimAuthors); String author = getCitationMarkerField(entries[i], database, authorField); AuthorList al = AuthorList.getAuthorList(author); //System.out.println("i="+i+" thisMarker='"+thisMarker+"'"); int prevALim = i > 0 ? unlimAuthors[i - 1] : unlimAuthors[0]; if (!thisMarker.equals(tmpMarker) || ((al.size() > maxAuthors) && (unlimAuthors[i] != prevALim))) { // No match. Update piv to exclude the previous entry. But first check if the // previous entry was part of a group: if ((piv > -1) && (i > (piv + 1))) { // Do the grouping: group(entries, uniquefiers, piv, i - 1, (String) citProperties.get("UniquefierSeparator")); } tmpMarker = thisMarker; piv = i; } } } else { // This entry has no uniquefier. // Check if we just passed a group of more than one entry with uniquefier: if ((piv > -1) && (i > (piv + 1))) { // Do the grouping: group(entries, uniquefiers, piv, i - 1, (String) citProperties.get("UniquefierSeparator")); } piv = -1; } } // Finished with the loop. See if the last entries form a group: if (piv >= 0) { // Do the grouping: group(entries, uniquefiers, piv, uniquefiers.length - 1, (String) citProperties.get("UniquefierSeparator")); } } if (inParenthesis) { return getAuthorYearParenthesisMarker(entries, database, (String) citProperties.get("AuthorField"), (String) citProperties.get("YearField"), (Integer) citProperties.get("MaxAuthors"), (String) citProperties.get("AuthorSeparator"), (String) citProperties.get("AuthorLastSeparator"), (String) citProperties.get("EtAlString"), (String) citProperties.get("YearSeparator"), (String) citProperties.get("BracketBefore"), (String) citProperties.get("BracketAfter"), (String) citProperties.get("CitationSeparator"), uniquefiers, unlimAuthors); } else { String authorLastSeparator = (String) citProperties.get("AuthorLastSeparator"); String alsInText = (String) citProperties.get("AuthorLastSeparatorInText"); if (alsInText != null) { authorLastSeparator = alsInText; } return getAuthorYearInTextMarker(entries, database, (String) citProperties.get("AuthorField"), (String) citProperties.get("YearField"), (Integer) citProperties.get("MaxAuthors"), (String) citProperties.get("AuthorSeparator"), authorLastSeparator, (String) citProperties.get("EtAlString"), (String) citProperties.get("InTextYearSeparator"), (String) citProperties.get("BracketBefore"), (String) citProperties.get("BracketAfter"), (String) citProperties.get("CitationSeparator"), uniquefiers, unlimAuthors); } } /** * Modify entry and uniqiefier arrays to facilitate a grouped presentation of uniqiefied entries. * @param entries The entry array. * @param uniquefiers The uniquefier array. * @param from The first index to group (inclusive) * @param to The last index to group (inclusive) * @param separator The separator for the uniquefier letters. */ private void group(BibtexEntry[] entries, String[] uniquefiers, int from, int to, String separator) { StringBuilder sb = new StringBuilder(uniquefiers[from]); for (int i = from + 1; i <= to; i++) { sb.append(separator); sb.append(uniquefiers[i]); entries[i] = null; } uniquefiers[from] = sb.toString(); } /** * This method produces (Author, year) style citation strings in many different forms. * * @param entries The array of BibtexEntry to get fields from. * @param authorField The bibtex field providing author names, e.g. "author" or "editor". * @param yearField The bibtex field providing the year, e.g. "year". * @param maxA The maximum number of authors to write out in full without using etal. Set to * -1 to always write out all authors. * @param authorSep The String to add between author names except the last two, e.g. ", ". * @param andString The String to add between the two last author names, e.g. " & ". * @param etAlString The String to represent authors that are not mentioned, e.g. " et al." * @param yearSep The String to separate authors from year, e.g. "; ". * @param startBrace The opening parenthesis. * @param endBrace The closing parenthesis. * @param citationSeparator The String to separate citations from each other. * @param uniquifiers Optional parameter to separate similar citations. Elements can be null if not needed. * @return The formatted citation. */ private String getAuthorYearParenthesisMarker(BibtexEntry[] entries, BibtexDatabase database, String authorField, String yearField, int maxA, String authorSep, String andString, String etAlString, String yearSep, String startBrace, String endBrace, String citationSeparator, String[] uniquifiers, int[] unlimAuthors) { StringBuffer sb = new StringBuffer(startBrace); for (int j = 0; j < entries.length; j++) { int unlimA = (unlimAuthors != null ? unlimAuthors[j] : -1); int maxAuthors = unlimA > 0 ? unlimA : maxA; BibtexEntry entry = entries[j]; // Check if this entry has been nulled due to grouping with the previous entry(ies): if (entry == null) { continue; } if (j > 0) { sb.append(citationSeparator); } String author = getCitationMarkerField(entry, database, authorField); if (author != null) { AuthorList al = AuthorList.getAuthorList(author); sb.append(getAuthorLastName(al, 0)); if ((al.size() > 1) && ((al.size() <= maxAuthors) || (maxAuthors < 0))) { int i = 1; while (i < (al.size() - 1)) { sb.append(authorSep); sb.append(getAuthorLastName(al, i)); i++; } sb.append(andString); sb.append(getAuthorLastName(al, al.size() - 1)); } else if (al.size() > maxAuthors) { sb.append(etAlString); } sb.append(yearSep); } String year = getCitationMarkerField(entry, database, yearField); if (year != null) { sb.append(year); } if ((uniquifiers != null) && (uniquifiers[j] != null)) { sb.append(uniquifiers[j]); } } sb.append(endBrace); return sb.toString(); } /** * This method produces "Author (year)" style citation strings in many different forms. * * @param entries The array of BibtexEntry to get fields from. * @param authorField The bibtex field providing author names, e.g. "author" or "editor". * @param yearField The bibtex field providing the year, e.g. "year". * @param maxA The maximum number of authors to write out in full without using etal. Set to * -1 to always write out all authors. * @param authorSep The String to add between author names except the last two, e.g. ", ". * @param andString The String to add between the two last author names, e.g. " & ". * @param etAlString The String to represent authors that are not mentioned, e.g. " et al." * @param yearSep The String to separate authors from year, e.g. "; ". * @param startBrace The opening parenthesis. * @param endBrace The closing parenthesis. * @param uniquefiers Optional parameters to separate similar citations. Can be null if not needed. * @return The formatted citation. */ private String getAuthorYearInTextMarker(BibtexEntry[] entries, BibtexDatabase database, String authorField, String yearField, int maxA, String authorSep, String andString, String etAlString, String yearSep, String startBrace, String endBrace, String citationSeparator, String[] uniquefiers, int[] unlimAuthors) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < entries.length; i++) { int unlimA = (unlimAuthors != null ? unlimAuthors[i] : -1); int maxAuthors = unlimA > 0 ? unlimA : maxA; // Check if this entry has been nulled due to grouping with the previous entry(ies): if (entries[i] == null) { continue; } if (i > 0) { sb.append(citationSeparator); } String author = getCitationMarkerField(entries[i], database, authorField); if (author != null) { AuthorList al = AuthorList.getAuthorList(author); if (al.size() > 0) { sb.append(getAuthorLastName(al, 0)); } if ((al.size() > 1) && ((al.size() <= maxAuthors) || (maxAuthors < 0))) { int j = 1; while (j < (al.size() - 1)) { sb.append(authorSep); sb.append(getAuthorLastName(al, j)); j++; } sb.append(andString); sb.append(getAuthorLastName(al, al.size() - 1)); } else if (al.size() > maxAuthors) { sb.append(etAlString); } sb.append(yearSep); } sb.append(startBrace); String year = getCitationMarkerField(entries[i], database, yearField); if (year != null) { sb.append(year); } if ((uniquefiers != null) && (uniquefiers[i] != null)) { sb.append(uniquefiers[i]); } sb.append(endBrace); } return sb.toString(); } /** * This method looks up a field for en entry in a database. Any number of backup fields can be used * if the primary field is empty. * @param entry The entry. * @param database The database the entry belongs to. * @param field The field, or succession of fields, to look up. If backup fields are needed, separate * field names by /. E.g. to use "author" with "editor" as backup, specify "author/editor". * @return The resolved field content, or an empty string if the field(s) were empty. */ private String getCitationMarkerField(BibtexEntry entry, BibtexDatabase database, String field) { String[] fields = field.split("/"); for (String s : fields) { String content = BibtexDatabase.getResolvedField(s, entry, database); if ((content != null) && (content.trim().length() > 0)) { if (fieldFormatter != null) { content = fieldFormatter.format(content); } return content; } } // No luck? Return an empty string: return ""; } /** * Look up the nth author and return the proper last name for citation markers. * @param al The author list. * @param number The number of the author to return. * @return The author name, or an empty String if inapplicable. */ private String getAuthorLastName(AuthorList al, int number) { StringBuilder sb = new StringBuilder(); if (al.size() > number) { AuthorList.Author a = al.getAuthor(number); if ((a.getVon() != null) && (a.getVon().length() > 0)) { String von = a.getVon(); sb.append(von); /*sb.append(von.substring(0, 1).toUpperCase()); if (von.length() > 1) sb.append(von.substring(1));*/ sb.append(' '); } sb.append(a.getLast()); } return sb.toString(); } /** * Take a finished citation and insert a string at the end (but inside the end bracket) * separated by "PageInfoSeparator" * @param citation * @param pageInfo * @return */ public String insertPageInfo(String citation, String pageInfo) { String bracketAfter = getStringCitProperty("BracketAfter"); if (citation.endsWith(bracketAfter)) { String first = citation.substring(0, citation.length() - bracketAfter.length()); return first + getStringCitProperty("PageInfoSeparator") + pageInfo + bracketAfter; } else { return citation + getStringCitProperty("PageInfoSeparator") + pageInfo; } } /** * Convenience method for checking the property for whether we use number citations or * author-year citations. * @return true if we use numbered citations, false otherwise. */ public boolean isNumberEntries() { return (Boolean) getProperty("IsNumberEntries"); } /** * Convenience method for checking the property for whether we sort the bibliography * according to their order of appearance in the text. * @return true to sort by appearance, false to sort alphabetically. */ public boolean isSortByPosition() { return (Boolean) getProperty("IsSortByPosition"); } /** * Convenience method for checking whether citation markers should be italicised. * Will only be relevant if isFormatCitations() returns true. * @return true to indicate that citations should be in italics. */ public boolean isItalicCitations() { return (Boolean) citProperties.get("ItalicCitations"); } /** * Convenience method for checking whether citation markers should be bold. * Will only be relevant if isFormatCitations() returns true. * @return true to indicate that citations should be in bold. */ public boolean isBoldCitations() { return (Boolean) citProperties.get("BoldCitations"); } /** * Convenience method for checking whether citation markers formatted * according to the results of the isItalicCitations() and * isBoldCitations() methods. * @return true to indicate that citations should be in italics. */ public boolean isFormatCitations() { return (Boolean) citProperties.get("FormatCitations"); } public boolean isBibtexKeyCiteMarkers() { return (Boolean) citProperties.get("BibtexKeyCitations"); } /** * Get boolean property. * @param key The property key * @return the value */ public boolean getBooleanCitProperty(String key) { return (Boolean) citProperties.get(key); } public int getIntCitProperty(String key) { return (Integer) citProperties.get(key); } public String getStringCitProperty(String key) { return (String) citProperties.get(key); } public String getCitationCharacterFormat() { return (String) citProperties.get("CitationCharacterFormat"); } /** * Get a style property. * @param name The property name. * @return The property value, or null if it doesn't exist. */ public Object getProperty(String name) { return properties.get(name); } @Override public int compareTo(OOBibStyle other) { return getName().compareTo(other.getName()); } @Override public boolean equals(Object o) { if(o != null) { return styleFile.equals(((OOBibStyle) o).styleFile); } else { return false; } } }
package org.amc.util; import java.util.ArrayList; import java.util.List; /** * @author Adrian McLaughlin * @version 1.1 */ public class DefaultSubject implements Subject { private final List<Observer> observers; /** * * Constructor for DefaultSubject.java */ public DefaultSubject() { super(); observers = new ArrayList<Observer>(); } /** * @see org.amc.util.Subject#attachObserver(org.amc.util.MyObserver) */ public void attachObserver(Observer observer) { observers.add(observer); } /** * @see org.amc.util.Subject#notifyObservers(java.lang.Object) */ public void notifyObservers(Object message) { List<Observer> copy = new ArrayList<>(observers); for (Observer observer:copy) { observer.update(this, message); } copy.clear(); } public void removeObserver(Observer O) { observers.remove(O); } @Override public void removeAllObservers() { observers.clear(); } }
package org.drpowell.vcf; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.logging.Level; import java.util.logging.Logger; import org.drpowell.util.CustomPercentEncoder; /** * Representation of a single row of a VCF file * * INFO flag fields will be set to the special value 'FLAG_INFO' if set * * @author bpow */ public class VCFVariant { private Map<String, String[]> info; private String qual; private String [] row; private int start; // fixme should this be final? private int end; private boolean urlEncode = true; private volatile double [][] logLikelihoods; private static final String [] FLAG_INFO = new String[0]; private static final CustomPercentEncoder INFO_ENCODER = CustomPercentEncoder.allowAsciiPrintable(true).recodeAdditionalCharacters(" ;=".toCharArray()); public VCFVariant(String line) { this(line.split("\t", -1)); } public VCFVariant(String [] row) { this.row = row; // FIXME - should defensive copy? start = Integer.parseInt(row[VCFParser.VCFFixedColumns.POS.ordinal()]); end = start + getRef().length() - 1; info = splitInfoField(row[VCFParser.VCFFixedColumns.INFO.ordinal()]); } public static Map<String, String[]> splitInfoField(String info) { Map<String, String[]> map = new LinkedHashMap<String, String[]>(); if (".".equals(info)) { return map; } String [] entries = info.split(";"); for (String entry : entries) { String [] keyvalue = entry.split("=",2); if (map.containsKey(keyvalue[0])) { String message = "VCF spec does not allow for duplicated keys [ " + keyvalue[0] + " ] in the INFO field of a VCF:\n " + info; Logger.getLogger(VCFVariant.class.getName()).log(Level.WARNING, message); //throw new RuntimeException(message); } if (keyvalue.length == 1) { map.put(keyvalue[0], FLAG_INFO); } else { map.put(keyvalue[0], keyvalue[1].split(",")); } } return map; } public static String joinInfo(Map<String, String []> info) { if (info.size() == 0) { return "."; } StringBuilder sb = new StringBuilder(); for (Entry<String, String[]> e: info.entrySet()) { if (e.getValue() == FLAG_INFO) { sb.append(e.getKey()).append(";"); } else { sb.append(e.getKey()).append("=").append(join(",",decodeInfo(false, e.getValue()))).append(";"); } } return sb.substring(0, sb.length()-1); // no need for the last semicolon } /** * Add an item to the VCF variant. * * @param key - the ID of the data, this should be defined in the VCF header * @param values - one or more values (if null, this entry will be treated as a Flag) * @return this VCFVariant, to facilitate chaining */ public VCFVariant putInfo(String key, String... values) { if (null == values || null == values[0] || values.length == 0 || "".equals(values[0])) { values = FLAG_INFO; } else { values = encodeInfo(urlEncode, values); } info.put(key, values); return this; } public VCFVariant putInfoFlag(String key) { info.put(key, FLAG_INFO); return this; } public Double getQual() { return Double.valueOf(qual); } private void updateInfo() { row[VCFParser.VCFFixedColumns.INFO.ordinal()] = joinInfo(info); } public String toString() { updateInfo(); StringBuilder sb = new StringBuilder(row[0]); for (int i = 1; i < row.length; i++) { sb.append("\t").append(row[i]); } return sb.toString(); } /** * Returns the value of one of the fixed columns of a vcf file. * @see VCFParser.VCFFixedColumns */ public String getFixedColumn(int i) { if (i >= VCFParser.VCFFixedColumns.SIZE) { throw new NoSuchElementException("Tried to access an invalid column in a VCF file"); } return row[i]; } public String getSequence() { return row[VCFParser.VCFFixedColumns.CHROM.ordinal()]; } public int getStart() { return start; } public int getEnd() { return end; } public String getID() { return row[VCFParser.VCFFixedColumns.ID.ordinal()]; } public String getRef() { return row[VCFParser.VCFFixedColumns.REF.ordinal()]; } public String getAlt() { return row[VCFParser.VCFFixedColumns.ALT.ordinal()]; } public String getFilter() { return row[VCFParser.VCFFixedColumns.FILTER.ordinal()]; } public String getFormat() { return row[VCFParser.VCFFixedColumns.FORMAT.ordinal()]; } private final int findFormatItemIndex(String key) { String [] format = getFormat().split(":"); for (int i = 0; i < format.length; i++) { if (key.equals(format[i])) return i; } return -1; } public static int[] PLfromGL(double [] GLs) { final int[] pls = new int[GLs.length]; int min = 255; for ( int i = 0; i < GLs.length; i++ ) { pls[i] = Math.min((int) Math.round(-10 * GLs[i]), 255); min = Math.min(pls[i], min); } for ( int i = 0; i < GLs.length; i++ ) { pls[i] -= min; } return pls; } private double [][] extractLikelihoods() { // i indexes sample, j indexes individual likelihood boolean foundGL = false; int index = findFormatItemIndex("GL"); if (index >= 0) { foundGL = true; } else { index = findFormatItemIndex("PL"); if (index < 0) { // didn't find GL or PL... but if we were to return 'null', someone might try again return new double[0][0]; } } String [] calls = getCalls(); double [][] res = new double[calls.length][]; for (int i = 0; i < res.length; i++) { String [] callFields = calls[i].split(":"); if (index >= callFields.length) { // no call for this sample res[i] = null; } else { res[i] = VCFUtils.parseDoubleList(callFields[index]); if (!foundGL) { for (int j = 0; j < res[i].length; j++) { res[i][j] /= -10.0; } } } } return res; } public double [][] getGenotypeLikelihoods() { double [][] result = logLikelihoods; if (null == result) { synchronized(this) { result = logLikelihoods; if (null == result) { result = logLikelihoods = extractLikelihoods(); } } } if (result.length == 0) return null; return result; } public List<String> getRow() { return Collections.unmodifiableList(Arrays.asList(row)); } public VCFVariant mergeID(String newID) { int idcol = VCFParser.VCFFixedColumns.ID.ordinal(); String oldID = row[idcol]; if (!".".equals(oldID)) { if (oldID.equals(newID)) { return this; } // should probably log this -- changing a previously-written rsID } row[idcol] = newID; return this; } public String [] getCalls() { int num = row.length - VCFParser.VCFFixedColumns.SIZE; if (num <= 0) { return new String[0]; } else { return Arrays.copyOfRange(row, VCFParser.VCFFixedColumns.SIZE, row.length); } } public String getGenotype(int sampleIndex) { if (!getFormat().startsWith("GT")) return null; // FIXME log? exception? String call = row[sampleIndex + VCFParser.VCFFixedColumns.SIZE]; int colon = call.indexOf(':'); return colon < 0 ? call : call.substring(0, colon); } private String phaseCall(String oldCall, int phase) { int delim = oldCall.indexOf('/'); if (delim < 0) delim = oldCall.indexOf('|'); if (delim < 0) delim = oldCall.indexOf('\\'); if (delim < 0) { Logger.getLogger(getClass().getName()).fine("Unable to phase [" + oldCall + "] because I could not find a delimiter"); return oldCall; } try { int a = Integer.parseInt(oldCall.substring(0, delim)); int b = Integer.parseInt(oldCall.substring(delim+1)); if (b < a) { a ^= b; b ^= a; a ^= b; // obscure swap, make sure a is less than b } String outDelim = phase == 0 ? "/" : "|"; if (phase < 0) { return Integer.toString(b) + outDelim + Integer.toString(a); } else { return Integer.toString(a) + outDelim + Integer.toString(b); } } catch (NumberFormatException nfe) { Logger.getLogger(VCFVariant.class.getName()).log(Level.FINE, "Tried to phase a non-numeric call: " + oldCall); } return oldCall; } public VCFVariant setPhases(int [] sampleIndices, int [] phases) { // TODO - decide if I really want this to be mutable or to return a new VCFVariant if (sampleIndices.length != phases.length) { throw new RuntimeException("attempted to set phases for samplenum != phasenum"); } if (!getFormat().startsWith("GT:")) { throw new RuntimeException("GT must be the first element of VCF file per the spec (if present), unable to set phase as requested"); } int offset = VCFParser.VCFFixedColumns.SIZE; for (int i = 0; i < phases.length; i++) { String sampleRecord = row[offset + sampleIndices[i]]; int colonPos = sampleRecord.indexOf(':'); if (colonPos < 0) colonPos = sampleRecord.length(); String call = phaseCall(sampleRecord.substring(0, colonPos), phases[i]); row[offset+sampleIndices[i]] = call + sampleRecord.substring(colonPos); } return this; } public String [] getInfoValues(boolean urlDecode, String key) { return decodeInfo(urlDecode, info.get(key)); } public String getInfoValue(String key) { return getInfoValue(key, true); } /** * Return the value within the INFO dictionary for a given key, optionally performing urlDecoding * * @param key * @return null if key not present, "" for flag fields, the encoded value otherwise */ public String getInfoValue(String key, boolean urlDecode) { String [] vals = info.get(key); if (vals == FLAG_INFO) return ""; if (vals == null) return null; vals = decodeInfo(urlDecode, vals); return join(",", vals); } public boolean hasInfo(String key) { return info.containsKey(key); } public static final String [] decodeInfo(boolean urlDecode, String... values) { if (urlDecode) { String [] decoded = new String[values.length]; for (int i = 0; i < values.length; i++) { decoded[i] = INFO_ENCODER.decode(values[i]); } values = decoded; } return values; } public static final String [] encodeInfo(boolean urlEncode, String... values) { if (urlEncode) { String [] encoded = new String[values.length]; for (int i = 0; i < values.length; i++) { encoded[i] = INFO_ENCODER.encode(values[i]); } values = encoded; } return values; } private static String join(String sep, String... strings) { if (strings.length == 0) return ""; if (strings.length == 1) return strings[0]; StringBuilder sb = new StringBuilder(); for (String s: strings) { sb.append(",").append(s); } return sb.substring(1); } }
package org.jtrfp.trcl.core; import java.awt.Color; import java.awt.image.BufferedImage; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.IntBuffer; import java.util.concurrent.Callable; import javax.imageio.ImageIO; import javax.media.opengl.GL3; import org.jtrfp.trcl.SpecialRAWDimensions; import org.jtrfp.trcl.core.VQCodebookManager.RasterRowWriter; import org.jtrfp.trcl.gpu.GPU; import org.jtrfp.trcl.img.vq.ByteBufferVectorList; import org.jtrfp.trcl.img.vq.PalettedVectorList; import org.jtrfp.trcl.img.vq.RGBA8888VectorList; import org.jtrfp.trcl.img.vq.RasterizedBlockVectorList; import org.jtrfp.trcl.img.vq.SubtextureVL; import org.jtrfp.trcl.img.vq.VectorList; import org.jtrfp.trcl.img.vq.VectorListRasterizer; import org.jtrfp.trcl.math.Misc; import org.jtrfp.trcl.mem.PagedByteBuffer; public class Texture implements TextureDescription { private final TR tr; private final GPU gpu; private final TextureManager tm ; private final VQCodebookManager cbm; private final TextureTOCWindow toc; private final SubTextureWindow stw; private Color averageColor; private final String debugName; private Integer tocIndex; private int[] subTextureIDs; private int[][] codebookStartOffsetsAbsolute; private ByteBuffer rgba; private final boolean uvWrapping; private volatile int texturePage; private int width; @Override public void finalize() throws Throwable{ System.out.println("Texture.finalize() "+debugName); //TOC ID if(tocIndex!=null) toc.free(tocIndex); //Subtexture IDs if(subTextureIDs!=null) for(int stID:subTextureIDs) stw.free(stID); //Codebook entries if(codebookStartOffsetsAbsolute!=null) for(int [] array:codebookStartOffsetsAbsolute){ for(int entry:array){ tm.vqCodebookManager.get().freeCodebook256(entry/256); }//end for(entries) }//end for(arrays) super.finalize(); }//end finalize() Texture(Color c, TR tr){ this(new PalettedVectorList(colorZeroRasterVL(), colorVL(c)),"SolidColor r="+c.getRed()+" g="+c.getGreen()+" b="+c.getBlue(),tr,false); }//end constructor private static VectorList colorZeroRasterVL(){ return new VectorList(){ @Override public int getNumVectors() { return 16; } @Override public int getNumComponentsPerVector() { return 1; } @Override public double componentAt(int vectorIndex, int componentIndex) { return 0; } @Override public void setComponentAt(int vectorIndex, int componentIndex, double value) { throw new RuntimeException("Cannot write to Texture.colorZeroRasterVL VectorList"); }}; }//end colorZeroRasterVL private static VectorList colorVL(Color c){ final double [] color = new double[]{ c.getRed()/255.,c.getGreen()/255.,c.getBlue()/255.,1.}; return new VectorList(){ @Override public int getNumVectors() { return 1; } @Override public int getNumComponentsPerVector() { return 4; } @Override public double componentAt(int vectorIndex, int componentIndex) { return color[componentIndex]; } @Override public void setComponentAt(int vectorIndex, int componentIndex, double value) { throw new RuntimeException("Static palette created by Texture(Color c, TR tr) cannot be written to."); }}; }//end colorVL(...) private Texture(TR tr, String debugName, boolean uvWrapping){ this.tr=tr; this.gpu =tr.gpu.get(); this.tm =gpu.textureManager.get(); this.cbm =tm.vqCodebookManager.get(); this.toc =tm.getTOCWindow(); this.stw =tm.getSubTextureWindow(); this.debugName =debugName.replace('.', '_'); this.uvWrapping =uvWrapping; }//end constructor private Texture(Texture parent, double uOff, double vOff, double uSize, double vSize, TR tr, boolean uvWrapping) { this(tr,"subtexture: "+parent.debugName,uvWrapping); }//end constructor public Texture subTexture(double uOff, double vOff, double uSize, double vSize){ return new Texture(this,uOff,vOff,uSize,vSize,tr,false); } Texture(PalettedVectorList vl, String debugName, TR tr, boolean uvWrapping){ this(tr,debugName,uvWrapping); vqCompress(vl); }//end constructor Texture(ByteBuffer imageRGBA8888, String debugName, TR tr, boolean uvWrapping) { this(tr,debugName,uvWrapping); if (imageRGBA8888.capacity() == 0) { throw new IllegalArgumentException( "Cannot create texture of zero size."); }//end if capacity==0 imageRGBA8888.clear();//Doesn't erase, just resets the tracking vars vqCompress(imageRGBA8888); }// end constructor private void vqCompress(PalettedVectorList squareImageIndexed){ final double fuzzySideLength = Math.sqrt(squareImageIndexed.getNumVectors()); final int sideLength = (int)Math.floor(fuzzySideLength); if(!SpecialRAWDimensions.isPowerOfTwo(sideLength)) System.err.println("WARNING: Calculated dimensions are not power-of-two. Trouble ahead."); if(Math.abs(fuzzySideLength-sideLength)>.001) System.err.println("WARNING: Calculated dimensions are not perfectly square. Trouble ahead."); vqCompress(squareImageIndexed,sideLength); } private void vqCompress(ByteBuffer imageRGBA8888){ final double fuzzySideLength = Math.sqrt((imageRGBA8888.capacity() / 4)); final int sideLength = (int)Math.floor(fuzzySideLength); if(!SpecialRAWDimensions.isPowerOfTwo(sideLength)) System.err.println("WARNING: Calculated dimensions are not power-of-two. Trouble ahead."); if(Math.abs(fuzzySideLength-sideLength)>.001) System.err.println("WARNING: Calculated dimensions are not perfectly square. Trouble ahead."); // Break down into 4x4 blocks final ByteBufferVectorList bbvl = new ByteBufferVectorList(imageRGBA8888); final RGBA8888VectorList rgba8888vl = new RGBA8888VectorList(bbvl); vqCompress(rgba8888vl,sideLength); } private final void vqCompress(VectorList rgba8888vl, final int sideLength){ width=sideLength; final int diameterInCodes = (int)Misc.clamp((double)sideLength/(double)VQCodebookManager.CODE_SIDE_LENGTH, 1, Integer.MAX_VALUE); final int diameterInSubtextures = (int)Math.ceil((double)diameterInCodes/(double)SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER); final RasterizedBlockVectorList rbvl = new RasterizedBlockVectorList( rgba8888vl, sideLength, 4); final VectorListRasterizer vlr = new VectorListRasterizer(rbvl, new int [] {diameterInCodes,diameterInCodes}); // Calculate a rough average color by averaging random samples. calulateAverageColor(rbvl); // Get a TOC tocIndex = toc.create(); setTexturePage((toc.getPhysicalAddressInBytes(tocIndex)/PagedByteBuffer.PAGE_SIZE_BYTES)); if(toc.getPhysicalAddressInBytes(tocIndex)%PagedByteBuffer.PAGE_SIZE_BYTES!=0) throw new RuntimeException("Physical GPU address not perfectly aligned with page interval."); tr.getThreadManager().submitToThreadPool(new Callable<Void>(){ @Override public Void call() throws Exception { // Create subtextures subTextureIDs = new int[diameterInSubtextures*diameterInSubtextures]; codebookStartOffsetsAbsolute = new int[diameterInSubtextures*diameterInSubtextures][6]; for(int i=0; i<subTextureIDs.length; i++){ //Create subtexture ID subTextureIDs[i]=stw.create(); for(int off=0; off<6; off++){ codebookStartOffsetsAbsolute[i][off] = tm.vqCodebookManager.get() .newCodebook256() * 256;} }//end for(subTextureIDs) tr.getThreadManager().submitToGPUMemAccess(new Callable<Void>() { @Override public final Void call() { //Set magic toc.magic.set(tocIndex, 1337); for(int i=0; i<subTextureIDs.length; i++){ final int id = subTextureIDs[i]; //Convert subtexture index to index of TOC final int tocSubTexIndex = (i%diameterInSubtextures)+(i/diameterInSubtextures)*TextureTOCWindow.WIDTH_IN_SUBTEXTURES; //Load subtexture ID into TOC toc.subtextureAddrsVec4.setAt(tocIndex, tocSubTexIndex,stw.getPhysicalAddressInBytes(id)/GPU.BYTES_PER_VEC4); //Render Flags toc.renderFlags.set(tocIndex, (uvWrapping?0x1:0x0) ); //Fill the subtexture code start offsets for(int off=0; off<6; off++) stw.codeStartOffsetTable.setAt(id, off, codebookStartOffsetsAbsolute[i][off]); }//end for(subTextureIDs) // Set the TOC vars toc.height .set(tocIndex, sideLength); toc.width .set(tocIndex, sideLength); setCodes(diameterInCodes, diameterInSubtextures); return null; }// end run() //REQUIRES GPU MEM ACCESS private final void setCodes(int diameterInCodes, int diameterInSubtextures){ final int numCodes = diameterInCodes*diameterInCodes; for(int i = 0; i < numCodes; i++){ final int codeX = i % diameterInCodes; final int codeY = i / diameterInCodes; setCodeAt(codeX,codeY); }//end for(numCodes) }//end setCodes() //REQUIRES GPU MEM ACCESS private final void setCodeAt(int codeX, int codeY){ final int subtextureX = codeX / SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER; final int subtextureY = codeY / SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER; final int subtextureCodeX = codeX % SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER; final int subtextureCodeY = codeY % SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER; final int codeIdx = subtextureCodeX + subtextureCodeY * SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER; final int subTextureIdx = subtextureX + subtextureY * diameterInSubtextures; final int subtextureID = subTextureIDs[subTextureIdx]; new SubtextureVL(stw, subtextureID).setComponentAt(codeIdx, 0, (byte)(codeIdx%256));//TODO: Could make a lot of garbage. }//end setCodeAt() }).get();//end gpuMemThread // Push texels to codebook for(int codeY=0; codeY<diameterInCodes; codeY++){ for(int codeX=0; codeX<diameterInCodes; codeX++){ final int subtextureX = codeX / SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER; final int subtextureY = codeY / SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER; final int subTextureIdx = subtextureX + subtextureY * diameterInSubtextures; final int subtextureCodeX = codeX % SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER; final int subtextureCodeY = codeY % SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER; final int codeIdx = subtextureCodeX + subtextureCodeY * SubTextureWindow.SIDE_LENGTH_CODES_WITH_BORDER; final int globalCodeIndex = codeIdx%256 + codebookStartOffsetsAbsolute[subTextureIdx][codeIdx/256]; setCodebookTexelsAt(codeX,codeY,diameterInCodes, globalCodeIndex); }//end for(codeX) }//end for(codeY) return null; } private void setCodebookTexelsAt(int codeX, int codeY, int diameterInCodes, int globalCodeIndex) { final int coord[] = new int[]{codeX,codeY}; final RasterRowWriter rw = new RasterRowWriter() { @Override public void applyRow(int row, ByteBuffer dest) { int position = row * 16; dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); dest.put((byte) (vlr .componentAt(coord, position++) * 255.)); }// end applyRow }; try { cbm.setRGBA(globalCodeIndex, rw); } catch (ArrayIndexOutOfBoundsException e) { throw new RuntimeException("this=" + Texture.this.toString(), e); }//end catch(ArrayIndexOutOfBoundsException) }// end setCodebookTexelsAt }).get();// end pool thread }//end vqCompress(...) private void calulateAverageColor(RasterizedBlockVectorList rbvl) { float redA=0,greenA=0,blueA=0; final double size = rbvl.getNumVectors(); for(int i=0; i<10; i++){ redA+=rbvl.componentAt((int)(Math.random()*size), 0); greenA+=rbvl.componentAt((int)(Math.random()*size), 1); blueA+=rbvl.componentAt((int)(Math.random()*size), 2); }averageColor = new Color(redA/10f,greenA/10f,blueA/10f); }//end calculateAverageColor(...) Texture(BufferedImage img, String debugName, TR tr, boolean uvWrapping) { this(tr,debugName,uvWrapping); long redA = 0, greenA = 0, blueA = 0; rgba = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 4); for (int y = 0; y < img.getHeight(); y++) { for (int x = 0; x < img.getWidth(); x++) { Color c = new Color(img.getRGB(x, y), true); rgba.put((byte) c.getRed()); rgba.put((byte) c.getGreen()); rgba.put((byte) c.getBlue()); rgba.put((byte) c.getAlpha()); redA += c.getRed(); greenA += c.getGreen(); blueA += c.getBlue(); }// end for(x) }// end for(y) final int div = rgba.capacity() / 4; averageColor = new Color((redA / div) / 255f, (greenA / div) / 255f, (blueA / div) / 255f); vqCompress(rgba); }//end constructor public static ByteBuffer RGBA8FromPNG(File f) { try { return RGBA8FromPNG(new FileInputStream(f)); } catch (FileNotFoundException e) { e.printStackTrace(); return null; } } public static ByteBuffer RGBA8FromPNG(InputStream is) { try { BufferedImage bi = ImageIO.read(is); return RGBA8FromPNG(bi, 0, 0, bi.getWidth(), bi.getHeight()); } catch (Exception e) { e.printStackTrace(); } return null; }//end RGBA8FromPNG(...) public static ByteBuffer RGBA8FromPNG(BufferedImage image, int startX, int startY, int sizeX, int sizeY) { int color; ByteBuffer buf = ByteBuffer.allocateDirect(image.getWidth() * image.getHeight() * 4); for (int y = startY; y < startY + sizeY; y++) { for (int x = startX; x < startX + sizeX; x++) { color = image.getRGB(x, y); buf.put((byte) ((color & 0x00FF0000) >> 16)); buf.put((byte) ((color & 0x0000FF00) >> 8)); buf.put((byte) (color & 0x000000FF)); buf.put((byte) ((color & 0xFF000000) >> 24)); }// end for(x) }// end for(y) buf.clear();// Rewind return buf; }// end RGB8FromPNG(...) public static final Color[] GREYSCALE; static { GREYSCALE = new Color[256]; for (int i = 0; i < 256; i++) { GREYSCALE[i] = new Color(i, i, i); } }// end static{} public static ByteBuffer fragmentRGBA(ByteBuffer input, int quadDepth, int x, int y) { final int originalSideLen = (int) Math.sqrt(input.capacity() / 4); final int splitAmount = (int) Math.pow(2, quadDepth); final int newSideLen = originalSideLen / splitAmount; ByteBuffer result = ByteBuffer.allocateDirect((int) (Math.pow( newSideLen, 2) * 4)); for (int row = y * newSideLen; row < (y + 1) * newSideLen; row++) { input.clear(); input.limit((x + 1) * newSideLen * 4 + row * originalSideLen * 4); input.position(x * newSideLen * 4 + row * originalSideLen * 4); result.put(input); } return result; }// end fragmentRGBA(...) public static ByteBuffer indexed2RGBA8888(ByteBuffer indexedPixels, Color[] palette) { Color color; ByteBuffer buf = ByteBuffer.allocate(indexedPixels.capacity() * 4); final int cap = indexedPixels.capacity(); for (int i = 0; i < cap; i++) { color = palette[(indexedPixels.get() & 0xFF)]; buf.put((byte) color.getRed()); buf.put((byte) color.getGreen()); buf.put((byte) color.getBlue()); buf.put((byte) color.getAlpha()); }// end for(i) buf.clear();// Rewind return buf; }// end indexed2RGBA8888(...) public static ByteBuffer[] indexed2RGBA8888(ByteBuffer[] indexedPixels, Color[] palette) { final int len = indexedPixels.length; ByteBuffer[] result = new ByteBuffer[len]; for (int i = 0; i < len; i++) { result[i] = indexed2RGBA8888(indexedPixels[i], palette); } return result; }// end indexed2RGBA8888(...) /** * @return the uvWrapping */ public boolean isUvWrapping() { return uvWrapping; } /** * @return the texturePage */ public int getTexturePage() { return texturePage; } /** * @param texturePage the texturePage to set */ public void setTexturePage(int texturePage) { this.texturePage = texturePage; } @Override public Color getAverageColor() { return averageColor; } public static final int createTextureID(GL3 gl) { IntBuffer ib = IntBuffer.allocate(1); gl.glGenTextures(1, ib); ib.clear(); return ib.get(); }//end createTextureID @Override public String toString(){ return "Texture debugName="+debugName+" width="+width; } }// end Texture
package org.lantern; import java.io.File; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.codehaus.jackson.type.TypeReference; import org.lantern.proxy.DefaultProxyTracker; import org.lantern.proxy.FallbackProxy; import org.lantern.util.HttpClientFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Optional; public class FallbackChecker implements Runnable { private DefaultProxyTracker proxyTracker; private List<FallbackProxy> fallbacks = new ArrayList<FallbackProxy>(); private static final String ALERTCMD_PATH = "/home/lantern/alert_fallbacks_failing_to_proxy.py"; private static final String TEST_URL = "http: private static final String TEST_RESULT_PREFIX = "Google is built by"; private static final Logger LOG = LoggerFactory .getLogger(FallbackChecker.class); public FallbackChecker(DefaultProxyTracker proxyTracker, String configFolderPath) { this.proxyTracker = proxyTracker; populateFallbacks(configFolderPath); } private void populateFallbacks(String configFolderPath) { final File file = new File(configFolderPath); if (!(file.exists() && file.canRead())) { throw new IllegalArgumentException("Cannot read file: " + configFolderPath); } Optional<String> url = S3ConfigFetcher.readUrlFromFile(file); if (!url.isPresent()) { throw new RuntimeException("url not present"); } Optional<String> config = S3ConfigFetcher.fetchRemoteConfig(url.get()); if (!config.isPresent()) { throw new RuntimeException("config not present"); } try { fallbacks = JsonUtils.OBJECT_MAPPER.readValue(config.get(), new TypeReference<List<FallbackProxy>>() {}); } catch (final Exception e) { throw new RuntimeException("Could not parse json:\n" + config.get() + "\n" + e); } } @Override public void run() { List<String> failed = new ArrayList<String>(); try { // sleep a bit to make sure everything's ready before we start Thread.sleep(20000); int nsucceeded = 0; proxyTracker.clear(); for (FallbackProxy fb : fallbacks) { proxyTracker.addSingleFallbackProxy(fb); final String addr = fb.getWanHost(); LOG.info("testing proxying through fallback: " + addr); boolean working = false; try { working = canProxyThroughCurrentFallback(); } catch (Exception e) { LOG.warn("proxying through fallback " + addr + " failed:\n" + e.toString()); failed.add(addr); } if (working) { LOG.info("proxying through fallback " + addr + " succeeded"); ++nsucceeded; } proxyTracker.clear(); } int nfailed = failed.size(); LOG.info(String.format("Finished checking fallbacks:\n" + "nsucceeded: %d\n" + "nfailed: %d\n" + "total: %d", nsucceeded, nfailed, nsucceeded + nfailed)); if (nfailed > 0) { failed.add(0, ALERTCMD_PATH); new ProcessBuilder(failed).start(); } System.exit(nfailed); } catch (Exception e) { e.printStackTrace(); } } private boolean canProxyThroughCurrentFallback() throws Exception { final HttpClient client = HttpClientFactory.newProxiedClient(); final HttpGet get = new HttpGet(TEST_URL); InputStream is = null; try { final HttpResponse res = client.execute(get); is = res.getEntity().getContent(); final String content = IOUtils.toString(is); if (StringUtils.startsWith(content, TEST_RESULT_PREFIX)) { return true; } else { throw new Exception( "response for " + TEST_URL + " did not match expectation\n" + "expected: " + TEST_RESULT_PREFIX + "\n" + "observed: " + content); } } finally { IOUtils.closeQuietly(is); get.reset(); } } }
package org.recap; import org.apache.activemq.camel.component.ActiveMQComponent; import org.apache.camel.CamelContext; import org.apache.camel.builder.RouteBuilder; import org.recap.repository.XmlRecordRepository; import org.recap.route.JMSReportRouteBuilder; import org.recap.route.XmlRouteBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; @Component public class ReCAPCamelContext { Logger logger = LoggerFactory.getLogger(ReCAPCamelContext.class); CamelContext context; XmlRecordRepository xmlRecordRepository; private String xmlTagName; private String inputDirectoryPath; private Integer poolSize; private Integer maxPoolSize; private JMSReportRouteBuilder jmsReportRouteBuilder; private XmlRouteBuilder xmlRouteBuilder; @Autowired public ReCAPCamelContext(CamelContext context, XmlRecordRepository xmlRecordRepository, @Value("${etl.split.xml.tag.name}") String xmlTagName, @Value("${etl.load.directory}") String inputDirectoryPath, @Value("${etl.pool.size}") Integer poolSize, @Value("${etl.max.pool.size}") Integer maxPoolSize) { this.context = context; this.xmlRecordRepository = xmlRecordRepository; this.xmlTagName = xmlTagName; this.inputDirectoryPath = inputDirectoryPath; this.poolSize = poolSize; this.maxPoolSize = maxPoolSize; init(); } private void init() { try { addComponents(); addDefaultRoutes(); } catch (Exception e) { logger.error("Exception : " + e.getMessage()); } } public void addRoutes(RouteBuilder routeBuilder) throws Exception { context.addRoutes(routeBuilder); } public void addDefaultRoutes() throws Exception { addRoutes(getXmlRouteBuilder()); addRoutes(getJMSReportBuilder()); } private JMSReportRouteBuilder getJMSReportBuilder() { if (null == jmsReportRouteBuilder) { jmsReportRouteBuilder = new JMSReportRouteBuilder(); } return jmsReportRouteBuilder; } private void addComponents() { context.addComponent("activemq", ActiveMQComponent.activeMQComponent("vm://localhost?broker.persistent=false")); } public RouteBuilder getXmlRouteBuilder() { if (null == xmlRouteBuilder) { xmlRouteBuilder = new XmlRouteBuilder(); xmlRouteBuilder.setXmlTagName(xmlTagName); xmlRouteBuilder.setInputDirectoryPath(inputDirectoryPath); xmlRouteBuilder.setPoolSize(poolSize); xmlRouteBuilder.setMaxPoolSize(maxPoolSize); xmlRouteBuilder.setXmlRecordRepository(xmlRecordRepository); } return xmlRouteBuilder; } }
package org.smap; public interface SurveyConversation { public void answerCurrentQuestion(String answerText); public Object save(); public String getCurrentQuestion(); public String[] getAllQuestions(); public Boolean isComplete(); public String getAnswers(); }
package org.ggp.base.validator; import org.ggp.base.util.game.Game; import org.ggp.base.util.statemachine.MachineState; import org.ggp.base.util.statemachine.StateMachine; import org.ggp.base.util.statemachine.exceptions.GoalDefinitionException; import org.ggp.base.util.statemachine.exceptions.MoveDefinitionException; import org.ggp.base.util.statemachine.exceptions.TransitionDefinitionException; import org.ggp.base.util.statemachine.implementation.prover.ProverStateMachine; public final class SimulationValidator implements Validator { private final int maxDepth; private final int numSimulations; public SimulationValidator(int maxDepth, int numSimulations) { this.maxDepth = maxDepth; this.numSimulations = numSimulations; } @Override public void checkValidity(Game theGame) throws ValidatorException { for (int i = 0; i < numSimulations; i++) { StateMachine stateMachine = new ProverStateMachine(); stateMachine.initialize(theGame.getRules()); MachineState state = stateMachine.getInitialState(); for (int depth = 0; !stateMachine.isTerminal(state); depth++) { if (depth == maxDepth) { throw new ValidatorException("Hit max depth while simulating: " + maxDepth); } try { state = stateMachine.getRandomNextState(state); } catch (MoveDefinitionException mde) { throw new ValidatorException("Could not find legal moves while simulating: " + mde); } catch (TransitionDefinitionException tde) { throw new ValidatorException("Could not find transition definition while simulating: " + tde); } } try { stateMachine.getGoals(state); } catch (GoalDefinitionException gde) { throw new ValidatorException("Could not find goals while simulating: " + gde); } } } }
package ru.r2cloud.jradio.au02; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ru.r2cloud.jradio.BeaconSource; import ru.r2cloud.jradio.blocks.TaggedStreamToPdu; import ru.r2cloud.jradio.fec.Golay; import ru.r2cloud.jradio.fec.ccsds.Randomize; import ru.r2cloud.jradio.fec.ccsds.ReedSolomon; import ru.r2cloud.jradio.fec.ccsds.UncorrectableException; public class Au02 extends BeaconSource<Au02Beacon> { private static final Logger LOG = LoggerFactory.getLogger(Au02.class); private final Golay golay = new Golay(); public Au02(TaggedStreamToPdu input) { super(input); } @Override protected Au02Beacon parseBeacon(byte[] raw) { int lengthField = ((raw[0] & 0xFF) << 16) | ((raw[1] & 0xFF) << 8) | (raw[2] & 0xFF); try { lengthField = golay.decode(lengthField); int frameLength = lengthField & 0xFF; int viterbiFlag = lengthField & 0x100; int scramblerFlag = lengthField & 0x200; int rsFlag = lengthField & 0x400; if (LOG.isDebugEnabled()) { LOG.debug("golay decoded. frameLength: {} viterbiFlag: {}, scramblerFlag: {}, rsFlag: {}", frameLength, viterbiFlag, scramblerFlag, rsFlag); } if (frameLength + 3 > raw.length) { if (LOG.isDebugEnabled()) { LOG.debug("not enough data: {} expected: {}", raw.length, frameLength); } return null; } byte[] data = new byte[frameLength]; System.arraycopy(raw, 3, data, 0, frameLength); if (viterbiFlag > 0) { data = ru.r2cloud.jradio.fec.Viterbi.decode(data, (byte) 0x6d, (byte) 0x4f, false); } if (scramblerFlag > 0) { Randomize.shuffle(data); } if (rsFlag > 0) { data = ReedSolomon.decode(data); } Au02Beacon beacon = new Au02Beacon(); beacon.readExternal(data); return beacon; } catch (UncorrectableException e) { if (LOG.isDebugEnabled()) { LOG.debug("unable to decode reed solomon: {}", e.getMessage()); } return null; } catch (IOException e) { LOG.error("unable to parse beacon", e); return null; } } }
package seedu.address.ui; import javafx.fxml.FXML; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.control.MenuItem; import javafx.scene.input.KeyCombination; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.VBox; import javafx.stage.Stage; import seedu.address.commons.core.Config; import seedu.address.commons.core.GuiSettings; import seedu.address.commons.events.ui.ExitAppRequestEvent; import seedu.address.logic.Logic; import seedu.address.model.UserPrefs; import seedu.address.model.task.ReadOnlyTask; /** * The Main Window. Provides the basic application layout containing * a menu bar and space where other JavaFX elements can be placed. */ public class MainWindow extends UiPart { private static final String ICON = "/images/address_book_32.png"; private static final String FXML = "MainWindow.fxml"; public static final int MIN_HEIGHT = 600; public static final int MIN_WIDTH = 450; private Logic logic; // Independent Ui parts residing in this Ui container private BrowserPanel browserPanel; private NavbarPanel navbarPanel; private TaskListPanel taskListPanel; private ResultDisplay resultDisplay; private StatusBarFooter statusBarFooter; private CommandBox commandBox; private Config config; private UserPrefs userPrefs; // Handles to elements of this Ui container private VBox rootLayout; private Scene scene; private String taskListName; @FXML private AnchorPane browserPlaceholder; @FXML private AnchorPane commandBoxPlaceholder; @FXML private MenuItem helpMenuItem; @FXML private AnchorPane navbarPanelPlaceholder; @FXML private AnchorPane taskListPanelPlaceholder; @FXML private AnchorPane resultDisplayPlaceholder; @FXML private AnchorPane statusbarPlaceholder; public MainWindow() { super(); } @Override public void setNode(Node node) { rootLayout = (VBox) node; } @Override public String getFxmlPath() { return FXML; } public static MainWindow load(Stage primaryStage, Config config, UserPrefs prefs, Logic logic) { MainWindow mainWindow = UiPartLoader.loadUiPart(primaryStage, new MainWindow()); mainWindow.configure(config.getAppTitle(), config.getTaskListName(), config, prefs, logic); return mainWindow; } private void configure(String appTitle, String taskListName, Config config, UserPrefs prefs, Logic logic) { //Set dependencies this.logic = logic; this.taskListName = taskListName; this.config = config; this.userPrefs = prefs; //Configure the UI setTitle(appTitle); setIcon(ICON); setWindowMinSize(); setWindowDefaultSize(prefs); scene = new Scene(rootLayout); primaryStage.setScene(scene); setAccelerators(); } private void setAccelerators() { helpMenuItem.setAccelerator(KeyCombination.valueOf("F1")); } void fillInnerParts() { browserPanel = BrowserPanel.load(browserPlaceholder); navbarPanel = NavbarPanel.load(primaryStage, getNavbarPlaceholder()); taskListPanel = TaskListPanel.load(primaryStage, getTaskListPlaceholder(), logic.getFilteredTaskList()); resultDisplay = ResultDisplay.load(primaryStage, getResultDisplayPlaceholder()); statusBarFooter = StatusBarFooter.load(primaryStage, getStatusbarPlaceholder(), config.getTaskListFilePath()); commandBox = CommandBox.load(primaryStage, getCommandBoxPlaceholder(), resultDisplay, logic); } private AnchorPane getCommandBoxPlaceholder() { return commandBoxPlaceholder; } private AnchorPane getStatusbarPlaceholder() { return statusbarPlaceholder; } private AnchorPane getResultDisplayPlaceholder() { return resultDisplayPlaceholder; } public AnchorPane getNavbarPlaceholder() { return navbarPanelPlaceholder; } public AnchorPane getTaskListPlaceholder() { return taskListPanelPlaceholder; } public void hide() { primaryStage.hide(); } private void setTitle(String appTitle) { primaryStage.setTitle(appTitle); } /** * Sets the default size based on user preferences. */ protected void setWindowDefaultSize(UserPrefs prefs) { primaryStage.setHeight(prefs.getGuiSettings().getWindowHeight()); primaryStage.setWidth(prefs.getGuiSettings().getWindowWidth()); if (prefs.getGuiSettings().getWindowCoordinates() != null) { primaryStage.setX(prefs.getGuiSettings().getWindowCoordinates().getX()); primaryStage.setY(prefs.getGuiSettings().getWindowCoordinates().getY()); } } private void setWindowMinSize() { primaryStage.setMinHeight(MIN_HEIGHT); primaryStage.setMinWidth(MIN_WIDTH); } /** * Returns the current size and the position of the main Window. */ public GuiSettings getCurrentGuiSetting() { return new GuiSettings(primaryStage.getWidth(), primaryStage.getHeight(), (int) primaryStage.getX(), (int) primaryStage.getY()); } @FXML public void handleHelp() { HelpWindow helpWindow = HelpWindow.load(primaryStage); helpWindow.show(); } public void show() { primaryStage.show(); } /** * Closes the application. */ @FXML private void handleExit() { raise(new ExitAppRequestEvent()); } public NavbarPanel getNavbarPanel() { return this.navbarPanel; } public TaskListPanel getTaskListPanel() { return this.taskListPanel; } public void loadTaskPage(ReadOnlyTask task) { browserPanel.loadTaskPage(task); } public void releaseResources() { browserPanel.freeResources(); } }
package seedu.geekeep.ui; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.scene.Scene; import javafx.scene.control.MenuItem; import javafx.scene.control.TextInputControl; import javafx.scene.input.KeyCombination; import javafx.scene.input.KeyEvent; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.Region; import javafx.stage.Stage; import seedu.geekeep.commons.core.Config; import seedu.geekeep.commons.core.GuiSettings; import seedu.geekeep.commons.events.ui.ExitAppRequestEvent; import seedu.geekeep.commons.util.FxViewUtil; import seedu.geekeep.logic.Logic; import seedu.geekeep.model.UserPrefs; /** * The Main Window. Provides the basic application layout containing * a menu bar and space where other JavaFX elements can be placed. */ public class MainWindow extends UiPart<Region> { private static final String ICON = "/images/geekeep_32.png"; private static final String FXML = "MainWindow.fxml"; private static final int MIN_HEIGHT = 600; private static final int MIN_WIDTH = 450; private Stage primaryStage; private Logic logic; // Independent Ui parts residing in this Ui container private TaskListPanel floatingTaskListPanel; private TaskListPanel eventListPanel; private TaskListPanel deadlineListPanel; private Config config; @FXML private AnchorPane commandBoxPlaceholder; @FXML private MenuItem helpMenuItem; @FXML private AnchorPane taskListPanelPlaceholder; @FXML private AnchorPane eventListPanelPlaceholder; @FXML private AnchorPane deadlineListPanelPlaceholder; @FXML private AnchorPane resultDisplayPlaceholder; @FXML private AnchorPane statusbarPlaceholder; public MainWindow(Stage primaryStage, Config config, UserPrefs prefs, Logic logic) { super(FXML); // Set dependencies this.primaryStage = primaryStage; this.logic = logic; this.config = config; // Configure the UI setTitle(config.getAppTitle()); setIcon(ICON); setWindowMinSize(); setWindowDefaultSize(prefs); Scene scene = new Scene(getRoot()); primaryStage.setScene(scene); setAccelerators(); } public Stage getPrimaryStage() { return primaryStage; } private void setAccelerators() { setAccelerator(helpMenuItem, KeyCombination.valueOf("F1")); } /** * Sets the accelerator of a MenuItem. * @param keyCombination the KeyCombination value of the accelerator */ private void setAccelerator(MenuItem menuItem, KeyCombination keyCombination) { menuItem.setAccelerator(keyCombination); getRoot().addEventFilter(KeyEvent.KEY_PRESSED, event -> { if (event.getTarget() instanceof TextInputControl && keyCombination.match(event)) { menuItem.getOnAction().handle(new ActionEvent()); event.consume(); } }); } void fillInnerParts() { //to-do: correctly display the lists floatingTaskListPanel = new TaskListPanel("floatingTask", getTaskListPlaceholder()); eventListPanel = new TaskListPanel("event", getEventListPlaceholder(), logic.getFilteredTaskList()); deadlineListPanel = new TaskListPanel("deadline", getDeadlineListPlaceholder()); new ResultDisplay(getResultDisplayPlaceholder()); new StatusBarFooter(getStatusbarPlaceholder(), config.getGeekeepFilePath()); new CommandBox(getCommandBoxPlaceholder(), logic); } private AnchorPane getCommandBoxPlaceholder() { return commandBoxPlaceholder; } private AnchorPane getStatusbarPlaceholder() { return statusbarPlaceholder; } private AnchorPane getResultDisplayPlaceholder() { return resultDisplayPlaceholder; } private AnchorPane getTaskListPlaceholder() { return taskListPanelPlaceholder; } private AnchorPane getEventListPlaceholder() { return eventListPanelPlaceholder; } private AnchorPane getDeadlineListPlaceholder() { return deadlineListPanelPlaceholder; } void hide() { primaryStage.hide(); } private void setTitle(String appTitle) { primaryStage.setTitle(appTitle); } /** * Sets the given image as the icon of the main window. * @param iconSource e.g. {@code "/images/help_icon.png"} */ private void setIcon(String iconSource) { FxViewUtil.setStageIcon(primaryStage, iconSource); } /** * Sets the default size based on user preferences. */ private void setWindowDefaultSize(UserPrefs prefs) { primaryStage.setHeight(prefs.getGuiSettings().getWindowHeight()); primaryStage.setWidth(prefs.getGuiSettings().getWindowWidth()); if (prefs.getGuiSettings().getWindowCoordinates() != null) { primaryStage.setX(prefs.getGuiSettings().getWindowCoordinates().getX()); primaryStage.setY(prefs.getGuiSettings().getWindowCoordinates().getY()); } } private void setWindowMinSize() { primaryStage.setMinHeight(MIN_HEIGHT); primaryStage.setMinWidth(MIN_WIDTH); } /** * Returns the current size and the position of the main Window. */ GuiSettings getCurrentGuiSetting() { return new GuiSettings(primaryStage.getWidth(), primaryStage.getHeight(), (int) primaryStage.getX(), (int) primaryStage.getY()); } @FXML public void handleHelp() { HelpWindow helpWindow = new HelpWindow(); helpWindow.show(); } void show() { primaryStage.show(); } /** * Closes the application. */ @FXML private void handleExit() { raise(new ExitAppRequestEvent()); } public TaskListPanel getTaskListPanel() { return this.floatingTaskListPanel; } public TaskListPanel getEventListPanel() { return this.eventListPanel; } public TaskListPanel getDeadlineListPanel() { return this.deadlineListPanel; } }
package technology.tabula; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @SuppressWarnings("serial") public class TextChunk extends RectangularTextContainer<TextElement> implements HasText { public static final TextChunk EMPTY = new TextChunk(0,0,0,0); List<TextElement> textElements = new ArrayList<TextElement>(); public TextChunk(float top, float left, float width, float height) { super(top, left, width, height); } public TextChunk(TextElement textElement) { super(textElement.y, textElement.x, textElement.width, textElement.height); this.add(textElement); } public TextChunk(List<TextElement> textElements) { this(textElements.get(0)); for (int i = 1; i < textElements.size(); i++) { this.add(textElements.get(i)); } } public TextChunk merge(TextChunk other) { super.merge(other); return this; } public void add(TextElement textElement) { this.textElements.add(textElement); this.merge(textElement); } public void add(List<TextElement> textElements) { for (TextElement te: textElements) { this.add(te); } } public List<TextElement> getTextElements() { return textElements; } public String getText() { if (this.textElements.size() == 0) { return ""; } StringBuilder sb = new StringBuilder(); for (TextElement te: this.textElements) { sb.append(te.getText()); } return sb.toString(); } @Override public String getText(boolean useLineReturns) { // TODO Auto-generated method stub return null; } /** * @return true if text contained in this TextChunk is the same repeated character */ public boolean isSameChar(Character c) { return isSameChar(new Character[] { c }); } public boolean isSameChar(Character[] c) { String s = this.getText(); List<Character> chars = Arrays.asList(c); for (int i = 0; i < s.length(); i++) { if (!chars.contains(s.charAt(i))) { return false; } } return true; } /** Splits a TextChunk in two, at the position of the i-th TextElement * @param textChunk the TextChunk to split * @param i * @return Two TextChunks, contained in a TextChunk[] */ public TextChunk[] splitAt(int i) { if (i < 1 || i >= this.getTextElements().size()) { throw new IllegalArgumentException(); } TextChunk[] rv = new TextChunk[] { new TextChunk(this.getTextElements().subList(0, i)), new TextChunk(this.getTextElements().subList(i, this.getTextElements().size())) }; return rv; } /** * Removes runs of identical TextElements in this TextChunk * For example, if the TextChunk contains this string of characters: "1234xxxxx56xx" * and c == 'x' and minRunLength == 4, this method will return a list of TextChunk * such that: ["1234", "56xx"] * * @param c the Character to remove * @param minRunLength minimum run length to consider. * @return */ public List<TextChunk> squeeze(Character c, int minRunLength) { Character currentChar, lastChar = null; int subSequenceLength = 0, subSequenceStart = 0; TextChunk[] t; List<TextChunk> rv = new ArrayList<TextChunk>(); for (int i = 0; i < this.getTextElements().size(); i++) { TextElement textElement = this.getTextElements().get(i); currentChar = textElement.getText().charAt(0); if (lastChar != null && currentChar.equals(c) && lastChar.equals(currentChar)) { subSequenceLength++; } else { if (((lastChar != null && !lastChar.equals(currentChar)) || i + 1 == this.getTextElements().size()) && subSequenceLength >= minRunLength) { if (subSequenceStart == 0 && subSequenceLength <= this.getTextElements().size() - 1) { t = this.splitAt(subSequenceLength); } else { t = this.splitAt(subSequenceStart); rv.add(t[0]); } rv.addAll(t[1].squeeze(c, minRunLength)); // Lo and behold, recursion. break; } subSequenceLength = 1; subSequenceStart = i; } lastChar = currentChar; } if (rv.isEmpty()) { // no splits occurred, hence this.squeeze() == [this] if (subSequenceLength >= minRunLength && subSequenceLength < this.textElements.size()) { TextChunk[] chunks = this.splitAt(subSequenceStart); rv.add(chunks[0]); } else { rv.add(this); } } return rv; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + ((textElements == null) ? 0 : textElements.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (!super.equals(obj)) return false; if (getClass() != obj.getClass()) return false; TextChunk other = (TextChunk) obj; if (textElements == null) { if (other.textElements != null) return false; } else if (!textElements.equals(other.textElements)) return false; return true; } public static boolean allSameChar(List<TextChunk> textChunks) { char first = textChunks.get(0).getText().charAt(0); for (TextChunk tc: textChunks) { if (!tc.isSameChar(first)) return false; } return true; } public static List<Line> groupByLines(List<TextChunk> textChunks) { List<Line> lines = new ArrayList<Line>(); if (textChunks.size() == 0) { return lines; } float bbwidth = Rectangle.boundingBoxOf(textChunks).width; Line l = new Line(); l.addTextChunk(textChunks.get(0)); textChunks.remove(0); lines.add(l); Line last = lines.get(lines.size() - 1); for (TextChunk te: textChunks) { if (last.verticalOverlapRatio(te) < 0.1) { if (last.width / bbwidth > 0.9 && TextChunk.allSameChar(last.getTextElements())) { lines.remove(lines.size() - 1); } lines.add(new Line()); last = lines.get(lines.size() - 1); } last.addTextChunk(te); } if (last.width / bbwidth > 0.9 && TextChunk.allSameChar(last.getTextElements())) { lines.remove(lines.size() - 1); } List<Line> rv = new ArrayList<Line>(lines.size()); for (Line line: lines) { rv.add(Line.removeRepeatedCharacters(line, ' ', 3)); } return rv; } }