answer
stringlengths
17
10.2M
package roart.database; import java.util.List; import java.util.TreeMap; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.LinkedBlockingQueue; import roart.model.IndexFiles; import roart.model.FileLocation; import roart.service.ControlService; import roart.util.ConfigConstants; import roart.util.Constants; import roart.util.MyLock; import org.apache.curator.framework.recipes.locks.InterProcessMutex; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class IndexFilesDao { private static Logger log = LoggerFactory.getLogger(IndexFilesDao.class); private static volatile ConcurrentMap<String, IndexFiles> all = new ConcurrentHashMap<String, IndexFiles>(); private static volatile ConcurrentMap<String, IndexFiles> dbi = new ConcurrentHashMap<String, IndexFiles>(); private static volatile ConcurrentMap<String, IndexFiles> dbitemp = new ConcurrentHashMap<String, IndexFiles>(); private static IndexFilesAccess indexFiles = null; public static void instance(String type) { if (indexFiles == null) { if (type.equals(ConfigConstants.HIBERNATE)) { indexFiles = new HibernateIndexFilesAccess(); } if (type.equals(ConfigConstants.HBASE)) { indexFiles = new HbaseIndexFilesAccess(); } if (type.equals(ConfigConstants.DATANUCLEUS)) { indexFiles = new DataNucleusIndexFilesAccess(); } } } // with zookeepersmall, lock must be held when entering here public static IndexFiles getByMd5(String md5, boolean create) throws Exception { if (md5 == null) { return null; } if (false && !ControlService.zookeepersmall) { if (all.containsKey(md5)) { return all.get(md5); } } synchronized(IndexFilesDao.class) { IndexFiles i = indexFiles.getByMd5(md5); if (i == null && create) { i = new IndexFiles(md5); } if (i != null) { all.put(md5, i); } return i; } } public static IndexFiles getByMd5(String md5) throws Exception { return getByMd5(md5, true); } public static IndexFiles getExistingByMd5(String md5) throws Exception { return getByMd5(md5, false); } public static Set<FileLocation> getFilelocationsByMd5(String md5) throws Exception { if (md5 == null) { return null; } synchronized(IndexFilesDao.class) { return indexFiles.getFilelocationsByMd5(md5); } } public static IndexFiles getByFilenameNot(String filename) throws Exception { String nodename = ControlService.nodename; FileLocation fl = new FileLocation(nodename, filename); synchronized(IndexFilesDao.class) { return indexFiles.getByFilelocation(fl); } } public static IndexFiles getByFilelocationNot(FileLocation fl) throws Exception { synchronized(IndexFilesDao.class) { return indexFiles.getByFilelocation(fl); } } public static String getMd5ByFilename(String filename) throws Exception { String nodename = ControlService.nodename; FileLocation fl = new FileLocation(nodename, filename); synchronized(IndexFilesDao.class) { return indexFiles.getMd5ByFilelocation(fl); } } public static List<IndexFiles> getAll() throws Exception { //all.clear(); Set<String> allKeys = all.keySet(); synchronized(IndexFilesDao.class) { List<IndexFiles> iAll = indexFiles.getAll(); for (IndexFiles i : iAll) { if (allKeys.contains(i.getMd5())) { //continue; } all.put(i.getMd5(), i); } return iAll; } } public static Set<String> getAllMd5() throws Exception { synchronized(IndexFilesDao.class) { Set<String> md5All = indexFiles.getAllMd5(); return md5All; } } public static Set<String> getLanguages() throws Exception { synchronized(IndexFilesDao.class) { Set<String> languages = indexFiles.getLanguages(); return languages; } } /* public static IndexFiles ensureExistence(String md5) throws Exception { IndexFiles fi = getByMd5(md5); if (fi == null) { indexFilesJpa.ensureExistence(md5); } return fi; } */ public static IndexFiles ensureExistenceNot(FileLocation filename) throws Exception { /* IndexFiles fi = getByMd5(md5); if (fi == null) { indexFilesJpa.ensureExistence(md5); } */ return null; } public static void save(IndexFiles i) { if (i.hasChanged()) { try { synchronized(IndexFilesDao.class) { indexFiles.save(i); } log.info("saving pri " + i.getPriority() + " " + i.getMd5()); i.setUnchanged(); i.setPriority(0); } catch (Exception e) { log.info("failed saving " + i.getMd5()); log.error(Constants.EXCEPTION, e); } } else { //log.info("not saving " + i.getMd5()); } } public static IndexFiles instanceNot(String md5) { IndexFiles i = all.get(md5); if (i == null) { i = new IndexFiles(md5); all.put(md5, i); } return i; } public static void add(IndexFiles i) { dbi.putIfAbsent(i.getMd5(), i); } public static void addTemp(IndexFiles i) { dbitemp.putIfAbsent(i.getMd5(), i); } public static void close() { try { synchronized(IndexFilesDao.class) { indexFiles.close(); } } catch (Exception e) { log.error(Constants.EXCEPTION, e); } } public static void commit() { int[] pris = getPris(); log.info("pris levels " + pris[0] + " " + pris[1]); if (pris[0] > 0) { log.info("saving finished"); } for (String k : dbi.keySet()) { IndexFiles i = dbi.get(k); IndexFilesDao.save(i); MyLock lock = i.getLock(); LinkedBlockingQueue lockqueue = (LinkedBlockingQueue) i.getLockqueue(); lockqueue.offer(lock); dbi.remove(k); dbitemp.remove(k); } if (pris[1] > 0) { log.info("saving temporarily"); } for (String k : dbitemp.keySet()) { IndexFiles i = dbitemp.get(k); IndexFilesDao.save(i); dbitemp.remove(k); } //all.clear(); try { synchronized(IndexFilesDao.class) { indexFiles.commit(); } } catch (Exception e) { log.error(Constants.EXCEPTION, e); } } private static int[] getPris() { int pris[] = { dbi.size(), dbitemp.size() }; /* for (String k : all.keySet()) { IndexFiles i = all.get(k); int priority = i.getPriority(); if (priority <= 1) { pris[priority]++; } else { log.error("priority " + priority); } } */ return pris; } public static void flush() { try { synchronized(IndexFilesDao.class) { indexFiles.flush(); } } catch (Exception e) { log.error(Constants.EXCEPTION, e); } } public static String webstat() { int [] pris = getPris(); return "d " + pris[0] + " / " + pris[1]; } public static int dirty() { int [] pris = getPris(); if (true) return pris[0] + pris[1]; int dirty1 = 0; for (String k : dbi.keySet()) { //log.info("save try " + Thread.currentThread().getId() + " " + k); IndexFiles i = dbi.get(k); if (i.hasChanged()) { dirty1++; } } return dirty1; } public static void delete(IndexFiles index) { try { synchronized(IndexFilesDao.class) { indexFiles.delete(index); } all.remove(index); } catch (Exception e) { log.error(Constants.EXCEPTION, e); } } }
package org.scribe.model; import java.io.*; import java.net.*; import java.nio.charset.*; import java.util.*; import java.util.concurrent.TimeUnit; import org.scribe.exceptions.*; import org.scribe.utils.*; /** * Represents an HTTP Request object * * @author Pablo Fernandez */ class Request { private static final String CONTENT_LENGTH = "Content-Length"; private String url; private Verb verb; private Map<String, String> querystringParams; private Map<String, String> bodyParams; private Map<String, String> headers; private String payload = null; private HttpURLConnection connection; private String charset; private byte[] bytePayload = null; private boolean connectionKeepAlive = false; /** * Creates a new Http Request * * @param verb Http Verb (GET, POST, etc) * @param url url with optional querystring parameters. */ public Request(Verb verb, String url) { this.verb = verb; this.url = url; this.querystringParams = new HashMap<String, String>(); this.bodyParams = new HashMap<String, String>(); this.headers = new HashMap<String, String>(); } /** * Execute the request and return a {@link Response} * * @return Http Response * @throws RuntimeException * if the connection cannot be created. */ public Response send() { try { createConnection(); return doSend(); } catch (IOException ioe) { throw new OAuthException("Problems while creating connection", ioe); } } private void createConnection() throws IOException { String effectiveUrl = URLUtils.appendParametersToQueryString(url, querystringParams); if (connection == null) { System.setProperty("http.keepAlive", connectionKeepAlive ? "true" : "false"); connection = (HttpURLConnection) new URL(effectiveUrl).openConnection(); } } Response doSend() throws IOException { connection.setRequestMethod(this.verb.name()); addHeaders(connection); if (verb.equals(Verb.PUT) || verb.equals(Verb.POST)) { addBody(connection, getByteBodyContents()); } return new Response(connection); } void addHeaders(HttpURLConnection conn) { for (String key : headers.keySet()) conn.setRequestProperty(key, headers.get(key)); } void addBody(HttpURLConnection conn, byte[] content) throws IOException { conn.setRequestProperty(CONTENT_LENGTH, String.valueOf(content.length)); conn.setDoOutput(true); conn.getOutputStream().write(content); } /** * Add an HTTP Header to the Request * * @param key the header name * @param value the header value */ public void addHeader(String key, String value) { this.headers.put(key, value); } /** * Add a body Parameter (for POST/ PUT Requests) * * @param key the parameter name * @param value the parameter value */ public void addBodyParameter(String key, String value) { this.bodyParams.put(key, value); } /** * Add a QueryString parameter * * @param key the parameter name * @param value the parameter value */ public void addQuerystringParameter(String key, String value) { this.querystringParams.put(key, value); } /** * Add body payload. * * This method is used when the HTTP body is not a form-url-encoded string, * but another thing. Like for example XML. * * Note: The contents are not part of the OAuth signature * * @param payload the body of the request */ public void addPayload(String payload) { this.payload = payload; } /** * Overloaded version for byte arrays * * @param payload */ public void addPayload(byte[] payload) { this.bytePayload = payload; } /** * Get a {@link Map} of the query string parameters. * * @return a map containing the query string parameters * @throws OAuthException if the URL is not valid */ public Map<String, String> getQueryStringParams() { try { Map<String, String> params = new HashMap<String, String>(); String queryString = new URL(url).getQuery(); params.putAll(URLUtils.queryStringToMap(queryString)); params.putAll(this.querystringParams); return params; } catch (MalformedURLException mue) { throw new OAuthException("Malformed URL", mue); } } /** * Obtains a {@link Map} of the body parameters. * * @return a map containing the body parameters. */ public Map<String, String> getBodyParams() { return bodyParams; } /** * Obtains the URL of the HTTP Request. * * @return the original URL of the HTTP Request */ public String getUrl() { return url; } /** * Returns the URL without the port and the query string part. * * @return the OAuth-sanitized URL */ public String getSanitizedUrl() { return url.replaceAll("\\?.*", "").replace("\\:\\d{4}", ""); } /** * Returns the body of the request * * @return form encoded string * @throws OAuthException if the charset chosen is not supported */ public String getBodyContents() { try { return new String(getByteBodyContents(),getCharset()); } catch(UnsupportedEncodingException uee) { throw new OAuthException("Unsupported Charset: "+charset, uee); } } byte[] getByteBodyContents() { if (bytePayload != null) return bytePayload; String body = (payload != null) ? payload : URLUtils.formURLEncodeMap(bodyParams); try { return body.getBytes(getCharset()); } catch(UnsupportedEncodingException uee) { throw new OAuthException("Unsupported Charset: "+getCharset(), uee); } } /** * Returns the HTTP Verb * * @return the verb */ public Verb getVerb() { return verb; } /** * Returns the connection headers as a {@link Map} * * @return map of headers */ public Map<String, String> getHeaders() { return headers; } /** * Returns the connection charset. Defaults to {@link Charset} defaultCharset if not set * * @return charset */ public String getCharset() { return charset == null ? Charset.defaultCharset().name() : charset; } /** * Sets the connect timeout for the underlying {@link HttpURLConnection} * * @param duration duration of the timeout * * @param unit unit of time (milliseconds, seconds, etc) */ public void setConnectTimeout(int duration, TimeUnit unit) { this.connection.setConnectTimeout((int) unit.toMillis(duration)); } /** * Sets the read timeout for the underlying {@link HttpURLConnection} * * @param duration duration of the timeout * * @param unit unit of time (milliseconds, seconds, etc) */ public void setReadTimeout(int duration, TimeUnit unit) { this.connection.setReadTimeout((int) unit.toMillis(duration)); } /** * Set the charset of the body of the request * * @param charsetName name of the charset of the request */ public void setCharset(String charsetName) { this.charset = charsetName; } public void setConnectionKeepAlive(boolean connectionKeepAlive) { this.connectionKeepAlive = connectionKeepAlive; } /* * We need this in order to stub the connection object for test cases */ void setConnection(HttpURLConnection connection) { this.connection = connection; } @Override public String toString() { return String.format("@Request(%s %s)", getVerb(), getUrl()); } }
package storagecraft.block; import net.minecraft.block.Block; import net.minecraft.block.BlockPistonBase; import net.minecraft.block.material.Material; import net.minecraft.entity.EntityLivingBase; import net.minecraft.inventory.IInventory; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntity; import net.minecraft.world.World; import net.minecraftforge.common.util.ForgeDirection; import storagecraft.StorageCraft; import storagecraft.tile.TileBase; import storagecraft.util.InventoryUtils; public abstract class BlockBase extends Block { private String name; public BlockBase(String name) { super(Material.rock); this.name = name; setCreativeTab(StorageCraft.TAB); setBlockTextureName("storagecraft:" + name); } @Override public String getUnlocalizedName() { return "block." + StorageCraft.ID + ":" + name; } @Override public boolean rotateBlock(World world, int x, int y, int z, ForgeDirection axis) { TileEntity tile = world.getTileEntity(x, y, z); if (tile instanceof TileBase) { ForgeDirection dir = ((TileBase) tile).getDirection(); int newDir = dir.ordinal() + 1; if (newDir > ForgeDirection.VALID_DIRECTIONS.length - 1) { newDir = 0; } ((TileBase) tile).setDirection(ForgeDirection.getOrientation(newDir)); world.markBlockForUpdate(x, y, z); return true; } return false; } @Override public void onBlockPlacedBy(World world, int x, int y, int z, EntityLivingBase entityLiving, ItemStack itemStack) { super.onBlockPlacedBy(world, x, y, z, entityLiving, itemStack); TileEntity tile = world.getTileEntity(x, y, z); if (tile instanceof TileBase) { ((TileBase) tile).setDirection(ForgeDirection.getOrientation(BlockPistonBase.determineOrientation(world, x, y, z, entityLiving))); } } @Override public void onBlockPreDestroy(World world, int x, int y, int z, int meta) { TileEntity tile = world.getTileEntity(x, y, z); if (tile instanceof IInventory) { InventoryUtils.dropInventory(world, (IInventory) tile, x, y, z); } super.onBlockPreDestroy(world, x, y, z, meta); } }
package org.testng.internal; import javax.xml.parsers.ParserConfigurationException; import org.testng.xml.Parser; import org.testng.xml.XmlClass; import org.testng.xml.XmlInclude; import org.testng.xml.XmlPackage; import org.testng.xml.XmlSuite; import org.testng.xml.XmlTest; import org.xml.sax.SAXException; import org.yaml.snakeyaml.Loader; import org.yaml.snakeyaml.TypeDescription; import org.yaml.snakeyaml.constructor.Constructor; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.reflect.Method; import java.util.Collection; import java.util.List; import java.util.Map; /** * YAML support for TestNG. * * @author cbeust */ public class Yaml { private static void addToMap(Map suite, String name, Map target) { List<Map<String, String>> parameters = (List<Map<String, String>>) suite.get(name); if (parameters != null) { for (Map<String, String> parameter : parameters) { for (Map.Entry p : parameter.entrySet()) { target.put(p.getKey(), p.getValue().toString()); } } } } private static void addToList(Map suite, String name, List target) { List<Map<String, String>> parameters = (List<Map<String, String>>) suite.get(name); if (parameters != null) { for (Map<String, String> parameter : parameters) { for (Map.Entry p : parameter.entrySet()) { target.add(p.getValue().toString()); } } } } public static XmlSuite parse(String filePath) throws FileNotFoundException { Constructor constructor = new Constructor(XmlSuite.class); { TypeDescription suiteDescription = new TypeDescription(XmlSuite.class); suiteDescription.putListPropertyType("packages", XmlPackage.class); suiteDescription.putListPropertyType("listeners", String.class); suiteDescription.putListPropertyType("tests", XmlTest.class); suiteDescription.putListPropertyType("method-selectors", XmlMethodSelector.class); constructor.addTypeDescription(suiteDescription); } { TypeDescription testDescription = new TypeDescription(XmlTest.class); testDescription.putListPropertyType("classes", XmlClass.class); testDescription.putMapPropertyType("metaGroups", String.class, List.class); constructor.addTypeDescription(testDescription); } Loader loader = new Loader(constructor); org.yaml.snakeyaml.Yaml y = new org.yaml.snakeyaml.Yaml(loader); XmlSuite result = (XmlSuite) y.load(new FileInputStream(new File(filePath))); // DEBUG // System.out.println("[Yaml] " + result.toXml()); // Adjust XmlTest parents for (XmlTest t : result.getTests()) { t.setSuite(result); } return result; // Map o = (Map) y.load(new FileInputStream(new File(filePath))); // return parse(o); } private static void setField(Object xml, Map<?, ?> map, String key, String methodName, Class<?> parameter) { Object o = map.get(key); if (o != null) { Method m; try { m = xml.getClass().getMethod(methodName, parameter); m.invoke(xml, o); } catch (Exception e) { e.printStackTrace(); } } } private static void maybeAdd(StringBuilder sb, String key, Object value, Object def) { maybeAdd(sb, "", key, value, def); } private static void maybeAdd(StringBuilder sb, String sp, String key, Object value, Object def) { if (value != null && ! value.equals(def)) { sb.append(sp).append(key).append(": ").append(value.toString()).append("\n"); } } public static StringBuilder toYaml(XmlSuite suite) { StringBuilder result = new StringBuilder(); maybeAdd(result, "name", suite.getName(), null); maybeAdd(result, "junit", suite.isJUnit(), XmlSuite.DEFAULT_JUNIT); maybeAdd(result, "verbose", suite.getVerbose(), XmlSuite.DEFAULT_VERBOSE); maybeAdd(result, "threadCount", suite.getThreadCount(), XmlSuite.DEFAULT_THREAD_COUNT); maybeAdd(result, "timeOut", suite.getTimeOut(), null); maybeAdd(result, "parallel", suite.getParallel(), XmlSuite.DEFAULT_PARALLEL); maybeAdd(result, "skipFailedInvocationCounts", suite.skipFailedInvocationCounts(), XmlSuite.DEFAULT_SKIP_FAILED_INVOCATION_COUNTS); toYaml(result, "parameters", "", suite.getParameters()); if (suite.getPackages().size() > 0) { result.append("packages:\n"); toYaml(result, suite.getPackages()); } toYaml(result, "listeners", suite.getListeners()); if (suite.getTests().size() > 0) { result.append("tests:\n"); for (XmlTest t : suite.getTests()) { toYaml(result, " ", t); } } // (test|method-selectors|suite-files)* > return result; } private static void toYaml(StringBuilder result, String sp, XmlTest t) { String sp2 = sp + " "; result.append(sp).append("- name: ").append(t.getName()).append("\n"); maybeAdd(result, sp2, "junit", t.isJUnit(), XmlSuite.DEFAULT_JUNIT); maybeAdd(result, sp2, "verbose", t.getVerbose(), XmlSuite.DEFAULT_VERBOSE); maybeAdd(result, sp2, "timeOut", t.getTimeOut(), null); maybeAdd(result, sp2, "parallel", t.getParallel(), XmlSuite.DEFAULT_PARALLEL); maybeAdd(result, sp2, "skipFailedInvocationCounts", t.skipFailedInvocationCounts(), XmlSuite.DEFAULT_SKIP_FAILED_INVOCATION_COUNTS); maybeAdd(result, "preserveOrder", sp2, t.getPreserveOrder(), XmlTest.DEFAULT_PRESERVE_ORDER); toYaml(result, "parameters", sp2, t.getTestParameters()); if (t.getIncludedGroups().size() > 0) { result.append(sp2).append("includedGroups: [ ") .append(Utils.join(t.getIncludedGroups(), ",")) .append(" ]\n"); } if (t.getExcludedGroups().size() > 0) { result.append(sp2).append("excludedGroups: [ ") .append(Utils.join(t.getExcludedGroups(), ",")) .append(" ]\n"); } Map<String, List<String>> mg = t.getMetaGroups(); if (mg.size() > 0) { result.append(sp2).append("metaGroups: { "); boolean first = true; for (String group : mg.keySet()) { if (! first) result.append(", "); result.append(group).append(": [ ") .append(Utils.join(mg.get(group), ",")).append(" ] "); first = false; } result.append(" }\n"); } if (t.getXmlPackages().size() > 0) { result.append(sp2).append("xmlPackages:\n"); for (XmlPackage xp : t.getXmlPackages()) { toYaml(result, sp2 + " - ", xp); } } if (t.getXmlClasses().size() > 0) { result.append(sp2).append("classes:\n"); for (XmlClass xc : t.getXmlClasses()) { toYaml(result, sp2 + " ", xc); } } result.append("\n"); } private static void toYaml(StringBuilder result, String sp2, XmlClass xc) { List<XmlInclude> im = xc.getIncludedMethods(); List<String> em = xc.getExcludedMethods(); String name = im.size() > 0 || em.size() > 0 ? "name: " : ""; result.append(sp2).append("- " + name).append(xc.getName()).append("\n"); if (im.size() > 0) { result.append(sp2 + " includedMethods:\n"); for (XmlInclude xi : im) { toYaml(result, sp2 + " ", xi); } } if (em.size() > 0) { result.append(sp2 + " excludedMethods:\n"); toYaml(result, sp2 + " ", em); } } private static void toYaml(StringBuilder result, String sp2, XmlInclude xi) { result.append(sp2 + "- " + xi.getName()).append("\n"); } private static void toYaml(StringBuilder result, String sp, List<String> strings) { for (String l : strings) { result.append(sp).append("- ").append(l).append("\n"); } } private static final String SP = " "; private static void toYaml(StringBuilder sb, List<XmlPackage> packages) { for (XmlPackage p : packages) { toYaml(sb, " ", p); } } private static void toYaml(StringBuilder sb, String sp, XmlPackage p) { sb.append(sp).append("name: ").append(p.getName()).append("\n"); generateIncludeExclude(sb, sp, "includes", p.getInclude()); generateIncludeExclude(sb, sp, "excludes", p.getExclude()); } private static void generateIncludeExclude(StringBuilder sb, String sp, String key, List<String> includes) { if (includes.size() > 0) { sb.append(sp).append(" ").append(key).append("\n"); for (String inc : includes) { sb.append(sp).append(" ").append(inc); } } } private static void mapToYaml(Map<String, String> map, StringBuilder out) { if (map.size() > 0) { out.append("{ "); boolean first = true; for (Map.Entry<String, String> e : map.entrySet()) { if (! first) out.append(", "); first = false; out.append(e.getKey() + ": " + e.getValue()); } out.append(" }\n"); } } private static void toYaml(StringBuilder sb, String key, String sp, Map<String, String> parameters) { if (parameters.size() > 0) { sb.append(sp).append(key).append(": "); mapToYaml(parameters, sb); } } public static void main(String[] args) throws FileNotFoundException, ParserConfigurationException, SAXException, IOException { Collection<XmlSuite> s = new Parser(args[0]).parse(); System.out.println(Yaml.toYaml(s.iterator().next())); } }
package tbax.baxshops.items; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.DyeColor; import org.bukkit.Material; import org.bukkit.block.banner.Pattern; import org.bukkit.block.banner.PatternType; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.enchantments.Enchantment; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.BannerMeta; import org.bukkit.inventory.meta.ItemMeta; import org.jetbrains.annotations.NotNull; import tbax.baxshops.*; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.reflect.Method; import java.util.*; @SuppressWarnings("JavaDoc") public final class ItemUtil { private static final String MINECRAFT_VERSION; private static final Method AS_NMS_COPY; private static final Method GET_NAME; static { String name = Bukkit.getServer().getClass().getPackage().getName(); MINECRAFT_VERSION = name.substring(name.lastIndexOf('.') + 1); Method nmsCpyMthd = null; Method getNmMthd = null; try { Class<?> itemStackCls = Class.forName("net.minecraft.server." + MINECRAFT_VERSION + ".ItemStack"); nmsCpyMthd = Class.forName("org.bukkit.craftbukkit." + MINECRAFT_VERSION + ".inventory.CraftItemStack") .getMethod("asNMSCopy", ItemStack.class); getNmMthd = itemStackCls.getMethod("getName"); } catch (ReflectiveOperationException e) { e.printStackTrace(); } AS_NMS_COPY = nmsCpyMthd; GET_NAME = getNmMthd; } /** * An array of items that can be damaged */ private static final Map<Material, Short> damageable = new HashMap<>(); /** * A list of enchantment names */ private static final Map<Enchantment, Enchantable> enchants = new HashMap<>(); private ItemUtil() { } public static List<BaxEntry> getItemFromAlias(String input, BaxShop shop) { String[] words = input.toUpperCase().split("_"); String normalizedInput = input.replace('_', ' ').toUpperCase(); int maxMatch = -1; List<BaxEntry> entries = new ArrayList<>(); for(BaxEntry entry : shop) { String entryName = entry.getName().toUpperCase(); if (Objects.equals(entryName, normalizedInput)) { return Collections.singletonList(entry); // 100% match } else { String[] entryWords = entryName.split(" "); int matches = getMatches(entryWords, words); if (matches == maxMatch) { entries.add(entry); } else if (matches > maxMatch) { entries.clear(); entries.add(entry); maxMatch = matches; } } } return entries; } private static int getMatches(String[] array1, String[] array2) { int matches = 0; for(String word1 : array1) { for(String word2 : array2) { if (word1.equals(word2)) { ++matches; } } } return matches; } /** * Gets the name of an item. * * @param entry the shop entry * @return the item's name */ public static String getName(BaxEntry entry) { return ItemUtil.getName(entry.getItemStack()); } /** * Gets the name of an item. * * @param item an item stack * @return the item's name */ public static String getName(ItemStack item) { if (item.getType() == Material.ENCHANTED_BOOK) { Map<Enchantment, Integer> enchants = EnchantMap.getEnchants(item); if (enchants != null) return EnchantMap.fullListString(enchants); } else if (isOminousBanner(item)) { return ChatColor.GOLD + "Ominous Banner"; } item = item.clone(); ItemMeta meta = item.getItemMeta(); meta.setDisplayName(null); item.setItemMeta(meta); try { Object nmsCopy = AS_NMS_COPY.invoke(null, item); Object txtObj = GET_NAME.invoke(nmsCopy); try { return (String) txtObj; } catch (ClassCastException e) { return (String)txtObj.getClass().getMethod("getText").invoke(txtObj); } } catch (ReflectiveOperationException | ClassCastException e) { ShopPlugin.logWarning("Could not get item name for " + item.getType()); return item.getType().toString(); } } public static boolean isOminousBanner(@NotNull ItemStack stack) { if (stack.getType() != Material.WHITE_BANNER) return false; BannerMeta bannerMeta = (BannerMeta)stack.getItemMeta(); return bannerMeta.getPatterns().containsAll(ominousPatterns()); } private static List<Pattern> ominousPatterns() { Pattern[] patterns = new Pattern[8]; patterns[0] = new Pattern(DyeColor.CYAN, PatternType.RHOMBUS_MIDDLE); patterns[1] = new Pattern(DyeColor.LIGHT_GRAY, PatternType.STRIPE_BOTTOM); patterns[2] = new Pattern(DyeColor.GRAY, PatternType.STRIPE_CENTER); patterns[3] = new Pattern(DyeColor.LIGHT_GRAY, PatternType.BORDER); patterns[4] = new Pattern(DyeColor.BLACK, PatternType.STRIPE_MIDDLE); patterns[5] = new Pattern(DyeColor.LIGHT_GRAY, PatternType.HALF_HORIZONTAL); patterns[6] = new Pattern(DyeColor.LIGHT_GRAY, PatternType.CIRCLE_MIDDLE); patterns[7] = new Pattern(DyeColor.BLACK, PatternType.BORDER); return Arrays.asList(patterns); } public static String getEnchantName(Enchantment enchant) { Enchantable enchantable = enchants.get(enchant); if (enchantable == null) return Format.toFriendlyName(enchant.toString()); return enchantable.getName(); } /** * Determines if a material can be damaged * @param item * @return */ public static boolean isDamageable(Material item) { return damageable.containsKey(item); } /** * Gets the maximum damage for an item. This assumes damageability * has been confirmed with isDamageable() * @param item * @return */ public static short getMaxDamage(Material item) { return damageable.get(item); } /** * Loads the damageable items list from the damageable.txt resource. * @param plugin */ public static void loadDamageable(ShopPlugin plugin) { InputStream stream = plugin.getResource("damageable.txt"); if (stream == null) { return; } int i = 1; try { BufferedReader br = new BufferedReader(new InputStreamReader(stream)); String line; while ((line = br.readLine()) != null) { if (line.length() == 0 || line.charAt(0) == ' continue; } Scanner scanner = new Scanner(line); Material material = Material.getMaterial(scanner.next()); short maxDamage = scanner.nextShort(); damageable.put(material, maxDamage); i++; } stream.close(); } catch (IOException e) { plugin.getLogger().warning("Failed to readFromDisk damageable: " + e.toString()); } catch (NoSuchElementException e) { plugin.getLogger().info("loadDamageable broke at line: " + i); e.printStackTrace(); } } /** * Loads the enchantment names in enchants.txt * @param plugin */ public static void loadEnchants(ShopPlugin plugin) { try (InputStream stream = plugin.getResource("enchants.yml")) { YamlConfiguration enchantConfig = YamlConfiguration.loadConfiguration(new InputStreamReader(stream)); List<Map<?, ?>> section = enchantConfig.getMapList("enchants"); for (Map<?, ?> enchantMap : section) { Enchantment enchantment = Enchantment.getByName((String) enchantMap.get("enchantment")); String name = (String) enchantMap.get("name"); boolean levels = (Boolean) enchantMap.get("levels"); enchants.put(enchantment, new Enchantable(name, levels)); } } catch (IOException e) { plugin.getLogger().warning("Failed to readFromDisk enchants: " + e.toString()); } } public static boolean hasEnchantLevels(Enchantment enchantment) { return getEnchantable(enchantment).hasLevels(); } public static Enchantable getEnchantable(Enchantment enchantment) { Enchantable enchantable = enchants.get(enchantment); if (enchantable == null) return new Enchantable(Format.toFriendlyName(enchantment.toString()), true); return enchantable; } public static boolean isSameBanner(ItemStack stack1, ItemStack stack2) { BannerMeta bannerMeta1, bannerMeta2; if (stack1.getItemMeta() instanceof BannerMeta) { bannerMeta1 = (BannerMeta)stack1.getItemMeta(); } else { return false; } if (stack2.getItemMeta() instanceof BannerMeta) { bannerMeta2 = (BannerMeta)stack2.getItemMeta(); } else { return false; } if (stack1.getType() != stack2.getType()) return false; if (bannerMeta1.numberOfPatterns() != bannerMeta2.numberOfPatterns()) return false; for (int i = 0; i < bannerMeta1.numberOfPatterns(); ++i) { if (!bannerMeta1.getPattern(i).equals(bannerMeta2.getPattern(i))) { return false; } } return true; } public static boolean isSimilar(ItemStack stack1, ItemStack stack2, boolean smartStack) { if (stack1 == stack2) return true; if (stack1 == null || stack2 == null) return false; if (!smartStack) return stack1.isSimilar(stack2); if (!stack1.isSimilar(stack2)) { return stack1.getType() == stack2.getType() && isSameBanner(stack1, stack2); } return true; } }
package tk.teamfield3.test; import tk.teamfield3.jTTD.display.Material; import tk.teamfield3.jTTD.display.Mesh; import tk.teamfield3.jTTD.display.Texture; import tk.teamfield3.jTTD.util.math.Vector3f; public class TestFloor extends TestComponent { public TestFloor(Mesh mesh) { super(mesh, new Material(new Texture("test.png"), new Vector3f(1, 1, 1), 1, 8), new Vector3f(0, 0, 0)); } @Override public void input() { } @Override public void update() { } @Override public void render() { super.render(); } }
package processes; import interface_objects.DatabaseHandler; import managers.Logger; import objects.DatabaseQuery; import java.net.URI; import java.net.URISyntaxException; import java.sql.*; public class DatabaseClient { /** * entry point of the database client process * @param args */ @SuppressWarnings("InfiniteLoopStatement") public static void main(String[] args) { DatabaseQuery query; while (true) { // get a query query = null; Logger.log("DatabaseClient", "waiting for query"); while (query == null) query = DatabaseHandler.receiveQuery(); Logger.log("DatabaseClient", "got query " + query.query); ResultSet rs = executeQuery(query); Logger.log("DatabaseClient", "query " + query.query + " got " + rs); DatabaseHandler.addResponse(query.id, rs); } } /** * executed the given {@code dbQuery} * @param dbQuery the query to execute * @return * <p> * {@link ResultSet} if query is a select, null otherwise. * </p> * <p> * will not return the error if one occurred, * to check for errors check the {@code dbQuery} field {@link DatabaseQuery#error} * </p> */ public static ResultSet executeQuery(DatabaseQuery dbQuery) { ResultSet rs = null; Connection connection = null; try { connection = getConnection(); addSqlTypes(connection); Statement stmt = connection.createStatement(); if (dbQuery.query.startsWith("SELECT")) rs = stmt.executeQuery(dbQuery.query); else stmt.executeUpdate(dbQuery.query); } catch (Exception e) { dbQuery.error = e.getMessage(); } finally { if (connection != null) { try { connection.close(); } catch (SQLException e) { if (dbQuery.error == null) dbQuery.error = ""; else dbQuery.error += "\n"; dbQuery.error += e.getMessage(); } } } return rs; } /** * adds the custom sql types to the connection * @param con the connection to add sql types to */ public static void addSqlTypes(Connection con) { try { java.util.Map<String, Class<?>> map = con.getTypeMap(); Class<?> test = Class.forName("database_objects.CommandAccessSqlType"); map.put("public.command_access", Class.forName("database_objects.CommandAccessSqlType")); con.setTypeMap(map); Logger.log("DatabaseClient.addSqlType", "adding types succeeded"); } catch (Exception e) { Logger.log("DatabaseClient.addSqlType", "adding types failed, " + e.getMessage()); e.printStackTrace(); } } /** * gets a database connection * @return a database connection * @throws URISyntaxException syntax exception * @throws SQLException sql exception */ private static Connection getConnection() throws URISyntaxException, SQLException { URI dbUri = new URI(System.getenv("DATABASE_URL")); int port = dbUri.getPort(); String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + ":" + port + dbUri.getPath(); if (dbUri.getUserInfo() != null) { String username = dbUri.getUserInfo().split(":")[0]; String password = dbUri.getUserInfo().split(":")[1]; return DriverManager.getConnection(dbUrl, username, password); } else { return DriverManager.getConnection(dbUrl); } } /* try { connection = getConnection(); Statement stmt = connection.createStatement(); stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)"); stmt.executeUpdate("INSERT INTO ticks VALUES (now())"); ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks"); ArrayList<String> output = new ArrayList<String>(); while (rs.next()) { output.add( "Read from DB: " + rs.getTimestamp("tick")); } attributes.put("results", output); return new ModelAndView(attributes, "db.ftl"); } catch (Exception e) { attributes.put("message", "There was an error: " + e); return new ModelAndView(attributes, "error.ftl"); } finally { if (connection != null) try{connection.close();} catch(SQLException e){} } */ }
package visitors; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import javax.annotation.processing.ProcessingEnvironment; import annotations.Morph; import checkers.types.AnnotatedTypeFactory; import com.sun.source.tree.Tree.Kind; import com.sun.source.util.TreePath; import com.sun.tools.javac.code.Scope; import com.sun.tools.javac.comp.AttrContext; import com.sun.tools.javac.comp.Enter; import com.sun.tools.javac.comp.Env; import com.sun.tools.javac.code.Symbol; import com.sun.tools.javac.code.Symbol.ClassSymbol; import com.sun.tools.javac.code.Symbol.MethodSymbol; import com.sun.tools.javac.code.Symbol.VarSymbol; import com.sun.tools.javac.code.Symtab; import com.sun.tools.javac.code.Type; import com.sun.tools.javac.comp.Attr; import com.sun.tools.javac.comp.MemberEnter; import com.sun.tools.javac.comp.Resolve; import com.sun.tools.javac.processing.JavacProcessingEnvironment; import com.sun.tools.javac.tree.JCTree.*; import com.sun.tools.javac.tree.JCTree; import com.sun.tools.javac.tree.JCTree.JCBlock; import com.sun.tools.javac.tree.JCTree.JCCompilationUnit; import com.sun.tools.javac.tree.JCTree.JCExpression; import com.sun.tools.javac.tree.JCTree.JCStatement; import com.sun.tools.javac.tree.TreeInfo; import com.sun.tools.javac.tree.TreeMaker; import com.sun.tools.javac.tree.TreeTranslator; import com.sun.tools.javac.util.Context; import com.sun.tools.javac.util.List; import com.sun.tools.javac.util.Log; import com.sun.tools.javac.util.Name; import com.sun.tools.javac.util.Names; import static com.sun.tools.javac.code.Kinds.*; public class ExpansionTranslator extends TreeTranslator { protected static final String SYN_PREFIX = "__"; protected Context context; private Symtab syms; protected TreeMaker make; protected Names names; protected Enter enter; private Resolve rs; protected MemberEnter memberEnter; protected TreePath path; protected Attr attr; protected Log log; protected AnnotatedTypeFactory atypeFactory; public ExpansionTranslator( ProcessingEnvironment processingEnv, TreePath path) { context = ((JavacProcessingEnvironment) processingEnv).getContext(); syms = Symtab.instance(context); make = TreeMaker.instance(context); names = Names.instance(context); enter = Enter.instance(context); rs = Resolve.instance(context); memberEnter = MemberEnter.instance(context); attr = Attr.instance(context); log = Log.instance(context); } @Override public void visitBlock(JCBlock tree) { System.out.println("# visitBlock: \n" + tree); List<JCStatement> stats; for (stats = tree.stats; stats.tail != null; stats = stats.tail) { JCStatement stat = stats.head; if (isMorphedVariableDeclaration(stat)) { System.out.println("# Found a morphed variable declaration: " + stat); JCVariableDecl varDecl = (JCVariableDecl) stat; printSymbolInfo(varDecl.sym); Env<AttrContext> env = enter.getEnv(varDecl.type.tsym); System.out.println("# old var decl: " + stat); JCVariableDecl syntheticStat = replaceWithSynthetic((JCVariableDecl) stat); spliceNode(stats, stat, syntheticStat); enterMember(syntheticStat, env); System.out.println(" new var decl: " + stats.toString()); attr.attribStat(syntheticStat, env); printSymbolInfo(syntheticStat.sym); // stats = stats.tail; } } System.out.println("# end"); super.visitBlock(tree); } @Override public void visitVarDef(JCVariableDecl tree) { super.visitVarDef(tree); if (isMorphedVariableDeclaration(tree)) { replaceWithSynthetic(tree); } } // Inspired by EnerJ public void enterMember(JCTree member, Env<AttrContext> env) { Method meth = null; try { meth = MemberEnter.class.getDeclaredMethod("memberEnter", JCTree.class, Env.class); } catch (NoSuchMethodException e) { System.out.println("raised only if compiler internal api changes"); } meth.setAccessible(true); Object[] args = {member, env}; try { meth.invoke(memberEnter, args); } catch (IllegalAccessException e) { System.out.println("raised only if compiler internal api changes"); } catch (InvocationTargetException e) { System.out.println("raised only if compiler internal api changes"); } } private void spliceNode (List<JCStatement> statementList, JCStatement oldNode, JCStatement newNode){ List<JCTree.JCStatement> newList = List.<JCTree.JCStatement>of(newNode); newList.tail = statementList.tail; statementList.tail = newList; } private void printSymbolInfo(Symbol sym){ System.out.println("# Symbol: " + sym); if (sym != null) { System.out.println("\tKind: " + sym.getKind()); System.out.println("\tType: " + sym.type); System.out.println("\tMembers: " + sym.members()); System.out.println("\tOwner: " + sym.owner); System.out.println("\tOwner Kind: " + sym.owner.getKind()); System.out.println("\tLocation: " + sym.location()); System.out.println("\tMembers " + sym.members()); System.out.println("\tMembers of Owner: " + sym.owner.members()); } } private JCVariableDecl replaceWithSynthetic(JCVariableDecl tree) { List<JCExpression> oldInitializerList = ((JCNewClass) tree.init).args; Name dummyName = names.fromString("__Logged$Stack"); Type clazz = tree.sym.enclClass().members().lookup(dummyName).sym.type; JCNewClass newClassExpression = make.NewClass(null, null, make.QualIdent(clazz.tsym), oldInitializerList, null); JCVariableDecl newVarDef = make.VarDef(tree.mods, tree.name, make.QualIdent(clazz.tsym), newClassExpression); return newVarDef; } private boolean isMorphedVariableDeclaration(JCTree tree){ return tree.getKind() == Kind.VARIABLE && ((JCVariableDecl) tree).getType().type.tsym.getAnnotation(Morph.class) != null; } private JCExpression stringToExpression(String chain) { String[] symbols = chain.split("\\."); JCExpression node = make.Ident(names.fromString(symbols[0])); for (int i = 1; i < symbols.length; i++) { com.sun.tools.javac.util.Name nextName = names.fromString(symbols[i]); node = make.Select(node, nextName); } return node; } }
//This software may be modified and distributed under the terms package wyjs.io; import java.io.*; import java.util.*; import wybs.lang.Build; import wybs.lang.NameID; import wybs.lang.NameResolver.ResolutionError; import wybs.lang.SyntacticElement; import static wybs.lang.SyntaxError.*; import wyfs.lang.Path; import static wyc.lang.WhileyFile.*; import wyc.lang.WhileyFile; import wyc.type.TypeSystem; import wyc.util.ErrorMessages; /** * Writes WYIL bytecodes in a textual from to a given file. * * <b>NOTE:</b> currently, this class is somewhat broken since it does not * provide any way to specify the output directory. Rather, it simply puts the * WYIL file in the same place as the Whiley file. * * @author David J. Pearce * */ public final class JavaScriptFileWriter { private final PrintWriter out; /** * The master project for identifying all resources available to the * builder. This includes all modules declared in the project being verified * and/or defined in external resources (e.g. jar files). */ private final Build.Project project; /** * The type system is useful for managing nominal types and converting them * into their underlying types. */ protected final TypeSystem typeSystem; private boolean verbose = false; private boolean commentTypes = false; private boolean commentSpecifications = false; private WhileyFile wyilfile; public JavaScriptFileWriter(Build.Project project, TypeSystem typeSystem, PrintWriter writer) { this.project = project; this.typeSystem = typeSystem; this.out = writer; } public JavaScriptFileWriter(Build.Project project, TypeSystem typeSystem, OutputStream stream) { this.project = project; this.typeSystem = typeSystem; this.out = new PrintWriter(new OutputStreamWriter(stream)); } // Configuration Methods public void setVerbose(boolean flag) { this.verbose = flag; } // Apply Method public void apply(WhileyFile module) throws IOException { // FIXME: this is a hack this.wyilfile = module; out.println(); HashSet<Type> typeTests = new HashSet<>(); for(Decl d : module.getDeclarations()) { if(d instanceof Decl.StaticVariable) { write((Decl.StaticVariable) d, typeTests); } else if(d instanceof Decl.FunctionOrMethod) { write((Decl.FunctionOrMethod) d, typeTests); } else if(d instanceof Decl.Type) { write((Decl.Type) d, typeTests); } } writeTypeTests(typeTests, new HashSet<>()); out.flush(); } private void write(Decl.Type td, Set<Type> typeTests) { Decl.Variable vardecl = td.getVariableDeclaration(); out.print("function "); out.print(td.getName()); out.print("$("); out.print(vardecl.getName()); out.println(") {"); Tuple<Expr> invariant = td.getInvariant(); if(invariant.size() == 0) { tabIndent(1); out.println("return true;"); } else if(invariant.size() == 1) { tabIndent(1); out.print("return "); writeExpression(invariant.getOperand(0), typeTests); out.println(";"); } else { for(int i=0;i!=invariant.size();++i) { tabIndent(1); if(i == 0) { out.print("var result = ("); } else { out.print("result = result && ("); } writeExpression(invariant.getOperand(i), typeTests); out.println(");"); } tabIndent(1); out.println("return result;"); } out.println("}"); out.println(); } private void write(Decl.StaticVariable cd, Set<Type> typeTests) { out.print("var " + cd.getName()); if (cd.hasInitialiser()) { out.print(" = "); writeExpression(cd.getInitialiser(), typeTests); } out.println(";"); } private void write(Decl.FunctionOrMethod method, Set<Type> typeTests) { // FIXME: what to do with private methods? if (method.getModifiers().match(Modifier.Export.class) != null) { writeExportTrampoline(method); } out.print("function "); out.print(method.getName()); writeTypeMangle(method.getType()); writeParameters(method.getParameters()); if(commentTypes) { if (method.getReturns().size() > 0) { out.print(" writeParameters(method.getReturns()); out.println(); } else { out.println(); } } else { out.print(" "); } if(commentSpecifications) { for (Expr precondition : method.getRequires()) { out.print("// requires "); writeExpression(precondition,new HashSet<>()); } for (Expr postcondition : method.getEnsures()) { out.print("// ensures "); writeExpression(postcondition, new HashSet<>()); out.println(); } } if (method.getBody() != null) { out.println("{"); writeBlock(0, method.getBody(), typeTests); out.println("}"); } } private void writeParameters(Tuple<Decl.Variable> parameters) { out.print("("); for (int i = 0; i != parameters.size(); ++i) { if (i != 0) { out.print(", "); } Decl.Variable decl = parameters.getOperand(i); writeType(decl.getType()); out.print(decl.getName()); } out.print(")"); } /** * Create a trampoline for an exported function. This is simply a function * without a name mangle which redirects to the same function with the name * mangle. * * @param method */ private void writeExportTrampoline(Decl.FunctionOrMethod method) { Type.Callable ft = method.getType(); Tuple<Decl.Variable> params = method.getParameters(); Tuple<Decl.Variable> returns = method.getReturns(); if (params.size() > 0) { out.print("function "); out.print(method.getName()); writeParameters(params); out.println(" {"); tabIndent(1); if (returns.size() > 0) { out.print("return "); } out.print(method.getName()); writeTypeMangle(ft); writeTrampolineArguments(params); out.println("}"); out.println(); } } private void writeTrampolineArguments(Tuple<Decl.Variable> parameters) { out.print("("); for (int i = 0; i != parameters.size(); ++i) { if (i != 0) { out.print(", "); } Decl.Variable decl = parameters.getOperand(i); out.print(decl.getName()); } out.println(");"); } private void writeBlock(int indent, Stmt.Block block, Set<Type> typeTests) { for (int i = 0; i != block.size(); ++i) { writeStatement(indent, block.getOperand(i), typeTests); } } @SuppressWarnings("unchecked") private void writeStatement(int indent, Stmt stmt, Set<Type> typeTests) { tabIndent(indent+1); switch(stmt.getOpcode()) { case STMT_assert: writeAssert(indent, (Stmt.Assert) stmt, typeTests); break; case STMT_assume: writeAssume(indent, (Stmt.Assume) stmt, typeTests); break; case STMT_assign: writeAssign(indent, (Stmt.Assign) stmt, typeTests); break; case STMT_break: writeBreak(indent, (Stmt.Break) stmt, typeTests); break; case STMT_continue: writeContinue(indent, (Stmt.Continue) stmt, typeTests); break; case STMT_debug: writeDebug(indent, (Stmt.Debug) stmt, typeTests); break; case STMT_dowhile: writeDoWhile(indent, (Stmt.DoWhile) stmt, typeTests); break; case STMT_fail: writeFail(indent, (Stmt.Fail) stmt, typeTests); break; case STMT_if: case STMT_ifelse: writeIf(indent, (Stmt.IfElse) stmt, typeTests); break; case EXPR_indirectinvoke: writeIndirectInvoke((Expr.IndirectInvoke) stmt, typeTests); out.println(";"); break; case EXPR_invoke: writeInvoke((Expr.Invoke) stmt, typeTests); out.println(";"); break; case STMT_namedblock: writeNamedBlock(indent, (Stmt.NamedBlock) stmt, typeTests); break; case STMT_while: writeWhile(indent, (Stmt.While) stmt, typeTests); break; case STMT_return: writeReturn(indent, (Stmt.Return) stmt, typeTests); break; case STMT_skip: writeSkip(indent, (Stmt.Skip) stmt, typeTests); break; case STMT_switch: writeSwitch(indent, (Stmt.Switch) stmt, typeTests); break; case DECL_var: case DECL_varinit: writeVariableDeclaration(indent, (Decl.Variable) stmt, typeTests); break; default: throw new IllegalArgumentException("unknown statement encountered (" + stmt.getClass().getName() + ")"); } } private void writeAssert(int indent, Stmt.Assert c, Set<Type> typeTests) { out.print("Wy.assert("); writeExpression(c.getCondition(), typeTests); out.println(");"); } private void writeAssume(int indent, Stmt.Assume c, Set<Type> typeTests) { out.print("Wy.assert("); writeExpression(c.getCondition(), typeTests); out.println(");"); } private void writeAssign(int indent, Stmt.Assign stmt, Set<Type> typeTests) { Tuple<LVal> lhs = stmt.getLeftHandSide(); Tuple<Expr> rhs = stmt.getRightHandSide(); if (lhs.size() == 1) { // easy case writeLVal(lhs.getOperand(0), typeTests); out.print(" = "); writeExpression(rhs.getOperand(0), typeTests); out.println(";"); } else if (lhs.size() > 1) { // FIXME: this is broken when multiple rhs expressions out.print("var $ = "); // Translate right-hand sides writeExpression(rhs.getOperand(0), typeTests); out.println(";"); // Translate left-hand sides for (int i = 0; i != lhs.size(); ++i) { tabIndent(indent + 1); writeLVal(lhs.getOperand(i), typeTests); out.println(" = $[" + i + "];"); } } } private void writeBreak(int indent, Stmt.Break b, Set<Type> typeTests) { out.println("break;"); } private void writeContinue(int indent, Stmt.Continue b, Set<Type> typeTests) { out.println("continue;"); } private void writeDebug(int indent, Stmt.Debug b, Set<Type> typeTests) { } private void writeDoWhile(int indent, Stmt.DoWhile b, Set<Type> typeTests) { out.println("do {"); writeBlock(indent+1,b.getBody(), typeTests); tabIndent(indent+1); // FIXME: write loop invariant if DEBUG mode out.print("} while("); writeExpression(b.getCondition(), typeTests); out.println(");"); } private void writeFail(int indent, Stmt.Fail c, Set<Type> typeTests) { out.println("fail"); } private void writeIf(int indent, Stmt.IfElse b, Set<Type> typeTests) { out.print("if("); writeExpression(b.getCondition(), typeTests); out.println(") {"); writeBlock(indent+1,b.getTrueBranch(), typeTests); if(b.hasFalseBranch()) { tabIndent(indent+1); out.println("} else {"); writeBlock(indent+1,b.getFalseBranch(), typeTests); } tabIndent(indent+1); out.println("}"); } private void writeNamedBlock(int indent, Stmt.NamedBlock b, Set<Type> typeTests) { out.print(b.getName()); out.println(":"); writeBlock(indent + 1, b.getBlock(), typeTests); } private void writeWhile(int indent, Stmt.While b, Set<Type> typeTests) { out.print("while("); writeExpression(b.getCondition(), typeTests); out.println(") {"); writeBlock(indent+1,b.getBody(), typeTests); tabIndent(indent+1); out.println("}"); } private void writeReturn(int indent, Stmt.Return b, Set<Type> typeTests) { Tuple<Expr> operands = b.getReturns(); out.print("return"); if (operands.size() == 1) { // easy case out.print(" "); writeExpression(operands.getOperand(0), typeTests); } else if (operands.size() > 0) { // harder case out.print(" ["); for (int i = 0; i != operands.size(); ++i) { if (i != 0) { out.print(", "); } writeExpression(operands.getOperand(i), typeTests); } out.print("]"); } out.println(";"); } private void writeSkip(int indent, Stmt.Skip b, Set<Type> typeTests) { out.println("// skip"); } private void writeSwitch(int indent, Stmt.Switch b, Set<Type> typeTests) { out.print("switch("); writeExpression(b.getCondition(), typeTests); out.println(") {"); Tuple<Stmt.Case> cases = b.getCases(); for (int i = 0; i != cases.size(); ++i) { // FIXME: ugly Stmt.Case cAse = cases.getOperand(i); Tuple<Expr> values = cAse.getConditions(); if (values.size() == 0) { tabIndent(indent + 1); out.println("default:"); } else { for (int j = 0; j != values.size(); ++j) { tabIndent(indent + 1); out.print("case "); // FIXME: this needs to be fixed out.print(values.getOperand(j)); out.println(":"); } } writeBlock(indent + 1, cAse.getBlock(), typeTests); tabIndent(indent + 2); out.println("break;"); } tabIndent(indent + 1); out.println("}"); } private void writeVariableDeclaration(int indent, Decl.Variable decl, Set<Type> typeTests) { out.print("var "); writeType(decl.getType()); out.print(decl.getName()); if (decl.hasInitialiser()) { out.print(" = "); writeExpression(decl.getInitialiser(), typeTests); } out.println(";"); } /** * Write a bracketed operand if necessary. Any operand whose human-readable * representation can contain whitespace must have brackets around it. * * @param operand * @param enclosing * @param out */ private void writeBracketedExpression(Expr expr, Set<Type> typeTests) { boolean needsBrackets = needsBrackets(expr); if (needsBrackets) { out.print("("); } writeExpression(expr, typeTests); if (needsBrackets) { out.print(")"); } } @SuppressWarnings("unchecked") private void writeExpression(Expr expr, Set<Type> typeTests) { try { switch (expr.getOpcode()) { case EXPR_alen: writeArrayLength((Expr.ArrayLength) expr, typeTests); break; case EXPR_aread: writeArrayIndex((Expr.ArrayAccess) expr, typeTests); break; case EXPR_ainit: writeArrayInitialiser((Expr.ArrayInitialiser) expr, typeTests); break; case EXPR_agen: writeArrayGenerator((Expr.ArrayGenerator) expr, typeTests); break; case EXPR_bnot: writeInvertOperator((Expr.BitwiseComplement) expr, typeTests); break; case EXPR_cast: writeConvert((Expr.Cast) expr, typeTests); break; case EXPR_constant: writeConst((Expr.Constant) expr, typeTests); break; case EXPR_pread: writeDereference((Expr.Dereference) expr, typeTests); break; case EXPR_rread: writeFieldLoad((Expr.RecordAccess) expr, typeTests); break; case EXPR_indirectinvoke: writeIndirectInvoke((Expr.IndirectInvoke) expr, typeTests); break; case EXPR_invoke: writeInvoke((Expr.Invoke) expr, typeTests); break; case DECL_lambda: writeLambdaDeclaration((Decl.Lambda) expr, typeTests); break; case EXPR_lread: writeLambdaAccess((Expr.LambdaAccess) expr, typeTests); break; case EXPR_rinit: writeRecordConstructor((Expr.RecordInitialiser) expr, typeTests); break; case EXPR_pinit: writeNewObject((Expr.New) expr, typeTests); break; case EXPR_lnot: case EXPR_ineg: writePrefixLocations((Expr.Operator) expr, typeTests); break; case EXPR_lall: case EXPR_lsome: writeQuantifier((Expr.Quantifier) expr, typeTests); break; case EXPR_eq: case EXPR_neq: writeEqualityOperator((Expr.Operator) expr, typeTests); break; case EXPR_idiv: writeDivideOperator((Expr.Division) expr, typeTests); break; case EXPR_iadd: case EXPR_isub: case EXPR_imul: case EXPR_irem: case EXPR_ilt: case EXPR_ile: case EXPR_igt: case EXPR_igteq: case EXPR_land: case EXPR_lor: case EXPR_bor: case EXPR_bxor: case EXPR_band: writeInfixOperator((Expr.Operator) expr, typeTests); break; case EXPR_limplies: writeLogicalImplication((Expr.LogicalImplication) expr, typeTests); break; case EXPR_liff: writeLogicalIff((Expr.LogicalIff) expr, typeTests); break; case EXPR_bshl: case EXPR_bshr: writeShiftOperator((Expr.Operator) expr, typeTests); break; case EXPR_is: writeIsOperator((Expr.Is) expr, typeTests); break; case EXPR_staticvar: writeStaticVariableAccess((Expr.StaticVariableAccess) expr, typeTests); break; case EXPR_varmove: writeVariableMove((Expr.VariableAccess) expr, typeTests); break; case EXPR_varcopy: writeVariableCopy((Expr.VariableAccess) expr, typeTests); break; default: throw new IllegalArgumentException("unknown expresion encountered: " + expr.getClass().getName()); } } catch (ResolutionError e) { // FIXME: the latter is rather ugly throw new InternalFailure("resolution failure", ((WhileyFile) expr.getHeap()).getEntry(), expr, e); } } private void writeArrayLength(Expr.ArrayLength expr, Set<Type> typeTests) { writeExpression(expr.getSource(), typeTests); out.print(".length"); } private void writeArrayIndex(Expr.ArrayAccess expr, Set<Type> typeTests) { writeExpression(expr.getSource(), typeTests); out.print("["); writeExpression(expr.getSubscript(), typeTests); out.print("]"); } private void writeArrayInitialiser(Expr.ArrayInitialiser expr, Set<Type> typeTests) { out.print("["); for (int i = 0; i != expr.size(); ++i) { if (i != 0) { out.print(", "); } writeExpression(expr.getOperand(i), typeTests); } out.print("]"); } private void writeArrayGenerator(Expr.ArrayGenerator expr, Set<Type> typeTests) { out.print("Wy.array("); writeExpression(expr.getValue(), typeTests); out.print(", "); writeExpression(expr.getLength(), typeTests); out.print(")"); } private void writeConvert(Expr.Cast expr, Set<Type> typeTests) { writeExpression(expr.getCastedExpr(), typeTests); } private void writeConst(Expr.Constant expr, Set<Type> typeTests) { writeConstant(expr.getValue()); } private void writeFieldLoad(Expr.RecordAccess expr, Set<Type> typeTests) { writeBracketedExpression(expr.getSource(), typeTests); out.print("." + expr.getField()); } private void writeIndirectInvoke(Expr.IndirectInvoke expr, Set<Type> typeTests) { writeExpression(expr.getSource(), typeTests); Tuple<Expr> arguments = expr.getArguments(); out.print("("); for(int i=0;i!=arguments.size();++i) { if(i!=0) { out.print(", "); } writeExpression(arguments.getOperand(i), typeTests); } out.print(")"); } private void writeInvoke(Expr.Invoke expr, Set<Type> typeTests) { Name name = expr.getName(); // FIXME: this doesn't work for imported function symbols! out.print(name); writeTypeMangle(expr.getSignature()); out.print("("); Tuple<Expr> args = expr.getArguments(); for (int i = 0; i != args.size(); ++i) { if (i != 0) { out.print(", "); } writeExpression(args.getOperand(i), typeTests); } out.print(")"); } private void writeLambdaDeclaration(Decl.Lambda expr, Set<Type> typeTests) { out.print("function("); Tuple<Decl.Variable> parameters = expr.getParameters(); for (int i = 0; i != parameters.size(); ++i) { Decl.Variable var = parameters.getOperand(i); if (i != 0) { out.print(", "); } writeType(var.getType()); out.print(var.getName()); } out.print(") { "); out.print("return "); writeExpression(expr.getBody(), typeTests); out.print("; }"); } private void writeLambdaAccess(Expr.LambdaAccess expr, Set<Type> typeTests) { // NOTE: the reason we use a function declaration here (i.e. instead of // just assigning the name) is that it protects against potential name // clashes with local variables. Type.Callable ft = expr.getSignature(); Tuple<Type> params = ft.getParameters(); out.print("function("); for(int i=0;i!=params.size();++i) { if(i!=0) { out.print(","); } out.print("p" + i); } out.print(") { return "); out.print(expr.getName()); writeTypeMangle(ft); out.print("("); for(int i=0;i!=params.size();++i) { if(i!=0) { out.print(","); } out.print("p" + i); } out.print("); }"); } private void writeRecordConstructor(Expr.RecordInitialiser expr, Set<Type> typeTests) { out.print("Wy.record({"); for (int i = 0; i != expr.size(); ++i) { Pair<Identifier,Expr> field = expr.getOperand(i); if (i != 0) { out.print(", "); } out.print(field.getFirst()); out.print(": "); writeExpression(field.getSecond(), typeTests); } out.print("})"); } private void writeNewObject(Expr.New expr, Set<Type> typeTests) { out.print("new Wy.Ref("); writeExpression(expr.getValue(), typeTests); out.print(")"); } private void writeDereference(Expr.Dereference expr, Set<Type> typeTests) { out.print("Wy.deref("); writeExpression(expr.getOperand(), typeTests); out.print(")"); } private void writePrefixLocations(Expr.Operator expr, Set<Type> typeTests) { // Prefix operators out.print(opcode(expr.getOpcode())); writeBracketedExpression(expr.getOperand(0), typeTests); } private void writeInvertOperator(Expr.BitwiseComplement expr, Set<Type> typeTests) { // Prefix operators out.print("((~"); writeBracketedExpression(expr.getOperand(0), typeTests); out.print(") & 0xFF)"); } private void writeEqualityOperator(Expr.Operator expr, Set<Type> typeTests) throws ResolutionError { Expr lhs = expr.getOperand(0); Expr rhs = expr.getOperand(1); // FIXME: put this back Type lhsT = typeSystem.inferType(lhs); Type rhsT = typeSystem.inferType(rhs); if(isCopyable(lhsT,lhs) && isCopyable(rhsT,rhs)) { writeInfixOperator(expr, typeTests); } else { if (expr instanceof Expr.NotEqual) { out.print("!"); } out.print("Wy.equals("); writeExpression(lhs, typeTests); out.print(", "); writeExpression(rhs, typeTests); out.print(")"); } } private void writeDivideOperator(Expr.Division expr, Set<Type> typeTests) { out.print("Math.floor("); writeBracketedExpression(expr.getOperand(0), typeTests); out.print(" / "); writeBracketedExpression(expr.getOperand(1), typeTests); out.print(")"); } private void writeInfixOperator(Expr.Operator expr, Set<Type> typeTests) { writeBracketedExpression(expr.getOperand(0), typeTests); out.print(" "); out.print(opcode(expr.getOpcode())); out.print(" "); writeBracketedExpression(expr.getOperand(1), typeTests); } private void writeLogicalImplication(Expr.LogicalImplication expr, Set<Type> typeTests) { out.print("!"); writeBracketedExpression(expr.getOperand(0), typeTests); out.print("||"); writeBracketedExpression(expr.getOperand(1), typeTests); } private void writeLogicalIff(Expr.LogicalIff expr, Set<Type> typeTests) { writeBracketedExpression(expr.getOperand(0), typeTests); out.print("=="); writeBracketedExpression(expr.getOperand(1), typeTests); } private void writeShiftOperator(Expr.Operator expr, Set<Type> typeTests) { out.print("(("); writeBracketedExpression(expr.getOperand(0), typeTests); out.print(" "); out.print(opcode(expr.getOpcode())); out.print(" "); writeBracketedExpression(expr.getOperand(1), typeTests); out.print(") & 0xFF)"); } private void writeIsOperator(Expr.Is expr, Set<Type> typeTests) { Type t = expr.getTestType(); // Handle all non-trivial cases directly if(t instanceof Type.Null) { writeExpression(expr.getTestExpr(), typeTests); out.print(" === null"); } else if(t instanceof Type.Int) { // FIXME: this will need to be updated when unbounded arithmetic is // supported out.print("typeof "); writeExpression(expr.getTestExpr(), typeTests); out.print(" === \"number\""); } else if(t instanceof Type.Bool) { out.print("typeof "); writeExpression(expr.getTestExpr(), typeTests); out.print(" === \"boolean\""); } else { // Fall back case out.print("is$"); writeTypeMangle(t); out.print("("); writeExpression(expr.getTestExpr(), typeTests); out.print(")"); // Register this type test to be written out as an appropriately // named function. typeTests.add(t); } } @SuppressWarnings("unchecked") private void writeQuantifier(Expr.Quantifier expr, Set<Type> typeTests) { out.print("Wy."); out.print((expr instanceof Expr.UniversalQuantifier) ? "all" : "some"); out.print("("); Tuple<Decl.Variable> params = expr.getParameters(); for (int i = 0; i != params.size(); ++i) { Decl.Variable param = params.getOperand(i); if(i > 0) { throw new RuntimeException("Need to support multiple operand groups"); } // FIXME: for now assume initialiser must be an array range. Expr.ArrayRange range = (Expr.ArrayRange) param.getInitialiser(); writeExpression(range.getStart(), typeTests); out.print(","); writeExpression(range.getEnd(), typeTests); } out.print(",function("); for (int i = 0; i != params.size(); ++i) { Decl.Variable param = params.getOperand(i); out.print(param.getName()); } out.print("){return "); writeExpression(expr.getBody(), typeTests); out.print(";})"); } private void writeStaticVariableAccess(Expr.StaticVariableAccess expr, Set<Type> typeTests) { // FIXME: this is horrendously broken out.print("Wy.copy(" + expr.getName() + ")"); } private void writeVariableMove(Expr.VariableAccess expr, Set<Type> typeTests) { Decl.Variable vd = expr.getVariableDeclaration(); out.print(vd.getName()); } private void writeVariableCopy(Expr.VariableAccess expr, Set<Type> typeTests) { Decl.Variable vd = expr.getVariableDeclaration(); if (isCopyable(vd.getType(), expr)) { out.print(vd.getName()); } else { out.print("Wy.copy(" + vd.getName() + ")"); } } private void writeLVal(LVal lval, Set<Type> typeTests) { switch (lval.getOpcode()) { case EXPR_aread: writeArrayIndexLVal((Expr.ArrayAccess) lval, typeTests); break; case EXPR_pread: writeDereferenceLVal((Expr.Dereference) lval, typeTests); break; case EXPR_rread: writeFieldLoadLVal((Expr.RecordAccess) lval, typeTests); break; case EXPR_varcopy: case EXPR_varmove: writeVariableAccessLVal((Expr.VariableAccess) lval, typeTests); break; default: throw new IllegalArgumentException("invalid lval: " + lval); } } private void writeDereferenceLVal(Expr.Dereference expr, Set<Type> typeTests) { writeLVal((LVal) expr.getOperand(), typeTests); out.print(".$ref"); } private void writeArrayIndexLVal(Expr.ArrayAccess expr, Set<Type> typeTests) { writeLVal((LVal) expr.getSource(), typeTests); out.print("["); writeExpression(expr.getSubscript(), typeTests); out.print("]"); } private void writeFieldLoadLVal(Expr.RecordAccess expr, Set<Type> typeTests) { writeLVal((LVal) expr.getSource(), typeTests); out.print("." + expr.getField()); } private void writeVariableAccessLVal(Expr.VariableAccess expr, Set<Type> typeTests) { Decl.Variable vd = expr.getVariableDeclaration(); out.print(vd.getName()); } private void writeConstant(Value c) { if(c instanceof Value.Byte) { Value.Byte b = (Value.Byte) c; // FIXME: support es6 binary literals // out.print("0b"); out.print("parseInt('"); out.print(Integer.toBinaryString(b.get() & 0xFF)); out.print("',2)"); } else if(c instanceof Value.UTF8) { Value.UTF8 s = (Value.UTF8) c; byte[] bytes = s.get(); out.print("["); for(int i=0;i!=bytes.length;++i) { if(i != 0) { out.print(", "); } out.print(bytes[i]); } out.print("]"); } else { out.print(c); } } private void writeTypeTests(Set<Type> typeTests, Set<Type> allTests) { HashSet<Type> deps = new HashSet<>(); for(Type type : typeTests) { out.print("function is$"); writeTypeMangle(type); out.print("(val) {"); writeTypeTest(type, deps); out.println("}"); out.println(); } deps.removeAll(allTests); allTests.addAll(deps); if(deps.size() > 0) { writeTypeTests(deps,allTests); } } private void writeTypeTest(Type test, Set<Type> deps) { if(test instanceof Type.Any) { writeTypeTestAny((Type.Primitive) test,deps); } else if(test instanceof Type.Null) { writeTypeTestNull((Type.Primitive) test,deps); } else if(test instanceof Type.Bool) { writeTypeTestBool((Type.Primitive) test,deps); } else if(test instanceof Type.Byte) { // FIXME: This is clear incorrect. However, there is no better // alternative. The good news is that the byte type is slated to be // removed in future versions of Whiley and, hence, this problem // will go away. writeTypeTestInt((Type.Primitive) test,deps); } else if(test instanceof Type.Int) { writeTypeTestInt((Type.Primitive) test,deps); } else if(test instanceof Type.Nominal) { writeTypeTestNominal((Type.Nominal) test,deps); } else if(test instanceof Type.Array) { writeTypeTestArray((Type.Array) test,deps); } else if(test instanceof Type.Reference) { writeTypeTestReference((Type.Reference) test,deps); } else if(test instanceof Type.Record) { writeTypeTestRecord((Type.Record) test,deps); } else if(test instanceof Type.Callable) { writeTypeTestFunctionOrMethod((Type.Callable) test,deps); } else if(test instanceof Type.Negation) { writeTypeTestNegation((Type.Negation) test,deps); } else if(test instanceof Type.Union) { writeTypeTestUnion((Type.Union) test,deps); } else if(test instanceof Type.Intersection) { writeTypeTestIntersection((Type.Intersection) test,deps); } else { throw new RuntimeException("unknown type encountered: " + test); } } private void writeTypeTestAny(Type.Primitive test, Set<Type> deps) { out.print(" return true; "); } private void writeTypeTestNull(Type.Primitive test, Set<Type> deps) { out.print(" return val === null; "); } private void writeTypeTestBool(Type.Primitive test, Set<Type> deps) { out.print(" return typeof val === \"boolean\"; "); } private void writeTypeTestInt(Type.Primitive test, Set<Type> deps) { out.print(" return typeof val === \"number\"; "); } private void writeTypeTestNominal(Type.Nominal test, Set<Type> deps) { // FIXME: this is so horrendously broken Name name = test.getName(); WhileyFile wyilFile = this.wyilfile; Decl.Type td = wyilFile.getDeclaration(name.getLast(), null, Decl.Type.class); if (td == null) { throw new RuntimeException("undefined nominal type encountered: " + name); } out.print(" return is$"); writeTypeMangle(td.getVariableDeclaration().getType()); out.print("(val) && " + name.getLast() + "$(val); "); deps.add(td.getVariableDeclaration().getType()); } private static int variableIndex = 0; private void writeTypeTestArray(Type.Array test, Set<Type> deps) { out.println(); tabIndent(1); out.println("if(val != null && val.constructor === Array) {"); tabIndent(2); // FIXME: could optimise this in the case of element "any" String var = "i" + (variableIndex++); out.println("for(var x=0;x!=val.length;++x) {".replaceAll("x", var)); tabIndent(3); out.print("if(!is$"); writeTypeMangle(test.getElement()); out.println("(val[" + var +"])) {"); tabIndent(4); out.println("return false;"); tabIndent(3); out.println("}"); tabIndent(2); out.println("}"); tabIndent(2); out.println("return true;"); tabIndent(1); out.println("}"); tabIndent(1); out.println("return false;"); // Add a follow-on dependency deps.add(test.getElement()); } private void writeTypeTestReference(Type.Reference test, Set<Type> deps) { out.println(); tabIndent(1); out.println("if(val != null && val.constructor === Wy.Ref) {"); tabIndent(2); out.print(" return is$"); writeTypeMangle(test.getElement()); out.println("(Wy.deref(val));"); tabIndent(1); out.println("}"); tabIndent(1); out.println("return false;"); deps.add(test.getElement()); } private void writeTypeTestRecord(Type.Record test, Set<Type> deps) { out.println(); tabIndent(1); out.print("if(val != null && typeof val === \"object\""); Tuple<Decl.Variable> fields = test.getFields(); if (!test.isOpen()) { out.print(" && Object.keys(val).length === " + fields.size()); } out.println(") {"); for (int i = 0; i != fields.size(); ++i) { Decl.Variable field = fields.getOperand(i); tabIndent(2); out.print("if(val." + field.getName() + " === \"undefined\" || !is$"); writeTypeMangle(field.getType()); out.println("(val." + field.getName() + ")) { return false; }"); deps.add(field.getType()); } tabIndent(2); out.println("return true;"); tabIndent(1); out.println("}"); tabIndent(1); out.println("return false;"); } /** * Perform a runtime type test looking for a function or method of a given * type. This is tricky in JavaScript since there is insufficient type * information available. Specifically, <code>typeof f</code> (for some * function f) returns only <code>"function"</code>. * * @param test * @param deps */ private void writeTypeTestFunctionOrMethod(Type.Callable test, Set<Type> deps) { out.println(); tabIndent(1); out.println("if(val != null && typeof val === \"function\") {"); // FIXME: we need to do more here to distinguish functions. We could, // for example, try to embed their signature string. tabIndent(2); out.println("return true;"); tabIndent(1); out.println("}"); tabIndent(1); out.println("return false;"); } private void writeTypeTestNegation(Type.Negation test, Set<Type> deps) { out.print(" return !(is$"); writeTypeMangle(test.getElement()); out.print("(val)); "); deps.add(test.getElement()); } private void writeTypeTestUnion(Type.Union test, Set<Type> deps) { out.println(); for(int i=0;i!=test.size();++i) { Type bound = test.getOperand(i); tabIndent(1); out.print("if(is$"); writeTypeMangle(bound); out.println("(val)) { return true; }"); deps.add(bound); } tabIndent(1); out.print("return false;"); } private void writeTypeTestIntersection(Type.Intersection test, Set<Type> deps) { out.println(); for(int i=0;i!=test.size();++i) { Type bound = test.getOperand(i); tabIndent(1); out.print("if(!is$"); writeTypeMangle(bound); out.println("(val)) { return false; }"); deps.add(bound); } tabIndent(1); out.print("return true;"); } private void writeTypeMangle(Type.Callable fmt) { Tuple<Type> params = fmt.getParameters(); for (int i = 0; i != params.size(); ++i) { if (i == 0) { out.print("_"); } writeTypeMangle(params.getOperand(i)); } } private void writeTypeMangle(Type t) { if (t instanceof Type.Any) { out.print("T"); } else if (t instanceof Type.Null) { out.print("N"); } else if (t instanceof Type.Bool) { out.print("B"); } else if (t instanceof Type.Byte) { out.print("U"); } else if (t instanceof Type.Int) { out.print("I"); } else if (t instanceof Type.Array) { writeTypeMangleArray((Type.Array) t); } else if (t instanceof Type.Reference) { writeTypeMangleReference((Type.Reference) t); } else if (t instanceof Type.Record) { writeTypeMangleRecord((Type.Record) t); } else if (t instanceof Type.Nominal) { writeTypeMangleNominal((Type.Nominal) t); } else if (t instanceof Type.Callable) { writeTypeMangleFunctionOrMethod((Type.Callable) t); } else if (t instanceof Type.Negation) { writeTypeMangleNegation((Type.Negation) t); } else if (t instanceof Type.Union) { writeTypeMangleUnion((Type.Union) t); } else if (t instanceof Type.Intersection) { writeTypeMangleIntersection((Type.Intersection) t); } else { throw new IllegalArgumentException("unknown type encountered: " + t); } } private void writeTypeMangleArray(Type.Array t) { out.print("a"); writeTypeMangle(t.getElement()); } private void writeTypeMangleReference(Type.Reference t) { out.print("p"); if (t.hasLifetime()) { String lifetime = t.getLifetime().get(); if(lifetime.equals("*")) { out.print("_"); } else { out.print(lifetime.length()); out.print(lifetime); } } else { out.print("0"); } writeTypeMangle(t.getElement()); } private void writeTypeMangleRecord(Type.Record rt) { out.print("r"); Tuple<Decl.Variable> fields = rt.getFields(); out.print(fields.size()); for (int i = 0; i != fields.size(); ++i) { Decl.Variable field = fields.getOperand(i); writeTypeMangle(field.getType()); String fieldName = field.getName().get(); out.print(fieldName.length()); out.print(fieldName); } } private void writeTypeMangleNominal(Type.Nominal t) { out.print("n"); // FIXME: need to figure out package String name = t.getName().getLast().get(); out.print(name.length()); out.print(name); } private void writeTypeMangleFunctionOrMethod(Type.Callable t) { if (t instanceof Type.Function) { out.print("f"); } else { out.print("m"); } Tuple<Type> params = t.getParameters(); out.print(params.size()); for (int i = 0; i != params.size(); ++i) { writeTypeMangle(params.getOperand(i)); } Tuple<Type> returns = t.getReturns(); out.print(returns.size()); for (int i = 0; i != returns.size(); ++i) { writeTypeMangle(returns.getOperand(i)); } out.print("e"); } private void writeTypeMangleNegation(Type.Negation t) { out.print("n"); writeTypeMangle(t.getElement()); } private void writeTypeMangleUnion(Type.Union t) { out.print("u"); out.print(t.size()); for(int i=0;i!=t.size();++i) { writeTypeMangle(t.getOperand(i)); } } private void writeTypeMangleIntersection(Type.Intersection t) { out.print("c"); out.print(t.size()); for(int i=0;i!=t.size();++i) { writeTypeMangle(t.getOperand(i)); } } private void writeType(Type t) { if(commentTypes) { out.print("/*"); out.print(t); out.print("*/"); } } /** * Return true if the type in question can be copied directly. More * specifically, if a bitwise copy of the value is sufficient to fully copy * it. In general, this is true for primitive data types in JavaScript. But, * for array types or general class types, it is not true (since these are * references into the heap). As an exception, class types which are known * to be immutable can be safely considered as copyable. * * @param type * @return */ private boolean isCopyable(Type type, SyntacticElement context) { if (type instanceof Type.Primitive) { return true; } else if (type instanceof Type.Callable) { return true; } else if (type instanceof Type.Reference) { return true; } else if (type instanceof Type.Nominal) { Type.Nominal tn = (Type.Nominal) type; Name nid = tn.getName(); // FIXME: following line is a temporary hack WhileyFile wyilFile = this.wyilfile; Decl.Type td = wyilFile.getDeclaration(nid.getLast(), null, Decl.Type.class); if (td == null) { throw new RuntimeException("undefined nominal type encountered: " + nid); } return isCopyable(td.getType(), context); } else { return false; } } private boolean needsBrackets(Expr e) { switch(e.getOpcode()) { case EXPR_cast: case EXPR_iadd: case EXPR_isub: case EXPR_imul: case EXPR_idiv: case EXPR_irem: case EXPR_eq: case EXPR_neq: case EXPR_ilt: case EXPR_ile: case EXPR_igt: case EXPR_igteq: case EXPR_land: case EXPR_lor: case EXPR_bor: case EXPR_bxor: case EXPR_band: case EXPR_bshl: case EXPR_bshr: case EXPR_is: case EXPR_pinit: return true; } return false; } private static String opcode(int k) { switch(k) { case EXPR_ineg: return "-"; case EXPR_lnot: return "!"; case EXPR_bnot: return "~"; case EXPR_pread: return "*"; // Binary case EXPR_iadd: return "+"; case EXPR_isub: return "-"; case EXPR_imul: return "*"; case EXPR_idiv: return "/"; case EXPR_irem: return "%"; case EXPR_eq: return "=="; case EXPR_neq: return "!="; case EXPR_ilt: return "<"; case EXPR_ile: return "<="; case EXPR_igt: return ">"; case EXPR_igteq: return ">="; case EXPR_land: return "&&"; case EXPR_lor: return "||"; case EXPR_bor: return "|"; case EXPR_bxor: return "^"; case EXPR_band: return "&"; case EXPR_bshl: return "<<"; case EXPR_bshr: return ">>"; case EXPR_is: return "is"; case EXPR_pinit: return "new"; default: throw new IllegalArgumentException("unknown operator kind : " + k); } } private void tabIndent(int indent) { indent = indent * 4; for(int i=0;i<indent;++i) { out.print(" "); } } }
package seedu.doit.ui; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; import org.reflections.Reflections; import javafx.beans.property.SimpleStringProperty; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.scene.Scene; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.Region; import javafx.stage.Stage; import seedu.doit.commons.core.LogsCenter; import seedu.doit.commons.util.FxViewUtil; import seedu.doit.logic.commands.Command; //@@author A0160076L /** * Controller for a help page */ public class HelpWindow extends UiPart<Region> { private static final Logger logger = LogsCenter.getLogger(HelpWindow.class); private static final String ICON = "/images/help_icon.png"; private static final String FXML = "HelpWindow.fxml"; private static final String TITLE = "Help"; private final Stage dialogStage; private static final double COMMAND_COLUMN_WIDTH = 0.15; private static final double PARAMETER_COLUMN_WIDTH = 0.25; private static final double RESULT_COLUMN_WIDTH = 0.3; private static final double EXAMPLE_COLUMN_WIDTH = 0.3; private static ObservableList<Map<CommandColumns, String>> commandList = FXCollections.observableArrayList(); private enum CommandColumns { COMMAND, PARAMETER, RESULT, EXAMPLE } @FXML private AnchorPane helpWindowRoot; @FXML private TableView<Map<CommandColumns, String>> commandTable; @FXML private TableColumn<Map<CommandColumns, String>, String> commandColumn; @FXML private TableColumn<Map<CommandColumns, String>, String> parameterColumn; @FXML private TableColumn<Map<CommandColumns, String>, String> resultColumn; @FXML private TableColumn<Map<CommandColumns, String>, String> exampleColumn; /** * Initializes the controller class. This method is automatically called * after the fxml file has been loaded. */ @FXML private void initialize() { this.commandColumn.setCellValueFactory(cellData -> new SimpleStringProperty(cellData.getValue().get(CommandColumns.COMMAND))); this.parameterColumn.setCellValueFactory(cellData -> new SimpleStringProperty(cellData.getValue().get(CommandColumns.PARAMETER))); this.resultColumn.setCellValueFactory(cellData -> new SimpleStringProperty(cellData.getValue().get(CommandColumns.RESULT))); this.exampleColumn.setCellValueFactory(cellData -> new SimpleStringProperty(cellData.getValue().get(CommandColumns.EXAMPLE))); this.commandTable.setItems(commandList); this.commandTable.setEditable(false); } public HelpWindow() { super(FXML); Scene scene = new Scene(getRoot(), 1020, 350); //Null passed as the parent stage to make it non-modal. this.dialogStage = createDialogStage(TITLE, null, scene); FxViewUtil.setStageIcon(this.dialogStage, ICON); } public void configure() { this.commandColumn.prefWidthProperty().bind(this.commandTable.widthProperty().multiply(COMMAND_COLUMN_WIDTH)); this.parameterColumn.prefWidthProperty().bind (this.commandTable.widthProperty().multiply(PARAMETER_COLUMN_WIDTH)); this.resultColumn.prefWidthProperty().bind(this.commandTable.widthProperty().multiply(RESULT_COLUMN_WIDTH)); this.exampleColumn.prefWidthProperty().bind(this.commandTable.widthProperty().multiply(EXAMPLE_COLUMN_WIDTH)); loadHelpList(); } public void show() { logger.fine("Showing help page about the application."); this.dialogStage.showAndWait(); } /** * Uses Java reflection followed by Java stream.map() to retrieve all commands for listing on the Help * window dynamically */ private void loadHelpList() { new Reflections("seedu.doit").getSubTypesOf(Command.class) .stream() .map(s -> { try { Map<CommandColumns, String> map = new HashMap<>(); map.put(CommandColumns.COMMAND, s.getMethod("getName").invoke(null).toString()); map.put(CommandColumns.PARAMETER, s.getMethod("getParameter").invoke(null).toString()); map.put(CommandColumns.RESULT, s.getMethod("getResult").invoke(null).toString()); map.put(CommandColumns.EXAMPLE, s.getMethod("getExample").invoke(null).toString()); return map; } catch (NullPointerException e) { return null; // Suppress this exception are we expect some Commands to not conform to these methods } catch (Exception e) { logger.severe("Java reflection for Command class failed"); throw new RuntimeException(); } }) .filter(p -> p != null) // remove nulls .sorted((lhs, rhs) -> lhs.get(CommandColumns.COMMAND).compareTo(rhs.get(CommandColumns.COMMAND))) .forEach(m -> commandList.add(m)); } }
package tars.logic.parser; import tars.commons.exceptions.IllegalValueException; import tars.commons.util.StringUtil; import tars.logic.commands.AddCommand; import tars.logic.commands.ClearCommand; import tars.logic.commands.Command; import tars.logic.commands.DeleteCommand; import tars.logic.commands.ExitCommand; import tars.logic.commands.FindCommand; import tars.logic.commands.HelpCommand; import tars.logic.commands.IncorrectCommand; import tars.logic.commands.ListCommand; import tars.logic.commands.SelectCommand; import static tars.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT; import static tars.commons.core.Messages.MESSAGE_UNKNOWN_COMMAND; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Parses user input. */ public class Parser { /** * Used for initial separation of command word and args. */ private static final Pattern BASIC_COMMAND_FORMAT = Pattern.compile("(?<commandWord>\\S+)(?<arguments>.*)"); private static final Pattern PERSON_INDEX_ARGS_FORMAT = Pattern.compile("(?<targetIndex>.+)"); private static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace private static final Pattern TASK_DATA_ARGS_FORMAT = // '/' forward slashes are reserved for delimiter prefixes Pattern.compile("(?<name>[^/]+) (?<datetime>(-dt (0?[1-9]|[12][0-9]|3[01])[//](0?[1-9]|1[012])[//]\\d{4} ([01]\\d|2[0-3])?[0-5]\\d)" + "|(-dt (0?[1-9]|[12][0-9]|3[01])[//](0?[1-9]|1[012])[//]\\d{4} ([01]\\d|2[0-3])?[0-5]\\d " + "to (0?[1-9]|[12][0-9]|3[01])[//](0?[1-9]|1[012])[//]\\d{4} ([01]\\d|2[0-3])?[0-5]\\d)) " + "(?<priority>-p [hml])" + "(?<tagArguments>(?: -t [^/]+)*)"); // variable number of tags public Parser() {} /** * Parses user input into command for execution. * * @param userInput full user input string * @return the command based on the user input */ public Command parseCommand(String userInput) { final Matcher matcher = BASIC_COMMAND_FORMAT.matcher(userInput.trim()); if (!matcher.matches()) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE)); } final String commandWord = matcher.group("commandWord"); final String arguments = matcher.group("arguments"); switch (commandWord) { case AddCommand.COMMAND_WORD: return prepareAdd(arguments); case SelectCommand.COMMAND_WORD: return prepareSelect(arguments); case DeleteCommand.COMMAND_WORD: return prepareDelete(arguments); case ClearCommand.COMMAND_WORD: return new ClearCommand(); case FindCommand.COMMAND_WORD: return prepareFind(arguments); case ListCommand.COMMAND_WORD: return new ListCommand(); case ExitCommand.COMMAND_WORD: return new ExitCommand(); case HelpCommand.COMMAND_WORD: return new HelpCommand(); default: return new IncorrectCommand(MESSAGE_UNKNOWN_COMMAND); } } /** * Parses arguments in the context of the add task command. * * @param args full command args string * @return the prepared command */ private Command prepareAdd(String args){ final Matcher matcher = TASK_DATA_ARGS_FORMAT.matcher(args.trim()); // Validate arg string format if (!matcher.matches()) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE)); } try { return new AddCommand( matcher.group("name"), getDateTimeFromArgs(matcher.group("datetime").replace("-dt ", "")), matcher.group("priority").replace("-p ", ""), getTagsFromArgs(matcher.group("tagArguments"))); } catch (IllegalValueException ive) { return new IncorrectCommand(ive.getMessage()); } } /** * Extracts the new task's datetime from the add command's task arguments string. */ private static String[] getDateTimeFromArgs(String taskArguments) { if (taskArguments.contains("to")) { int toIndex = taskArguments.indexOf("to"); String startDateTime = taskArguments.substring(0, toIndex).trim(); String endDateTime = taskArguments.substring(toIndex+2).trim(); return new String[] {startDateTime, endDateTime}; } else { return new String[] {taskArguments}; } } /** * Extracts the new task's tags from the add command's tag arguments string. * Merges duplicate tag strings. */ private static Set<String> getTagsFromArgs(String tagArguments) throws IllegalValueException { // no tags if (tagArguments.isEmpty()) { return Collections.emptySet(); } // replace first delimiter prefix, then split final Collection<String> tagStrings = Arrays.asList(tagArguments.replaceFirst(" -t ", "").split(" -t ")); return new HashSet<>(tagStrings); } /** * Parses arguments in the context of the delete task command. * * @param args full command args string * @return the prepared command */ private Command prepareDelete(String args) { Optional<Integer> index = parseIndex(args); if(!index.isPresent()){ return new IncorrectCommand( String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE)); } return new DeleteCommand(index.get()); } /** * Parses arguments in the context of the select task command. * * @param args full command args string * @return the prepared command */ private Command prepareSelect(String args) { Optional<Integer> index = parseIndex(args); if(!index.isPresent()){ return new IncorrectCommand( String.format(MESSAGE_INVALID_COMMAND_FORMAT, SelectCommand.MESSAGE_USAGE)); } return new SelectCommand(index.get()); } /** * Returns the specified index in the {@code command} IF a positive unsigned integer is given as the index. * Returns an {@code Optional.empty()} otherwise. */ private Optional<Integer> parseIndex(String command) { final Matcher matcher = PERSON_INDEX_ARGS_FORMAT.matcher(command.trim()); if (!matcher.matches()) { return Optional.empty(); } String index = matcher.group("targetIndex"); if(!StringUtil.isUnsignedInteger(index)){ return Optional.empty(); } return Optional.of(Integer.parseInt(index)); } /** * Parses arguments in the context of the find task command. * * @param args full command args string * @return the prepared command */ private Command prepareFind(String args) { final Matcher matcher = KEYWORDS_ARGS_FORMAT.matcher(args.trim()); if (!matcher.matches()) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE)); } // keywords delimited by whitespace final String[] keywords = matcher.group("keywords").split("\\s+"); final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords)); return new FindCommand(keywordSet); } }
package tars.model.task; import tars.commons.exceptions.IllegalValueException; /** * Represents a Task's priority in tars. */ public class Priority { public static final String MESSAGE_PRIORITY_CONSTRAINTS = "Task priority should be h / m / l"; public static final String PRIORITY_VALIDATION_REGEX = "[\\p{Lower} ]+"; public String priorityLevel; public Priority(String priorityLevel) throws IllegalValueException { assert priorityLevel != null; priorityLevel = priorityLevel.trim(); if (!isValidPriorityLevel(priorityLevel)) { throw new IllegalValueException(MESSAGE_PRIORITY_CONSTRAINTS); } this.priorityLevel = priorityLevel; } /** * Returns true if a given string is a valid task priority level. */ public static boolean isValidPriorityLevel(String level) { return level.matches(PRIORITY_VALIDATION_REGEX); } @Override public String toString() { return priorityLevel; } public void setLevel(String priorityLevel) { this.priorityLevel = priorityLevel; } }
package mondrian.rolap; import mondrian.olap.*; import mondrian.olap.DimensionType; import mondrian.olap.LevelType; import mondrian.olap.fun.*; import mondrian.olap.type.*; import mondrian.rolap.sql.SqlQuery; import mondrian.resource.MondrianResource; import mondrian.mdx.*; import mondrian.calc.*; import mondrian.calc.impl.*; import org.apache.log4j.Logger; import java.util.List; import java.io.PrintWriter; /** * <code>RolapHierarchy</code> implements {@link Hierarchy} for a ROLAP database. * * @author jhyde * @since 10 August, 2001 * @version $Id$ */ public class RolapHierarchy extends HierarchyBase { private static final Logger LOGGER = Logger.getLogger(RolapHierarchy.class); /** * The raw member reader. For a member reader which incorporates access * control and deals with hidden members (if the hierarchy is ragged), use * {@link #createMemberReader(Role)}. */ private MemberReader memberReader; protected MondrianDef.Hierarchy xmlHierarchy; private String memberReaderClass; protected MondrianDef.RelationOrJoin relation; private Member defaultMember; private String defaultMemberName; private RolapNullMember nullMember; private String sharedHierarchyName; private Exp aggregateChildrenExpression; /** * Type for members of this hierarchy. Set once to avoid excessive newing. */ final Type memberType = MemberType.forHierarchy(this); /** * The level that the null member belongs too. */ protected final RolapLevel nullLevel; /** * The 'all' member of this hierarchy. This exists even if the hierarchy * does not officially have an 'all' member. */ private RolapMember allMember; private static final String ALL_LEVEL_CARDINALITY = "1"; RolapHierarchy(RolapDimension dimension, String subName, boolean hasAll) { super(dimension, subName, hasAll); this.allLevelName = "(All)"; this.allMemberName = subName != null && (MondrianProperties.instance().SsasCompatibleNaming.get() || name.equals(subName + "." + subName)) ? "All " + subName + "s" : "All " + name + "s"; if (hasAll) { this.levels = new RolapLevel[1]; this.levels[0] = new RolapLevel( this, 0, this.allLevelName, null, null, null, null, null, null, null, RolapProperty.emptyArray, RolapLevel.FLAG_ALL | RolapLevel.FLAG_UNIQUE, null, RolapLevel.HideMemberCondition.Never, LevelType.Regular, ""); } else { this.levels = new RolapLevel[0]; } // The null member belongs to a level with very similar properties to // the 'all' level. this.nullLevel = new RolapLevel( this, 0, this.allLevelName, null, null, null, null, null, null, null, RolapProperty.emptyArray, RolapLevel.FLAG_ALL | RolapLevel.FLAG_UNIQUE, null, RolapLevel.HideMemberCondition.Never, LevelType.Null, ""); } /** * Creates a <code>RolapHierarchy</code>. * * @param dimension the dimension this hierarchy belongs to * @param xmlHierarchy the xml object defining this hierarchy * @param xmlCubeDimension the xml object defining the cube * dimension for this object */ RolapHierarchy( RolapDimension dimension, MondrianDef.Hierarchy xmlHierarchy, MondrianDef.CubeDimension xmlCubeDimension) { this(dimension, xmlHierarchy.name, xmlHierarchy.hasAll); assert !(this instanceof RolapCubeHierarchy); this.xmlHierarchy = xmlHierarchy; this.relation = xmlHierarchy.relation; if (xmlHierarchy.relation instanceof MondrianDef.InlineTable) { this.relation = RolapUtil.convertInlineTableToRelation( (MondrianDef.InlineTable) xmlHierarchy.relation, getRolapSchema().getDialect()); } this.memberReaderClass = xmlHierarchy.memberReaderClass; // Create an 'all' level even if the hierarchy does not officially // have one. if (xmlHierarchy.allMemberName != null) { this.allMemberName = xmlHierarchy.allMemberName; } if (xmlHierarchy.allLevelName != null) { this.allLevelName = xmlHierarchy.allLevelName; } RolapLevel allLevel = new RolapLevel( this, 0, this.allLevelName, null, null, null, null, null, null, null, RolapProperty.emptyArray, RolapLevel.FLAG_ALL | RolapLevel.FLAG_UNIQUE, null, RolapLevel.HideMemberCondition.Never, LevelType.Regular, ALL_LEVEL_CARDINALITY); allLevel.init(xmlCubeDimension); this.allMember = new RolapMember( null, allLevel, null, allMemberName, Member.MemberType.ALL); // assign "all member" caption if (xmlHierarchy.allMemberCaption != null && xmlHierarchy.allMemberCaption.length() > 0) { this.allMember.setCaption(xmlHierarchy.allMemberCaption); } this.allMember.setOrdinal(0); // If the hierarchy has an 'all' member, the 'all' level is level 0. if (hasAll) { this.levels = new RolapLevel[xmlHierarchy.levels.length + 1]; this.levels[0] = allLevel; for (int i = 0; i < xmlHierarchy.levels.length; i++) { final MondrianDef.Level xmlLevel = xmlHierarchy.levels[i]; if (xmlLevel.getKeyExp() == null && xmlHierarchy.memberReaderClass == null) { throw MondrianResource.instance().LevelMustHaveNameExpression.ex(xmlLevel.name); } levels[i + 1] = new RolapLevel(this, i + 1, xmlLevel); } } else { this.levels = new RolapLevel[xmlHierarchy.levels.length]; for (int i = 0; i < xmlHierarchy.levels.length; i++) { levels[i] = new RolapLevel(this, i, xmlHierarchy.levels[i]); } } if (xmlCubeDimension instanceof MondrianDef.DimensionUsage) { String sharedDimensionName = ((MondrianDef.DimensionUsage) xmlCubeDimension).source; this.sharedHierarchyName = sharedDimensionName; if (subName != null) { this.sharedHierarchyName += "." + subName; // e.g. "Time.Weekly" } } else { this.sharedHierarchyName = null; } if (xmlHierarchy.relation != null && xmlHierarchy.memberReaderClass != null) { throw MondrianResource.instance(). HierarchyMustNotHaveMoreThanOneSource.ex(getUniqueName()); } if (!Util.isEmpty(xmlHierarchy.caption)) { setCaption(xmlHierarchy.caption); } defaultMemberName = xmlHierarchy.defaultMember; } protected Logger getLogger() { return LOGGER; } public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof RolapHierarchy)) { return false; } RolapHierarchy that = (RolapHierarchy)o; if (sharedHierarchyName == null || that.sharedHierarchyName == null) { return false; } else { return sharedHierarchyName.equals(that.sharedHierarchyName) && getUniqueName().equals(that.getUniqueName()); } } protected int computeHashCode() { return super.computeHashCode() ^ (sharedHierarchyName == null ? 0 : sharedHierarchyName.hashCode()); } /** * Initializes a hierarchy within the context of a cube. */ void init(MondrianDef.CubeDimension xmlDimension) { // first create memberReader if (this.memberReader == null) { this.memberReader = getRolapSchema().createMemberReader( sharedHierarchyName, this, memberReaderClass); } for (Level level : levels) { ((RolapLevel) level).init(xmlDimension); } if (defaultMemberName != null) { List<Id.Segment> uniqueNameParts = Util.parseIdentifier(defaultMemberName); // First look up from within this hierarchy. Works for unqualified // names, e.g. [USA].[CA]. defaultMember = (Member) Util.lookupCompound( getRolapSchema().getSchemaReader(), this, uniqueNameParts, false, Category.Member, MatchType.EXACT); // Next look up within global context. Works for qualified names, // e.g. [Store].[USA].[CA] or [Time].[Weekly].[1997].[Q2]. if (defaultMember == null) { defaultMember = (Member) Util.lookupCompound( getRolapSchema().getSchemaReader(), new DummyElement(), uniqueNameParts, false, Category.Member, MatchType.EXACT); } if (defaultMember == null) { throw Util.newInternal( "Can not find Default Member with name \"" + defaultMemberName + "\" in Hierarchy \"" + getName() + "\""); } } } void setMemberReader(MemberReader memberReader) { this.memberReader = memberReader; } MemberReader getMemberReader() { return memberReader; } RolapLevel newMeasuresLevel() { RolapLevel level = new RolapLevel( this, this.levels.length, "MeasuresLevel", null, null, null, null, null, null, null, RolapProperty.emptyArray, 0, null, RolapLevel.HideMemberCondition.Never, LevelType.Regular, ""); this.levels = RolapUtil.addElement(this.levels, level); return level; } /** * If this hierarchy has precisely one table, returns that table; * if this hierarchy has no table, return the cube's fact-table; * otherwise, returns null. */ MondrianDef.Relation getUniqueTable() { if (relation instanceof MondrianDef.Relation) { return (MondrianDef.Relation) relation; } else if (relation instanceof MondrianDef.Join) { return null; } else { throw Util.newInternal( "hierarchy's relation is a " + relation.getClass()); } } boolean tableExists(String tableName) { return (relation != null) && tableExists(tableName, relation); } private static boolean tableExists( String tableName, MondrianDef.RelationOrJoin relationOrJoin) { if (relationOrJoin instanceof MondrianDef.Relation) { MondrianDef.Relation relation = (MondrianDef.Relation) relationOrJoin; return relation.getAlias().equals(tableName); } else { MondrianDef.Join join = (MondrianDef.Join) relationOrJoin; return tableExists(tableName, join.left) || tableExists(tableName, join.right); } } public RolapSchema getRolapSchema() { return (RolapSchema) dimension.getSchema(); } public MondrianDef.RelationOrJoin getRelation() { return relation; } public MondrianDef.Hierarchy getXmlHierarchy() { return xmlHierarchy; } public Member getDefaultMember() { // use lazy initialization to get around bootstrap issues if (defaultMember == null) { List rootMembers = memberReader.getRootMembers(); if (rootMembers.size() == 0) { throw MondrianResource.instance().InvalidHierarchyCondition.ex(this.getUniqueName()); } defaultMember = (RolapMember) rootMembers.get(0); } return defaultMember; } public Member getNullMember() { // use lazy initialization to get around bootstrap issues if (nullMember == null) { nullMember = new RolapNullMember(nullLevel); } return nullMember; } /** * Returns the 'all' member. */ public RolapMember getAllMember() { return allMember; } public Member createMember( Member parent, Level level, String name, Formula formula) { if (formula == null) { return new RolapMember( (RolapMember) parent, (RolapLevel) level, name); } else if (level.getDimension().isMeasures()) { return new RolapCalculatedMeasure( (RolapMember) parent, (RolapLevel) level, name, formula); } else { return new RolapCalculatedMember( (RolapMember) parent, (RolapLevel) level, name, formula); } } String getAlias() { return getName(); } /** * Returns the name of the source hierarchy, if this hierarchy is shared, * otherwise null. * * <p>If this hierarchy is a public -- that is, it belongs to a dimension * which is a usage of a shared dimension -- then * <code>sharedHierarchyName</code> holds the unique name of the shared * hierarchy; otherwise it is null. * * <p> Suppose this hierarchy is "Weekly" in the dimension "Order Date" of * cube "Sales", and that "Order Date" is a usage of the "Time" * dimension. Then <code>sharedHierarchyName</code> will be * "[Time].[Weekly]". */ public String getSharedHierarchyName() { return sharedHierarchyName; } /** * Adds to the FROM clause of the query the tables necessary to access the * members of this hierarchy. If <code>expression</code> is not null, adds * the tables necessary to compute that expression. * * <p> This method is idempotent: if you call it more than once, it only * adds the table(s) to the FROM clause once. * * @param query Query to add the hierarchy to * @param expression Level to qualify up to; if null, qualifies up to the * topmost ('all') expression, which may require more columns and more joins */ void addToFrom(SqlQuery query, MondrianDef.Expression expression) { if (relation == null) { throw Util.newError( "cannot add hierarchy " + getUniqueName() + " to query: it does not have a <Table>, <View> or <Join>"); } final boolean failIfExists = false; MondrianDef.RelationOrJoin subRelation = relation; if (relation instanceof MondrianDef.Join) { if (expression != null) { // Suppose relation is // (((A join B) join C) join D) // and the fact table is // and our expression uses C. We want to make the expression // F left join ((A join B) join C). // Search for the smallest subset of the relation which // uses C. subRelation = relationSubset(relation, expression.getTableAlias()); } } query.addFrom(subRelation, null, failIfExists); } /** * Adds a table to the FROM clause of the query. * If <code>table</code> is not null, adds the table. Otherwise, add the * relation on which this hierarchy is based on. * * <p> This method is idempotent: if you call it more than once, it only * adds the table(s) to the FROM clause once. * * @param query Query to add the hierarchy to * @param table table to add to the query */ void addToFrom(SqlQuery query, RolapStar.Table table) { if (getRelation() == null) { throw Util.newError( "cannot add hierarchy " + getUniqueName() + " to query: it does not have a <Table>, <View> or <Join>"); } final boolean failIfExists = false; MondrianDef.RelationOrJoin subRelation = null; if (table != null) { // Suppose relation is // (((A join B) join C) join D) // and the fact table is // and the table to add is C. We want to make the expression // F left join ((A join B) join C). // Search for the smallest subset of the relation which // joins with C. subRelation = lookupRelationSubset(getRelation(), table); } if (subRelation == null) { // If no table is found or specified, add the entire base relation. subRelation = getRelation(); } query.addFrom(subRelation, null, failIfExists); } /** * Returns the smallest subset of <code>relation</code> which contains * the relation <code>alias</code>, or null if these is no relation with * such an alias. * @param relation the relation in which to look for table by its alias * @param alias table alias to search for * @return the smallest containing relation or null if no matching table * is found in <code>relation</code> */ private static MondrianDef.RelationOrJoin relationSubset( MondrianDef.RelationOrJoin relation, String alias) { if (relation instanceof MondrianDef.Relation) { MondrianDef.Relation table = (MondrianDef.Relation) relation; return table.getAlias().equals(alias) ? relation : null; } else if (relation instanceof MondrianDef.Join) { MondrianDef.Join join = (MondrianDef.Join) relation; MondrianDef.RelationOrJoin rightRelation = relationSubset(join.right, alias); return (rightRelation == null) ? relationSubset(join.left, alias) : join; } else { throw Util.newInternal("bad relation type " + relation); } } /** * Returns the smallest subset of <code>relation</code> which contains * the table <code>targetTable</code>, or null if the targetTable is not * one of the joining table in <code>relation</code>. * * @param relation the relation in which to look for targetTable * @param targetTable table to add to the query * @return the smallest containing relation or null if no matching table * is found in <code>relation</code> */ private static MondrianDef.RelationOrJoin lookupRelationSubset( MondrianDef.RelationOrJoin relation, RolapStar.Table targetTable) { if (relation instanceof MondrianDef.Table) { MondrianDef.Table table = (MondrianDef.Table) relation; if (table.name.equals(targetTable.getTableName())) { return relation; } else { // Not the same table if table names are different return null; } } else if (relation instanceof MondrianDef.Join) { // Search inside relation, starting from the rightmost table, // and move left along the join chain. MondrianDef.Join join = (MondrianDef.Join) relation; MondrianDef.RelationOrJoin rightRelation = lookupRelationSubset(join.right, targetTable); if (rightRelation == null) { // Keep searching left. return lookupRelationSubset( join.left, targetTable); } else { // Found a match. return join; } } return null; } /** * Creates a member reader which enforces the access-control profile of * <code>role</code>. * * <p>This method may not be efficient, so the caller should take care * not to call it too often. A cache is a good idea. * * @param role Role * @return Member reader that implements access control * * @pre role != null * @post return != null */ MemberReader createMemberReader(Role role) { final Access access = role.getAccess(this); switch (access) { case NONE: role.getAccess(this); // todo: remove throw Util.newInternal("Illegal access to members of hierarchy " + this); case ALL: return (isRagged()) ? new RestrictedMemberReader(getMemberReader(), role) : getMemberReader(); case CUSTOM: final Role.HierarchyAccess hierarchyAccess = role.getAccessDetails(this); final Role.RollupPolicy rollupPolicy = hierarchyAccess.getRollupPolicy(); final NumericType returnType = new NumericType(); switch (rollupPolicy) { case FULL: return new RestrictedMemberReader(getMemberReader(), role); case PARTIAL: Type memberType1 = new mondrian.olap.type.MemberType( getDimension(), getHierarchy(), null, null); SetType setType = new SetType(memberType1); ListCalc listCalc = new AbstractMemberListCalc( new DummyExp(setType), new Calc[0]) { public List<Member> evaluateMemberList( Evaluator evaluator) { return FunUtil.getNonEmptyMemberChildren( evaluator, ((RolapEvaluator) evaluator).getExpanding()); } public boolean dependsOn(Dimension dimension) { return true; } }; final Calc partialCalc = new LimitedRollupAggregateCalc(returnType, listCalc); final Exp partialExp = new ResolvedFunCall( new FunDefBase("$x", "x", "In") { public Calc compileCall( ResolvedFunCall call, ExpCompiler compiler) { return partialCalc; } public void unparse(Exp[] args, PrintWriter pw) { pw.print("$RollupAccessibleChildren()"); } }, new Exp[0], returnType); return new LimitedRollupSubstitutingMemberReader( getMemberReader(), role, hierarchyAccess, partialExp); case HIDDEN: Exp hiddenExp = new ResolvedFunCall( new FunDefBase("$x", "x", "In") { public Calc compileCall( ResolvedFunCall call, ExpCompiler compiler) { return new ConstantCalc(returnType, null); } public void unparse(Exp[] args, PrintWriter pw) { pw.print("$RollupAccessibleChildren()"); } }, new Exp[0], returnType); return new LimitedRollupSubstitutingMemberReader( getMemberReader(), role, hierarchyAccess, hiddenExp); default: throw Util.unexpected(rollupPolicy); } default: throw Util.badValue(access); } } /** * A hierarchy is ragged if it contains one or more levels with hidden * members. */ public boolean isRagged() { for (Level level : levels) { if (((RolapLevel) level).getHideMemberCondition() != RolapLevel.HideMemberCondition.Never) { return true; } } return false; } /** * Returns an expression which will compute a member's value by aggregating * its children. * * <p>It is efficient to share one expression between all calculated members in * a parent-child hierarchy, so we only need need to validate the expression * once. */ synchronized Exp getAggregateChildrenExpression() { if (aggregateChildrenExpression == null) { UnresolvedFunCall fc = new UnresolvedFunCall( "$AggregateChildren", Syntax.Internal, new Exp[] {new HierarchyExpr(this)}); Validator validator = Util.createSimpleValidator(BuiltinFunTable.instance()); aggregateChildrenExpression = fc.accept(validator); } return aggregateChildrenExpression; } /** * Builds a dimension which maps onto a table holding the transitive * closure of the relationship for this parent-child level. * * <p>This method is triggered by the * {@link mondrian.olap.MondrianDef.Closure} element * in a schema, and is only meaningful for a parent-child hierarchy. * * <p>When a Schema contains a parent-child Hierarchy that has an * associated closure table, Mondrian creates a parallel internal * Hierarchy, called a "closed peer", that refers to the closure table. * This is indicated in the schema at the level of a Level, by including a * Closure element. The closure table represents * the transitive closure of the parent-child relationship. * * <p>The peer dimension, with its single hierarchy, and 3 levels (all, * closure, item) really 'belong to' the parent-child level. If a single * hierarchy had two parent-child levels (however unlikely this might be) * then each level would have its own auxiliary dimension. * * <p>For example, in the demo schema the [HR].[Employee] dimension * contains a parent-child hierarchy: * * <pre> * &lt;Dimension name="Employees" foreignKey="employee_id"&gt; * &lt;Hierarchy hasAll="true" allMemberName="All Employees" * primaryKey="employee_id"&gt; * &lt;Table name="employee"/&gt; * &lt;Level name="Employee Id" type="Numeric" uniqueMembers="true" * column="employee_id" parentColumn="supervisor_id" * nameColumn="full_name" nullParentValue="0"&gt; * &lt;Closure parentColumn="supervisor_id" childColumn="employee_id"&gt; * &lt;Table name="employee_closure"/&gt; * &lt;/Closure&gt; * ... * </pre> * The internal closed peer Hierarchy has this structure: * <pre> * &lt;Dimension name="Employees" foreignKey="employee_id"&gt; * ... * &lt;Hierarchy name="Employees$Closure" * hasAll="true" allMemberName="All Employees" * primaryKey="employee_id" primaryKeyTable="employee_closure"&gt; * &lt;Join leftKey="supervisor_id" rightKey="employee_id"&gt; * &lt;Table name="employee_closure"/&gt; * &lt;Table name="employee"/&gt; * &lt;/Join&gt; * &lt;Level name="Closure" type="Numeric" uniqueMembers="false" * table="employee_closure" column="supervisor_id"/&gt; * &lt;Level name="Employee" type="Numeric" uniqueMembers="true" * table="employee_closure" column="employee_id"/&gt; * &lt;/Hierarchy&gt; * </pre> * * <p>Note that the original Level with the Closure produces two Levels in * the closed peer Hierarchy: a simple peer (Employee) and a closed peer * (Closure). * * @param src a parent-child Level that has a Closure clause * @param clos a Closure clause * @return the closed peer Level in the closed peer Hierarchy */ RolapDimension createClosedPeerDimension( RolapLevel src, MondrianDef.Closure clos, MondrianDef.CubeDimension xmlDimension) { // REVIEW (mb): What about attribute primaryKeyTable? // Create a peer dimension. RolapDimension peerDimension = new RolapDimension( dimension.getSchema(), dimension.getName() + "$Closure", DimensionType.StandardDimension, dimension.isHighCardinality()); // Create a peer hierarchy. RolapHierarchy peerHier = peerDimension.newHierarchy(null, true); peerHier.allMemberName = getAllMemberName(); peerHier.allMember = getAllMember(); peerHier.allLevelName = getAllLevelName(); peerHier.sharedHierarchyName = getSharedHierarchyName(); MondrianDef.Join join = new MondrianDef.Join(); peerHier.relation = join; join.left = clos.table; // the closure table join.leftKey = clos.parentColumn; join.right = relation; // the unclosed base table join.rightKey = clos.childColumn; // Create the upper level. // This represents all groups of descendants. For example, in the // Employee closure hierarchy, this level has a row for every employee. int index = peerHier.levels.length; int flags = src.getFlags() &~ RolapLevel.FLAG_UNIQUE; MondrianDef.Expression keyExp = new MondrianDef.Column(clos.table.name, clos.parentColumn); RolapLevel level = new RolapLevel(peerHier, index++, "Closure", keyExp, null, null, null, null, null, // no longer a parent-child hierarchy null, RolapProperty.emptyArray, flags, src.getDatatype(), src.getHideMemberCondition(), src.getLevelType(), ""); peerHier.levels = RolapUtil.addElement(peerHier.levels, level); // Create lower level. // This represents individual items. For example, in the Employee // closure hierarchy, this level has a row for every direct and // indirect report of every employee (which is more than the number // of employees). flags = src.getFlags() | RolapLevel.FLAG_UNIQUE; keyExp = new MondrianDef.Column(clos.table.name, clos.childColumn); RolapLevel sublevel = new RolapLevel( peerHier, index++, "Item", keyExp, null, null, null, null, null, // no longer a parent-child hierarchy null, RolapProperty.emptyArray, flags, src.getDatatype(), src.getHideMemberCondition(), src.getLevelType(), ""); peerHier.levels = RolapUtil.addElement(peerHier.levels, sublevel); return peerDimension; } /** * Sets default member of this Hierarchy. * * @param defaultMember Default member */ public void setDefaultMember(Member defaultMember) { if (defaultMember != null) { this.defaultMember = defaultMember; } } /** * A <code>RolapNullMember</code> is the null member of its hierarchy. * Every hierarchy has precisely one. They are yielded by operations such as * <code>[Gender].[All].ParentMember</code>. Null members are usually * omitted from sets (in particular, in the set constructor operator "{ ... * }". */ static class RolapNullMember extends RolapMember { RolapNullMember(final RolapLevel level) { super(null, level, null, RolapUtil.mdxNullLiteral, MemberType.NULL); assert level != null; } } /** * Calculated member which is also a measure (that is, a member of the * [Measures] dimension). */ protected static class RolapCalculatedMeasure extends RolapCalculatedMember implements RolapMeasure { private CellFormatter cellFormatter; public RolapCalculatedMeasure( RolapMember parent, RolapLevel level, String name, Formula formula) { super(parent, level, name, formula); } public synchronized void setProperty(String name, Object value) { if (name.equals(Property.CELL_FORMATTER.getName())) { String cellFormatterClass = (String) value; try { this.cellFormatter = RolapCube.getCellFormatter(cellFormatterClass); } catch (Exception e) { throw MondrianResource.instance().CellFormatterLoadFailed.ex( cellFormatterClass, getUniqueName(), e); } } super.setProperty(name, value); } public CellFormatter getFormatter() { return cellFormatter; } } /** * Substitute for a member in a hierarchy whose rollup policy is 'partial' * or 'hidden'. The member is calculated using an expression which * aggregates only visible descendants. * * <p>Note that this class extends RolapCubeMember only because other code * expects that all members in a RolapCubeHierarchy are RolapCubeMembers. * * @see mondrian.olap.Role.RollupPolicy */ public static class LimitedRollupMember extends RolapCubeMember { public final RolapMember member; private final Exp exp; LimitedRollupMember( RolapCubeMember member, Exp exp) { super( member.getParentMember(), member.getRolapMember(), member.getLevel(), member.getCube()); assert !(member instanceof LimitedRollupMember); this.member = member; this.exp = exp; } public boolean equals(Object o) { return o instanceof LimitedRollupMember && ((LimitedRollupMember) o).member.equals(member); } public int hashCode() { return member.hashCode(); } public Exp getExpression() { return exp; } protected boolean computeCalculated(final MemberType memberType) { return true; } public boolean isCalculated() { return false; } public boolean isEvaluated() { return true; } } /** * Member reader which wraps a hierarchy's member reader, and if the * role has limited access to the hierarchy, replaces members with * dummy members which evaluate to the sum of only the accessible children. */ private static class LimitedRollupSubstitutingMemberReader extends SubstitutingMemberReader { private final Role.HierarchyAccess hierarchyAccess; private final Exp exp; /** * Creates a LimitedRollupSubstitutingMemberReader. * * @param memberReader Underlying member reader * @param role Role to enforce * @param hierarchyAccess Access this role has to the hierarchy * @param exp Expression for hidden member */ public LimitedRollupSubstitutingMemberReader( MemberReader memberReader, Role role, Role.HierarchyAccess hierarchyAccess, Exp exp) { super( new RestrictedMemberReader( memberReader, role)); this.hierarchyAccess = hierarchyAccess; this.exp = exp; } @Override public RolapMember substitute(final RolapMember member) { if (member != null && (hierarchyAccess.getAccess(member) == Access.CUSTOM || hierarchyAccess.hasInaccessibleDescendants(member))) { // Member is visible, but at least one of its // descendants is not. return new LimitedRollupMember((RolapCubeMember)member, exp); } else { // No need to substitute. Member and all of its // descendants are accessible. Total for member // is same as for FULL policy. return member; } } @Override public RolapMember desubstitute(RolapMember member) { if (member instanceof LimitedRollupMember) { return ((LimitedRollupMember) member).member; } else { return member; } } } /** * Compiled expression that computes rollup over a set of visible children. * The {@code listCalc} expression determines that list of children. */ private static class LimitedRollupAggregateCalc extends AggregateFunDef.AggregateCalc { public LimitedRollupAggregateCalc(Type returnType, ListCalc listCalc) { super( new DummyExp(returnType), listCalc, new ValueCalc(new DummyExp(returnType))); } } /** * Dummy element that acts as a namespace for resolving member names within * shared hierarchies. Acts like a cube that has a single child, the * hierarchy in question. */ private class DummyElement implements OlapElement { public String getUniqueName() { throw new UnsupportedOperationException(); } public String getName() { return "$"; } public String getDescription() { throw new UnsupportedOperationException(); } public OlapElement lookupChild( SchemaReader schemaReader, Id.Segment s, MatchType matchType) { if (Util.equalName(s.name, dimension.getName())) { return dimension; } // Archaic form <dimension>.<hierarchy>, e.g. [Time.Weekly].[1997] if (Util.equalName(s.name, dimension.getName() + "." + subName)) { return RolapHierarchy.this; } return null; } public String getQualifiedName() { throw new UnsupportedOperationException(); } public String getCaption() { throw new UnsupportedOperationException(); } public Hierarchy getHierarchy() { throw new UnsupportedOperationException(); } public Dimension getDimension() { throw new UnsupportedOperationException(); } } } // End RolapHierarchy.java
package soottocfg.soot.visitors; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import com.google.common.base.Preconditions; import com.google.common.base.Verify; import soot.ArrayType; import soot.Body; import soot.Local; import soot.PatchingChain; import soot.RefType; import soot.IntType; import soot.ByteType; import soot.CharType; import soot.ShortType; import soot.LongType; import soot.BooleanType; import soot.SootClass; import soot.SootMethod; import soot.Unit; import soot.Value; import soot.Type; import soot.jimple.Jimple; import soot.jimple.AnyNewExpr; import soot.jimple.AssignStmt; import soot.jimple.BreakpointStmt; import soot.jimple.DefinitionStmt; import soot.jimple.DynamicInvokeExpr; import soot.jimple.EnterMonitorStmt; import soot.jimple.ExitMonitorStmt; import soot.jimple.FieldRef; import soot.jimple.GotoStmt; import soot.jimple.IdentityStmt; import soot.jimple.IfStmt; import soot.jimple.InstanceInvokeExpr; import soot.jimple.InvokeExpr; import soot.jimple.InvokeStmt; import soot.jimple.LengthExpr; import soot.jimple.LookupSwitchStmt; import soot.jimple.NopStmt; import soot.jimple.RetStmt; import soot.jimple.ReturnStmt; import soot.jimple.ReturnVoidStmt; import soot.jimple.SpecialInvokeExpr; import soot.jimple.StaticInvokeExpr; import soot.jimple.Stmt; import soot.jimple.StmtSwitch; import soot.jimple.TableSwitchStmt; import soot.jimple.ThrowStmt; import soot.toolkits.graph.CompleteUnitGraph; import soottocfg.cfg.SourceLocation; import soottocfg.cfg.expression.BinaryExpression; import soottocfg.cfg.expression.BinaryExpression.BinaryOperator; import soottocfg.cfg.expression.Expression; import soottocfg.cfg.expression.IdentifierExpression; import soottocfg.cfg.expression.TupleAccessExpression; import soottocfg.cfg.expression.UnaryExpression; import soottocfg.cfg.expression.UnaryExpression.UnaryOperator; import soottocfg.cfg.expression.literal.IntegerLiteral; import soottocfg.cfg.method.CfgBlock; import soottocfg.cfg.method.Method; import soottocfg.cfg.statement.AssertStatement; import soottocfg.cfg.statement.AssumeStatement; import soottocfg.cfg.statement.AssignStatement; import soottocfg.cfg.statement.CallStatement; import soottocfg.cfg.statement.HavocStatement; import soottocfg.cfg.statement.NewStatement; import soottocfg.cfg.statement.Statement; import soottocfg.cfg.type.ReferenceType; import soottocfg.cfg.variable.ClassVariable; import soottocfg.cfg.variable.Variable; import soottocfg.soot.util.MethodInfo; import soottocfg.soot.util.SootTranslationHelpers; /** * @author schaef */ public class SootStmtSwitch implements StmtSwitch { private final SootMethod sootMethod; private final Body sootBody; private final MethodInfo methodInfo; private final SootValueSwitch valueSwitch; private final PatchingChain<Unit> units; private final CompleteUnitGraph unitGraph; private CfgBlock currentBlock, entryBlock, exitBlock; private boolean insideMonitor = false; private Stmt currentStmt; protected SourceLocation loc; public SootStmtSwitch(Body body, MethodInfo mi) { this.methodInfo = mi; this.sootBody = body; this.sootMethod = sootBody.getMethod(); this.valueSwitch = new SootValueSwitch(this); units = body.getUnits(); Unit head = units.getFirst(); unitGraph = new CompleteUnitGraph(sootBody); // check if the block is empty. if (head != null) { this.entryBlock = methodInfo.lookupCfgBlock(head); this.currentBlock = this.entryBlock; Iterator<Unit> iterator = units.iterator(); while (iterator.hasNext()) { Unit u = iterator.next(); u.apply(this); } } else { if (methodInfo.getMethod().getSource()==null) { methodInfo.getMethod().setSource(new CfgBlock(methodInfo.getMethod())); } // this.entryBlock = new CfgBlock(methodInfo.getMethod()); this.entryBlock = methodInfo.getMethod().getSource(); this.currentBlock = this.entryBlock; } if (this.currentBlock != null) { this.exitBlock = this.currentBlock; } else { this.exitBlock = null; } // TODO: connect stuff to exit. } public CfgBlock getEntryBlock() { return this.entryBlock; } public CfgBlock getExitBlock() { return this.exitBlock; } public MethodInfo getMethodInfo() { return this.methodInfo; } public SootMethod getMethod() { return this.sootMethod; } public Stmt getCurrentStmt() { return this.currentStmt; } public SourceLocation getCurrentLoc() { return this.loc; } /** * Checks if the current statement is synchronized or inside a monitor * * @return True if the current statement is inside a monitor or synchronized * and false, otherwise. */ public boolean isSynchronizedOrInsideMonitor() { return this.insideMonitor || this.sootMethod.isSynchronized(); } public void push(Statement stmt) { this.currentBlock.addStatement(stmt); } private void connectBlocks(CfgBlock from, CfgBlock to) { Preconditions.checkArgument(!methodInfo.getMethod().containsEdge(from, to)); this.methodInfo.getMethod().addEdge(from, to); } private void connectBlocks(CfgBlock from, CfgBlock to, Expression label) { Preconditions.checkArgument(!methodInfo.getMethod().containsEdge(from, to)); this.methodInfo.getMethod().addEdge(from, to).setLabel(label); } private void precheck(Stmt st) { this.currentStmt = st; loc = SootTranslationHelpers.v().getSourceLocation(currentStmt); if (currentBlock != null) { // first check if we already created a block // for this statement. CfgBlock block = methodInfo.findBlock(st); if (block != null) { if (block != currentBlock) { connectBlocks(currentBlock, block); currentBlock = block; } else { // do nothing. } } else { if (unitGraph.getPredsOf(st).size() > 1) { // then this statement might be reachable via a back edge // and we have to create a new block for it. CfgBlock newBlock = methodInfo.lookupCfgBlock(st); connectBlocks(currentBlock, newBlock); currentBlock = newBlock; } else { // do nothing. } } } else { // If not, and we currently don't have a block, // create a new one. currentBlock = methodInfo.lookupCfgBlock(st); } } /* * Below follow the visitor methods from StmtSwitch * */ @Override public void caseAssignStmt(AssignStmt arg0) { precheck(arg0); translateDefinitionStmt(arg0); } @Override public void caseBreakpointStmt(BreakpointStmt arg0) { precheck(arg0); } @Override public void caseEnterMonitorStmt(EnterMonitorStmt arg0) { precheck(arg0); arg0.getOp().apply(this.valueSwitch); this.valueSwitch.popExpression(); this.insideMonitor = true; // TODO Havoc stuff } @Override public void caseExitMonitorStmt(ExitMonitorStmt arg0) { precheck(arg0); arg0.getOp().apply(this.valueSwitch); this.valueSwitch.popExpression(); this.insideMonitor = false; // TODO: } @Override public void caseGotoStmt(GotoStmt arg0) { precheck(arg0); CfgBlock target = this.methodInfo.lookupCfgBlock(arg0.getTarget()); connectBlocks(currentBlock, target); this.currentBlock = null; } @Override public void caseIdentityStmt(IdentityStmt arg0) { precheck(arg0); translateDefinitionStmt(arg0); } @Override public void caseIfStmt(IfStmt arg0) { precheck(arg0); arg0.getCondition().apply(valueSwitch); Expression cond = valueSwitch.popExpression(); // apply the switch twice. Otherwise the conditional and its negation // are aliased. arg0.getCondition().apply(valueSwitch); Expression negCond = new UnaryExpression(loc, UnaryOperator.LNot, valueSwitch.popExpression()); // create a new (empty) block for the fan out // CfgBlock block = methodInfo.lookupCfgBlock(arg0); // if (currentBlock!=null) { // connectBlocks(currentBlock, block); // currentBlock = block; /* * In jimple, conditionals are of the form if (x) goto y; So we end the * current block and create two new blocks for then and else branch. The * new currenBlock becomes the else branch. */ Unit next = units.getSuccOf(arg0); /* * In rare cases of empty If- and Else- blocks, next and * arg0.getTraget() are the same. For these cases, we do not generate an * If statement, but still translate the conditional in case it may * throw an exception. */ if (next == arg0.getTarget()) { // ignore the IfStmt. return; } CfgBlock thenBlock = methodInfo.lookupCfgBlock(arg0.getTarget()); connectBlocks(currentBlock, thenBlock, cond); if (next != null) { CfgBlock elseBlock = methodInfo.lookupCfgBlock(next); connectBlocks(currentBlock, elseBlock, negCond); this.currentBlock = elseBlock; } else { connectBlocks(currentBlock, methodInfo.getSink(), negCond); this.currentBlock = null; } } @Override public void caseInvokeStmt(InvokeStmt arg0) { precheck(arg0); translateMethodInvokation(arg0, null, arg0.getInvokeExpr()); } @Override public void caseLookupSwitchStmt(LookupSwitchStmt arg0) { throw new RuntimeException("Should have been eliminated by SwitchStatementRemover"); } @Override public void caseNopStmt(NopStmt arg0) { precheck(arg0); } @Override public void caseRetStmt(RetStmt arg0) { throw new RuntimeException("Not implemented " + arg0); } @Override public void caseReturnStmt(ReturnStmt arg0) { precheck(arg0); arg0.getOp().apply(valueSwitch); Expression returnValue = valueSwitch.popExpression(); currentBlock.addStatement(new AssignStatement(SootTranslationHelpers.v().getSourceLocation(arg0), methodInfo.getReturnVariable().mkExp(loc), returnValue)); connectBlocks(currentBlock, methodInfo.getSink()); currentBlock = null; } @Override public void caseReturnVoidStmt(ReturnVoidStmt arg0) { precheck(arg0); // if (sootMethod.isConstructor()) { // SourceLocation loc = getCurrentLoc(); // SootClass currentClass = // SootTranslationHelpers.v().getCurrentMethod().getDeclaringClass(); // List<SootField> fields = // SootTranslationHelpers.findFieldsRecursively(currentClass); // JimpleBody jb = (JimpleBody)this.sootMethod.getActiveBody(); // for (int i=1; i<methodInfo.getOutVariables().size();i++) { // Variable outVar = methodInfo.getOutVariables().get(i); // Variable tmp = methodInfo.createFreshLocal("afdafd", // outVar.getType(), false, false); // AssignStatement as = new AssignStatement(loc, // new IdentifierExpression(loc, outVar), // new IdentifierExpression(loc, tmp)); // currentBlock.addStatement(as); connectBlocks(currentBlock, methodInfo.getSink()); currentBlock = null; } @Override public void caseTableSwitchStmt(TableSwitchStmt arg0) { throw new RuntimeException("Should have been eliminated by SwitchStatementRemover"); } @Override public void caseThrowStmt(ThrowStmt arg0) { precheck(arg0); throw new RuntimeException("Apply the ExceptionRemover first."); // arg0.getOp().apply(valueSwitch); // Expression exception = valueSwitch.popExpression(); // currentBlock.addStatement(new // AssignStatement(SootTranslationHelpers.v().getSourceLocation(arg0), // methodInfo.getExceptionVariable(), exception)); // connectBlocks(currentBlock, methodInfo.getSink()); // currentBlock = null; } @Override public void defaultCase(Object arg0) { throw new RuntimeException("Case not implemented"); } /** * Translate method invokation. This assumes that exceptions and virtual * calls have already been removed. * * @param u * @param optionalLhs * @param call */ private void translateMethodInvokation(Unit u, Value optionalLhs, InvokeExpr call) { if (isHandledAsSpecialCase(u, optionalLhs, call)) { return; } // translate the expressions in the arguments first. LinkedList<Expression> args = new LinkedList<Expression>(); for (int i = 0; i < call.getArgs().size(); i++) { call.getArg(i).apply(valueSwitch); args.add(valueSwitch.popExpression()); } Expression baseExpression = null; // List of possible virtual methods that can be called at this point. // Order matters here. if (call instanceof InstanceInvokeExpr) { InstanceInvokeExpr iivk = (InstanceInvokeExpr) call; iivk.getBase().apply(valueSwitch); baseExpression = valueSwitch.popExpression(); // add the "this" variable to the list of args args.addFirst(baseExpression); // this include Interface-, Virtual, and SpecialInvokeExpr } else if (call instanceof StaticInvokeExpr) { // no need to handle the base. } else if (call instanceof DynamicInvokeExpr) { // DynamicInvokeExpr divk = (DynamicInvokeExpr) call; System.err.println("Dynamic invoke translation is only a stub. Will be unsound!"); } else { throw new RuntimeException("Cannot compute instance for " + call.getClass().toString()); } List<Expression> receiver = new LinkedList<Expression>(); receiver.add(methodInfo.getExceptionVariable()); if (optionalLhs != null) { optionalLhs.apply(valueSwitch); Expression lhs = valueSwitch.popExpression(); receiver.add(lhs); } // System.err.println(call); if (call.getMethod().isConstructor() && call instanceof SpecialInvokeExpr) { /* * For our new memory model, we need special treatment of * constructor invoke */ SootTranslationHelpers.v().getMemoryModel().mkConstructorCall(u, call.getMethod(), args); } else { Method method = SootTranslationHelpers.v().lookupOrCreateMethod(call.getMethod()); if (method.getReturnType().size()>1 && optionalLhs==null) { for (int i=1; i<method.getReturnType().size(); i++) { final Variable dummyVar = methodInfo.createFreshLocal("dummy_ret", method.getReturnType().get(i), false, false); receiver.add(new IdentifierExpression(loc, dummyVar)); } } CallStatement stmt = new CallStatement(SootTranslationHelpers.v().getSourceLocation(u), method, args, receiver); this.currentBlock.addStatement(stmt); } } /** * Check if the call is a special case such as System.exit. If so, translate * it and return true. Otherwise, ignore it and return false. * * @param u * @param optionalLhs * @param call * @return true, if its a special method that is handled by the procedure, * and false, otherwise. */ private boolean isHandledAsSpecialCase(Unit u, Value optionalLhs, InvokeExpr call) { if (call.getMethod().getSignature().equals(SootTranslationHelpers.v().getAssertMethod().getSignature())) { Verify.verify(optionalLhs == null); Verify.verify(call.getArgCount() == 1); call.getArg(0).apply(valueSwitch); currentBlock.addStatement( new AssertStatement(SootTranslationHelpers.v().getSourceLocation(u), valueSwitch.popExpression())); return true; } if (call.getMethod().getSignature().contains("<java.lang.String: int length()>")) { assert (call instanceof InstanceInvokeExpr); Expression rhs = SootTranslationHelpers.v().getMemoryModel() .mkStringLengthExpr(((InstanceInvokeExpr) call).getBase()); if (optionalLhs != null) { optionalLhs.apply(valueSwitch); Expression lhs = valueSwitch.popExpression(); currentBlock .addStatement(new AssignStatement(SootTranslationHelpers.v().getSourceLocation(u), lhs, rhs)); } return true; } if (call.getMethod().getSignature().contains("<java.lang.System: void exit(int)>") || call.getMethod().getSignature().contains("<java.lang.Runtime: void halt(int)>")) { // TODO: this is not sufficient for interprocedural analysis. currentBlock = null; return true; } if (call.getMethod().getDeclaringClass().getName().contains("org.junit.Assert")) { // TODO: this should not be hard coded! if (call.getMethod().getName().equals("fail")) { Stmt ret = SootTranslationHelpers.v().getDefaultReturnStatement(sootMethod.getReturnType(), currentStmt); ret.apply(this); return true; } } if (call.getMethod().getDeclaringClass().getName().contains("com.google.common.base.")) { if (call.getMethod().getSignature().contains("void checkArgument(boolean)")) { Preconditions.checkArgument(optionalLhs == null); call.getArg(0).apply(valueSwitch); Expression guard = valueSwitch.popExpression(); currentBlock.addStatement(new AssertStatement(SootTranslationHelpers.v().getSourceLocation(u), guard)); return true; } } if (call.getMethod().getSignature().equals("<java.lang.Class: boolean isAssignableFrom(java.lang.Class)>")) { InstanceInvokeExpr iivk = (InstanceInvokeExpr) call; Verify.verify(call.getArgCount() == 1); if (optionalLhs != null) { optionalLhs.apply(valueSwitch); Expression lhs = valueSwitch.popExpression(); iivk.getBase().apply(valueSwitch); Expression binOpRhs = valueSwitch.popExpression(); Verify.verify(binOpRhs instanceof IdentifierExpression); Variable rhsVar = ((IdentifierExpression)binOpRhs).getVariable(); Verify.verify(rhsVar instanceof ClassVariable); call.getArg(0).apply(valueSwitch); IdentifierExpression binOpLhs = (IdentifierExpression)valueSwitch.popExpression(); Expression instOf = SootTranslationHelpers.createInstanceOfExpression(getCurrentLoc(), binOpLhs.getVariable(), (ClassVariable)rhsVar); currentBlock.addStatement( new AssignStatement(SootTranslationHelpers.v().getSourceLocation(u), lhs, instOf)); return true; } // otherwise ignore. } else if (call.getMethod().getSignature().equals("<java.lang.Object: java.lang.Class getClass()>")) { InstanceInvokeExpr iivk = (InstanceInvokeExpr) call; Verify.verify(call.getArgCount() == 0); if (optionalLhs != null) { Value objectToGetClassFrom = iivk.getBase(); soot.Type t = objectToGetClassFrom.getType(); // SootField typeField = null; if (t instanceof RefType) { // first make a heap-read of the type filed. // typeField = SootTranslationHelpers.getTypeField(((RefType) t).getSootClass()); // // now get the dynamic type // SootTranslationHelpers.v().getMemoryModel().mkHeapReadStatement(getCurrentStmt(), // Jimple.v().newInstanceFieldRef(objectToGetClassFrom, typeField.makeRef()), optionalLhs); objectToGetClassFrom.apply(valueSwitch); IdentifierExpression base = (IdentifierExpression)valueSwitch.popExpression(); optionalLhs.apply(valueSwitch); Expression left = valueSwitch.popExpression(); currentBlock.addStatement(new AssignStatement(loc, left, new TupleAccessExpression(loc, base.getVariable(), ReferenceType.TypeFieldName))); } else if (t instanceof ArrayType) { // typeField = SootTranslationHelpers.getTypeField(Scene.v().getSootClass("java.lang.Object")); throw new RuntimeException("Arrays should be removed first."); } else { throw new RuntimeException("Not implemented. " + t + ", " + t.getClass()); } return true; } } else if (call.getMethod().getSignature() .equals("<java.lang.Class: java.lang.Object cast(java.lang.Object)>")) { // TODO: we have to check if we have to throw an exception or add // E.g, String.<java.lang.Class: java.lang.Object // cast(java.lang.Object)>(x); means (String)x InstanceInvokeExpr iivk = (InstanceInvokeExpr) call; Verify.verify(call.getArgCount() == 1); if (optionalLhs != null) { // TODO optionalLhs.apply(valueSwitch); Expression lhs = valueSwitch.popExpression(); iivk.getBase().apply(valueSwitch); Expression binOpRhs = valueSwitch.popExpression(); call.getArg(0).apply(valueSwitch); Expression binOpLhs = valueSwitch.popExpression(); Expression instOf = new BinaryExpression(this.getCurrentLoc(), BinaryOperator.PoLeq, binOpLhs, binOpRhs); currentBlock.addStatement(new AssertStatement(SootTranslationHelpers.v().getSourceLocation(u), instOf)); call.getArg(0).apply(valueSwitch); Expression asgnRhs = valueSwitch.popExpression(); currentBlock.addStatement( new AssignStatement(SootTranslationHelpers.v().getSourceLocation(u), lhs, asgnRhs)); return true; } } else if (call.getMethod().getSignature().equals("<java.lang.Class: boolean isInstance(java.lang.Object)>")) { /* * E.g, * $r2 = class "java/lang/String"; * $z0 = virtualinvoke $r2.<java.lang.Class: boolean * isInstance(java.lang.Object)>(r1); * checks if r1 instancof String */ InstanceInvokeExpr iivk = (InstanceInvokeExpr) call; Verify.verify(call.getArgCount() == 1); if (optionalLhs != null) { // TODO optionalLhs.apply(valueSwitch); Expression lhs = valueSwitch.popExpression(); iivk.getBase().apply(valueSwitch); Expression binOpRhs = valueSwitch.popExpression(); call.getArg(0).apply(valueSwitch); Expression binOpLhs = valueSwitch.popExpression(); Expression instOf = new BinaryExpression(this.getCurrentLoc(), BinaryOperator.PoLeq, binOpLhs, binOpRhs); currentBlock.addStatement( new AssignStatement(SootTranslationHelpers.v().getSourceLocation(u), lhs, instOf)); return true; } } else if (call.getMethod().getSignature().equals("<org.sosy_lab.sv_benchmarks.Verifier: boolean nondetBoolean()>")) { translateVerifierNondet(BooleanType.v(), optionalLhs, call, false, 0, 0); return true; } else if (call.getMethod().getSignature().equals("<org.sosy_lab.sv_benchmarks.Verifier: byte nondetByte()>")) { translateVerifierNondet(ByteType.v(), optionalLhs, call, true, Byte.MIN_VALUE, Byte.MAX_VALUE); return true; } else if (call.getMethod().getSignature().equals("<org.sosy_lab.sv_benchmarks.Verifier: char nondetChar()>")) { translateVerifierNondet(CharType.v(), optionalLhs, call, true, Character.MIN_VALUE, Character.MAX_VALUE); return true; } else if (call.getMethod().getSignature().equals("<org.sosy_lab.sv_benchmarks.Verifier: short nondetShort()>")) { translateVerifierNondet(ShortType.v(), optionalLhs, call, true, Short.MIN_VALUE, Short.MAX_VALUE); return true; } else if (call.getMethod().getSignature().equals("<org.sosy_lab.sv_benchmarks.Verifier: int nondetInt()>")) { translateVerifierNondet(IntType.v(), optionalLhs, call, true, Integer.MIN_VALUE, Integer.MAX_VALUE); return true; } else if (call.getMethod().getSignature().equals("<org.sosy_lab.sv_benchmarks.Verifier: long nondetLong()>")) { translateVerifierNondet(LongType.v(), optionalLhs, call, true, Long.MIN_VALUE, Long.MAX_VALUE); return true; } else if (call.getMethod().getSignature().equals("<org.sosy_lab.sv_benchmarks.Verifier: void assume(boolean)>")) { Verify.verify(optionalLhs == null); Verify.verify(call.getArgCount() == 1); call.getArg(0).apply(valueSwitch); Expression cond = valueSwitch.popExpression(); currentBlock.addStatement( new AssumeStatement(SootTranslationHelpers.v().getSourceLocation(u), cond)); return true; } //System.out.println(call.getMethod().getSignature()); return false; } /** * Method nondet*() of the Verifier class used to formula * SV-COMP problems. Replace those method with a simple * havoc. */ private void translateVerifierNondet(Type t, Value optionalLhs, InvokeExpr call, boolean addBounds, long lower, long upper) { Verify.verify(call.getArgCount() == 0); if (optionalLhs != null) { optionalLhs.apply(valueSwitch); Expression lhs = valueSwitch.popExpression(); Verify.verify(lhs instanceof IdentifierExpression, "do not know how to havoc " + lhs); IdentifierExpression idLhs = (IdentifierExpression)lhs; final SourceLocation loc = lhs.getSourceLocation(); currentBlock.addStatement(new HavocStatement(loc, idLhs)); if (addBounds) currentBlock.addStatement( new AssumeStatement(loc, new BinaryExpression( loc, BinaryOperator.And, new BinaryExpression( loc, BinaryOperator.Le, new IntegerLiteral(loc, lower), idLhs), new BinaryExpression( loc, BinaryOperator.Le, idLhs, new IntegerLiteral(loc, upper))))); } } private void translateDefinitionStmt(DefinitionStmt def) { if (def.containsInvokeExpr()) { translateMethodInvokation(def, def.getLeftOp(), def.getInvokeExpr()); return; } Value lhs = def.getLeftOp(); Value rhs = def.getRightOp(); if (def.containsFieldRef()) { Verify.verify(lhs instanceof FieldRef || rhs instanceof FieldRef); if (def.getFieldRef().getField().equals(SootTranslationHelpers.v().getExceptionGlobal())) { // Special treatment of the exception global. if (lhs instanceof FieldRef) { IdentifierExpression left = methodInfo.getExceptionVariable(); if (rhs instanceof AnyNewExpr) { SootClass sc = ((RefType) ((AnyNewExpr) rhs).getType()).getSootClass(); currentBlock.addStatement( new NewStatement(loc, left, SootTranslationHelpers.v().getClassVariable(sc))); } else { rhs.apply(valueSwitch); Expression right = valueSwitch.popExpression(); currentBlock.addStatement( new AssignStatement(SootTranslationHelpers.v().getSourceLocation(def), left, right)); } } else /* if (rhs instanceof FieldRef) */ { lhs.apply(valueSwitch); Expression left = valueSwitch.popExpression(); Expression right = methodInfo.getExceptionVariable(); currentBlock.addStatement( new AssignStatement(SootTranslationHelpers.v().getSourceLocation(def), left, right)); } } else { if (lhs instanceof FieldRef) { Verify.verify(!(rhs instanceof AnyNewExpr)); SootTranslationHelpers.v().getMemoryModel().mkHeapWriteStatement(def, def.getFieldRef(), rhs); } else /* if (rhs instanceof FieldRef) */ { SootTranslationHelpers.v().getMemoryModel().mkHeapReadStatement(def, def.getFieldRef(), lhs); } } } else if (def.containsArrayRef()) { throw new RuntimeException("Remove Arrays first."); } else if (rhs instanceof LengthExpr) { throw new RuntimeException("Remove Arrays first."); } else { // first tell memory model to copy all fields if (lhs instanceof Local && rhs instanceof Local) SootTranslationHelpers.v().getMemoryModel().mkCopy((Local) lhs, (Local) rhs); // local to local assignment. lhs.apply(valueSwitch); Expression left = valueSwitch.popExpression(); if (rhs instanceof AnyNewExpr) { SootClass sc = ((RefType) ((AnyNewExpr) rhs).getType()).getSootClass(); currentBlock.addStatement(new NewStatement(loc, (IdentifierExpression) left, SootTranslationHelpers.v().getClassVariable(sc))); } else { rhs.apply(valueSwitch); Expression right = valueSwitch.popExpression(); currentBlock.addStatement( new AssignStatement(SootTranslationHelpers.v().getSourceLocation(def), left, right)); } } //TODO: assume non-null is not needed because we have a NewStatement now. // if (rhs instanceof AnyNewExpr) { // // add an assume that lhs is not null. // lhs.apply(valueSwitch); // Expression left = valueSwitch.popExpression(); // currentBlock.addStatement(new AssumeStatement(getCurrentLoc(), new BinaryExpression(getCurrentLoc(), // BinaryOperator.Ne, left, SootTranslationHelpers.v().getMemoryModel().mkNullConstant()))); } }
package edu.wustl.cab2b.client.ui; import java.awt.Color; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Font; import java.awt.GradientPaint; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.geom.Point2D; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.Vector; import javax.swing.BorderFactory; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.JTextArea; import javax.swing.border.EmptyBorder; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableCellRenderer; import org.jdesktop.swingx.JXPanel; import org.jdesktop.swingx.JXTitledPanel; import org.jdesktop.swingx.painter.gradient.BasicGradientPainter; import edu.common.dynamicextensions.domaininterface.AttributeInterface; import edu.common.dynamicextensions.domaininterface.EntityInterface; import edu.wustl.cab2b.client.ui.controls.Cab2bButton; import edu.wustl.cab2b.client.ui.controls.Cab2bHyperlink; import edu.wustl.cab2b.client.ui.controls.Cab2bPanel; import edu.wustl.cab2b.client.ui.controls.Cab2bTable; import edu.wustl.cab2b.client.ui.controls.Cab2bTitledPanel; import edu.wustl.cab2b.client.ui.main.AbstractTypePanel; import edu.wustl.cab2b.client.ui.main.ParseXMLFile; import edu.wustl.cab2b.client.ui.main.SwingUIManager; import edu.wustl.cab2b.client.ui.mainframe.Cab2bContentPanel; import edu.wustl.cab2b.client.ui.mainframe.NewWelcomePanel; import edu.wustl.cab2b.client.ui.pagination.JPageElement; import edu.wustl.cab2b.client.ui.pagination.JPagination; import edu.wustl.cab2b.client.ui.pagination.NumericPager; import edu.wustl.cab2b.client.ui.pagination.PageElement; import edu.wustl.cab2b.client.ui.pagination.PageElementImpl; import edu.wustl.cab2b.client.ui.query.ClientQueryBuilder; import edu.wustl.cab2b.client.ui.query.IClientQueryBuilderInterface; import edu.wustl.cab2b.client.ui.util.CDEDetails; import edu.wustl.cab2b.client.ui.util.CommonUtils; import edu.wustl.cab2b.common.exception.CheckedException; import edu.wustl.cab2b.common.queryengine.Cab2bQueryObjectFactory; import edu.wustl.cab2b.common.util.AttributeInterfaceComparator; import edu.wustl.cab2b.common.util.Constants; import edu.wustl.cab2b.common.util.Utility; import edu.wustl.common.querysuite.queryobject.ICondition; import edu.wustl.common.querysuite.queryobject.IExpression; import edu.wustl.common.querysuite.queryobject.IExpressionId; import edu.wustl.common.querysuite.queryobject.IRule; /** * The class that contains commonalities required for displaying results from * the 'AddLimit' and 'choose category' section from the main search dialog. * * @author mahesh_iyer/chetan_bh/gautam_shetty/Deepak_Shingan */ public class SearchResultPanel extends Cab2bPanel implements ActionListener { private static final long serialVersionUID = 1L; /** The pagination component to paginate the results of the search */ private Cab2bPanel resultPanel; private Cab2bButton addLimitButton; private Cab2bButton editLimitButton; private Cab2bHyperlink attributeDetailsLink; private Cab2bPanel constraintButtonPanel; private EntityInterface entityForSelectedLink; /** * Saved reference to the content searchPanel that needs to be refreshed for * appropriate events. */ protected ContentPanel contentPanel; /** * Constructor * * @param addLimitPanel * Reference to the parent content searchPanel that needs * refreshing. * * @param result * The collectiond of entities. */ public SearchResultPanel(ContentPanel contentPanel, Set<EntityInterface> result) { this.contentPanel = contentPanel; initGUI(result); } /** * Method initializes the searchPanel by appropriately laying out child * components. * * @param result * The collectiond of entities. */ private void initGUI(Set<EntityInterface> resultSet) { if (contentPanel instanceof AddLimitPanel) { ((AddLimitPanel) contentPanel).setSearchResultPanel(this); } Vector<PageElement> pageElementCollection = new Vector<PageElement>(); if (resultSet != null) { List<EntityInterface> resultList = new ArrayList<EntityInterface>(resultSet); //Collections.sort(resultList, new EntityInterfaceComparator()); for (EntityInterface entity : resultList) { // Create an instance of the PageElement. Initialize with the // appropriate data PageElement pageElement = new PageElementImpl(); String className = Utility.getDisplayName(entity); pageElement.setDisplayName(className); String description = entity.getDescription(); if (description == null || description.equals("")) description = "*No description available. "; pageElement.setDescription(description); pageElement.setUserObject(entity); pageElementCollection.add(pageElement); } NumericPager numericPager = new NumericPager(pageElementCollection, getPageSize()); /* Initalize the pagination component. */ JPagination resultsPage = new JPagination(pageElementCollection, numericPager, this, true); resultsPage.setSelectableEnabled(false); resultsPage.setGroupActionEnabled(false); resultsPage.addPageElementActionListener(this); resultPanel = new Cab2bPanel(); resultPanel.add("hfill vfill ", resultsPage); JXTitledPanel titledSearchResultsPanel = displaySearchSummary(resultList.size()); titledSearchResultsPanel.setContentContainer(resultPanel); add("hfill vfill", titledSearchResultsPanel); } } /** * Sets result panel * * @param resulPanel */ public void setResultPanel(Cab2bPanel resulPanel) { resultPanel.removeAll(); resultPanel.add("hfill vfill ", resulPanel); } public EntityInterface getEntityForSelectedLink() { return entityForSelectedLink; } /** * Removing result panel */ public void removeResultPanel() { if (resultPanel != null) resultPanel.removeAll(); } /** * Initiliasing/Adding Add Limit buttons * * @param panelsToAdd * @param entity */ public void initializeAddLimitButton(final JXPanel[] panelsToAdd, final EntityInterface entity) { addLimitButton = new Cab2bButton("Add Limit"); addLimitButton.setPreferredSize(new Dimension(95, 22)); addLimitButton.addActionListener(new AddLimitButtonListner(panelsToAdd, entity)); } /** * Initiliasing/Adding EditLimit buttons * * @param panelsToAdd * @param expression */ private void initializeEditLimitButtons(final JXPanel[] panelsToAdd, final IExpression expression) { editLimitButton = new Cab2bButton("Edit Limit"); editLimitButton.addActionListener(new EditLimitButtonListner(panelsToAdd, expression)); editLimitButton.setPreferredSize(new Dimension(95, 22)); } /** * This method ctreates and returns a hyperlink which will display certain * details of all the attributes of the given entity. * * @param entity * @return */ private void initializeAttributeDetailLink(final EntityInterface entity) { attributeDetailsLink = new Cab2bHyperlink(); attributeDetailsLink.setText("CDE Details"); attributeDetailsLink.addActionListener(new AttributeDetailsLinkListener(entity)); } /** * * @param cab2bButton * @return */ private Cab2bPanel getConstraintButtonPanel(Cab2bButton cab2bButton, EntityInterface entity) { constraintButtonPanel = new Cab2bPanel(new RiverLayout(5, 0)); constraintButtonPanel.add(cab2bButton); constraintButtonPanel.add("tab", new JLabel(" | ")); if (attributeDetailsLink == null) { initializeAttributeDetailLink(entity); } constraintButtonPanel.add("tab", attributeDetailsLink); constraintButtonPanel.setOpaque(false); return constraintButtonPanel; } /** * Method to create AddLimitUI * * @param entity */ protected JXPanel[] createAddLimitPanels(final EntityInterface entity) { final JXPanel[] componentPanel = getAttributeComponentPanels(entity); final JXPanel[] finalPanelToadd = initializePanelsForAddConstraints(componentPanel); initializeAddLimitButton(componentPanel, entity); finalPanelToadd[0].add(getConstraintButtonPanel(addLimitButton, entity)); GradientPaint gp1 = new GradientPaint(new Point2D.Double(.09d, 0), Color.LIGHT_GRAY, new Point2D.Double( .95d, 0), Color.WHITE); finalPanelToadd[0].setBackgroundPainter(new BasicGradientPainter(gp1)); finalPanelToadd[0].setBorder(BorderFactory.createLineBorder(Color.BLACK)); return finalPanelToadd; } /** * Get panels array to be displayed in add limit searchPanel * * @param entity * @return */ public JXPanel[] createEditLimitPanels(final IExpression expression) { /* This is the EntityInterface instance. */ final EntityInterface entity = expression.getQueryEntity().getDynamicExtensionsEntity(); final JXPanel[] componentPanel = getAttributeComponentPanels(entity); final JXPanel[] finalPanelToadd = initializePanelsForAddConstraints(componentPanel); initializeEditLimitButtons(componentPanel, expression); finalPanelToadd[0].add(getConstraintButtonPanel(editLimitButton, entity)); return finalPanelToadd; } /** * The action listener for the individual page elements. * * @param actionEvent * The event that contains details of the click on the individual * page elements. */ public void actionPerformed(ActionEvent actionEvent) { Cab2bHyperlink<JPageElement> link = (Cab2bHyperlink<JPageElement>) (actionEvent.getSource()); JPageElement jPageElement = link.getUserObject(); jPageElement.resetLabel(); JPagination pagination = jPageElement.getPagination(); JPageElement selectedPageElement = pagination.getSelectedJPageElement(); if (selectedPageElement != null) { selectedPageElement.resetHyperLink(); } pagination.setSelectedJPageElement(jPageElement); PageElement pageElement = jPageElement.getPageElement(); entityForSelectedLink = (EntityInterface) pageElement.getUserObject(); if (contentPanel instanceof Cab2bContentPanel) { SearchPanel searchPanel = (SearchPanel) this.getParent(); (searchPanel.getAttributeSelectCDCPanel()).setEntityInterface(entityForSelectedLink); (searchPanel.getAttributeSelectCDCPanel()).generatePanel(); (searchPanel.getAttributeSelectCDCPanel()).setTestDAG(searchPanel.getTestDAG()); } initializeAttributeDetailLink(entityForSelectedLink); final JXPanel[] panelsToAdd = createAddLimitPanels(entityForSelectedLink); if (getAttributeComponentPanels(entityForSelectedLink) != null) { // pass the appropriate class name for display performAction(panelsToAdd, Utility.getDisplayName(entityForSelectedLink)); } updateUI(); } /** * Get panels array to be displayed in add limit searchPanel * * @param entity * @return */ private JXPanel[] getAttributeComponentPanels(final EntityInterface entity) { final Collection<AttributeInterface> attributeCollection = entity.getAttributeCollection(); AbstractTypePanel[] componentPanels = null; if (attributeCollection != null) { try { List<AttributeInterface> attributeList = new ArrayList<AttributeInterface>(attributeCollection); Collections.sort(attributeList, new AttributeInterfaceComparator()); componentPanels = new AbstractTypePanel[attributeList.size()]; ParseXMLFile parseFile = ParseXMLFile.getInstance(); Dimension maxLabelDimension = CommonUtils.getMaximumLabelDimension(attributeList); int i = 0; for (AttributeInterface attribute : attributeList) { componentPanels[i++] = (AbstractTypePanel) SwingUIManager.generateUIPanel(parseFile, attribute, maxLabelDimension); componentPanels[i - 1].createPanelWithOperator(attribute); } } catch (CheckedException checkedException) { CommonUtils.handleException(checkedException, this, true, true, false, false); } } return componentPanels; } /** * * @param componentPanel * @return */ public JXPanel[] initializePanelsForAddConstraints(JXPanel[] componentPanel) { Cab2bPanel cab2bPanel = new Cab2bPanel(new RiverLayout(5, 5)); for (int j = 0; j < componentPanel.length; j++) { cab2bPanel.add("br", componentPanel[j]); } JScrollPane pane = new JScrollPane(JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); pane.getViewport().setBackground(Color.WHITE); pane.getViewport().add(cab2bPanel); pane.getViewport().setBorder(null); pane.setBorder(null); JXPanel[] finalPanelsToAdd = new Cab2bPanel[2]; FlowLayout flowLayout = new FlowLayout(2, 0, 3); finalPanelsToAdd[0] = new Cab2bPanel(flowLayout); finalPanelsToAdd[1] = new Cab2bPanel(); finalPanelsToAdd[1].add("hfill vfill ", pane); return finalPanelsToAdd; } /** * Method to handle 'Add Limit' button click event * * @param componentPanel * @param entity */ public void performAddLimitAction(JXPanel[] componentPanel, EntityInterface entity) { List<AttributeInterface> attributes = new ArrayList<AttributeInterface>(); List<String> conditions = new ArrayList<String>(); List<List<String>> values = new ArrayList<List<String>>(); for (int j = 0; j < componentPanel.length; j++) { AbstractTypePanel panel = (AbstractTypePanel) componentPanel[j]; int conditionStatus = panel.isConditionValid(contentPanel); if (conditionStatus < 0) return; else if (conditionStatus == 0) { attributes.add(panel.getAttributeEntity()); conditions.add(panel.getConditionItem()); values.add(panel.getValues()); } } if (attributes.isEmpty()) { JOptionPane.showMessageDialog(contentPanel, "Please add condition(s) before proceeding", "Add Limit Warning", JOptionPane.WARNING_MESSAGE); } else { MainSearchPanel mainSearchPanel = (MainSearchPanel) ((JXPanel) contentPanel).getParent().getParent(); if (mainSearchPanel.getQueryObject() == null) { IClientQueryBuilderInterface query = new ClientQueryBuilder(); mainSearchPanel.setQueryObject(query); mainSearchPanel.getCenterPanel().getAddLimitPanel().setQueryObject(query); } IExpressionId expressionId = mainSearchPanel.getQueryObject().addRule(attributes, conditions, values); if (expressionId == null) { JOptionPane.showMessageDialog( mainSearchPanel.getParent(), "This rule cannot be added as it is not associated with the added rules.", "Error", JOptionPane.ERROR_MESSAGE); } else { mainSearchPanel.getCenterPanel().getAddLimitPanel().refreshBottomCenterPanel(expressionId); } } } /** * returns Attribute Interface for the name from the collection parameter * * @param collection * @param attributeName * @return */ private AttributeInterface getAttribute(Collection<AttributeInterface> attributeCollection, String attributeName) { AttributeInterface requriedAttribute = null; for (AttributeInterface attribute : attributeCollection) { if (attributeName.trim().equals(attribute.getName().trim())) { requriedAttribute = attribute; break; } } return requriedAttribute; } /** * Method to perform edit limit action */ public void performEditLimitAction(JXPanel[] componentPanel, IExpression expression) { List<ICondition> conditionList = new ArrayList<ICondition>(); for (int j = 0; j < componentPanel.length; j++) { AbstractTypePanel panel = (AbstractTypePanel) componentPanel[j]; String conditionString = panel.getConditionItem(); ArrayList<String> values = panel.getValues(); if (0 == conditionString.compareToIgnoreCase("Between") && (values.size() == 1)) { JOptionPane.showInternalMessageDialog((this.contentPanel).getParent().getParent().getParent(), "Please enter both the values for between operator.", "Error", JOptionPane.ERROR_MESSAGE); return; } if ((conditionString.equals("Is Null")) || conditionString.equals("Is Not Null") || (values.size() != 0)) { ICondition condition = Cab2bQueryObjectFactory.createCondition(); final AttributeInterface attribute = panel.getAttributeEntity(); condition.setAttribute(attribute); condition.setRelationalOperator(edu.wustl.cab2b.client.ui.query.Utility.getRelationalOperator(conditionString)); for (int i = 0; i < values.size(); i++) { condition.addValue(values.get(i)); } conditionList.add(condition); } } if (conditionList.isEmpty()) { MainSearchPanel mainSearchPanel = (MainSearchPanel) ((JXPanel) contentPanel).getParent().getParent(); JOptionPane.showInternalMessageDialog( mainSearchPanel.getParent(), "This rule cannot be added as it is not associated with the added rules.", "Error", JOptionPane.ERROR_MESSAGE); } else { IRule rule = (IRule) expression.getOperand(0); rule.removeAllConditions(); for (int i = 0; i < conditionList.size(); i++) { rule.addCondition(conditionList.get(i)); } } } /** * This method generates the search summary searchPanel * * @param numberOfResults * number of results obtained * @return summary searchPanel */ public JXTitledPanel displaySearchSummary(int numberOfResults) { String message = (numberOfResults == 0) ? "No result found." : "Search Results :- Total results ( " + numberOfResults + " )"; JXTitledPanel titledSearchResultsPanel = new Cab2bTitledPanel(message); GradientPaint gp = new GradientPaint(new Point2D.Double(.05d, 0), new Color(185, 211, 238), new Point2D.Double(.95d, 0), Color.WHITE); titledSearchResultsPanel.setTitlePainter(new BasicGradientPainter(gp)); titledSearchResultsPanel.setBorder(new EmptyBorder(0, 0, 0, 0)); titledSearchResultsPanel.setTitleFont(new Font("SansSerif", Font.BOLD, 11)); titledSearchResultsPanel.setTitleForeground(Color.BLACK); return titledSearchResultsPanel; } /** * The method that needs to handle any refresh related activites * * @param attributeComponentPanel * This is the array of panels that forms the dynamically * generated criterion pages. Each searchPanel corresponds to one * attribute from the class/category Method to select appropriate * searchPanel and refresh the addLimit page * * @param className * The class/category name. */ private void performAction(JXPanel[] attributeComponentPanel, String className) { Container container = ((JXPanel) (contentPanel)).getParent(); if (container instanceof SearchCenterPanel) { SearchCenterPanel searchCenterPanel = (SearchCenterPanel) container; /* * Use the parent reference to in turn get a reference to the * navigation searchPanel, and cause it to move to the next card. */ MainSearchPanel mainSearchPanel = (MainSearchPanel) (searchCenterPanel.getParent()); mainSearchPanel.getNavigationPanel().enableButtons(); /* * Get the searchPanel corresponding to the currently selcted card * and refresh it. */ AddLimitPanel addLimitPanel = searchCenterPanel.getAddLimitPanel(); addLimitPanel.refresh(attributeComponentPanel, className); // set search-result searchPanel in AddLimit searchPanel and move to // next tab if (searchCenterPanel.getSelectedCardIndex() == 0) { ChooseCategoryPanel chooseCategoryPanel = searchCenterPanel.getChooseCategoryPanel(); addLimitPanel.addSearchPanel(chooseCategoryPanel.getSearchPanel()); SearchResultPanel searchResultPanel = chooseCategoryPanel.getSearchResultPanel(); if (searchResultPanel != null) { addLimitPanel.addResultsPanel(searchResultPanel); searchCenterPanel.setAddLimitPanel(addLimitPanel); } mainSearchPanel.getNavigationPanel().showCard(true); } } } /** * The abstract method that is to return the number of elements to be * displayed/page. * * @return int Value represents the number of elements/page. */ public int getPageSize() { return 3; }; /** * @return the addLimitButtonTop */ public Cab2bButton getAddLimitButton() { return addLimitButton; } /** * Action Listener class for Add Limit buttons * * @author Deepak_Shingan * */ class AddLimitButtonListner implements ActionListener { private JXPanel[] panelsToAdd; private EntityInterface entity; public AddLimitButtonListner(final JXPanel[] panelsToAdd, final EntityInterface entity) { this.panelsToAdd = panelsToAdd; this.entity = entity; } public void actionPerformed(ActionEvent event) { performAddLimitAction(panelsToAdd, entity); AddLimitPanel.m_innerPane.setDividerLocation(242); } } /** * Action Listener class for Edit Limit buttons * * @author Deepak_Shingan * */ class EditLimitButtonListner implements ActionListener { private JXPanel[] panelsToAdd; private IExpression expression; public EditLimitButtonListner(final JXPanel[] panelsToAdd, final IExpression expression) { this.panelsToAdd = panelsToAdd; this.expression = expression; } public void actionPerformed(ActionEvent event) { performEditLimitAction(this.panelsToAdd, this.expression); } } class AttributeDetailsLinkListener implements ActionListener { private EntityInterface entity; public AttributeDetailsLinkListener(EntityInterface entity) { this.entity = entity; } public void actionPerformed(ActionEvent event) { Cab2bTable cab2bTable = new Cab2bTable(new CDETableModel(entity)); cab2bTable.setBorder(null); cab2bTable.setRowHeightEnabled(true); cab2bTable.setShowGrid(false); cab2bTable.getColumnModel().getColumn(0).setPreferredWidth(50); cab2bTable.getColumnModel().getColumn(1).setPreferredWidth(10); cab2bTable.getColumnModel().getColumn(2).setPreferredWidth(30); cab2bTable.getColumnModel().getColumn(3).setPreferredWidth(320); cab2bTable.setRowSelectionAllowed(false); for (int j = 0; j < 4; j++) { cab2bTable.getColumnModel().getColumn(j).setCellRenderer(new MyCellRenderer()); } cab2bTable.getTableHeader().setFont(new Font("Arial", Font.BOLD, 14)); JScrollPane jScrollPane = new JScrollPane(cab2bTable, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); jScrollPane.setBorder(null); /* * WindowUtilities.showInDialog(NewWelcomePanel.mainFrame, * jScrollPane, "CDE Details", Constants.WIZARD_SIZE2_DIMENSION, * true, false); */ WindowUtilities.showInDialog(NewWelcomePanel.mainFrame, jScrollPane, "CDE Details", Constants.WIZARD_SIZE2_DIMENSION, true, false); } } /** * @return the constraintButtonPanel */ public Cab2bPanel getConstraintButtonPanel() { return constraintButtonPanel; } class MyCellRenderer extends JTextArea implements TableCellRenderer { public MyCellRenderer() { setLineWrap(true); setWrapStyleWord(true); } public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { if (value != null) { setText(value.toString()); } setSize(table.getColumnModel().getColumn(column).getWidth(), getPreferredSize().height); if (table.getRowHeight(row) != getPreferredSize().height) { table.setRowHeight(row, getPreferredSize().height); } return this; } } private class CDETableModel extends AbstractTableModel { private CDEDetails cdeDetails; private CDETableModel(EntityInterface entity) { super(); this.cdeDetails = new CDEDetails(entity); } public int getRowCount() { return cdeDetails.getRowCount(); } public int getColumnCount() { return cdeDetails.getColumnCount(); } public Object getValueAt(int row, int column) { return cdeDetails.getValueAt(row, column); } public String getColumnName(int column) { return cdeDetails.getColumnName(column); } } }
package com.intellij.openapi.roots.ui.configuration; import com.intellij.ide.DataManager; import com.intellij.ide.util.projectWizard.ProjectJdkListRenderer; import com.intellij.openapi.actionSystem.DefaultActionGroup; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectBundle; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.ui.configuration.projectRoot.JdkListConfigurable; import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectJdksModel; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Condition; import com.intellij.ui.SimpleTextAttributes; import com.intellij.ui.ScreenUtil; import com.intellij.util.Consumer; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; class JdkComboBox extends JComboBox{ private final JButton myEditButton = new JButton(ApplicationBundle.message("button.edit")); public JdkComboBox(final ProjectJdksModel jdksModel) { super(new JdkComboBoxModel(jdksModel)); setRenderer(new ProjectJdkListRenderer() { protected void customizeCellRenderer(JList list, Object value, int index, boolean selected, boolean hasFocus) { if (JdkComboBox.this.isEnabled()) { if (value instanceof InvalidJdkComboBoxItem) { final String str = value.toString(); append(str, SimpleTextAttributes.ERROR_ATTRIBUTES); } else if (value instanceof ProjectJdkComboBoxItem){ final ProjectJdkComboBoxItem item = (ProjectJdkComboBoxItem)value; final String str = item.toString(); final Sdk jdk = jdksModel.getProjectJdk(); if (jdk != null){ setIcon(jdk.getSdkType().getIcon()); append(ProjectBundle.message("project.roots.project.jdk.inherited"), SimpleTextAttributes.REGULAR_ATTRIBUTES); append(" (" + jdk.getName() + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES); } else { append(str, SimpleTextAttributes.ERROR_ATTRIBUTES); } } else { super.customizeCellRenderer(list, value != null ? ((JdkComboBoxItem)value).getJdk() : new NoneJdkComboBoxItem(), index, selected, hasFocus); } } } }); } @Override public Dimension getPreferredSize() { final Rectangle rec = ScreenUtil.getScreenRectangle(0, 0); final Dimension size = super.getPreferredSize(); final int maxWidth = rec.width / 4; if (size.width > maxWidth) { size.width = maxWidth; } return size; } @Override public Dimension getMinimumSize() { final Dimension minSize = super.getMinimumSize(); final Dimension prefSize = getPreferredSize(); if (minSize.width > prefSize.width) { minSize.width = prefSize.width; } return minSize; } public JButton createSetupButton(final Project project, final ProjectJdksModel jdksModel, final JdkComboBoxItem firstItem) { return createSetupButton(project, jdksModel, firstItem, null, false); } public JButton createSetupButton(final Project project, final ProjectJdksModel jdksModel, final JdkComboBoxItem firstItem, final Condition<Sdk> additionalSetup, final boolean moduleJdkSetup) { final JButton setUpButton = new JButton(ApplicationBundle.message("button.new")); setUpButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { final JdkListConfigurable configurable = JdkListConfigurable.getInstance(project); DefaultActionGroup group = new DefaultActionGroup(); jdksModel.createAddActions(group, JdkComboBox.this, new Consumer<Sdk>() { public void consume(final Sdk jdk) { configurable.addJdkNode(jdk, false); reloadModel(firstItem, project); setSelectedJdk(jdk); //restore selection if (additionalSetup != null) { if (additionalSetup.value(jdk)) { //leave old selection setSelectedJdk(firstItem.getJdk()); } } } }); JBPopupFactory.getInstance() .createActionGroupPopup(ProjectBundle.message("project.roots.set.up.jdk.title", moduleJdkSetup ? 1 : 2), group, DataManager.getInstance().getDataContext(), JBPopupFactory.ActionSelectionAid.MNEMONICS, false) .showUnderneathOf(setUpButton); } }); return setUpButton; } public void appendEditButton(final Project project, final JPanel panel, GridBagConstraints gc, final Computable<Sdk> retrieveJDK){ myEditButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { final Sdk projectJdk = retrieveJDK.compute(); ProjectStructureConfigurable.getInstance(project).select(projectJdk, true); } }); addActionListener(new ActionListener(){ public void actionPerformed(ActionEvent e) { final JdkComboBoxItem selectedItem = getSelectedItem(); if (selectedItem instanceof ProjectJdkComboBoxItem) { myEditButton.setEnabled(ProjectStructureConfigurable.getInstance(project).getProjectJdksModel().getProjectJdk() != null); } else { myEditButton.setEnabled(!(selectedItem instanceof InvalidJdkComboBoxItem) && selectedItem != null && selectedItem.getJdk() != null); } } }); panel.add(myEditButton, gc); } public JdkComboBoxItem getSelectedItem() { return (JdkComboBoxItem)super.getSelectedItem(); } public Sdk getSelectedJdk() { final JdkComboBoxItem selectedItem = (JdkComboBoxItem)super.getSelectedItem(); return selectedItem != null? selectedItem.getJdk() : null; } public void setSelectedJdk(Sdk jdk) { final int index = indexOf(jdk); if (index >= 0) { setSelectedIndex(index); } } public void setInvalidJdk(String name) { removeInvalidElement(); addItem(new InvalidJdkComboBoxItem(name)); setSelectedIndex(getModel().getSize() - 1); } private int indexOf(Sdk jdk) { final JdkComboBoxModel model = (JdkComboBoxModel)getModel(); final int count = model.getSize(); for (int idx = 0; idx < count; idx++) { final JdkComboBoxItem elementAt = model.getElementAt(idx); if (jdk == null) { if (elementAt instanceof NoneJdkComboBoxItem) { return idx; } else if (elementAt instanceof ProjectJdkComboBoxItem){ return idx; } } else { if (jdk.equals(elementAt.getJdk())) { return idx; } } } return -1; } private void removeInvalidElement() { final JdkComboBoxModel model = (JdkComboBoxModel)getModel(); final int count = model.getSize(); for (int idx = 0; idx < count; idx++) { final JdkComboBoxItem elementAt = model.getElementAt(idx); if (elementAt instanceof InvalidJdkComboBoxItem) { removeItemAt(idx); break; } } } public void reloadModel(JdkComboBoxItem firstItem, Project project) { final DefaultComboBoxModel model = ((DefaultComboBoxModel)getModel()); model.removeAllElements(); model.addElement(firstItem); final ArrayList<Sdk> projectJdks = new ArrayList<Sdk>(ProjectJdksModel.getInstance(project).getProjectJdks().values()); Collections.sort(projectJdks, new Comparator<Sdk>() { public int compare(final Sdk o1, final Sdk o2) { return o1.getName().compareToIgnoreCase(o2.getName()); } }); for (Sdk projectJdk : projectJdks) { model.addElement(new JdkComboBox.JdkComboBoxItem(projectJdk)); } } private static class JdkComboBoxModel extends DefaultComboBoxModel { public JdkComboBoxModel(final ProjectJdksModel jdksModel) { super(); final Sdk[] jdks = jdksModel.getSdks(); Arrays.sort(jdks, new Comparator<Sdk>() { public int compare(final Sdk s1, final Sdk s2) { return s1.getName().compareToIgnoreCase(s2.getName()); } }); for (Sdk jdk : jdks) { addElement(new JdkComboBoxItem(jdk)); } } // implements javax.swing.ListModel public JdkComboBoxItem getElementAt(int index) { return (JdkComboBoxItem)super.getElementAt(index); } } public static class JdkComboBoxItem { private final Sdk myJdk; public JdkComboBoxItem(Sdk jdk) { myJdk = jdk; } public Sdk getJdk() { return myJdk; } public String toString() { return myJdk.getName(); } } public static class ProjectJdkComboBoxItem extends JdkComboBoxItem { public ProjectJdkComboBoxItem() { super(null); } public String toString() { return ProjectBundle.message("jdk.combo.box.project.item"); } } public static class NoneJdkComboBoxItem extends JdkComboBoxItem { public NoneJdkComboBoxItem() { super(null); } public String toString() { return ProjectBundle.message("jdk.combo.box.none.item"); } } private static class InvalidJdkComboBoxItem extends JdkComboBoxItem { private final String myName; public InvalidJdkComboBoxItem(String name) { super(null); myName = ProjectBundle.message("jdk.combo.box.invalid.item", name); } public String toString() { return myName; } } }
package org.jfree.chart.renderer.category; import java.awt.Color; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Shape; import java.awt.Stroke; import java.awt.geom.Ellipse2D; import java.awt.geom.Line2D; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.jfree.chart.LegendItem; import org.jfree.chart.axis.CategoryAxis; import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.entity.EntityCollection; import org.jfree.chart.event.RendererChangeEvent; import org.jfree.chart.plot.CategoryPlot; import org.jfree.chart.plot.PlotOrientation; import org.jfree.chart.plot.PlotRenderingInfo; import org.jfree.chart.renderer.Outlier; import org.jfree.chart.renderer.OutlierList; import org.jfree.chart.renderer.OutlierListCollection; import org.jfree.chart.util.PaintUtilities; import org.jfree.chart.util.PublicCloneable; import org.jfree.chart.util.RectangleEdge; import org.jfree.chart.util.SerialUtilities; import org.jfree.data.category.CategoryDataset; import org.jfree.data.statistics.BoxAndWhiskerCategoryDataset; /** * A box-and-whisker renderer. This renderer requires a * {@link BoxAndWhiskerCategoryDataset} and is for use with the * {@link CategoryPlot} class. */ public class BoxAndWhiskerRenderer extends AbstractCategoryItemRenderer implements Cloneable, PublicCloneable, Serializable { /** For serialization. */ private static final long serialVersionUID = 632027470694481177L; /** The color used to paint the median line and average marker. */ private transient Paint artifactPaint; /** A flag that controls whether or not the box is filled. */ private boolean fillBox; /** The margin between items (boxes) within a category. */ private double itemMargin; /** * The maximum bar width as percentage of the available space in the plot, * where 0.05 is five percent. */ private double maximumBarWidth; /** * Default constructor. */ public BoxAndWhiskerRenderer() { this.artifactPaint = Color.black; this.fillBox = true; this.itemMargin = 0.20; this.maximumBarWidth = 1.0; setBaseLegendShape(new Rectangle2D.Double(-4.0, -4.0, 8.0, 8.0)); } /** * Returns the paint used to color the median and average markers. * * @return The paint used to draw the median and average markers (never * <code>null</code>). * * @see #setArtifactPaint(Paint) */ public Paint getArtifactPaint() { return this.artifactPaint; } /** * Sets the paint used to color the median and average markers and sends * a {@link RendererChangeEvent} to all registered listeners. * * @param paint the paint (<code>null</code> not permitted). * * @see #getArtifactPaint() */ public void setArtifactPaint(Paint paint) { if (paint == null) { throw new IllegalArgumentException("Null 'paint' argument."); } this.artifactPaint = paint; fireChangeEvent(); } /** * Returns the flag that controls whether or not the box is filled. * * @return A boolean. * * @see #setFillBox(boolean) */ public boolean getFillBox() { return this.fillBox; } /** * Sets the flag that controls whether or not the box is filled and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param flag the flag. * * @see #getFillBox() */ public void setFillBox(boolean flag) { this.fillBox = flag; fireChangeEvent(); } /** * Returns the item margin. This is a percentage of the available space * that is allocated to the space between items in the chart. * * @return The margin. * * @see #setItemMargin(double) */ public double getItemMargin() { return this.itemMargin; } /** * Sets the item margin and sends a {@link RendererChangeEvent} to all * registered listeners. * * @param margin the margin (a percentage). * * @see #getItemMargin() */ public void setItemMargin(double margin) { this.itemMargin = margin; fireChangeEvent(); } /** * Returns the maximum bar width as a percentage of the available drawing * space. * * @return The maximum bar width. * * @see #setMaximumBarWidth(double) * * @since 1.0.10 */ public double getMaximumBarWidth() { return this.maximumBarWidth; } /** * Sets the maximum bar width, which is specified as a percentage of the * available space for all bars, and sends a {@link RendererChangeEvent} * to all registered listeners. * * @param percent the maximum Bar Width (a percentage). * * @see #getMaximumBarWidth() * * @since 1.0.10 */ public void setMaximumBarWidth(double percent) { this.maximumBarWidth = percent; fireChangeEvent(); } /** * Returns a legend item for a series. * * @param datasetIndex the dataset index (zero-based). * @param series the series index (zero-based). * * @return The legend item (possibly <code>null</code>). */ public LegendItem getLegendItem(int datasetIndex, int series) { CategoryPlot cp = getPlot(); if (cp == null) { return null; } // check that a legend item needs to be displayed... if (!isSeriesVisible(series) || !isSeriesVisibleInLegend(series)) { return null; } CategoryDataset dataset = cp.getDataset(datasetIndex); String label = getLegendItemLabelGenerator().generateLabel(dataset, series); String description = label; String toolTipText = null; if (getLegendItemToolTipGenerator() != null) { toolTipText = getLegendItemToolTipGenerator().generateLabel( dataset, series); } String urlText = null; if (getLegendItemURLGenerator() != null) { urlText = getLegendItemURLGenerator().generateLabel(dataset, series); } Shape shape = lookupLegendShape(series); Paint paint = lookupSeriesPaint(series); Paint outlinePaint = lookupSeriesOutlinePaint(series); Stroke outlineStroke = lookupSeriesOutlineStroke(series); LegendItem result = new LegendItem(label, description, toolTipText, urlText, shape, paint, outlineStroke, outlinePaint); result.setLabelFont(lookupLegendTextFont(series)); Paint labelPaint = lookupLegendTextPaint(series); if (labelPaint != null) { result.setLabelPaint(labelPaint); } result.setDataset(dataset); result.setDatasetIndex(datasetIndex); result.setSeriesKey(dataset.getRowKey(series)); result.setSeriesIndex(series); return result; } /** * Initialises the renderer. This method gets called once at the start of * the process of drawing a chart. * * @param g2 the graphics device. * @param dataArea the area in which the data is to be plotted. * @param plot the plot. * @param rendererIndex the renderer index. * @param info collects chart rendering information for return to caller. * * @return The renderer state. */ public CategoryItemRendererState initialise(Graphics2D g2, Rectangle2D dataArea, CategoryPlot plot, int rendererIndex, PlotRenderingInfo info) { CategoryItemRendererState state = super.initialise(g2, dataArea, plot, rendererIndex, info); // calculate the box width CategoryAxis domainAxis = getDomainAxis(plot, rendererIndex); CategoryDataset dataset = plot.getDataset(rendererIndex); if (dataset != null) { int columns = dataset.getColumnCount(); int rows = dataset.getRowCount(); double space = 0.0; PlotOrientation orientation = plot.getOrientation(); if (orientation == PlotOrientation.HORIZONTAL) { space = dataArea.getHeight(); } else if (orientation == PlotOrientation.VERTICAL) { space = dataArea.getWidth(); } double maxWidth = space * getMaximumBarWidth(); double categoryMargin = 0.0; double currentItemMargin = 0.0; if (columns > 1) { categoryMargin = domainAxis.getCategoryMargin(); } if (rows > 1) { currentItemMargin = getItemMargin(); } double used = space * (1 - domainAxis.getLowerMargin() - domainAxis.getUpperMargin() - categoryMargin - currentItemMargin); if ((rows * columns) > 0) { state.setBarWidth(Math.min(used / (dataset.getColumnCount() * dataset.getRowCount()), maxWidth)); } else { state.setBarWidth(Math.min(used, maxWidth)); } } return state; } /** * Draw a single data item. * * @param g2 the graphics device. * @param state the renderer state. * @param dataArea the area in which the data is drawn. * @param plot the plot. * @param domainAxis the domain axis. * @param rangeAxis the range axis. * @param dataset the data (must be an instance of * {@link BoxAndWhiskerCategoryDataset}). * @param row the row index (zero-based). * @param column the column index (zero-based). * @param pass the pass index. */ public void drawItem(Graphics2D g2, CategoryItemRendererState state, Rectangle2D dataArea, CategoryPlot plot, CategoryAxis domainAxis, ValueAxis rangeAxis, CategoryDataset dataset, int row, int column, int pass) { // do nothing if item is not visible if (!getItemVisible(row, column)) { return; } if (!(dataset instanceof BoxAndWhiskerCategoryDataset)) { throw new IllegalArgumentException( "BoxAndWhiskerRenderer.drawItem() : the data should be " + "of type BoxAndWhiskerCategoryDataset only."); } PlotOrientation orientation = plot.getOrientation(); if (orientation == PlotOrientation.HORIZONTAL) { drawHorizontalItem(g2, state, dataArea, plot, domainAxis, rangeAxis, dataset, row, column); } else if (orientation == PlotOrientation.VERTICAL) { drawVerticalItem(g2, state, dataArea, plot, domainAxis, rangeAxis, dataset, row, column); } } /** * Draws the visual representation of a single data item when the plot has * a horizontal orientation. * * @param g2 the graphics device. * @param state the renderer state. * @param dataArea the area within which the plot is being drawn. * @param plot the plot (can be used to obtain standard color * information etc). * @param domainAxis the domain axis. * @param rangeAxis the range axis. * @param dataset the dataset (must be an instance of * {@link BoxAndWhiskerCategoryDataset}). * @param row the row index (zero-based). * @param column the column index (zero-based). */ public void drawHorizontalItem(Graphics2D g2, CategoryItemRendererState state, Rectangle2D dataArea, CategoryPlot plot, CategoryAxis domainAxis, ValueAxis rangeAxis, CategoryDataset dataset, int row, int column) { BoxAndWhiskerCategoryDataset bawDataset = (BoxAndWhiskerCategoryDataset) dataset; double categoryEnd = domainAxis.getCategoryEnd(column, getColumnCount(), dataArea, plot.getDomainAxisEdge()); double categoryStart = domainAxis.getCategoryStart(column, getColumnCount(), dataArea, plot.getDomainAxisEdge()); double categoryWidth = Math.abs(categoryEnd - categoryStart); double yy = categoryStart; int seriesCount = getRowCount(); int categoryCount = getColumnCount(); if (seriesCount > 1) { double seriesGap = dataArea.getHeight() * getItemMargin() / (categoryCount * (seriesCount - 1)); double usedWidth = (state.getBarWidth() * seriesCount) + (seriesGap * (seriesCount - 1)); // offset the start of the boxes if the total width used is smaller // than the category width double offset = (categoryWidth - usedWidth) / 2; yy = yy + offset + (row * (state.getBarWidth() + seriesGap)); } else { // offset the start of the box if the box width is smaller than // the category width double offset = (categoryWidth - state.getBarWidth()) / 2; yy = yy + offset; } g2.setPaint(getItemPaint(row, column)); Stroke s = getItemStroke(row, column); g2.setStroke(s); RectangleEdge location = plot.getRangeAxisEdge(); Number xQ1 = bawDataset.getQ1Value(row, column); Number xQ3 = bawDataset.getQ3Value(row, column); Number xMax = bawDataset.getMaxRegularValue(row, column); Number xMin = bawDataset.getMinRegularValue(row, column); Shape box = null; if (xQ1 != null && xQ3 != null && xMax != null && xMin != null) { double xxQ1 = rangeAxis.valueToJava2D(xQ1.doubleValue(), dataArea, location); double xxQ3 = rangeAxis.valueToJava2D(xQ3.doubleValue(), dataArea, location); double xxMax = rangeAxis.valueToJava2D(xMax.doubleValue(), dataArea, location); double xxMin = rangeAxis.valueToJava2D(xMin.doubleValue(), dataArea, location); double yymid = yy + state.getBarWidth() / 2.0; // draw the upper shadow... g2.draw(new Line2D.Double(xxMax, yymid, xxQ3, yymid)); g2.draw(new Line2D.Double(xxMax, yy, xxMax, yy + state.getBarWidth())); // draw the lower shadow... g2.draw(new Line2D.Double(xxMin, yymid, xxQ1, yymid)); g2.draw(new Line2D.Double(xxMin, yy, xxMin, yy + state.getBarWidth())); // draw the box... box = new Rectangle2D.Double(Math.min(xxQ1, xxQ3), yy, Math.abs(xxQ1 - xxQ3), state.getBarWidth()); if (this.fillBox) { g2.fill(box); } g2.setStroke(getItemOutlineStroke(row, column)); g2.setPaint(getItemOutlinePaint(row, column)); g2.draw(box); } g2.setPaint(this.artifactPaint); double aRadius = 0; // average radius // draw mean - SPECIAL AIMS REQUIREMENT... Number xMean = bawDataset.getMeanValue(row, column); if (xMean != null) { double xxMean = rangeAxis.valueToJava2D(xMean.doubleValue(), dataArea, location); aRadius = state.getBarWidth() / 4; // here we check that the average marker will in fact be visible // before drawing it... if ((xxMean > (dataArea.getMinX() - aRadius)) && (xxMean < (dataArea.getMaxX() + aRadius))) { Ellipse2D.Double avgEllipse = new Ellipse2D.Double(xxMean - aRadius, yy + aRadius, aRadius * 2, aRadius * 2); g2.fill(avgEllipse); g2.draw(avgEllipse); } } // draw median... Number xMedian = bawDataset.getMedianValue(row, column); if (xMedian != null) { double xxMedian = rangeAxis.valueToJava2D(xMedian.doubleValue(), dataArea, location); g2.draw(new Line2D.Double(xxMedian, yy, xxMedian, yy + state.getBarWidth())); } // collect entity and tool tip information... if (state.getInfo() != null && box != null) { EntityCollection entities = state.getEntityCollection(); if (entities != null) { addItemEntity(entities, dataset, row, column, box); } } } /** * Draws the visual representation of a single data item when the plot has * a vertical orientation. * * @param g2 the graphics device. * @param state the renderer state. * @param dataArea the area within which the plot is being drawn. * @param plot the plot (can be used to obtain standard color information * etc). * @param domainAxis the domain axis. * @param rangeAxis the range axis. * @param dataset the dataset (must be an instance of * {@link BoxAndWhiskerCategoryDataset}). * @param row the row index (zero-based). * @param column the column index (zero-based). */ public void drawVerticalItem(Graphics2D g2, CategoryItemRendererState state, Rectangle2D dataArea, CategoryPlot plot, CategoryAxis domainAxis, ValueAxis rangeAxis, CategoryDataset dataset, int row, int column) { BoxAndWhiskerCategoryDataset bawDataset = (BoxAndWhiskerCategoryDataset) dataset; double categoryEnd = domainAxis.getCategoryEnd(column, getColumnCount(), dataArea, plot.getDomainAxisEdge()); double categoryStart = domainAxis.getCategoryStart(column, getColumnCount(), dataArea, plot.getDomainAxisEdge()); double categoryWidth = categoryEnd - categoryStart; double xx = categoryStart; int seriesCount = getRowCount(); int categoryCount = getColumnCount(); if (seriesCount > 1) { double seriesGap = dataArea.getWidth() * getItemMargin() / (categoryCount * (seriesCount - 1)); double usedWidth = (state.getBarWidth() * seriesCount) + (seriesGap * (seriesCount - 1)); // offset the start of the boxes if the total width used is smaller // than the category width double offset = (categoryWidth - usedWidth) / 2; xx = xx + offset + (row * (state.getBarWidth() + seriesGap)); } else { // offset the start of the box if the box width is smaller than the // category width double offset = (categoryWidth - state.getBarWidth()) / 2; xx = xx + offset; } double yyAverage = 0.0; double yyOutlier; Paint itemPaint = getItemPaint(row, column); g2.setPaint(itemPaint); Stroke s = getItemStroke(row, column); g2.setStroke(s); double aRadius = 0; // average radius RectangleEdge location = plot.getRangeAxisEdge(); Number yQ1 = bawDataset.getQ1Value(row, column); Number yQ3 = bawDataset.getQ3Value(row, column); Number yMax = bawDataset.getMaxRegularValue(row, column); Number yMin = bawDataset.getMinRegularValue(row, column); Shape box = null; if (yQ1 != null && yQ3 != null && yMax != null && yMin != null) { double yyQ1 = rangeAxis.valueToJava2D(yQ1.doubleValue(), dataArea, location); double yyQ3 = rangeAxis.valueToJava2D(yQ3.doubleValue(), dataArea, location); double yyMax = rangeAxis.valueToJava2D(yMax.doubleValue(), dataArea, location); double yyMin = rangeAxis.valueToJava2D(yMin.doubleValue(), dataArea, location); double xxmid = xx + state.getBarWidth() / 2.0; // draw the upper shadow... g2.draw(new Line2D.Double(xxmid, yyMax, xxmid, yyQ3)); g2.draw(new Line2D.Double(xx, yyMax, xx + state.getBarWidth(), yyMax)); // draw the lower shadow... g2.draw(new Line2D.Double(xxmid, yyMin, xxmid, yyQ1)); g2.draw(new Line2D.Double(xx, yyMin, xx + state.getBarWidth(), yyMin)); // draw the body... box = new Rectangle2D.Double(xx, Math.min(yyQ1, yyQ3), state.getBarWidth(), Math.abs(yyQ1 - yyQ3)); if (this.fillBox) { g2.fill(box); } g2.setStroke(getItemOutlineStroke(row, column)); g2.setPaint(getItemOutlinePaint(row, column)); g2.draw(box); } g2.setPaint(this.artifactPaint); // draw mean - SPECIAL AIMS REQUIREMENT... Number yMean = bawDataset.getMeanValue(row, column); if (yMean != null) { yyAverage = rangeAxis.valueToJava2D(yMean.doubleValue(), dataArea, location); aRadius = state.getBarWidth() / 4; // here we check that the average marker will in fact be visible // before drawing it... if ((yyAverage > (dataArea.getMinY() - aRadius)) && (yyAverage < (dataArea.getMaxY() + aRadius))) { Ellipse2D.Double avgEllipse = new Ellipse2D.Double(xx + aRadius, yyAverage - aRadius, aRadius * 2, aRadius * 2); g2.fill(avgEllipse); g2.draw(avgEllipse); } } // draw median... Number yMedian = bawDataset.getMedianValue(row, column); if (yMedian != null) { double yyMedian = rangeAxis.valueToJava2D(yMedian.doubleValue(), dataArea, location); g2.draw(new Line2D.Double(xx, yyMedian, xx + state.getBarWidth(), yyMedian)); } // draw yOutliers... double maxAxisValue = rangeAxis.valueToJava2D( rangeAxis.getUpperBound(), dataArea, location) + aRadius; double minAxisValue = rangeAxis.valueToJava2D( rangeAxis.getLowerBound(), dataArea, location) - aRadius; g2.setPaint(itemPaint); // draw outliers double oRadius = state.getBarWidth() / 3; // outlier radius List outliers = new ArrayList(); OutlierListCollection outlierListCollection = new OutlierListCollection(); // From outlier array sort out which are outliers and put these into a // list If there are any farouts, set the flag on the // OutlierListCollection List yOutliers = bawDataset.getOutliers(row, column); if (yOutliers != null) { for (int i = 0; i < yOutliers.size(); i++) { double outlier = ((Number) yOutliers.get(i)).doubleValue(); Number minOutlier = bawDataset.getMinOutlier(row, column); Number maxOutlier = bawDataset.getMaxOutlier(row, column); Number minRegular = bawDataset.getMinRegularValue(row, column); Number maxRegular = bawDataset.getMaxRegularValue(row, column); if (outlier > maxOutlier.doubleValue()) { outlierListCollection.setHighFarOut(true); } else if (outlier < minOutlier.doubleValue()) { outlierListCollection.setLowFarOut(true); } else if (outlier > maxRegular.doubleValue()) { yyOutlier = rangeAxis.valueToJava2D(outlier, dataArea, location); outliers.add(new Outlier(xx + state.getBarWidth() / 2.0, yyOutlier, oRadius)); } else if (outlier < minRegular.doubleValue()) { yyOutlier = rangeAxis.valueToJava2D(outlier, dataArea, location); outliers.add(new Outlier(xx + state.getBarWidth() / 2.0, yyOutlier, oRadius)); } Collections.sort(outliers); } // Process outliers. Each outlier is either added to the // appropriate outlier list or a new outlier list is made for (Iterator iterator = outliers.iterator(); iterator.hasNext();) { Outlier outlier = (Outlier) iterator.next(); outlierListCollection.add(outlier); } for (Iterator iterator = outlierListCollection.iterator(); iterator.hasNext();) { OutlierList list = (OutlierList) iterator.next(); Outlier outlier = list.getAveragedOutlier(); Point2D point = outlier.getPoint(); if (list.isMultiple()) { drawMultipleEllipse(point, state.getBarWidth(), oRadius, g2); } else { drawEllipse(point, oRadius, g2); } } // draw farout indicators if (outlierListCollection.isHighFarOut()) { drawHighFarOut(aRadius / 2.0, g2, xx + state.getBarWidth() / 2.0, maxAxisValue); } if (outlierListCollection.isLowFarOut()) { drawLowFarOut(aRadius / 2.0, g2, xx + state.getBarWidth() / 2.0, minAxisValue); } } // collect entity and tool tip information... if (state.getInfo() != null && box != null) { EntityCollection entities = state.getEntityCollection(); if (entities != null) { addItemEntity(entities, dataset, row, column, box); } } } /** * Draws a dot to represent an outlier. * * @param point the location. * @param oRadius the radius. * @param g2 the graphics device. */ private void drawEllipse(Point2D point, double oRadius, Graphics2D g2) { Ellipse2D dot = new Ellipse2D.Double(point.getX() + oRadius / 2, point.getY(), oRadius, oRadius); g2.draw(dot); } /** * Draws two dots to represent the average value of more than one outlier. * * @param point the location * @param boxWidth the box width. * @param oRadius the radius. * @param g2 the graphics device. */ private void drawMultipleEllipse(Point2D point, double boxWidth, double oRadius, Graphics2D g2) { Ellipse2D dot1 = new Ellipse2D.Double(point.getX() - (boxWidth / 2) + oRadius, point.getY(), oRadius, oRadius); Ellipse2D dot2 = new Ellipse2D.Double(point.getX() + (boxWidth / 2), point.getY(), oRadius, oRadius); g2.draw(dot1); g2.draw(dot2); } /** * Draws a triangle to indicate the presence of far-out values. * * @param aRadius the radius. * @param g2 the graphics device. * @param xx the x coordinate. * @param m the y coordinate. */ private void drawHighFarOut(double aRadius, Graphics2D g2, double xx, double m) { double side = aRadius * 2; g2.draw(new Line2D.Double(xx - side, m + side, xx + side, m + side)); g2.draw(new Line2D.Double(xx - side, m + side, xx, m)); g2.draw(new Line2D.Double(xx + side, m + side, xx, m)); } /** * Draws a triangle to indicate the presence of far-out values. * * @param aRadius the radius. * @param g2 the graphics device. * @param xx the x coordinate. * @param m the y coordinate. */ private void drawLowFarOut(double aRadius, Graphics2D g2, double xx, double m) { double side = aRadius * 2; g2.draw(new Line2D.Double(xx - side, m - side, xx + side, m - side)); g2.draw(new Line2D.Double(xx - side, m - side, xx, m)); g2.draw(new Line2D.Double(xx + side, m - side, xx, m)); } /** * Tests this renderer for equality with an arbitrary object. * * @param obj the object (<code>null</code> permitted). * * @return <code>true</code> or <code>false</code>. */ public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof BoxAndWhiskerRenderer)) { return false; } if (!super.equals(obj)) { return false; } BoxAndWhiskerRenderer that = (BoxAndWhiskerRenderer) obj; if (!PaintUtilities.equal(this.artifactPaint, that.artifactPaint)) { return false; } if (this.fillBox != that.fillBox) { return false; } if (this.itemMargin != that.itemMargin) { return false; } if (this.maximumBarWidth != that.maximumBarWidth) { return false; } return true; } /** * Provides serialization support. * * @param stream the output stream. * * @throws IOException if there is an I/O error. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); SerialUtilities.writePaint(this.artifactPaint, stream); } /** * Provides serialization support. * * @param stream the input stream. * * @throws IOException if there is an I/O error. * @throws ClassNotFoundException if there is a classpath problem. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.artifactPaint = SerialUtilities.readPaint(stream); } }
package language; import java.io.InputStream; import java.util.AbstractCollection; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Scanner; import java.util.TreeMap; import utilities.functions.Utilities; /** * The {@code Dictionary} class retrieves all parseable tokens from a given file * and stores them in a dictionary structure. Various operations can be * performed on this map, such as finding the shortest token or the amount of * vowels in any particular string of characters. Additionally, items may be * added or removed at any time. * * <p> Any tokens with alphabetical characters contained in the * {@code Dictionary} are converted to lowercase. * * @author Oliver Abdulrahim */ public class Dictionary extends AbstractCollection<Word> { /** * Stores the resource location of the default dictionary. */ public static final InputStream DEFAULT_STREAM = Dictionary.class .getResourceAsStream("/resources/dictionary.txt"); /** * Contains this object's word dictionary. This map is sorted by word * difficulty */ private final Map<Difficulty, List<Word>> words; /** * Stores the difficulty of the last word that was successfully retrieved * from this object, or {@link Difficulty#DEFAULT} if there has not been a * word previously retrieved from this object or if there was an error in * attempting to do so. */ private Difficulty difficultyCache; /** * Instantiates a new, empty {@code Dictionary} using the default word * repository. * * @see #DEFAULT_STREAM The <code>InputStream</code> containing the word * repository. */ public Dictionary() { this(DEFAULT_STREAM); } /** * Instantiates a new {@code Dictionary} with the specified {@code String} * path. * * @param path The {@code String} path for the {@code File} to read into * this object's map. */ public Dictionary(String path) { this(Dictionary.class.getResourceAsStream(path)); } /** * Instantiates a new {@code Dictionary} and fills this object's map with * the contents of the specified {@code File}. Each token in the file is * parsed and placed into the map in a line-by-line basis. * * @param target The {@code File} to read into this object's map. */ public Dictionary(InputStream target) { words = new TreeMap<>(); difficultyCache = Difficulty.DEFAULT; if (target != null) { constructDictionary(target); } } /** * Each token in the given {@code InputStream} is parsed and placed into the * map in a line-by-line basis using a {@code Scanner} implementation. * * @param target The {@code File} to read into this object's map. */ private void constructDictionary(InputStream target) { try (Scanner input = new Scanner(target)) { List<Word> easyWords = new ArrayList<>(); List<Word> mediumWords = new ArrayList<>(); List<Word> hardWords = new ArrayList<>(); while(input.hasNext()) { Word w = new Word(input.nextLine()); // Interesting type inference in case statements - did not know // that Java inferred types for enum switch statements. switch (judgeDifficulty(w)) { case EASY: easyWords.add(w); break; case MEDIUM: mediumWords.add(w); break; case HARD: hardWords.add(w); break; // No default case needed, judgeDifficulty(Word) always // returns a Difficulty object } } words.put(Difficulty.EASY, easyWords); words.put(Difficulty.MEDIUM, mediumWords); words.put(Difficulty.HARD, hardWords); } } /** * Returns an enumerated property depending on the difficulty of the given * {@code Word}. * * @param w The word to judge the difficulty of. * @return An enumerated property depending on the difficulty of the given * {@code Word}. */ public static Difficulty judgeDifficulty(Word w) { if (isEasyWord(w)) { return Difficulty.EASY; } else if (isMediumWord(w)) { return Difficulty.MEDIUM; } else { return Difficulty.HARD; } } // Collection operations /** * Returns {@code true} if this dictionary contains no words, {@code false} * otherwise. * * @return {@code true} if this object contains no words, {@code false} * otherwise. */ @Override public boolean isEmpty() { return size() == 0; } /** * Returns {@code true} if this dictionary contains the given word, * {@code false} otherwise. * * @param o The object to search for in this object. * @return {@code true} if this object contains the given word, * {@code false} otherwise. */ @Override public boolean contains(Object o) { boolean contains = false; if (o instanceof Word) { final Word w = (Word) o; contains = getAllWords().stream().anyMatch(word -> word == w); } return contains; } /** * Returns an iterator over the words contained within this dictionary. * * <p> The iterator returned by this method is ordered by word difficulty * and is backed by the data stored by this dictionary, so changes to the * dictionary are reflected on the iterator, and vice versa. * * @return An iterator over the words contained within this object. */ @Override public Iterator<Word> iterator() { return getAllWords().iterator(); } /** * Inserts into this dictionary the given {@code Word}, taking into account * its difficulty, returning {@code true} if the given word was inserted * into this object, {@code false} otherwise. * * @param w The {@code Word} to add. * @return {@code true} if the given word was inserted into this object, * {@code false} otherwise. */ @Override public boolean add(Word w) { List<Word> peers = getListOf(judgeDifficulty(w)); return peers.add(w); } /** * Removes a given word from this dictionary, returning {@code true} if the * word was removed, {@code false} otherwise (i.e. the given word does not * exist in this dictionary). * * @param o The object to remove from this dictionary. * @return {@code true} if the word was removed, {@code false} otherwise. */ @Override public boolean remove(Object o) { if (o instanceof Word) { final Word w = (Word) o; List<Word> peers = getListOf(judgeDifficulty(w)); return peers.remove(w); } return false; } /** * Returns the amount of words currently contained within this object. * * @return The amount of words currently contained within this object. */ @Override public int size() { int size = words.values() .stream() .mapToInt(List :: size) .sum(); // Nice terminal operation dude. return size; } /** * Returns a list containing all words of the given difficulty that are * contained within this dictionary. * * @param difficulty The difficulty of list to retrieve. * @return A list containing all words of the given difficulty. */ public List<Word> getListOf(Difficulty difficulty) { if (!hasWordOf(difficulty)) { difficultyCache = Difficulty.DEFAULT; throw new NoSuchWordException("Could not retrieve word.", difficulty); } return Collections.unmodifiableList(words.get(difficulty)); } /** * Returns a list containing all words of the previously used difficulty. * * @return A cache of the previously used difficulty list. */ public List<Word> cacheList() { return Collections.unmodifiableList(getListOf(difficultyCache)); } /** * Returns a list containing all the words stored within this dictionary. * * @return A list containing all the words of this dictionary. */ public List<Word> getAllWords() { List<Word> allWords = new ArrayList<>(); words.values() .stream() .forEach((wordList) -> allWords.addAll(wordList)); return Collections.unmodifiableList(allWords); } // Difficulty tuning variables /** * Minimum amount of vowels for an easy word. */ private static final int EASY_VOWEL_THRESHOLD = 4; /** * Minimum length for an easy word. */ private static final int EASY_LENGTH_THRESHOLD = 7; /** * Minimum amount of vowels for a medium word. This value is always less * than the {@link #EASY_VOWEL_THRESHOLD}. */ private static final int MEDIUM_VOWEL_THRESHOLD = 3; /** * Minimum length for a medium word. This value is always less than the * {@link #EASY_LENGTH_THRESHOLD}. */ private static final int MEDIUM_LENGTH_THRESHOLD = 5; /** * Determines if a given word is "easy" in difficulty. Words that are longer * in length and have more vowels are generally considered easy words. * * @param w The {@code Word} to test for difficulty. * @return {@code true} if the given word is considered to be "easy" in * difficulty. */ public static boolean isEasyWord(Word w) { return w.vowelCount() >= EASY_VOWEL_THRESHOLD && w.length() >= EASY_LENGTH_THRESHOLD; } /** * Determines if a given word is "medium" in difficulty. * * @param w The {@code Word} to test for difficulty. * @return {@code true} if the given word is considered to be "medium" in * difficulty. */ public static boolean isMediumWord(Word w) { return w.vowelCount() >= MEDIUM_VOWEL_THRESHOLD && w.length() >= MEDIUM_LENGTH_THRESHOLD; } /** * Determines if a given word is "hard" in difficulty. Words that are short * in length and contain mostly consonants are generally considered hard * words. * * @param w The {@code String} to test for difficulty. * @return {@code true} if the given word is considered to be "hard" in * difficulty. */ public static boolean isHardWord(Word w) { return !isEasyWord(w) && !isMediumWord(w); } /** * Returns a randomly selected word of easy difficulty mapped to this * object. * * @return A randomly selected word of easy difficulty. */ public Word getEasyWord() { return getWordOf(Difficulty.EASY); } /** * Returns a randomly selected word of medium difficulty mapped to this * object. * * @return A randomly selected word of medium difficulty. */ public Word getMediumWord() { return getWordOf(Difficulty.MEDIUM); } /** * Returns a randomly selected word of hard difficulty mapped to this * object. * * @return A randomly selected word of hard difficulty. */ public Word getHardWord() { return getWordOf(Difficulty.HARD); } /** * Selects and returns a random {@code Word} from this object. * * @return A random {@code Word} from this object. */ public Word getAnyWord() { Difficulty d = Difficulty.ALL .stream() .findAny() // Terminal operation .get(); difficultyCache = d; return getWordOf(d); } /** * Returns {@code true} if this object contains at least one word of the * given difficulty, {@code false} otherwise. * * @param difficulty The difficulty to test. * @return {@code true} if this object contains at least one word of the * given difficulty, {@code false} otherwise. */ private boolean hasWordOf(Difficulty difficulty) { return words.containsKey(difficulty) && !words.get(difficulty).isEmpty(); } /** * Returns a randomly selected word of the given difficulty. * * <p> This method populates the given list if it is empty. Then it * retrieves a randomly selected element from it. in the case that there are * no elements of the specified difficulty contained within this object, * this method throws a {@code NoSuchWordException}. * * @param difficulty The difficulty of the word to return. * @return A randomly selected word of the given difficulty. * @throws NoSuchWordException If there are no words of the given * difficulty */ protected Word getWordOf(Difficulty difficulty) { if (hasWordOf(difficulty)) { List<Word> wordsOf = getListOf(difficulty); difficultyCache = difficulty; return wordsOf.get(Utilities.r.nextInt(wordsOf.size())); } throw new NoSuchWordException("Could not retrieve word.", difficulty); } // Utility methods /** * Returns a formatted {@code String} containing this object's map of words. * * @return A formatted {@code String} containing this object's map. */ @Override public String toString() { StringBuilder sb = new StringBuilder("Dictionary {\n"); for (Difficulty d : Difficulty.ALL) { if (words.containsKey(d)) { sb.append(" ") .append(d) .append(" = ") .append(words.get(d)) .append('\n'); } } sb.append('}'); return sb.toString(); } }
package magpie.data; import java.io.BufferedReader; import java.io.FileReader; import magpie.data.utilities.DatasetHelper; import weka.core.*; import java.util.*; import javax.naming.OperationNotSupportedException; import magpie.attributes.evaluators.BaseAttributeEvaluator; import magpie.attributes.expansion.BaseAttributeExpander; import magpie.attributes.generators.BaseAttributeGenerator; import magpie.data.utilities.DatasetOutput; import magpie.data.utilities.filters.BaseDatasetFilter; import magpie.data.utilities.generators.BaseEntryGenerator; import magpie.data.utilities.modifiers.BaseDatasetModifier; import magpie.optimization.rankers.BaseEntryRanker; import static magpie.user.CommandHandler.instantiateClass; import static magpie.user.CommandHandler.printImplmentingClasses; import magpie.utility.UtilityOperations; import magpie.utility.interfaces.Commandable; import magpie.utility.interfaces.Options; import magpie.utility.interfaces.Printable; import magpie.utility.interfaces.Savable; import org.apache.commons.collections.Predicate; import weka.core.converters.ArffLoader; /** * Provides a basic storage container for data-mining tasks. Must be filled with * entries that are subclasses of BaseEntry * * <p> * To implement a new Dataset, you first need to create an extension of * {@linkplain BaseEntry} that represents the new kind of data. Then, you need * to overload the following operations: * * <ul> * <li>{@linkplain #getEntry(int) } - Might be useful to overload with an * operation that returns entry type associated with this model * <li>{@linkplain #addEntry(java.lang.String) } - Call the constructor to the * associated entry type * <li>{@linkplain #calculateAttributes() } - Compute any new attributes for for * class * <li>{@linkplain #emptyClone() } - Create clone of dataset. Cloning entries * is handled in {@linkplain #clone() }, which does not need to be modified. * </ul> * * <usage><p> * <b>Usage</b>: *No options to set*</usage> * * <p> * <b><u>Implemented Commands:</u></b> * * <command><p> * <b>&lt;output> = clone [-empty]</b> - Create a copy of this dataset * <br><pr><i>-empty</i>: Do not copy entries from dataset into clone * </command> * * <command><p><b>combine $&lt;dataset&gt;</b> - Add all entries from another dataset * <br><pr><i>dataset</i>: Dataset to merge with this one. It will remain unchanged. * </command> * * <command><p> * <b>filter &lt;include|exclude> &lt;method> [&lt;options...>]</b> - Run * dataset through a filter * <br><pr><i>include|exclude</i>: Whether to include/exclude only entries that * pass the filter * <br><pr><i>method</i>: Filtering method. Name of a * {@linkplain BaseDatasetFilter} ("?" to print available methods) * <br><pr><i>options...</i>: Options for the filter</command> * * <command><p> * <b>generate &lt;method&gt; [&gt;options&lt;]</b> - Generate new entries * <br><pr><i>method</i>: Name of a {@linkplain BaseEntryGenerator}. ("?" for * options) * <br><pr><i>options</i>: Any options for the entry generator</command> * * <command><p> * <b>modify &lt;method> [&lt;options>]</b> - Modify the dataset * <br><pr><i>method</i>: How to modify dataset. Name of a * {@linkplain BaseDatasetModifier}. ("?" to print available methods) * <br><pr><i>options</i>: Any options for the dataset</command> * * <command><p> * <b>import &lt;filename> [&lt;options...>]</b> - Import data by reading a file * <br><pr><i>filename</i>: Name of file to import data from * <br><pr><i>options...</i>: Any options used when parsing this dataset * (specific to type of Dataset)</command> * * <command><p> * <b>rank &lt;number> &lt;maximum|minimum> &lt;measured|predicted> &lt;method> * [&lt;options>]</b> - Print the top ranked entries based by some measure * <br><pr><i>number</i>: Number of top entries to print * <br><pr><i>maximum|minimum</i>: Whether to print entries with the largest or * smallest objection function * <br><pr><i>measured|predicted</i>: Whether to use the measured or predicted * values when calculation * <br><pr><i>method</i>: Object function used to rank entries. Name of a * {@link BaseEntryRanker} ("?" for available methods) * <br><pr><i>options...</i>: Any options for the objective function</command> * * <command><p> * <b>&lt;output> = split &lt;number|fraction></b> - Randomly select and remove * entries from dataset * <br><pr><i>number|fraction</i>: Either the fraction or number of entries to * be removed * <br><pr><i>output</i>: New dataset containing randomly selected entries that * were in this dataset</command> * * <command><p> * <b>&lt;output> = subset &lt;number|fraction></b> - Generate a random subset * from this dataset * <br><pr><i>number|fraction</i>: Either the fraction or number of entries to * select * <br><pr><i>output</i>: New dataset containing random selection from this * dataset</command> * * <command><p> * <b>attributes</b> - Print all attributes</command> * * <command><p> * <b>attributes expander add &lt;method> [&lt;options...>]</b> - Add an * attribute expander to be run after generating attributes * <br><pr><i>method</i>: How to expand attributes. Name of a * {@linkplain BaseAttributeExpander} ("?" to print available methods) * <br><pr><i>options...</i>: Any options for the expansion method These * expanders are designed to create new attributes based on existing * ones.</command> * * <command><p> * <b>attributes expander clear</b> - Clear the current list of attribute * expanders</command> * * <command><p> * <b>attributes expander run</b> - Run the currently-defined list of attribute * expanders</command> * * <command><p> * <b>attributes generators add &lt;method> [&lt;options...>]</b> - Add an * attribute generators to create additional attributes * <br><pr><i>method</i>: New generation method. Name of a * {@linkplain BaseAttributeGenerator} ("?" to print available methods) * <br><pr><i>options...</i>: Any options for the generator method These * expanders are designed to create new attributes tailored for a specific * application.</command> * * <command><p> * <b>attributes generators run</b> - Run the currently-defined list of * attribute expanders</command> * * <command><p> * <b>attributes generators clear</b> - Clear the current list of attribute * generators</command> * * <command><p> * <b>attributes generate</b> - Generate attributes for each entry</command> * * <command><p> * <b>attributes rank &lt;number> &lt;method> [&lt;options...&gt;]</b> - Rank * attributes based on predictive power * <br><pr><i>number</i>: Number of top attributes to print * <br><pr><i>method</i>: Method used to rank attributes. Name of a * {@linkplain BaseAttributeEvaluator} ("?" to print available methods) * <br><pr><i>options...</i>: Options for the evaluation method.</command> * * <p> * <b><u>Implemented Print Commands:</u></b> * * <print><p> * <b>details</b> - Print details about this class</print> * * <print><p> * <b>dist</b> - Print distribution of entries between known classes</print> * * <p> * <b><u>Implemented Save Formats:</u></b> TBD * * <save><p> * <b>csv</b> - Comma-separated value format. * <br>The value of each attribute and the measured class variable, if * defined.</save> * * <save><p> * <b>arff</b> - Weka's ARFF format. * <br>Requires that a measured value is available for the class variable of * each entry.</save> * * <save><p> * <b>stats</b> - Writes predicted and measured class variables. * <br>This is intended to allow an external program to evaluate model * performance.</save> * * @author Logan Ward * @version 0.1 */ public class Dataset extends java.lang.Object implements java.io.Serializable, java.lang.Cloneable, Printable, Savable, Options, Commandable { /** * Names of attributes that describe each entry */ protected ArrayList<String> AttributeName; /** * Names of the class(s) of each entry */ private String[] ClassName; /** * Internal array that stores entries */ protected ArrayList<BaseEntry> Entries; /** * Tools to generate new attributes based on existing ones */ protected List<BaseAttributeExpander> Expanders = new LinkedList<>(); /** * Tools to generate special-purpose attributes */ protected List<BaseAttributeGenerator> Generators = new LinkedList<>(); /** * Read the state from file using serialization * * @param filename Filename for input * @return Dataset stored in that file * @throws java.lang.Exception */ public static Dataset loadState(String filename) throws Exception { return (Dataset) UtilityOperations.loadState(filename); } /** * Generate a blank dataset */ public Dataset() { this.ClassName = new String[]{"Class"}; this.AttributeName = new ArrayList<>(); this.Entries = new ArrayList<>(); } ; /** Create a Dataset that containing the same entries as another * @param AttributeName Attribute names to use * @param ClassName Name(s) of class variable * @param Entries Entries to be stored */ public Dataset(ArrayList<String> AttributeName, String[] ClassName, ArrayList<BaseEntry> Entries) { this.AttributeName = AttributeName; this.ClassName = ClassName.clone(); this.Entries = new ArrayList<>(Entries); } /** * Create an empty dataset with the same attributes names as another * * @param AttributeName Attribute names * @param ClassName Name(s) of class variable */ public Dataset(ArrayList<String> AttributeName, String[] ClassName) { this.AttributeName = AttributeName; this.ClassName = ClassName; this.Entries = new ArrayList<>(); } @Override public void setOptions(List<Object> Options) throws Exception { /* Nothing to do */ } @Override public String printUsage() { return "Usage: *No Options*"; } /** * Creates a new instance of this dataset, and clones of each entry. * @return Clone of this dataset */ @Override @SuppressWarnings("CloneDeclaresCloneNotSupported") public Dataset clone() { Dataset copy = emptyClone(); // Make unique copies of the entries copy.Entries = new ArrayList<>(NEntries()); Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { copy.addEntry(iter.next().clone()); } return copy; } /** * Creates a new instance with the same class and attribute names, but * without any entries. * * @return Dataset with same properties, no entries */ public Dataset emptyClone() { Dataset copy; try { copy = (Dataset) super.clone(); } catch (CloneNotSupportedException c) { throw new Error(c); } copy.AttributeName = new ArrayList<>(AttributeName); copy.ClassName = ClassName.clone(); copy.Expanders = new LinkedList<>(Expanders); copy.Generators = new LinkedList<>(Generators); // Make unique copies of the entries copy.Entries = new ArrayList<>(); return copy; } /** * Get a dataset that only contains entries with a measured class variable. * * @return Dataset with all entries that have a measured class */ public Dataset getTrainingExamples() { Dataset output = emptyClone(); for (BaseEntry entry : Entries) { if (entry.hasMeasurement()) { output.addEntry(entry); } } return output; } /** * Clear out all entries */ public void clearData() { this.Entries.clear(); } /** * Add a new tool to expand the number of attributes for this dataset. * * @param expander New expander */ public void addAttribueExpander(BaseAttributeExpander expander) { Expanders.add(expander); } /** * Reset the list of attribute expanders */ public void clearAttributeExpanders() { Expanders.clear(); } /** * Get a copy of the list of currently-employed attribute expanders. * * @return List of attribute expanders * @see * #addAttribueExpander(magpie.attributes.expansion.BaseAttributeExpander) */ public List<BaseAttributeExpander> getAttributeExpanders() { return new LinkedList<>(Expanders); } /** * Expand the list of attributes using the currently-set list of attribute * expanders. * * @see #getAttributeExpanders() */ public void runAttributeExpanders() { for (BaseAttributeExpander expander : Expanders) { expander.expand(this); } } /** * Add a new tool to generate additional attributes * * @param generator New generator */ public void addAttribueGenerator(BaseAttributeGenerator generator) { Generators.add(generator); } /** * Reset the list of attribute generators */ public void clearAttributeGenerators() { Generators.clear(); } /** * Get a copy of the list of currently-employed attribute generators. * * @return List of attribute expanders * @see * #addAttribueExpander(magpie.attributes.expansion.BaseAttributeExpander) */ public List<BaseAttributeGenerator> getAttributeGenerators() { return new LinkedList<>(Generators); } /** * Expand the list of attributes using the currently-set list of attribute * generators. * * @throws java.lang.Exception * @see #getAttributeGenerators() */ public void runAttributeGenerators() throws Exception { for (BaseAttributeGenerator generator : Generators) { generator.addAttributes(this); } } /** * Generate attributes for this dataset * * @throws java.lang.Exception If any error is encountered */ final public void generateAttributes() throws Exception { // First things first, clear out old data AttributeName.clear(); for (BaseEntry e : Entries) { e.clearAttributes(); } // Now compute attributes calculateAttributes(); // Run generators runAttributeGenerators(); // Run expanders runAttributeExpanders(); // Reduce memory footprint, where possible finalizeGeneration(); } /** * Perform attribute calculation. Should also store names in * {@linkplain #AttributeName}. * * @throws Exception */ protected void calculateAttributes() throws Exception { throw new OperationNotSupportedException("Dataset does not support attribute generation"); } /** * @return Names of all attributes */ public String[] getAttributeNames() { return AttributeName.toArray(new String[0]); } /** * Get name of a specific attribute * * @param index Attribute number * @return Name of that attribute */ public String getAttributeName(int index) { return AttributeName.get(index); } /** * Set the names of each attributes. * * <p> * NOTE: This will not effect the number of attributes of each entry. Make * sure to update those if needed! * * @param attributeNames */ public void setAttributeNames(List<String> attributeNames) { AttributeName.clear(); AttributeName.addAll(attributeNames); } /** * Get index of a certain attribute * * @param Name Name of desired attribute * @return Index of that attribute (-1 if it does not exist) */ public int getAttributeIndex(String Name) { return AttributeName.indexOf(Name); } /** * Imports data from a text file. Expected format for file: * <p> * Attribute1Name, Attribute2Name, ..., AttributeNName, Class<br> * Attribute1, Attribute2, ..., AttributeN, ClassVariable * * @param filename Path to data file * @param options Any options used to control import * @throws java.lang.Exception If text import fails */ public void importText(String filename, Object[] options) throws Exception { // Open the file BufferedReader fp = new BufferedReader(new FileReader(filename)); // Clear out old data AttributeName.clear(); Entries.clear(); // If the file is a Weka arff if (filename.toLowerCase().contains("arff")) { // Import an ARFF file Instances arff = new ArffLoader.ArffReader(fp).getData(); // Determine which attribute is the class index. If none is // specified in the ARFF, assume it is the last one int classIndex = arff.classIndex(); if (classIndex == -1) { classIndex = arff.numAttributes() - 1; arff.setClassIndex(classIndex); } // Get possible values of the class index if (arff.attribute(classIndex).enumerateValues() == null) { ClassName = new String[]{arff.attribute(classIndex).name()}; } else { List<String> classNames = new LinkedList<>(); Enumeration enums = arff.attribute(classIndex).enumerateValues(); while (enums.hasMoreElements()) { classNames.add(enums.nextElement().toString()); } ClassName = classNames.toArray(new String[0]); } // Read in attributes (only get the numeric ones) Set<Integer> excludedAttributes = new TreeSet<>(); for (int i = 0; i < arff.numAttributes(); i++) { if (i == classIndex) { continue; } if (arff.attribute(i).isNumeric()) { AttributeName.add(arff.attribute(i).name()); } else if (i != classIndex) { excludedAttributes.add(i); } } // Read in data Entries.ensureCapacity(arff.numInstances()); for (Instance inst : arff) { double[] attr = new double[NAttributes()]; // Read in attributes int counter = 0; for (int a = 0; a < inst.numAttributes(); a++) { if (!(a == classIndex || excludedAttributes.contains(a))) { attr[counter++] = inst.value(a); } } // Store values BaseEntry entry = new BaseEntry(); entry.setAttributes(attr); // Set class variable try { entry.setMeasuredClass(inst.classValue()); } catch (Exception e) { // do nothing } addEntry(entry); } return; } // Process header String Line = fp.readLine(); String[] Words = Line.split("[, \t]"); AttributeName.addAll(Arrays.asList(Arrays.copyOfRange(Words, 0, Words.length - 1))); ClassName = new String[]{Words[Words.length - 1]}; // Add in entries while (true) { // Read line Line = fp.readLine(); if (Line == null) { break; } Words = Line.split("[, \t]"); if (Words.length == 0) { break; } // Read in data double[] attributes = new double[NAttributes()]; double cValue; try { for (int i = 0; i < AttributeName.size(); i++) { attributes[i] = Double.parseDouble(Words[i]); } cValue = Double.parseDouble(Words[Words.length - 1]); } catch (NumberFormatException e) { // If a problem reading numbers, just continue continue; } // Add entry BaseEntry E = new BaseEntry(); E.setAttributes(attributes); E.setMeasuredClass(cValue); E.reduceMemoryFootprint(); addEntry(E); } } /** * Set name of class variable (or possible classes) * * @param newClassNames New name(s) to use */ public void setClassNames(String[] newClassNames) { ClassName = newClassNames.clone(); } /** * @return Names of possible classes for class variable */ public String[] getClassNames() { return ClassName; } /** * Get the name of a certain class (for data with multiple possible * classficiations) * * @param value Value of class variable * @return Name of that class */ public String getClassName(int value) { return ClassName[value]; } /** * Add in a new attribute. Places at end of list * * @param name Name to be added * @param values Value of attribute for each entry */ public void addAttribute(String name, double[] values) { if (AttributeName.contains(name)) { throw new Error("Dataset already contains attribute: " + name); } AttributeName.add(name); for (int i = 0; i < NEntries(); i++) { BaseEntry E = getEntry(i); E.addAttribute(values[i]); E.reduceMemoryFootprint(); } } /** * Add new attributes. If you use this operation, you must add attributes to * each new entry manually. * @param names Names of new attributes * @see BaseEntry#addAttributes(double[]) */ public void addAttributes(List<String> names) { for (String name : names) { if (AttributeName.contains(name)) { throw new Error("Dataset already contains attribute: " + name); } } AttributeName.addAll(names); } /** * Remove an attribute * * @param index Index of attribute to be removed */ public void removeAttribute(int index) { System.err.println("WARNING: This does not currently remove attribute from entries. LW 4Apr14"); AttributeName.remove(index); } /** * Remove an attribute * * @param name Name of attribute to be removed */ public void removeAttribute(String name) { System.err.println("WARNING: This does not currently remove attribute from entries. LW 4Apr14"); AttributeName.remove(name); } /** * @return Number of features describing each entry */ public int NAttributes() { return AttributeName.size(); } /** * @return Number of possible (discrete) values for class variable. 1 means * variable is continuous */ public int NClasses() { return ClassName.length; } /** * @return Number of entries in Dataset */ public int NEntries() { return Entries.size(); } /** * Add an entry. You may need to run {@linkplain #generateAttributes(java.lang.Object[]) * }. * * @param e Entry to be added */ public void addEntry(BaseEntry e) { Entries.add(e); } /** * A new entry by parsing an input string. After using this operation, it * may be necessary to recalculate attributes. * * @param input String describing the entry * @return Entry representing the parsed string. * @throws Exception If conversion fails */ public BaseEntry addEntry(String input) throws Exception { BaseEntry toAdd = new BaseEntry(input); addEntry(toAdd); return toAdd; } /** * Add many entries to a the data set * * @param entries Any collection type of entries */ public void addEntries(Collection<? extends BaseEntry> entries) { Entries.addAll(entries); } /** * Remove all duplicate entries */ public void removeDuplicates() { Set Filter = new HashSet<>(Entries); Entries.clear(); Entries.addAll(Filter); } /** * Determine whether a dataset contains a certain entry * * @param Entry Entry to be tested * @return Whether the dataset contains <code>Entry</code> */ public boolean containsEntry(BaseEntry Entry) { return Entries.contains(Entry); } /** * Combine the data structure with another. Does not alter the other entry * * @param d Dataset to be added */ public void combine(Dataset d) { if (d.NAttributes() != NAttributes()) { throw new Error("Data set has wrong number of features"); } if (d.NClasses() != NClasses()) { throw new Error("Data set has wrong number of classes"); } Entries.addAll(d.Entries); } /** * Combine the data structure with an array of other Datasets. Leaves all of * the others all unaltered. * * @param d Array of DataStructures */ public void combine(Dataset[] d) { for (Dataset data : d) { combine(data); } } /** * Combine the data structure with a collection of other data structures. * Leaves other datasets unaltered * * @param d Collection of Datasets */ public void combine(Collection<Dataset> d) { for (Dataset data : d) { combine(data); } } /** * Remove all entries that are in another dataset from this dataset * * @param Data Second dataset */ public void subtract(Dataset Data) { TreeSet<BaseEntry> TempSet = new TreeSet<>(Entries); TempSet.removeAll(Data.Entries); Entries = new ArrayList(TempSet); } /** * Retrieve a single entry from the dataset * * @param index Index of entry * @return Specified entry */ public BaseEntry getEntry(int index) { return Entries.get(index); } /** * Retrieve the internal collection of entries * * @return Collection of entries (probably an ArrayList) */ public List<BaseEntry> getEntries() { return this.Entries; } /** * Given a list of labels, separate Dataset into multiple subsets * * @param labels Label defining in which subset to label an entry * @return Array of subsets of length <code>max(label) + 1</code>, where * each member, i, contains entries with <code>label[i] == i</code>. */ public Dataset[] partition(int[] labels) { int maxLabel = -1; for (int i = 0; i < labels.length; i++) { if (labels[i] > maxLabel) { maxLabel = labels[i]; } } return partition(labels, (int) maxLabel + 1); } /** * Given a list of labels, separate Dataset into multiple subsets (some may * be empty) * * @param labels Label defining in which subset to label an entry * @param number Number subsets to produce, must be greater than max(labels) * @return Array of subsets of length max(label), where each member, i, * contains entries with label[i] == i. */ public Dataset[] partition(int[] labels, int number) { if (labels.length != this.NEntries()) { throw new Error("Number of labels != Number of entries!"); } Dataset[] subsets = new Dataset[number]; for (int i = 0; i < number; i++) { subsets[i] = this.emptyClone(); } int toSubset; for (int i = 0; i < labels.length; i++) { toSubset = labels[i]; if (toSubset >= number) { throw new Error("number < max(labels)"); } subsets[toSubset].addEntry(this.getEntry(i)); } return subsets; } /** * Get a specific list of entries from the dataset. These entries are not * removed from the original dataset * * @param indicies List of entry IDs to be removed * @return A new dataset containing only the specified entries */ public Dataset getSubset(int[] indicies) { Dataset output = emptyClone(); for (int i = 0; i < indicies.length; i++) { output.addEntry(getEntry(indicies[i])); } return output; } /** * Split off a certain number of entries into a separate dataset. Deletes * those entries from the original set * * @param number Number of entries in new set * @return Dataset containing a subset of entries */ public Dataset randomSplit(int number) { if (number < 0 || number > NEntries()) { throw new Error("Number must be positive, and less than the size of the set"); } // Create a list of which entries to move over Boolean[] to_switch = new Boolean[NEntries()]; Arrays.fill(to_switch, 0, number, true); Arrays.fill(to_switch, number, NEntries(), false); Collections.shuffle(Arrays.asList(to_switch)); // Delete or switch, as suggested Dataset out = emptyClone(); int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); ArrayList<BaseEntry> new_set = new ArrayList<>(number); while (iter.hasNext()) { BaseEntry e = iter.next(); if (to_switch[id]) { new_set.add(e); iter.remove(); } id++; } out.addEntries(new_set); return out; } /** * Split off a certain number of entries into a separate dataset. Deletes * those entries from the original set * * @param fraction Fraction of entries of original set to move to new set * @return Dataset containing a subset of entries */ public Dataset randomSplit(double fraction) { if (fraction > 1 || fraction < 0) { throw new Error("Fraction must be between 0 and 1"); } int to_new = (int) Math.floor((double) NEntries() * fraction); return randomSplit(to_new); } /** * Generate a random subset of the original data, which is left intact * * @param number Number of entries to move over * @return Dataset containing a subset of entries */ public Dataset getRandomSubset(int number) { /** * Grab a random subset from the original data, leave this intact */ if (number < 0 || number > NEntries()) { throw new Error("Number must be positive, and less than the size of the set"); } // Create a list of which entries to move over Boolean[] to_switch = new Boolean[NEntries()]; Arrays.fill(to_switch, 0, number, true); Arrays.fill(to_switch, number, NEntries(), false); Collections.shuffle(Arrays.asList(to_switch)); // Add to subset if desired Dataset out = emptyClone(); int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); ArrayList<BaseEntry> new_set = new ArrayList<>(number); // Faster than adding to set while (iter.hasNext()) { BaseEntry e = iter.next(); if (to_switch[id]) { new_set.add(e); } id++; } out.addEntries(new_set); return out; } /** * Generate a random subset of the original data, which is left intact * * @param fraction Fraction of entries used in new set * @return Dataset containing a subset of entries */ public Dataset getRandomSubset(double fraction) { if (fraction > 1 || fraction < 0) { throw new Error("Fraction must be between 0 and 1"); } int to_new = (int) Math.floor((double) NEntries() * fraction); return getRandomSubset(to_new); } /** * Split for threading purposes. Does not worry about randomization. * * @param NThreads Number of subsets to create * @return Array of equally-sized Dataset objects */ public Dataset[] splitForThreading(int NThreads) { Dataset[] output = new Dataset[NThreads]; // Number to split per thread int to_split = NEntries() / NThreads; Iterator<BaseEntry> iter = Entries.iterator(); // Fill in each thread for (int i = 0; i < NThreads - 1; i++) { output[i] = emptyClone(); output[i].Entries.ensureCapacity(to_split); for (int j = 0; j < to_split; j++) { output[i].Entries.add(iter.next()); } } // Fill in the last thread output[NThreads - 1] = emptyClone(); output[NThreads - 1].Entries.ensureCapacity(to_split + 1); while (iter.hasNext()) { output[NThreads - 1].Entries.add(iter.next()); } return output; } /** * Split the dataset into multiple folds for cross-validation, empties the * original test set * * @param folds Number of folds * @return Vector of independent test sets */ public Dataset[] splitIntoFolds(int folds) { // Generate the output array Dataset[] output = new Dataset[folds]; for (int i = 0; i < folds; i++) { output[i] = emptyClone(); } if (NClasses() == 1) { // Generate list of entries to shuffle int to_split = (int) Math.floor((double) NEntries() / (double) folds); Integer[] to_switch = new Integer[NEntries()]; int count = 0; for (int i = 1; i < folds; i++) { Arrays.fill(to_switch, count, count + to_split, i); count += to_split; } Arrays.fill(to_switch, count, NEntries(), 0); Collections.shuffle(Arrays.asList(to_switch)); // Assign them to the appropriate array int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { BaseEntry e = iter.next(); iter.remove(); // Remove from old set output[to_switch[id]].addEntry(e); id++; } } else { for (int i = 0; i < NClasses(); i++) { final int cls = i; // Get the entries that are in class # cls Predicate splitter = new Predicate() { @Override public boolean evaluate(Object input) { BaseEntry input_obj = (BaseEntry) input; return input_obj.getMeasuredClass() == cls; } }; Dataset split = DatasetHelper.split(this, splitter); // Split them into folds for cross-validation split.setClassNames(new String[]{"Class"}); Dataset[] split_folds = split.splitIntoFolds(folds); for (Dataset S : split_folds) { S.setClassNames(getClassNames()); } // Add them to the output structure for (int j = 0; j < folds; j++) { output[j].combine(split_folds[j]); } } } return output; } /** * Convert the Dataset to a Weka Instances object. Treats the class variable * as continuous * * @return Object in the Weka Instances format with DenseInstance entries * @throws java.lang.Exception */ public Instances convertToWeka() throws Exception { return convertToWeka(true, false); // Generate object with continuous class data } /** * Convert the Dataset to a Weka Instances object for classifier data. * * @param discrete_class Whether the class is treated as a discrete variable * @return Object in the Weka Instances format with DenseInstance entries * @throws java.lang.Exception */ public Instances convertToWeka(boolean discrete_class) throws Exception { return convertToWeka(true, discrete_class); } /** * Convert to Weka Instances object. User can decided whether to output * class variable in the Instances object or, if so, whether to treat it as * discrete. * * @param useClass Whether to output class data * @param useDiscreteClass Whether to treat class variable as discrete * @return Dataset in Weka format */ public Instances convertToWeka(boolean useClass, boolean useDiscreteClass) { // Create an array of attribute names ArrayList<Attribute> Attributes = new ArrayList<>(); for (int i = 0; i < NAttributes(); i++) { Attribute att = new Attribute(AttributeName.get(i)); Attributes.add(att); } if (!useClass) { // Do nothing } else if (useDiscreteClass) { Attributes.add(new Attribute("Class", Arrays.asList(getClassNames()))); } else { Attributes.add(new Attribute("Class")); } Instances weka_out = new Instances("Output", Attributes, NEntries()); int j; for (int i = 0; i < NEntries(); i++) { BaseEntry entry = Entries.get(i); DenseInstance inst = new DenseInstance(Attributes.size()); inst.setDataset(weka_out); for (j = 0; j < NAttributes(); j++) { inst.setValue(j, entry.getAttribute(j)); } if (!useClass) { }// Do nothing else if (useDiscreteClass) { inst.setValue(j, getClassNames()[(int) entry.getMeasuredClass()]); } else { inst.setValue(j, entry.getMeasuredClass()); } weka_out.add(inst); } if (useClass) { weka_out.setClassIndex(NAttributes()); } return weka_out; } /** * Convert to Weka Instances object, delete attribute information in each * entry. This can be used to conserve memory when using Weka. * * @param useClass Whether to output class data. Note: If there is no measured * class data and useDiscreteClass is true, value will be set to Zero. This * allows the Instances to contain information about how many classes are available * regardless of whether this Dataset contains any measurements. * @param useDiscreteClass Whether to treat class variable as discrete * @return Dataset in Weka format * @see Dataset#restoreAttributes(weka.core.Instances) */ public Instances transferToWeka(boolean useClass, boolean useDiscreteClass) { // Create an array of attribute names ArrayList<Attribute> Attributes = new ArrayList<>(); for (int i = 0; i < NAttributes(); i++) { Attribute att = new Attribute(AttributeName.get(i)); Attributes.add(att); } if (!useClass) { // Do nothing } else if (useDiscreteClass) { Attributes.add(new Attribute("Class", Arrays.asList(getClassNames()))); } else { Attributes.add(new Attribute("Class")); } Instances weka_out = new Instances("Output", Attributes, NEntries()); int j; for (int i = 0; i < NEntries(); i++) { BaseEntry entry = Entries.get(i); DenseInstance inst = new DenseInstance(Attributes.size()); inst.setDataset(weka_out); for (j = 0; j < NAttributes(); j++) { inst.setValue(j, entry.getAttribute(j)); } if (!useClass) { }// Do nothing else if (useDiscreteClass) { if (entry.hasMeasurement()) { inst.setValue(j, getClassName((int) entry.getMeasuredClass())); } else { inst.setValue(j, getClassName(0)); } } else { inst.setValue(j, entry.getMeasuredClass()); } weka_out.add(inst); entry.clearAttributes(); } if (useClass) { weka_out.setClassIndex(NAttributes()); } return weka_out; } /** * Restore attribute data to each entry. * * @param weka Weka object containing attribute information. Assumes last * variable has class variable * @throws java.lang.Exception * @see Dataset#transferToWeka(boolean, boolean) */ public void restoreAttributes(Instances weka) throws Exception { // Check input if (weka.numInstances() != NEntries()) { throw new Exception("Wrong number of entries"); } boolean hasClass = weka.classIndex() >= 0; if ((weka.numAttributes() != NAttributes() && !hasClass) || (weka.numAttributes() - 1 != NAttributes() && hasClass)) { throw new Exception("Wrong number of attributes"); } // Transfer data Iterator<Instance> iter = weka.iterator(); for (int i = 0; i < this.NEntries(); i++) { Entries.get(i).clearAttributes(); Instance inst = iter.next(); double[] attr = inst.toDoubleArray(); iter.remove(); if (!hasClass) { Entries.get(i).addAttributes(attr); } else { Entries.get(i).addAttributes(Arrays.copyOf(attr, NAttributes())); } } } /** * Output the attributes and class of each entry * * @return Array where the last column is the measured class variable (0 if * no measured) */ public double[][] getEntryArray() { double[][] output = new double[NEntries()][NAttributes() + 1]; int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { BaseEntry e = iter.next(); System.arraycopy(e.getAttributes(), 0, output[id], 0, NAttributes()); output[id][NAttributes()] = e.hasMeasurement() ? e.getMeasuredClass() : 0; id++; } return output; } /** * Output the attributes of each entry into an array * * @return Array of attributes */ public double[][] getAttributeArray() { double[][] output = new double[NEntries()][NAttributes()]; int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { BaseEntry e = iter.next(); System.arraycopy(e.getAttributes(), 0, output[id], 0, NAttributes()); id++; } return output; } /** * Output a single attribute for each entry * * @param Attribute Which Attribute to output * @return Array of attribute values */ public double[] getSingleAttributeArray(int Attribute) { double[] output = new double[NEntries()]; int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { BaseEntry e = iter.next(); output[id] = e.getAttribute(Attribute); id++; } return output; } /** * Output an array of the measured classes for each entry * * @return 1D double array containing measured classes */ public double[] getMeasuredClassArray() { if (!Entries.iterator().next().hasMeasurement()) { throw new Error("Entries have no measured class"); } double[] output = new double[NEntries()]; int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { BaseEntry e = iter.next(); if (e.hasMeasurement()) { output[id] = e.getMeasuredClass(); } else { throw new Error("Entry " + id + " does not have a measured class variable"); } id++; } return output; } /** * Get the predicted class for each entry * * @return 1D double array containing measured classes */ public double[] getPredictedClassArray() { if (!Entries.iterator().next().hasPrediction()) { throw new Error("Entries have no predicted class"); } double[] output = new double[NEntries()]; int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { BaseEntry e = iter.next(); if (e.hasPrediction()) { output[id] = e.getPredictedClass(); } else { throw new Error("Entry " + id + " does not have a predicted class variable"); } id++; } return output; } /** * Get an array of class probabilities * * @return Probabilities of each entry being in each class */ public double[][] getClassProbabilityArray() { if (!Entries.iterator().next().hasPrediction()) { throw new Error("Entries have no predicted class"); } double[][] output = new double[NEntries()][NClasses()]; int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { BaseEntry e = iter.next(); output[id] = e.getClassProbilities(); id++; } return output; } /** * Set predicted class for each entry, given an array of predictions * * @param predictions Predictions in the same order as generated by * getFeatures */ public void setPredictedClasses(double[] predictions) { int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { BaseEntry e = iter.next(); e.setPredictedClass(predictions[id]); id++; } } /** * Set measured class for each entry, given an array of measurements * * @param measurements Measurements in the same order as generated by * getFeatures */ public void setMeasuredClasses(double[] measurements) { int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { BaseEntry e = iter.next(); e.setMeasuredClass(measurements[id]); id++; } } /** * Set class probabilities for each entry * * @param predictions Probabilities in the same order as generated by * getFeatures */ public void setClassProbabilities(double[][] predictions) { int id = 0; Iterator<BaseEntry> iter = Entries.iterator(); while (iter.hasNext()) { BaseEntry e = iter.next(); e.setClassProbabilities(predictions[id]); id++; } } @Override public String about() { String output = "Number of entries: " + NEntries(); output += " - Number of features: " + NAttributes(); return output; } /** * Print out name of dataset and what attributes are generated. * @param htmlFormat Whether to print in HTML format * @return String describing this dataset */ @Override public String printDescription(boolean htmlFormat) { // Print out class name String output = ""; if (htmlFormat) { output += "<label>"; } output += "Dataset Type"; if (htmlFormat) { output += "</label> "; } else { output += ": "; } output += getClass().getName() + "\n"; // Print out entry description if (htmlFormat) { output += "<br><label>"; } output += "Entry Description"; if (htmlFormat) { output += "</label> "; } else { output += ": "; } output += printEntryDescription(htmlFormat) + "\n"; // Print out what attributes are generated by default String defaultAttr = printAttributeDescription(htmlFormat); if (defaultAttr.length() > 1) { // Print out header if (htmlFormat) { output += "<h3>"; } output += "Default Attributes"; if (htmlFormat) { output += "</h3>"; } output += "\n"; // Print out description if (htmlFormat) { output += "<p>"; } output += defaultAttr + "\n"; } // Print out attribute generators if (Generators.size() > 0) { // Print out header if (htmlFormat) { output += "<h4>"; } output += "Attribute Generators"; if (htmlFormat) { output += "</h4>"; } output += "\n"; // Print out start of HTML list if (htmlFormat) { output += "<ol>\n"; } } for (int i=0; i<Generators.size(); i++) { // Print out description of the generator if (htmlFormat) { output += "<li> "; } else { output += i + ". "; } output += Generators.get(i).printDescription(htmlFormat); if (htmlFormat) { output += "</li>"; } output += "\n"; } if (Generators.size() > 0 && htmlFormat) { output += "</ol>\n"; } // Print out attribute expanders return output; } /** * Print out description of attributes. * * <p><b>Implementation Guide</b> * <p>Subclasses should describe what kind of attributes are generated * <i>by default</i>. If it uses a separate generator class, those are captured * by the {@linkplain #printDescription(boolean) } section. * @param htmlFormat Whether to print in HTML format * @return Description of the attributes or an empty string ("") if there * are no default attributes. */ public String printAttributeDescription(boolean htmlFormat) { return ""; } /** * Print out what the entries to this dataset are. * @param htmlFormat Whether to print in HTML format * @return Description of each entry (i.e., what kind of data is this). */ public String printEntryDescription(boolean htmlFormat) { return "A list of ordinary numbers"; } @Override public String toString() { String output = "Number of entries: " + NEntries(); output += "\nNumber of features: " + NAttributes(); return output; } /** * Get the distribution of entries between known classes * * @return Number of entries of each class */ public int[] getDistributionCount() { int[] output = new int[NClasses()]; if (NClasses() == 1) { output[0] = NEntries(); return output; } for (BaseEntry Entry : Entries) { output[(int) Entry.getMeasuredClass()]++; } return output; } /** * Get the distribution of entries between known classes * * @return Fraction of entries of each class */ public double[] getDistribution() { double[] output = new double[NClasses()]; if (NClasses() == 1) { output[0] = 1.0; return output; } int[] count = new int[NClasses()]; for (BaseEntry Entry : Entries) { count[(int) Entry.getMeasuredClass()]++; } for (int i = 0; i < NClasses(); i++) { output[i] = (double) count[i] / (double) NEntries(); } return output; } /** * Print out the distribution of entries in the known classes * * @return Distribution as a String */ public String printDistribution() { if (ClassName.length == 1) { return "All entries in single class: " + ClassName[0]; } String output = ""; double[] dist = getDistribution(); for (int i = 0; i < NClasses(); i++) { output += String.format("%s (%.2f%%) ", ClassName[i], dist[i] * 100.0); } return output; } /** * Print out data regarding a list of entries. Format:<br> * <center>ID, Entry, Measured Class, Predicted Class, Class * Probabilities</center> * * @param list ID numbers of entries to be printed. * @return Desired information as a String */ public String printEntries(int[] list) { String output = ""; // Print out a header output += "ID\tEntry\tMeasuredClass\tPredictedClass"; if (NClasses() > 1) { output += "\tClassProbabilities"; } output += "\n"; // Print out each entry for (int i = 0; i < list.length; i++) { output += String.format("%d\t%s\t", list[i], Entries.get(list[i])); if (Entries.get(list[i]).hasMeasurement()) { output += String.format("%.3f\t", Entries.get(list[i]).getMeasuredClass()); } else { output += "None\t"; } if (Entries.get(list[i]).hasPrediction()) { output += String.format("%.3f\t", Entries.get(list[i]).getPredictedClass()); } else { output += "None\t"; } if (NClasses() > 1) { double[] probs = Entries.get(i).getClassProbilities(); output += String.format("(%.3f", probs[0]); for (int j = 1; j < NClasses(); j++) { output += String.format(",%.3f", probs[j]); } output += ")"; } output += "\n"; } return output; } /** * Save the state of this object using serialization * * @param filename Filename for output */ public void saveState(String filename) { UtilityOperations.saveState(this, filename); } @Override public String printCommand(List<String> Command) throws Exception { switch (Command.get(0).toLowerCase()) { case "details": return this.toString(); case "dist": return this.printDistribution(); default: throw new Exception("ERROR: Print command \"" + Command.get(0) + "\" not recognized"); } } @Override public String saveCommand(String Basename, String Command) throws Exception { switch (Command) { case "csv": // Save as CSV file DatasetOutput.saveDelimited(this, Basename + ".csv", ","); return Basename + ".csv"; case "arff": // Save as an ARFF DatasetOutput.saveARFF(this, Basename + ".arff"); return Basename + ".arff"; case "stats": // Save for statistics (only: name, predicted, measured) DatasetOutput.printForStatistics(this, Basename + ".csv"); return Basename + ".csv"; default: throw new Exception("ERROR: Save command \"" + Command + "\" not recognized"); } } @Override public Object runCommand(List<Object> Command) throws Exception { if (Command.isEmpty()) { System.out.println(about()); return null; } String Action = Command.get(0).toString(); switch (Action.toLowerCase()) { case "attributes": case "attr": return runAttributeCommand(Command.subList(1, Command.size())); case "clone": // Usage: <output> = clone [-emptyy] if (Command.size() == 1) { return clone(); } else if (Command.get(1).toString().equalsIgnoreCase("-empty")) { return emptyClone(); } else { throw new Exception("Usage: clone [-empty]"); } case "combine": { try { if (Command.size() != 2) { throw new Exception(); } Dataset other = (Dataset) Command.get(1); combine(other); System.out.format("\tAdded %d entries. New size: %d\n", other.NEntries(), NEntries()); } catch (Exception e) { throw new Exception("Usage: combine $<other dataset>"); } } break; case "filter": { // Usage: <include|exclude> <method> [<options...>] String Method; List<Object> Options; boolean Exclude; try { if (Command.get(1).toString().toLowerCase().startsWith("ex")) { Exclude = true; } else if (Command.get(1).toString().toLowerCase().startsWith("in")) { Exclude = false; } else { throw new Exception(); } Method = Command.get(2).toString(); if (Method.equals("?")) { System.out.println(printImplmentingClasses(BaseDatasetFilter.class, false)); return null; } Options = Command.subList(3, Command.size()); } catch (Exception e) { throw new Exception("Usage: <dataset> filter <exclude|include> <method> <options...>"); } BaseDatasetFilter Filter = (BaseDatasetFilter) instantiateClass("data.utilities.filters." + Method, Options); Filter.setExclude(Exclude); Filter.filter(this); System.out.println("\tFiltered using a " + Method + ". New size: " + NEntries()); } break; case "generate": { // Generate new entries. Usage: generate <method> [<options...>] String Method = ""; List<Object> MethodOptions; try { Method = Command.get(1).toString(); if (Method.equalsIgnoreCase("?")) { System.out.println("Available Entry Generators"); System.out.println(printImplmentingClasses(BaseEntryGenerator.class, false)); return null; } MethodOptions = Command.subList(2, Command.size()); } catch (Exception e) { throw new Exception("Usage: generate <method> [<options...>]"); } BaseEntryGenerator generator = (BaseEntryGenerator) instantiateClass("data.utilities.generators." + Method, MethodOptions); int initialCount = this.NEntries(); generator.addEntriesToDataset(this); System.out.println(String.format("\tGenerated %d new entries with a %s. Total Count: %s", NEntries() - initialCount, Method, NEntries())); } break; case "import": { // Usage: import <filename> [<options...>] String filename = Command.get(1).toString(); Object[] options = Command.subList(2, Command.size()).toArray(); importText(filename, options); System.out.println("\tImported " + NEntries() + " entries"); } break; case "modify": { if (Command.size() < 2) { throw new Exception("Usage: <dataset> modify <method> <options>"); } // Get command String Method = Command.get(1).toString(); if (Method.equals("?")) { System.out.println(printImplmentingClasses(BaseDatasetModifier.class, false)); return null; } // Get options List<Object> Options = Command.subList(2, Command.size()); // Modify the Dataset BaseDatasetModifier Mdfr = (BaseDatasetModifier) instantiateClass("data.utilities.modifiers." + Method, Options); Mdfr.transform(this); System.out.println("\tModified dataset using a " + Method); } break; case "rank": { // Usage: <number> <max|min> <meas|pred> <method> <options...> boolean measured = true; boolean maximize = true; int numberToPrint = -1; String Method; List<Object> Options; try { numberToPrint = Integer.parseInt(Command.get(1).toString()); if (Command.get(2).toString().toLowerCase().startsWith("max")) { maximize = true; } else if (Command.get(2).toString().toLowerCase().contains("min")) { maximize = false; } else { throw new Exception(); } if (Command.get(3).toString().toLowerCase().startsWith("mea")) { measured = true; } else if (Command.get(3).toString().toLowerCase().startsWith("pre")) { measured = false; } else { throw new Exception(); } // Get Method and its options Method = Command.get(4).toString(); if (Method.equalsIgnoreCase("?")) { System.out.println("Available EntryRankers:"); System.out.println(printImplmentingClasses( BaseEntryRanker.class, false)); return null; } Options = Command.subList(5, Command.size()); } catch (Exception e) { throw new Exception("Usage: <dataset> rank <number> <maximum|minimum> <measured|predicted> <method> [<options>]"); } BaseEntryRanker ranker = (BaseEntryRanker) instantiateClass( "optimization.rankers." + Method, Options); ranker.setMaximizeFunction(maximize); ranker.setUseMeasured(measured); ranker.train(this); System.out.println(DatasetOutput.printTopEntries(this, ranker, numberToPrint)); } break; case "subset": case "split": { // Usage: split|subset <fraction|number> = <output> double size; try { size = Double.parseDouble(Command.get(1).toString()); } catch (Exception e) { throw new Exception("Usage: " + Action + " <fraction|number> = <output>"); } Dataset output; if (Action.toLowerCase().startsWith("sub")) { output = size >= 1 ? getRandomSubset((int) size) : getRandomSubset(size); System.out.println("\tGenerated a subset containing " + output.NEntries() + " entries."); } else { output = size >= 1 ? randomSplit((int) size) : randomSplit(size); System.out.println("\tSplit off " + output.NEntries() + " entries from dataset"); } return output; } default: throw new Exception("ERROR: Dataset command not recognized: " + Action); } return null; } /** * Run commands related to attributes of each entry. Starts with the action * to perform on the attributes * * @param Command Operation to be run on/about attributes * @return Any output (null if nothing is created) * @throws Exception On any error */ protected Object runAttributeCommand(List<Object> Command) throws Exception { if (Command.isEmpty()) { System.out.print("Attributes contained within dataset:\n"); for (int i = 0; i < NAttributes(); i++) { System.out.format("%32s", AttributeName.get(i)); if (i % 2 == 1) { System.out.println(); } } if (NAttributes() % 2 == 1) { System.out.println(); } return null; } String Action = Command.get(0).toString(); switch (Action.toLowerCase()) { case "expanders": { runAttributeExpansionCommand(Command.subList(1, Command.size())); } break; case "generators": { runAttributeGeneratorCommand(Command.subList(1, Command.size())); } break; case "generate": // Usage: generate if (Command.size() > 1) { throw new Exception("Usage: <dataset> generate"); } generateAttributes(); System.out.println("\tGenerated " + NAttributes() + " attributes."); break; case "rank": { // Usage: <number> String Method; int NumToPrint; List<Object> MethodOptions; try { if (Command.get(1) instanceof Integer) { NumToPrint = (Integer) Command.get(1); } else { NumToPrint = Integer.parseInt(Command.get(1).toString()); } Method = Command.get(2).toString(); if (Method.equals("?")) { System.out.println(printImplmentingClasses(BaseAttributeEvaluator.class, false)); return null; } MethodOptions = Command.subList(3, Command.size()); } catch (Exception e) { throw new Exception("Usage: <dataset> attributes rank <number> <method> [<method options...>]"); } BaseAttributeEvaluator Evaluator = (BaseAttributeEvaluator) instantiateClass("attributes.evaluators." + Method, MethodOptions); System.out.print(Evaluator.printRankings(this, NumToPrint)); } break; default: throw new Exception("ERROR: Dataset attribute command not recognized" + Action); } return null; } /** * Run commands relating to expanding the attribute pool. * * @param Command Attribute expansion command (e.g., "run") * @throws Exception */ protected void runAttributeExpansionCommand(List<Object> Command) throws Exception { if (Command.isEmpty()) { throw new Exception("Available attribute expansion commands: run, add, clear"); } String action = Command.get(0).toString().toLowerCase(); switch (action) { case "add": // Usage: add <method> <options...> String Method; List<Object> Options; try { Method = Command.get(1).toString(); if (Method.equals("?")) { System.out.println(printImplmentingClasses(BaseAttributeExpander.class, false)); return; } Options = Command.subList(2, Command.size()); } catch (Exception e) { throw new Exception("Usage: <dataset> expand <method> <options...>"); } BaseAttributeExpander expander = (BaseAttributeExpander) instantiateClass("attributes.expansion." + Method, Options); addAttribueExpander(expander); System.out.println("\tAdded a " + Method + " to list of attribute expanders"); break; case "clear": clearAttributeExpanders(); System.out.println("\tCleared list of attribute expanders."); break; case "run": runAttributeExpanders(); System.out.println("\tExpanded number of attributes to " + NAttributes()); break; default: throw new Exception("Attribute expansion command not recognized: " + action); } } /** * Run commands relating to generating new attributes. * * @param Command Attribute expansion command (e.g., "run") * @throws Exception */ protected void runAttributeGeneratorCommand(List<Object> Command) throws Exception { if (Command.isEmpty()) { throw new Exception("Available attribute generator commands: run, add, clear"); } String action = Command.get(0).toString().toLowerCase(); switch (action) { case "add": // Usage: add <method> <options...> String Method; List<Object> Options; try { Method = Command.get(1).toString(); if (Method.equals("?")) { System.out.println(printImplmentingClasses(BaseAttributeGenerator.class, false)); return; } Options = Command.subList(2, Command.size()); } catch (Exception e) { throw new Exception("Usage: <dataset> expand <method> <options...>"); } BaseAttributeGenerator generator = (BaseAttributeGenerator) instantiateClass("attributes.generators." + Method, Options); addAttribueGenerator(generator); System.out.println("\tAdded a " + Method + " to list of attribute generators"); break; case "clear": clearAttributeGenerators(); System.out.println("\tCleared list of attribute generators."); break; case "run": int oldCount = NAttributes(); runAttributeGenerators(); System.out.format("\tGenerated %d new attributes\n", NAttributes() - oldCount); break; default: throw new Exception("Attribute generator command not recognized: " + action); } } /** * Run after generating attributes. Performs some operations to reduce the * amount of memory used. */ protected void finalizeGeneration() { for (int i = 0; i < NEntries(); i++) { getEntry(i).reduceMemoryFootprint(); } System.gc(); } }
import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.nio.file.FileSystemException; import com.sleepycat.db.Database; import com.sleepycat.db.DatabaseType; public class DatabaseApp { private String tmpDir; DatabaseType mode; public static void main(String[] args) { DatabaseApp app = new DatabaseApp(); try { app.setup(args); app.run(); } catch (FileSystemException e) { System.err.println(e.getMessage()); } finally { app.cleanup(); } } public void setup(String[] args) throws FileSystemException { if (System.getProperty("os.name").startsWith("Windows")) { tmpDir = "C:\\tmp\\sajust_db"; } else { tmpDir = "/tmp/sajust_dir"; } /** Select the appropriate mode based on commandline arguments */ try{ if (args[0].equals("btree")) this.mode = DatabaseType.BTREE; if (args[0].equals("hash")) this.mode = DatabaseType.HASH; if (args[0].equals("indexfile")) this.mode = DatabaseType.UNKNOWN; } catch(ArrayIndexOutOfBoundsException e) { System.err.println("Please enter in a commandline argument."); System.err.println("Acceptable options are: btree, hash, indexfile"); System.exit(1); } File tDirFile = new File(tmpDir); if (tDirFile.exists()) tDirFile.delete(); if (!(new File(tmpDir)).mkdirs()) { throw new FileSystemException("Failed to create temp folder"); } } public void run() { /** Display the main menu, prompt the user for which db type is being used * */ while (true) { System.out.println("CMPUT 291 Project 2"); System.out.println(" System.out.println("Select Option"); System.out.println("1) Create and populate the database"); System.out.println("2) Retrieve records with a given key"); System.out.println("3) Retrieve records with a given data"); System.out.println("4) Retrieve records with a given range of key values"); System.out.println("5) Destroy the database"); System.out.println("6) Quit"); BufferedReader br = null; br = new BufferedReader(new InputStreamReader(System.in)); Integer inputnumber = 0; try { String input = br.readLine(); inputnumber = Integer.parseInt(input); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (NumberFormatException e) { System.out.println("Invalid Entry, please try again"); continue; } switch(inputnumber) { case 1: Database db = DbHelper.create(tmpDir + File.separator + "table", mode); DbHelper.populateTable(db, 10000); break; case 2: break; case 3: break; case 4: break; case 5: File dbFile = new File(tmpDir + File.separator + "table"); if (dbFile.exists()) dbFile.delete(); System.out.println("Deleted database file"); break; case 6: break; } } } public void cleanup() { File tDirFile = new File(tmpDir); File dbFile = new File(tmpDir + File.separator + "table"); if (dbFile.exists() || tDirFile.exists()){ dbFile.delete(); tDirFile.delete(); } } }
package bisq.asset.coins; import bisq.asset.Coin; import bisq.asset.RegexAddressValidator; public class Croat extends Coin { public Croat() { super("Croat", "CROAT", new RegexAddressValidator("^C[1-9A-Za-z]{94}")); } }
package cgeo.geocaching; import cgeo.geocaching.concurrent.BlockingThreadPool; import cgeo.geocaching.files.LocalStorage; import cgeo.geocaching.geopoint.GeopointFormatter.Format; import cgeo.geocaching.network.Network; import cgeo.geocaching.network.Parameters; import cgeo.geocaching.utils.Log; import ch.boye.httpclientandroidlib.HttpResponse; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import android.app.Activity; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.util.DisplayMetrics; import android.view.Display; import android.view.WindowManager; import java.io.File; import java.util.concurrent.TimeUnit; public class StaticMapsProvider { private static final String PREFIX_PREVIEW = "preview"; private static final String GOOGLE_STATICMAP_URL = "http://maps.google.com/maps/api/staticmap"; private static final String SATELLITE = "satellite"; private static final String ROADMAP = "roadmap"; private static final String WAYPOINT_PREFIX = "wp"; private static final String MAP_FILENAME_PREFIX = "map_"; private static final String MARKERS_URL = "http://status.cgeo.org/assets/markers/"; /** We assume there is no real usable image with less than 1k */ private static final int MIN_MAP_IMAGE_BYTES = 1000; /** ThreadPool restricting this to 1 Thread. **/ private static final BlockingThreadPool pool = new BlockingThreadPool(1, Thread.MIN_PRIORITY); private static File getMapFile(final String geocode, String prefix, final boolean createDirs) { return LocalStorage.getStorageFile(geocode, MAP_FILENAME_PREFIX + prefix, false, createDirs); } private static void downloadDifferentZooms(final String geocode, String markerUrl, String prefix, String latlonMap, int edge, final Parameters waypoints) { downloadMap(geocode, 20, SATELLITE, markerUrl, prefix + '1', "", latlonMap, edge, edge, waypoints); downloadMap(geocode, 18, SATELLITE, markerUrl, prefix + '2', "", latlonMap, edge, edge, waypoints); downloadMap(geocode, 16, ROADMAP, markerUrl, prefix + '3', "", latlonMap, edge, edge, waypoints); downloadMap(geocode, 14, ROADMAP, markerUrl, prefix + '4', "", latlonMap, edge, edge, waypoints); downloadMap(geocode, 11, ROADMAP, markerUrl, prefix + '5', "", latlonMap, edge, edge, waypoints); } private static void downloadMap(String geocode, int zoom, String mapType, String markerUrl, String prefix, String shadow, String latlonMap, int width, int height, final Parameters waypoints) { final Parameters params = new Parameters( "center", latlonMap, "zoom", String.valueOf(zoom), "size", String.valueOf(width) + 'x' + String.valueOf(height), "maptype", mapType, "markers", "icon:" + markerUrl + '|' + shadow + latlonMap, "sensor", "false"); if (waypoints != null) { params.addAll(waypoints); } final HttpResponse httpResponse = Network.getRequest(GOOGLE_STATICMAP_URL, params); if (httpResponse != null) { if (httpResponse.getStatusLine().getStatusCode() == 200) { final File file = getMapFile(geocode, prefix, true); if (LocalStorage.saveEntityToFile(httpResponse, file)) { // Delete image if it has no contents final long fileSize = file.length(); if (fileSize < MIN_MAP_IMAGE_BYTES) { file.delete(); } } } else { Log.d("StaticMapsProvider.downloadMap: httpResponseCode = " + httpResponse.getStatusLine().getStatusCode()); } } else { Log.e("StaticMapsProvider.downloadMap: httpResponse is null"); } } public static void downloadMaps(cgCache cache) { final Display display = ((WindowManager) cgeoapplication.getInstance().getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); downloadMaps(cache, display); } private static void downloadMaps(cgCache cache, Display display) { if (cache == null) { Log.e("downloadMaps - missing input parameter cache"); return; } if ((!Settings.isStoreOfflineMaps() && !Settings.isStoreOfflineWpMaps()) || StringUtils.isBlank(cache.getGeocode())) { return; } int edge = guessMaxDisplaySide(display); if (Settings.isStoreOfflineMaps() && cache.getCoords() != null) { storeCachePreviewMap(cache); storeCacheStaticMap(cache, edge, false); } // download static map for current waypoints if (Settings.isStoreOfflineWpMaps() && CollectionUtils.isNotEmpty(cache.getWaypoints())) { // remove all waypoint static map files due to origin cache waypoint id changed on saveCache LocalStorage.deleteFilesWithPrefix(cache.getGeocode(), MAP_FILENAME_PREFIX + WAYPOINT_PREFIX); for (cgWaypoint waypoint : cache.getWaypoints()) { storeWaypointStaticMap(cache.getGeocode(), edge, waypoint, false); } } } public static void storeWaypointStaticMap(cgCache cache, Activity activity, cgWaypoint waypoint, boolean waitForResult) { int edge = StaticMapsProvider.guessMaxDisplaySide(activity); storeWaypointStaticMap(cache.getGeocode(), edge, waypoint, waitForResult); } private static void storeWaypointStaticMap(final String geocode, int edge, cgWaypoint waypoint, final boolean waitForResult) { if (geocode == null) { Log.e("storeWaypointStaticMap - missing input parameter geocode"); return; } if (waypoint == null) { Log.e("storeWaypointStaticMap - missing input parameter waypoint"); return; } if (waypoint.getCoords() == null) { return; } String wpLatlonMap = waypoint.getCoords().format(Format.LAT_LON_DECDEGREE_COMMA); String wpMarkerUrl = getWpMarkerUrl(waypoint); // download map images in separate background thread for higher performance downloadMaps(geocode, wpMarkerUrl, WAYPOINT_PREFIX + waypoint.getId() + '_', wpLatlonMap, edge, null, waitForResult); } public static void storeCacheStaticMap(cgCache cache, Activity activity, final boolean waitForResult) { int edge = guessMaxDisplaySide(activity); storeCacheStaticMap(cache, edge, waitForResult); } private static void storeCacheStaticMap(final cgCache cache, final int edge, final boolean waitForResult) { final String latlonMap = cache.getCoords().format(Format.LAT_LON_DECDEGREE_COMMA); final Parameters waypoints = new Parameters(); for (final cgWaypoint waypoint : cache.getWaypoints()) { if (waypoint.getCoords() == null) { continue; } final String wpMarkerUrl = getWpMarkerUrl(waypoint); waypoints.put("markers", "icon:" + wpMarkerUrl + '|' + waypoint.getCoords().format(Format.LAT_LON_DECDEGREE_COMMA)); } // download map images in separate background thread for higher performance final String cacheMarkerUrl = getCacheMarkerUrl(cache); downloadMaps(cache.getGeocode(), cacheMarkerUrl, "", latlonMap, edge, waypoints, waitForResult); } public static void storeCachePreviewMap(final cgCache cache) { if (cache == null) { Log.e("storeCachePreviewMap - missing input parameter cache"); return; } final String latlonMap = cache.getCoords().format(Format.LAT_LON_DECDEGREE_COMMA); final String markerUrl = MARKERS_URL + "my_location_mdpi.png"; final Display display = ((WindowManager) cgeoapplication.getInstance().getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); DisplayMetrics metrics = new DisplayMetrics(); display.getMetrics(metrics); final int width = metrics.widthPixels; final int height = (int) (110 * metrics.density); downloadMap(cache.getGeocode(), 15, ROADMAP, markerUrl, PREFIX_PREVIEW, "shadow:false|", latlonMap, width, height, null); } private static int guessMaxDisplaySide(Display display) { final int maxWidth = display.getWidth() - 25; final int maxHeight = display.getHeight() - 25; if (maxWidth > maxHeight) { return maxWidth; } return maxHeight; } private static int guessMaxDisplaySide(Activity activity) { return guessMaxDisplaySide(((WindowManager) activity.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay()); } private static void downloadMaps(final String geocode, final String markerUrl, final String prefix, final String latlonMap, final int edge, final Parameters waypoints, boolean waitForResult) { if (waitForResult) { downloadDifferentZooms(geocode, markerUrl, prefix, latlonMap, edge, waypoints); } else { final Runnable currentTask = new Runnable() { @Override public void run() { downloadDifferentZooms(geocode, markerUrl, prefix, latlonMap, edge, waypoints); } }; try { pool.add(currentTask, 20, TimeUnit.SECONDS); } catch (InterruptedException e) { Log.e("StaticMapsProvider.downloadMaps error adding task: " + e.toString()); } } } private static String getCacheMarkerUrl(final cgCache cache) { StringBuilder url = new StringBuilder(MARKERS_URL); url.append("marker_cache_").append(cache.getType().id); if (cache.isFound()) { url.append("_found"); } else if (cache.isDisabled()) { url.append("_disabled"); } url.append(".png"); return url.toString(); } private static String getWpMarkerUrl(final cgWaypoint waypoint) { String type = waypoint.getWaypointType() != null ? waypoint.getWaypointType().id : null; return MARKERS_URL + "marker_waypoint_" + type + ".png"; } public static void removeWpStaticMaps(int wp_id, final String geocode) { if (wp_id <= 0) { return; } for (int level = 1; level <= 5; level++) { try { StaticMapsProvider.getMapFile(geocode, WAYPOINT_PREFIX + wp_id + '_' + level, false).delete(); } catch (Exception e) { Log.e("StaticMapsProvider.removeWpStaticMaps: " + e.toString()); } } } /** * Check if at least one map file exists for the given cache. * * @param cache * @return <code>true</code> if at least one mapfile exists; <code>false</code> otherwise */ public static boolean hasStaticMap(final cgCache cache) { if (cache == null) { return false; } final String geocode = cache.getGeocode(); if (StringUtils.isBlank(geocode)) { return false; } for (int level = 1; level <= 5; level++) { File mapFile = StaticMapsProvider.getMapFile(geocode, String.valueOf(level), false); if (mapFile != null && mapFile.exists()) { return true; } } return false; } /** * Checks if at least one map file exists for the given geocode and waypoint ID. * * @param geocode * @param waypointId * @return <code>true</code> if at least one mapfile exists; <code>false</code> otherwise */ public static boolean hasStaticMapForWaypoint(String geocode, int waypointId) { for (int level = 1; level <= 5; level++) { File mapFile = StaticMapsProvider.getMapFile(geocode, WAYPOINT_PREFIX + waypointId + "_" + level, false); if (mapFile != null && mapFile.exists()) { return true; } } return false; } public static Bitmap getPreviewMap(final String geocode) { return decodeFile(StaticMapsProvider.getMapFile(geocode, PREFIX_PREVIEW, false)); } public static Bitmap getWaypointMap(final String geocode, int waypoint_id, int level) { return decodeFile(StaticMapsProvider.getMapFile(geocode, WAYPOINT_PREFIX + waypoint_id + "_" + level, false)); } public static Bitmap getCacheMap(final String geocode, int level) { return decodeFile(StaticMapsProvider.getMapFile(geocode, String.valueOf(level), false)); } private static Bitmap decodeFile(final File mapFile) { // avoid exception in system log, if we got nothing back from Google. if (mapFile.exists()) { return BitmapFactory.decodeFile(mapFile.getPath()); } return null; } }
package edu.mit.simile.butterfly; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.io.StringWriter; import java.net.URL; import java.net.URLConnection; import java.security.AccessControlException; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TimeZone; import java.util.Timer; import java.util.TimerTask; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.collections.ExtendedProperties; import org.apache.log4j.PropertyConfigurator; import org.apache.velocity.app.VelocityEngine; import org.apache.velocity.runtime.RuntimeConstants; import org.mozilla.javascript.Context; import org.mozilla.javascript.ContextFactory; import org.mozilla.javascript.Script; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import edu.mit.simile.butterfly.velocity.ButterflyResourceLoader; import edu.mit.simile.butterfly.velocity.Super; /** * This is the Butterfly servlet and the main entry point * for a Butterfly-powered web application. This servlet is * responsible for loading, configuring and wire together * the various modules that compose your webapp and then * manages the dispatching of requests to the modules that * are supposed to handle them. */ public class Butterfly extends HttpServlet { public static final String HOST_HEADER = "X-Forwarded-Host"; public static final String CONTEXT_HEADER = "X-Context-Path"; private static final long serialVersionUID = 1938797827088619577L; private static final long watcherDelay = 1000; public static final String NAME = "butterfly.name"; public static final String APPENGINE = "butterfly.appengine"; public static final String AUTORELOAD = "butterfly.autoreload"; public static final String HOME = "butterfly.home"; public static final String ZONE = "butterfly.zone"; public static final String BASE_URL = "butterfly.url"; public static final String DEFAULT_ZONE = "butterfly.default.zone"; public static final String DEFAULT_MOUNTPOINT = "butterfly.default.mountpoint"; public static final String MODULES_IGNORE = "butterfly.modules.ignore"; public static final String MODULES_PATH = "butterfly.modules.path"; public static final String MAIN_ZONE = "main"; final static List<String> CONTROLLER; static { CONTROLLER = new ArrayList<String>(); CONTROLLER.add("controller.js"); } public static String getTrueHost(HttpServletRequest request) { String host = request.getHeader(HOST_HEADER); if (host != null) { String[] hosts = host.split(","); host = hosts[hosts.length - 1]; } return host; } public static String getTrueContextPath(HttpServletRequest request, boolean absolute) { String context = request.getHeader(CONTEXT_HEADER); if (context != null) { if (context.charAt(context.length() - 1) == '/') context = context.substring(0, context.length() - 1); } else { context = request.getContextPath(); } if (absolute) { return getFullHost(request) + context; } else { return context; } } public static String getTrueRequestURI(HttpServletRequest request, boolean absolute) { return getTrueContextPath(request,absolute) + request.getPathInfo(); } public static String getFullHost(HttpServletRequest request) { StringBuffer prefix = new StringBuffer(); String protocol = request.getScheme(); prefix.append(protocol); prefix.append(": String proxy = getTrueHost(request); if (proxy != null) { prefix.append(proxy); } else { prefix.append(request.getServerName()); int port = request.getServerPort(); if (!((protocol.equals("http") && port == 80) || (protocol.equals("https") && port == 443))) { prefix.append(':'); prefix.append(port); } } return prefix.toString(); } public static boolean isGAE(ServletConfig config) { return (config.getServletContext().getServerInfo().indexOf("Google App Engine") != -1); } transient private Logger _logger; private boolean _autoreload; private boolean _appengine; private String _name; private String _default_mountpoint; private int _routingCookieMaxAge; private String[] _ignores; transient protected Timer _timer; transient protected ButterflyClassLoader _classLoader; transient protected ButterflyScriptWatcher _scriptWatcher; transient protected ServletConfig _config; transient protected ServletContext _context; transient protected ButterflyMounter _mounter; protected ExtendedProperties _properties; protected File _contextDir; protected File _homeDir; protected File _webInfDir; protected Exception _configurationException; protected boolean _configured = false; protected ContextFactory contextFactory; class ButterflyContextFactory extends ContextFactory { protected void onContextCreated(Context cx) { cx.setOptimizationLevel(9); super.onContextCreated(cx); } } @Override public void init(ServletConfig config) throws ServletException { super.init(config); _config = config; _appengine = isGAE(config); _name = System.getProperty(NAME, "butterfly"); _context = config.getServletContext(); _context.setAttribute(NAME, _name); _context.setAttribute(APPENGINE, _appengine); _contextDir = new File(_context.getRealPath("/")); _webInfDir = new File(_contextDir, "WEB-INF"); _properties = new ExtendedProperties(); _mounter = new ButterflyMounter(); // Load the butterfly properties String props = System.getProperty("butterfly.properties"); File butterflyProperties = (props == null) ? new File(_webInfDir, "butterfly.properties") : new File(props); BufferedInputStream is = null; try { is = new BufferedInputStream(new FileInputStream(butterflyProperties)); _properties.load(is); } catch (FileNotFoundException e) { throw new ServletException("Could not find butterfly properties file",e); } catch (IOException e) { throw new ServletException("Could not read butterfly properties file",e); } finally { try { is.close(); } catch (Exception e) { // ignore } } // Process eventual properties includes String includes = _properties.getString("butterfly.includes"); if (includes != null) { for (String prop : includes.split(",")) { File prop_file = (prop.startsWith("/")) ? new File(prop) : new File(_webInfDir, prop); try { is = new BufferedInputStream(new FileInputStream(prop_file)); ExtendedProperties p = new ExtendedProperties(); p.load(is); _properties.combine(p); } catch (Exception e) { // ignore } finally { try { is.close(); } catch (Exception e) { // ignore } } } } // Overload with properties set from the command line // using the -Dkey=value parameters to the JVM Properties systemProperties = System.getProperties(); for (Iterator<Object> i = systemProperties.keySet().iterator(); i.hasNext(); ) { String key = (String) i.next(); String value = systemProperties.getProperty(key); _properties.setProperty(key, value); } _default_mountpoint = _properties.getString(DEFAULT_MOUNTPOINT, "/modules"); _ignores = _properties.getString(MODULES_IGNORE, "").split(","); _autoreload = _properties.getBoolean(AUTORELOAD, false); if (!_appengine) { String log4j = System.getProperty("butterfly.log4j"); File logProperties = (log4j == null) ? new File(_webInfDir, "log4j.properties") : new File(log4j); if (logProperties.exists()) { if (_autoreload) { PropertyConfigurator.configureAndWatch(logProperties.getAbsolutePath(), watcherDelay); } else { PropertyConfigurator.configure(logProperties.getAbsolutePath()); } } } _logger = LoggerFactory.getLogger(_name); _logger.info("Starting {} ...", _name); _logger.info("Properties loaded from {}", butterflyProperties); if (_autoreload) _logger.info("Autoreloading is enabled"); if (_appengine) _logger.info("Running in Google App Engine"); _logger.debug("> init"); _logger.debug("> initialize classloader"); try { _classLoader = AccessController.doPrivileged ( new PrivilegedAction<ButterflyClassLoader>() { public ButterflyClassLoader run() { return new ButterflyClassLoader(this.getClass().getClassLoader()); } } ); Thread.currentThread().setContextClassLoader(_classLoader); _classLoader.watch(butterflyProperties); // reload if the butterfly properties change contextFactory = new ButterflyContextFactory(); contextFactory.initApplicationClassLoader(_classLoader); // tell rhino to use this classloader as well ContextFactory.initGlobal(contextFactory); if (_autoreload && !_appengine) { _timer = new Timer(true); TimerTask classloaderWatcher = _classLoader.getClassLoaderWatcher(new Trigger(_contextDir)); _timer.schedule(classloaderWatcher, watcherDelay, watcherDelay); } } catch (Exception e) { throw new ServletException("Failed to load butterfly classloader", e); } _logger.debug("< initialize classloader"); if (_autoreload && !_appengine) { _logger.debug("> initialize script watcher"); _scriptWatcher = new ButterflyScriptWatcher(); _timer.schedule(_scriptWatcher, watcherDelay, watcherDelay); _logger.debug("< initialize script watcher"); } this.configure(); _logger.debug("< init"); } @Override public void destroy() { _logger.info("Stopping Butterfly..."); for (ButterflyModule m : _modulesByName.values()) { try { _logger.debug("> destroying {}", m); m.destroy(); _logger.debug("< destroying {}", m); } catch (Exception e) { _logger.error("Exception caught while destroying '" + m + "'", e); } } if (_timer != null) { _timer.cancel(); } _logger.info("done."); } @SuppressWarnings("unchecked") public void configure() { _logger.debug("> configure"); _logger.info("> process properties"); try { String homePath = _properties.getString(HOME); if (homePath == null) { _homeDir = _contextDir; } else { _homeDir = new File(homePath); } _logger.info("Butterfly home: {}", _homeDir); Iterator<String> i = _properties.getKeys(ZONE); while (i.hasNext()) { String zone = i.next(); String path = _properties.getString(zone); zone = zone.substring(ZONE.length() + 1); _logger.info("Zone path: [{}] -> {}", zone, path); _mounter.registerZone(zone, path); } String defaultZone = _properties.getString(DEFAULT_ZONE); if (defaultZone != null) { _logger.info("Default zone is: '{}'", defaultZone); _mounter.setDefaultZone(defaultZone); } else { String baseURL = _properties.getString(BASE_URL,"/"); _mounter.registerZone(MAIN_ZONE, baseURL); _mounter.setDefaultZone(MAIN_ZONE); } String language = _properties.getString("butterfly.locale.language"); String country = _properties.getString("butterfly.locale.country"); String variant = _properties.getString("butterfly.locale.variant"); if (language != null) { if (country != null) { if (variant != null) { Locale.setDefault(new Locale(language, country, variant)); } else { Locale.setDefault(new Locale(language, country)); } } else { Locale.setDefault(new Locale(language)); } } String timeZone = _properties.getString("butterfly.timeZone"); if (timeZone != null) { TimeZone.setDefault(TimeZone.getTimeZone(timeZone)); } _routingCookieMaxAge = _properties.getInt("butterfly.routing.cookie.maxage",-1); } catch (Exception e) { _configurationException = new Exception("Failed to load butterfly properties", e); } _logger.info("< process properties"); _logger.info("> load modules"); // load modules from the properties found in the butterfly.properties List<String> paths = _properties.getList(MODULES_PATH); for (String path : paths) { findModulesIn(absolutize(_homeDir, path.trim())); } // load modules from the path found in the servlet init properties String servlet_paths = this._config.getInitParameter(MODULES_PATH); if (servlet_paths != null) { for (String path : servlet_paths.split(",")) { findModulesIn(absolutize(_homeDir, path.trim())); } } _logger.info("< load modules"); _logger.info("> create modules"); for (String name : _moduleProperties.keySet()) { createModule(name); } _logger.info("< create modules"); _logger.info("> load module wirings"); ExtendedProperties wirings = new ExtendedProperties(); try { // Load the wiring properties File moduleWirings = absolutize(_homeDir, _properties.getString("butterfly.modules.wirings","WEB-INF/modules.properties")); _logger.info("Loaded module wirings from: {}", moduleWirings); _classLoader.watch(moduleWirings); // reload if the module wirings change FileInputStream fis = new FileInputStream(moduleWirings); wirings.load(fis); fis.close(); } catch (Exception e) { _configurationException = new Exception("Failed to load module wirings", e); } _logger.info("< load module wirings"); _logger.info("> wire modules"); try { wireModules(wirings); } catch (Exception e) { _configurationException = new Exception("Failed to wire modules", e); } _logger.info("< wire modules"); _logger.info("> configure modules"); try { configureModules(); } catch (Exception e) { _configurationException = new Exception("Failed to configure modules", e); } _logger.info("< configure modules"); _logger.info("> initialize modules"); Set<String> initialized = new HashSet<String>(); Set<String> initializing = new HashSet<String>(); for (String name : _modulesByName.keySet()) { initializeModule(name, initialized, initializing); } _logger.info("< initialize modules"); _configured = true; _logger.debug("< configure"); } protected void initializeModule(String name, Set<String> initialized, Set<String> initializing) { ButterflyModule m = _modulesByName.get(name); if (m != null && !initialized.contains(name)) { _logger.debug("> initialize " + m.getName()); if (initializing.contains(name)) { _logger.warn("Circular dependencies detected involving module " + m); } else { initializing.add(name); for (String depends : m.getDependencies().keySet()) { initializeModule(depends, initialized, initializing); } initializing.remove(name); } try { m.init(getServletConfig()); } catch (Exception e) { _configurationException = new Exception("Failed to initialize module " + m, e); } _logger.debug("< initialize " + m.getName()); initialized.add(name); } } @Override @SuppressWarnings("unchecked") public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String method = request.getMethod(); String path = request.getPathInfo(); String urlQuery = request.getQueryString(); if (_mounter != null) { Zone zone = _mounter.getZone(request); if (_logger.isDebugEnabled()) { _logger.debug("> " + method + " [" + ((zone != null) ? zone.getName() : "") + "] " + path + ((urlQuery != null) ? "?" + urlQuery : "")); Enumeration<String> en = request.getHeaderNames(); while (en.hasMoreElements()) { String header = en.nextElement(); _logger.trace("{}: {}", header, request.getHeader(header)); } } else if (_logger.isInfoEnabled()) { String zoneName = (zone != null) ? zone.getName() : ""; _logger.info("{} {} [{}]", new String[] { method,path,zoneName }); } setRoutingCookie(request, response); try { if (_configured) { if (_configurationException == null) { ButterflyModule module = _mounter.getModule(path,zone); _logger.debug("Module '{}' will handle the request", module.getName()); String localPath = module.getRelativePath(request); if (!module.process(localPath, request, response)) { response.sendError(HttpServletResponse.SC_NOT_FOUND); } } else { error(response, "Butterfly Error", "Butterfly incurred in the following errors while initializing:", _configurationException); } } else { delay(response, "Butterfly is still initializing..."); } } catch (FileNotFoundException e) { response.sendError(HttpServletResponse.SC_NOT_FOUND); } catch (Exception e) { error(response, "Butterfly Error", "Butterfly caught the following error while processing the request:", e); } response.flushBuffer(); if (_logger.isDebugEnabled()) _logger.debug("< " + method + " [" + ((zone != null) ? zone.getName() : "") + "] " + path + ((urlQuery != null) ? "?" + urlQuery : "")); } else { response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE); } } final static private String dependencyPrefix = "requires"; final static private String implementsProperty = "implements"; final static private String extendsProperty = "extends"; protected Map<String,ButterflyModule> _modulesByName = new HashMap<String,ButterflyModule>(); protected Map<String,Map<String,ButterflyModule>> _modulesByInterface = new HashMap<String,Map<String,ButterflyModule>>(); protected Map<String,ExtendedProperties> _moduleProperties = new HashMap<String,ExtendedProperties>(); protected Map<String,Boolean> _created = new HashMap<String,Boolean>(); final static private String routingCookie = "host"; /* * This method adds a cookie to the response that will be used by mod_proxy_balancer * to know what server is supposed to be handling all the requests of this user agent. */ protected void setRoutingCookie(HttpServletRequest request, HttpServletResponse response) { Cookie[] cookies = request.getCookies(); if (cookies != null) { for (Cookie cookie : cookies) { if (routingCookie.equals(cookie.getName())) { return; } } } Cookie cookie = new Cookie(routingCookie, "." + _name); // IMPORTANT: the initial dot is required by mod_proxy_balancer! cookie.setMaxAge(_routingCookieMaxAge); // delete at end of browser session cookie.setPath("/"); response.addCookie(cookie); } protected File absolutize(File base, String location) { if (location == null || location.length() == 0) { // we got an empty location return base; } else if (location.indexOf(':') > 0) { // we got an absolute windows location (ie c:\blah) return new File(location); } else if (location.charAt(0) == '/' || location.charAt(0) == '\\') { // we got an absolute location return new File(location); } else { // we got a relative location return new File(base, location); } } protected static final String PATH_PROP = "__path__"; protected void findModulesIn(File f) { _logger.debug("look for modules in {}", f); File modFile = new File(f,"MOD-INF"); if (modFile.exists()) { _logger.trace("> findModulesIn({})", f); try { String name = f.getName(); ExtendedProperties p = new ExtendedProperties(); File propFile = new File(modFile,"module.properties"); if (propFile.exists()) { _classLoader.watch(propFile); // reload if the the module properties change BufferedInputStream stream = new BufferedInputStream(new FileInputStream(propFile)); p.load(stream); stream.close(); } p.addProperty(PATH_PROP, f.getAbsolutePath()); if (p.containsKey("name")) { name = p.getString("name"); } boolean load = true; for (String s : _ignores) { if (name.matches(s)) { load = false; break; } } if (load) { _moduleProperties.put(name, p); } } catch (Exception e) { _logger.error("Error finding module wirings", e); } _logger.trace("< findModulesIn({})", f); } else { File[] files = f.listFiles(); if (files != null) { for (int i = 0; i < files.length; i++) { File file = files[i]; try { if (file.isDirectory()) { findModulesIn(file); } } catch (AccessControlException e) { // skip // NOTE: this is needed for Google App Engine that doesn't like us snooping around the internal file system } } } } } protected ButterflyModule createModule(String name) { _logger.trace("> Creating module: {}", name); if (_modulesByName.containsKey(name)) { _logger.trace("< Module '{}' already exists", name); return _modulesByName.get(name); } ExtendedProperties p = _moduleProperties.get(name); File path = new File(p.getString(PATH_PROP)); _logger.debug("Module path: {}", path); File classes = new File(path,"MOD-INF/classes"); if (classes.exists()) { _classLoader.addRepository(classes); } File libs = new File(path,"MOD-INF/lib"); if (libs.exists()) { _classLoader.addRepository(libs); } ButterflyModule m = new ButterflyModuleImpl(); // process module's controller String manager = p.getString("module-impl"); if (manager != null && !manager.equals(m.getClass().getName())) { try { Class<?> c = _classLoader.loadClass(manager); m = (ButterflyModule) c.newInstance(); } catch (Exception e) { _logger.error("Error loading special module manager", e); } } m.setName(name); m.setPath(path); m.setModules(_modulesByName); m.setMounter(_mounter); m.setClassLoader(_classLoader); m.setTimer(_timer); _modulesByName.put(name,m); // process inheritance ButterflyModule parentModule = null; String parentName = p.getString(extendsProperty); if (parentName != null) { if (_moduleProperties.containsKey(parentName)) { if (_modulesByName.containsKey(parentName)) { parentModule = _modulesByName.get(parentName); } else { parentModule = createModule(parentName); } } else { throw new RuntimeException("Cannot wire module '" + name + "' because the extended module '" + parentName + "' is not defined."); } } if (parentModule != null) { m.setExtended(parentModule); parentModule.addExtendedBy(m); } _logger.trace("< Creating module: {}", name); return m; } @SuppressWarnings("unchecked") protected void wireModules(ExtendedProperties wirings) { _logger.trace("> wireModules()"); _logger.info("mounting modules"); for (String name : _moduleProperties.keySet()) { _logger.trace("> Mounting module: {}", name); ButterflyModule m = _modulesByName.get(name); String mountPointStr = wirings.getString(m.getName()); if (mountPointStr == null) { String moduleName = m.getName(); String mountPoint = _default_mountpoint + "/" + m.getName(); _logger.info("No mount point defined for module '" + moduleName + "', mounting to '" + mountPoint + "'"); mountPointStr = mountPoint; } MountPoint mountPoint = new MountPoint(mountPointStr); if (_mounter.isRegistered(mountPoint)) { throw new RuntimeException("Cannot have two different modules with the same mount point '" + mountPoint + "'."); } else { _mounter.register(mountPoint, m); } _logger.trace("< Mounting module: {}", name); } for (String name : _moduleProperties.keySet()) { _logger.trace("> Expanding properties for module: {}", name); ButterflyModule m = _modulesByName.get(name); ExtendedProperties p = _moduleProperties.get(name); ButterflyModule extended = m.getExtendedModule(); while (extended != null) { _logger.trace("> Merging properties from extended module: {}", name); ExtendedProperties temp = p; p = _moduleProperties.get(extended.getName()); p.combine(temp); _logger.trace("< Merging properties from extended module: {} -> {}", name, p); extended = extended.getExtendedModule(); } _moduleProperties.put(name,p); List<String> implementations = p.getList(implementsProperty); if (implementations != null) { for (String i : implementations) { Map<String, ButterflyModule> map = _modulesByInterface.get(i); if (map == null) { map = new HashMap<String,ButterflyModule>(); _modulesByInterface.put(i, map); } map.put(name, m); m.setImplementation(i); } } _logger.trace("< Expanding properties for module: {}", name); } for (String name : _moduleProperties.keySet()) { _logger.trace("> Inject dependencies in module: {}", name); ExtendedProperties p = _moduleProperties.get(name); ButterflyModule m = _modulesByName.get(name); for (Object o : p.keySet()) { String s = (String) o; if (s.equals(dependencyPrefix)) { for (Object oo : p.getList(s)) { String dep = (String) oo; _logger.trace("> Processing dependency: {}", dep); dep = dep.trim(); Map<String,ButterflyModule> modules = _modulesByInterface.get(dep); if (modules != null) { if (modules.size() == 1) { // if there's only one module implementing that interface, wiring is automatic setDependency(m, dep, modules.values().iterator().next()); } else { ButterflyModule parent = m.getExtendedModule(); do { String wiredDependency = wirings.getString(name + "." + dep); if (wiredDependency != null) { setDependency(m, dep, _modulesByName.get(wiredDependency)); break; } else { if (parent != null) { name = parent.getName(); } } } while (parent != null); } } else { throw new RuntimeException("Cannot wire module '" + name + "' because no module implements the required interface '" + dep + "'"); } _logger.trace("< Processing dependency: {}", dep); } } } _logger.trace("< Inject dependencies in module: {}", name); } ButterflyModule rootModule = _mounter.getRootModule(); // in case nothing defined the root mount point use the default one if (rootModule == null) { rootModule = _modulesByName.get("main"); } // in case not even the 'main' module is available, give up if (rootModule == null) { throw new RuntimeException("Cannot initialize the modules because I can't guess which module to mount to '/'"); } _logger.trace("< wireModules()"); } @SuppressWarnings("unchecked") protected void configureModules() { _logger.trace("> configureModules()"); for (String name : _moduleProperties.keySet()) { _logger.trace("> Configuring module: {}", name); ExtendedProperties p = _moduleProperties.get(name); ButterflyModule m = _modulesByName.get(name); // make the system properties accessible to the modules m.setProperties(_properties); try { if (p.getBoolean("templating", Boolean.TRUE)) { _logger.trace("> enabling templating"); // load the default velocity properties Properties properties = new Properties(); File velocityProperties = new File(_webInfDir, "velocity.properties"); _classLoader.watch(velocityProperties); // reload if the velocity properties change FileInputStream fis = new FileInputStream(velocityProperties); properties.load(fis); fis.close(); // set properties for resource loading properties.setProperty("resource.loader", "butterfly"); properties.setProperty("butterfly.resource.loader.class", ButterflyResourceLoader.class.getName()); properties.setProperty("butterfly.resource.loader.cache", "true"); properties.setProperty("butterfly.resource.loader.modificationCheckInterval", "1"); properties.setProperty("butterfly.resource.loader.description", "Butterfly Resource Loader"); // set properties for macros properties.setProperty("velocimacro.library", p.getString("templating.macros", "")); // Set our special parent injection directive properties.setProperty("userdirective", Super.class.getName()); // Set logging properties if (_appengine) { properties.setProperty(RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.JdkLogChute"); } else { properties.setProperty(RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.Log4JLogChute"); properties.setProperty("runtime.log.logsystem.log4j.logger", "velocity"); } // create a module-specific velocity engine VelocityEngine velocity = new VelocityEngine(); velocity.setApplicationAttribute("module", m); // this is how we pass the module to the resource loader velocity.init(properties); // inject the template engine in the module m.setTemplateEngine(velocity); _logger.trace("< enabling templating"); } List<String> scriptables = p.getList("scriptables"); if (scriptables.size() > 0) { Context context = Context.enter(); BufferedReader initializerReader = null; for (String scriptable : scriptables) { if (!scriptable.equals("")) { try { _logger.trace("> adding scriptable object: {}", scriptable); @SuppressWarnings("rawtypes") Class c = _classLoader.loadClass(scriptable); ButterflyScriptableObject o = (ButterflyScriptableObject) c.newInstance(); setScriptable(m, o); URL initializer = c.getResource("init.js"); if (initializer != null) { initializerReader = new BufferedReader(new InputStreamReader(initializer.openStream())); setScript(m, initializer, context.compileReader(initializerReader, "init.js", 1, null)); _scriptWatcher.watch(initializer,m); _logger.trace("Parsed scriptable javascript initializer successfully"); } _logger.trace("< adding scriptable object: {}", scriptable); } catch (Exception e) { _logger.trace("Error initializing scriptable object '{}': {}", scriptable, e); } finally { if (initializerReader != null) initializerReader.close(); } } } Context.exit(); } List<String> controllers = p.getList("controller", CONTROLLER); Set<URL> controllerURLs = new HashSet<URL>(controllers.size()); for (String controller : controllers) { URL controllerURL = m.getResource("MOD-INF/" + controller); if (controllerURL != null) { controllerURLs.add(controllerURL); } } if (controllerURLs.size() > 0) { _logger.trace("> enabling javascript control"); Context context = Context.enter(); BufferedReader initializerReader = null; try { URL initializer = this.getClass().getClassLoader().getResource("edu/mit/simile/butterfly/Butterfly.js"); initializerReader = new BufferedReader(new InputStreamReader(initializer.openStream())); setScript(m, initializer, context.compileReader(initializerReader, "Butterfly.js", 1, null)); watch(initializer,m); _logger.trace("Parsed javascript initializer successfully"); } finally { if (initializerReader != null) initializerReader.close(); } BufferedReader controllerReader = null; for (URL controllerURL : controllerURLs) { try{ controllerReader = new BufferedReader(new InputStreamReader(controllerURL.openStream())); setScript(m, controllerURL, context.compileReader(controllerReader, controllerURL.toString(), 1, null)); watch(controllerURL,m); _logger.trace("Parsed javascript controller successfully: {}", controllerURL); } finally { if (controllerReader != null) controllerReader.close(); } } Context.exit(); _logger.trace("< enabling javascript control"); } } catch (Exception e) { _logger.error("Error enabling javascript control",e); } _logger.trace("< Configuring module: {}", name); } _logger.trace("< configureModules()"); } protected void setDependency(ButterflyModule subj, String dep, ButterflyModule obj) { subj.setDependency(dep, obj); ButterflyModule extended = subj.getExtendedModule(); if (extended != null) { setDependency(extended, dep, obj); } } protected void setScriptable(ButterflyModule mod, ButterflyScriptableObject scriptable) { mod.setScriptable(scriptable); ButterflyModule extended = mod.getExtendedModule(); if (extended != null) { setScriptable(extended, scriptable); } } protected void watch(URL script, ButterflyModule module) throws IOException { if (_scriptWatcher != null) { _scriptWatcher.watch(script, module); } } /* * NOTE(SM): I'm fully aware that these embedded HTML snippets are really ugly, but I don't * want to depend on velocity for error reporting as that would prevent us from reporting * errors about velocity's dependency itself. */ String header = "<html>" + " <head>" + " </head>" + " <body>"; String footer = "</body></html>"; protected void delay(HttpServletResponse response, String title) throws IOException { response.setContentType("text/html"); response.setCharacterEncoding("UTF-8"); PrintWriter writer = response.getWriter(); writer.println(header); writer.println("<h1>" + title + "</h1>"); writer.println("<script>setTimeout(function() { window.location = '.' }, 3000);</script>"); writer.println(footer); writer.close(); } protected void error(HttpServletResponse response, String title, String msg, Exception e) throws IOException { StringWriter stringWriter = new StringWriter(); PrintWriter writer = new PrintWriter(stringWriter); writer.println(title); writer.println(msg); if (e != null) { e.printStackTrace(writer); } writer.close(); response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, stringWriter.toString()); } static protected void setScript(ButterflyModule mod, URL location, Script script) { mod.setScript(location, script); ButterflyModule extended = mod.getExtendedModule(); if (extended != null) { setScript(extended, location, script); } } /* * This is the trigger invoked by the butterfly classloader if any of the observed classes or files * has changed. This trigger attempts to find the Butterfly.class on disk and changes its lastModified * time if found. This has no effect in some servlet containers, but in others (for example the Jetty * plugin for Maven) this triggers a context autoreload. * NOTE: this is only invoked when files that were found when the application started are modified * Adding new files to the classpath does not trigger a restart! */ private static class Trigger implements Runnable { final static private Logger _logger = LoggerFactory.getLogger("butterfly.trigger"); private List<File> tries = new ArrayList<File>(); Trigger(File context) { File web_inf = new File(context, "WEB-INF"); File classes = new File(web_inf, "classes"); if (classes.exists()) { tries.add(findFile(classes, ".class")); } File libs = new File(web_inf, "lib"); if (libs.exists()) { tries.add(findFile(libs, ".jar")); } } public void run() { _logger.info("classloader changed trigger invoked"); for (File f : tries) { _logger.debug("trying: " + f.getAbsolutePath()); if (f.exists()) { f.setLastModified((new Date()).getTime()); _logger.debug(" touched!!"); return; } } _logger.warn("could not find anything to touch"); } private File findFile(File start, String extension) { for (File f : start.listFiles()) { if (f.isDirectory()) { return findFile(f, extension); } else { if (f.getName().endsWith(extension)) { return f; } } } return null; } } } class ButterflyScriptWatcher extends TimerTask { final static private Logger _logger = LoggerFactory.getLogger("butterfly.script_watcher"); private Map<URL,ButterflyModule> scripts = new HashMap<URL,ButterflyModule>(); private Map<URL,Long> lastModifieds = new HashMap<URL,Long>(); protected void watch(URL script, ButterflyModule module) throws IOException { _logger.debug("Watching {}", script); this.lastModifieds.put(script, script.openConnection().getLastModified()); this.scripts.put(script, module); } public void run() { for (URL url : this.scripts.keySet()) { try { URLConnection connection = url.openConnection(); long lastModified = connection.getLastModified(); if (lastModified > this.lastModifieds.get(url)) { _logger.debug("{} has changed, reparsing...", url); this.lastModifieds.put(url, lastModified); ButterflyModule module = this.scripts.get(url); BufferedReader reader = null; try { Context context = Context.enter(); reader = new BufferedReader(new InputStreamReader(url.openStream())); Butterfly.setScript(module, url, context.compileReader(reader, url.getFile(), 1, null)); _logger.info("{} reloaded", url); Context.exit(); } finally { if (reader != null) reader.close(); } } connection.getInputStream().close(); // NOTE(SM): this avoids leaking file descriptions in some JVMs } catch (Exception e) { _logger.error("", e); } } } }
import org.junit.Test; import org.junit.Ignore; import org.junit.Rule; import org.junit.rules.ExpectedException; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; public class PrimeTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void testFirstPrime() { assertThat(Prime.nth(1), is(2)); } @Ignore @Test public void testSecondPrime() { assertThat(Prime.nth(2), is(3)); } @Ignore @Test public void testSixthPrime() { assertThat(Prime.nth(6), is(13)); } @Ignore @Test public void testBigPrime() { assertThat(Prime.nth(10001), is(104743)); } @Ignore @Test public void testUndefinedPrime() { thrown.expect(IllegalArgumentException.class); Prime.nth(0); } }
package cubicchunks.server; import static cubicchunks.util.ReflectionUtil.getFieldGetterHandle; import static cubicchunks.util.ReflectionUtil.getFieldSetterHandle; import com.google.common.base.Throwables; import cubicchunks.CubicChunks; import cubicchunks.network.PacketColumn; import cubicchunks.network.PacketDispatcher; import cubicchunks.network.PacketHeightMapUpdate; import cubicchunks.network.PacketUnloadColumn; import cubicchunks.server.chunkio.async.forge.AsyncWorldIOExecutor; import cubicchunks.util.AddressTools; import cubicchunks.util.CubePos; import cubicchunks.util.XZAddressable; import cubicchunks.world.column.IColumn; import gnu.trove.list.TByteList; import gnu.trove.list.array.TByteArrayList; import mcp.MethodsReturnNonnullByDefault; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.server.management.PlayerChunkMapEntry; import net.minecraft.util.math.ChunkPos; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.world.ChunkWatchEvent; import java.lang.invoke.MethodHandle; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.ParametersAreNonnullByDefault; @ParametersAreNonnullByDefault @MethodsReturnNonnullByDefault class ColumnWatcher extends PlayerChunkMapEntry implements XZAddressable { @Nonnull private PlayerCubeMap playerCubeMap; private static MethodHandle getPlayers = getFieldGetterHandle(PlayerChunkMapEntry.class, "field_187283_c"); private static MethodHandle setLastUpdateInhabitedTime = getFieldSetterHandle(PlayerChunkMapEntry.class, "field_187289_i"); private static MethodHandle setSentToPlayers = getFieldSetterHandle(PlayerChunkMapEntry.class, "field_187290_j"); private static MethodHandle isLoading = getFieldGetterHandle(PlayerChunkMapEntry.class, "loading");//forge field, no srg name private static MethodHandle getLoadedRunnable = getFieldGetterHandle(PlayerChunkMapEntry.class, "loadedRunnable");//forge field, no srg name @Nonnull private final Runnable loadedRunnable; @Nonnull private final TByteList dirtyColumns = new TByteArrayList(64); ColumnWatcher(PlayerCubeMap playerCubeMap, ChunkPos pos) { super(playerCubeMap, pos.x, pos.z); this.playerCubeMap = playerCubeMap; try { this.loadedRunnable = (Runnable) getLoadedRunnable.invoke(this); } catch (Throwable throwable) { throw new RuntimeException(throwable); } } // CHECKED: 1.10.2-12.18.1.2092 public void addPlayer(EntityPlayerMP player) { if (this.getPlayers().contains(player)) { CubicChunks.LOGGER.debug("Failed to add player. {} already is in chunk {}, {}", player, this.getPos().x, this.getPos().z); return; } if (this.getPlayers().isEmpty()) { this.setLastUpdateInhabitedTime(playerCubeMap.getWorldServer().getTotalWorldTime()); } this.getPlayers().add(player); //always sent to players, no need to check it if (this.isSentToPlayers()) { PacketColumn message = new PacketColumn(this.getColumn()); PacketDispatcher.sendTo(message, player); //this.sendNearbySpecialEntities - done by cube entry MinecraftForge.EVENT_BUS.post(new ChunkWatchEvent.Watch(this.getPos(), player)); } } // CHECKED: 1.10.2-12.18.1.2092//TODO: remove it, the only different line is sending packet public void removePlayer(EntityPlayerMP player) { if (!this.getPlayers().contains(player)) { return; } if (this.getColumn() == null) { this.getPlayers().remove(player); if (this.getPlayers().isEmpty()) { if (isLoading()) { AsyncWorldIOExecutor.dropQueuedColumnLoad( playerCubeMap.getWorld(), getPos().x, getPos().z, (c) -> loadedRunnable.run()); } this.playerCubeMap.removeEntry(this); } return; } if (this.isSentToPlayers()) { PacketDispatcher.sendTo(new PacketUnloadColumn(getPos()), player); } this.getPlayers().remove(player); MinecraftForge.EVENT_BUS.post(new ChunkWatchEvent.UnWatch(this.getPos(), player)); if (this.getPlayers().isEmpty()) { playerCubeMap.removeEntry(this); } } private List<EntityPlayerMP> getPlayers() { try { return (List<EntityPlayerMP>) getPlayers.invoke(this); } catch (Throwable throwable) { throw Throwables.propagate(throwable); } } private void setLastUpdateInhabitedTime(long time) { try { setLastUpdateInhabitedTime.invoke(this, time); } catch (Throwable throwable) { throw Throwables.propagate(throwable); } } //providePlayerChunk - ok // CHECKED: 1.10.2-12.18.1.2092 @Override public boolean sendToPlayers() { if (this.isSentToPlayers()) { return true; } if (getColumn() == null) { return false; } try { PacketColumn message = new PacketColumn(this.getColumn()); for (EntityPlayerMP player : this.getPlayers()) { PacketDispatcher.sendTo(message, player); } setSentToPlayers.invoke(this, true); } catch (Throwable throwable) { throw new RuntimeException(throwable); } return true; } @Override @Deprecated public void sendToPlayer(EntityPlayerMP player) { //done by cube watcher } //updateChunkInhabitedTime - ok @Override @Deprecated public void blockChanged(int x, int y, int z) { CubeWatcher watcher = playerCubeMap.getCubeWatcher(CubePos.fromBlockCoords(x, y, z)); if (watcher != null) { watcher.blockChanged(x, y, z); } } @Override public void update() { if (!this.isSentToPlayers() || this.dirtyColumns.isEmpty()) { return; } IColumn column = getColumn(); assert column != null; for (EntityPlayerMP player : this.getPlayers()) { PacketDispatcher.sendTo(new PacketHeightMapUpdate(getPos(), dirtyColumns, column.getOpacityIndex()), player); } this.dirtyColumns.clear(); } //containsPlayer, hasPlayerMatching, hasPlayerMatchingInRange, isAddedToChunkUpdateQueue, getChunk, getClosestPlayerDistance - ok @Nullable public IColumn getColumn() { return (IColumn) this.getChunk(); } private boolean isLoading() { try { return (boolean) isLoading.invoke(this); } catch (Throwable throwable) { throw new RuntimeException(throwable); } } @Override public int getX() { return this.getPos().x; } @Override public int getZ() { return this.getPos().z; } void heightChanged(int localX, int localZ) { if (!isSentToPlayers()) { return; } if (this.dirtyColumns.isEmpty()) { playerCubeMap.addToUpdateEntry(this); } this.dirtyColumns.add(AddressTools.getLocalAddress(localX, localZ)); } }
package de.slackspace.alfa; import java.util.ArrayList; import java.util.List; import org.elasticsearch.client.Client; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.microsoft.windowsazure.services.core.Configuration; import com.microsoft.windowsazure.services.table.TableConfiguration; import com.microsoft.windowsazure.services.table.TableContract; import com.microsoft.windowsazure.services.table.TableService; import de.slackspace.alfa.azure.AzureService; import de.slackspace.alfa.azure.LogFetcher; import de.slackspace.alfa.elasticsearch.LogForwarder; import de.slackspace.alfa.exception.ConfigurationException; import de.slackspace.alfa.properties.PropertyHandler; import de.slackspace.alfa.properties.PropertyHandlerFactory; public class ObjectFactory { private static final Logger LOGGER = LoggerFactory.getLogger(ObjectFactory.class); private ObjectFactory() {} public static List<LogFetcher> constructLogFetcher(String configFile, Client client) { PropertyHandler propertyHandler = PropertyHandlerFactory.createPropertyHandler(configFile); LogForwarder logForwarder = new LogForwarder(client); List<LogFetcher> list = new ArrayList<>(); if(LOGGER.isDebugEnabled()) { LOGGER.debug("Starting alfa with these accounts:"); } for (int i = 1; i < propertyHandler.getNumberOfAccounts() + 1; i++) { AzureService azureService = createAzureService(propertyHandler, i); list.add(new LogFetcher(propertyHandler, logForwarder, azureService, i)); } if(list.size() == 0) { throw new ConfigurationException(String.format("The properties file is missing a configured azure account. Please provide at least one property with name %s_1", PropertyHandler.ACCOUNT_URL)); } return list; } private static AzureService createAzureService(PropertyHandler propertyHandler, int currentInstance) { String accountName = propertyHandler.getProperty(PropertyHandler.ACCOUNT_NAME, currentInstance); String accountKey = propertyHandler.getProperty(PropertyHandler.ACCOUNT_KEY, currentInstance); String accountUrl = propertyHandler.getProperty(PropertyHandler.ACCOUNT_URL, currentInstance); String maxLogDays = propertyHandler.getProperty(PropertyHandler.MAX_LOG_DAYS, currentInstance); if(accountName == null || accountName.isEmpty()) { throw new ConfigurationException(String.format("The properties file is missing the %s_%s property.", PropertyHandler.ACCOUNT_NAME, currentInstance)); } if(accountKey == null || accountKey.isEmpty()) { throw new ConfigurationException(String.format("The properties file is missing the %s_%s property.", PropertyHandler.ACCOUNT_KEY, currentInstance)); } if(accountUrl == null || accountUrl.isEmpty()) { throw new ConfigurationException(String.format("The properties file is missing the %s_%s property.", PropertyHandler.ACCOUNT_URL, currentInstance)); } int maxLogDaysAsInteger = 10; if(maxLogDays != null && !maxLogDays.isEmpty()) { try { maxLogDaysAsInteger = Integer.parseInt(maxLogDays); } catch(NumberFormatException e) { throw new ConfigurationException(String.format("The property %s_%s is provided but not as integer. Please provide an integer value.", PropertyHandler.MAX_LOG_DAYS, currentInstance)); } } if(LOGGER.isDebugEnabled()) { LOGGER.debug(String.format(" %s. Accountname: %s", currentInstance, accountName)); LOGGER.debug(String.format(" MaxLogDays: %s", maxLogDaysAsInteger)); } Configuration config = Configuration.getInstance(); config.setProperty(TableConfiguration.ACCOUNT_NAME, accountName); config.setProperty(TableConfiguration.ACCOUNT_KEY, accountKey); config.setProperty(TableConfiguration.URI, accountUrl); TableContract contract = TableService.create(config); return new AzureService(contract, maxLogDaysAsInteger); } }
package de.spinscale.dropwizard.jobs; import com.yammer.metrics.Metrics; import com.yammer.metrics.core.Counter; import com.yammer.metrics.core.Timer; import com.yammer.metrics.core.TimerContext; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; public abstract class Job implements org.quartz.Job { private final Timer timer; public Job() { timer = Metrics.defaultRegistry().newTimer(getClass(), getClass().getName()); } @Override public void execute(JobExecutionContext context) throws JobExecutionException { TimerContext timerContext = timer.time(); try { doJob(); } finally { timerContext.stop(); } } public abstract void doJob(); }
package de.team33.libs.reflect.v4; import java.lang.reflect.Field; import java.util.Map; import java.util.TreeMap; import java.util.function.Function; import java.util.function.IntPredicate; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.unmodifiableMap; import static java.util.stream.Collectors.toMap; /** * Utility for dealing with fields. */ public class Fields { /** * Streams all {@link Field}s straightly declared by a given {@link Class} */ public static Stream<Field> flat(final Class<?> subject) { return fieldsOf(Classes.optional(subject)); } /** * Streams all {@link Field}s declared by a given {@link Class} or any of its superclasses. */ public static Stream<Field> deep(final Class<?> subject) { return fieldsOf(Classes.deep(subject)); } /** * Streams all {@link Field}s declared by a given {@link Class}, any of its superclasses or any of its * superinterfaces. */ public static Stream<Field> wide(final Class<?> subject) { return fieldsOf(Classes.wide(subject)); } /** * Determines a canonical, fully qualified name for a given field. */ public static String canonicalName(final Field field) { return field.getDeclaringClass().getCanonicalName() + "." + field.getName(); } private static Stream<Field> fieldsOf(final Stream<Class<?>> classes) { return classes.map(Class::getDeclaredFields) .map(Stream::of) .reduce(Stream::concat) .orElseGet(Stream::empty); } /** * <p>Returns a {@link Mapping.Builder Builder} for {@link Mapping Mapping}s.</p> * <p>A mapping is a {@link Function} that returns a {@link Map Map&lt;String, Field&gt;} * from a given {@link Class}</p> * <p>The key values ({@link String}) of such a {@link Map} are the logical names of the associated * {@link Field}s.</p> */ public static Mapping.Builder mapping() { return new Mapping.Builder(); } /** * Provides some predefined {@linkplain Predicate filters} for {@link Field Fields}. */ public enum Filter implements Predicate<Field> { /** * Defines a filter accepting all fields (including static fields). */ ANY(Modifiers.Predicate.TRUE), /** * Defines a filter accepting all public fields. */ PUBLIC(Modifiers.Predicate.PUBLIC), /** * Defines a filter accepting all private fields. */ PRIVATE(Modifiers.Predicate.PRIVATE), /** * Defines a filter accepting all protected fields. */ PROTECTED(Modifiers.Predicate.PROTECTED), /** * Defines a filter accepting all static fields. */ STATIC(Modifiers.Predicate.STATIC), /** * Defines a filter accepting all final fields. */ FINAL(Modifiers.Predicate.FINAL), /** * Defines a filter accepting all transient fields. */ TRANSIENT(Modifiers.Predicate.TRANSIENT), /** * Defines a filter accepting all instance-fields (non-static fields). */ INSTANCE(Modifiers.Predicate.STATIC.negate()), /** * Defines a filter accepting all but static or transient fields. * Those fields should be significant for a type with value semantics. */ SIGNIFICANT(Modifiers.Predicate.STATIC.or(Modifiers.Predicate.TRANSIENT).negate()); private final IntPredicate filter; Filter(final IntPredicate filter) { this.filter = filter; } @Override public final boolean test(final Field field) { return filter.test(field.getModifiers()); } } /** * Defines some typical {@link Function}s that serve to find a name for a {@link Field}. */ public interface Naming extends Function<Field, String> { /** * A {@link Function} that simply returns the plain {@linkplain Field#getName() name} of a given {@link Field}. */ Naming SIMPLE = Field::getName; /** * A {@link Function} that returns a canonical, full qualified name for a given {@link Field}. */ Naming CANONICAL = Fields::canonicalName; /** * Defines some typical {@link Function}s that serve to find a name for a {@link Field} * that is as unique as possible in the context of a particular class. */ interface ContextSensitive extends Function<Class<?>, Function<Field, String>> { /** * A {@link Function} that simply returns the plain {@linkplain Field#getName() name} of the given * {@link Field} if inquired in the context of the Field's declaring class. Otherwise it returns a * canonical, full qualified name. */ ContextSensitive QUALIFIED = context -> field -> context.equals(field.getDeclaringClass()) ? field.getName() : canonicalName(field); /** * A {@link Function} that returns the plane {@linkplain Field#getName() name} of the given {@link Field}, * preceded by a corresponding number of points (".") depending on the distance of the context to the * declaring class of the field. */ ContextSensitive COMPACT = context -> field -> Stream.generate(() -> ".") .limit(Classes.distance(context, field.getDeclaringClass())) .collect(Collectors.joining("", "", field.getName())); } } /** * Defines some typical {@link Function}s that serve to stream {@link Field}s of a {@link Class}. */ public interface Streaming extends Function<Class<?>, Stream<Field>> { /** * Streams all {@link Field}s straightly declared by a given {@link Class} */ Streaming FLAT = Fields::flat; /** * Streams all {@link Field}s declared by a given {@link Class} or any of its superclasses. */ Streaming DEEP = Fields::deep; /** * Streams all {@link Field}s declared by a given {@link Class}, any of its superclasses or any of * its superinterfaces. */ Streaming WIDE = Fields::wide; /** * Streams all non-static {@link Field}s declared by a given {@link Class} or any of its * superclasses. */ Streaming INSTANCE = context -> deep(context).filter(Filter.INSTANCE); /** * Streams all non-static/non-transient {@link Field}s straightly declared by a given {@link Class}. * Those fields should be significant for a simple type with value semantics. */ Streaming SIGNIFICANT_FLAT = context -> flat(context).filter(Filter.SIGNIFICANT); /** * Streams all non-static/non-transient {@link Field}s declared by a given {@link Class} or any of * its superclasses. Those fields should be significant for a type with value semantics. */ Streaming SIGNIFICANT_DEEP = context -> deep(context).filter(Filter.SIGNIFICANT); } /** * <p>A Mapping is a {@link Function} that returns a {@link Map Map&lt;String, Field&gt;} * from a given {@link Class}</p> * <p>The key values ({@link String}) of such a {@link Map} are the logical names of the associated * {@link Field}s.</p> */ @FunctionalInterface public interface Mapping extends Function<Class<?>, Map<String, Field>> { /** * Defines a {@link Mapping} that only considers the fields straightly declared by the underlying class, * which are neither static nor transient. */ Mapping SIGNIFICANT_FLAT = mapping() .setToFieldStream(Streaming.SIGNIFICANT_FLAT) .setToName(Naming.SIMPLE) .build(); /** * Defines a {@link Mapping} that considers the fields declared by the underlying class or one of its * superclasses, which are neither static nor transient. */ Mapping SIGNIFICANT_DEEP = mapping().build(); /** * A for {@link Mapping} instances. */ class Builder { private Function<Class<?>, Stream<Field>> toFieldStream = Streaming.SIGNIFICANT_DEEP; private Function<Class<?>, Function<Field, String>> toNaming = Naming.ContextSensitive.COMPACT; private Builder() { } /** * <p>Specifies how to get a {@link Stream} of {@link Field}s from a given {@link Class}.</p> * <p>Default is {@link Streaming#SIGNIFICANT_DEEP}.</p> * */ public final Builder setToFieldStream(final Function<Class<?>, Stream<Field>> toFieldStream) { this.toFieldStream = toFieldStream; return this; } /** * <p>Specifies how a name results from a given {@link Field} in the context of a given {@link Class}.</p> * <p>Default is {@link Naming.ContextSensitive#COMPACT}.</p> */ public final Builder setToNaming(final Function<Class<?>, Function<Field, String>> toNaming) { this.toNaming = toNaming; return this; } /** * Specifies how a name results from a given {@link Field}. */ public final Builder setToName(final Function<Field, String> toName) { return setToNaming(ignored -> toName); } /** * Retrieves a new {@link Mapping} that uses the specified methods. */ public final Mapping build() { return new Mapper(this); } } } /** * A tool to create a {@link Map} to {@link Fields} from their logical names. */ private static class Mapper implements Mapping { private Function<Class<?>, Stream<Field>> toFieldStream; private Function<Class<?>, Function<Field, String>> toNaming; private Mapper(final Mapping.Builder builder) { toFieldStream = builder.toFieldStream; toNaming = builder.toNaming; } /** * Retrieves a {@link Map} to {@link Fields} from their logical names. */ @Override public final Map<String, Field> apply(final Class<?> subject) { final Function<Field, String> toName = toNaming.apply(subject); return unmodifiableMap(toFieldStream.apply(subject) .peek(field -> field.setAccessible(true)) .collect(toMap(toName, field -> field, (a, b) -> b, TreeMap::new))); } } }
package edu.hm.cs.vss.remote; import edu.hm.cs.vss.*; import edu.hm.cs.vss.log.Logger; import java.net.InetAddress; import java.net.UnknownHostException; import java.rmi.RemoteException; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.util.Observable; import java.util.stream.Stream; public class RemoteTable extends Observable implements Table, Philosopher.OnStandUpListener { private final String host; private final Logger logger; private final RmiTable table; private final BackupService backupService; public RemoteTable(final String host, Logger logger) throws Exception { this.host = host; this.logger = logger; this.backupService = BackupService.create(this); final Registry registry = LocateRegistry.getRegistry(host, NETWORK_PORT); table = (RmiTable) registry.lookup(Table.class.getSimpleName()); } @Override public String getName() { return host; } @Override public void connectToTable(final String tableHost) { try { logger.log("Requesting the remote table " + host + " to add the table " + tableHost); table.addTable(tableHost); } catch (RemoteException e) { handleRemoteTableDisconnected(e); } } @Override public void disconnectFromTable(final String tableHost) { try { logger.log("Requesting the remote table " + host + " to delete the table " + tableHost); table.removeTable(tableHost); } catch (RemoteException e) { handleRemoteTableDisconnected(e); } } @Override public Stream<Table> getTables() { throw new UnsupportedOperationException(); } @Override public void addPhilosopher(Philosopher philosopher) { try { table.addPhilosopher(getLocalHost(), philosopher.getName(), philosopher.isHungry()); } catch (RemoteException e) { handleRemoteTableDisconnected(e); } } @Override public void removePhilosopher(Philosopher philosopher) { try { table.removePhilosopher(getLocalHost(), philosopher.getName()); } catch (RemoteException e) { handleRemoteTableDisconnected(e); } } @Override public Stream<Philosopher> getPhilosophers() { throw new UnsupportedOperationException(); } @Override public void addChair(Chair chair) { try { table.addChair(getLocalHost(), chair.toString()); } catch (RemoteException e) { handleRemoteTableDisconnected(e); } } @Override public void removeChair(Chair chair) { try { table.removeChair(getLocalHost(), chair.toString()); } catch (RemoteException e) { handleRemoteTableDisconnected(e); } } @Override public Stream<Chair> getChairs() { return backupService.getChairs(); } @Override public Chair getNeighbourChair(Chair chair) { throw new UnsupportedOperationException(); } @Override public TableMaster getTableMaster() { return mealCount -> mealCount <= getBackupService().getPhilosophers() .mapToInt(Philosopher::getMealCount).min().orElse(0) + TableMaster.MAX_DEVIATION; } @Override public BackupService getBackupService() { return backupService; } @Override public void setTableMaster(TableMaster tableMaster) { throw new UnsupportedOperationException(); } @Override public void onStandUp(Philosopher philosopher) { try { table.onStandUp(getLocalHost(), philosopher.getName()); } catch (RemoteException e) { handleRemoteTableDisconnected(e); } } protected RmiTable getRmi() { return table; } protected void handleRemoteTableDisconnected(final RemoteException e) { //logger.log(e.getMessage()); // e.printStackTrace(); setChanged(); notifyObservers(RemoteTable.this); } private String getLocalHost() { try { return InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException e) { e.printStackTrace(); return "127.0.0.1"; // TODO This is a potential bug if the host can not be detected automaticly } } }
package edu.pdx.spi.verticles; import com.fasterxml.jackson.databind.ObjectMapper; import edu.pdx.spi.fakedata.models.Patient; import io.vertx.core.AbstractVerticle; import io.vertx.core.eventbus.EventBus; import io.vertx.core.eventbus.impl.MessageImpl; import io.vertx.core.json.Json; import io.vertx.core.json.JsonObject; import java.util.HashMap; import java.util.Map; import java.util.Random; import java.util.stream.DoubleStream; import java.util.stream.IntStream; final class Patients { Map<Integer, Patient> patients = new HashMap<>(); public Patients() { patients.put(1, new Patient(1, 100, "Jane", "Doe")); patients.put(2, new Patient(2, 101, "John", "Doe")); patients.put(3, new Patient(3, 102, "Lego", "Man")); patients.put(4, new Patient(4, 103, "Suzy", "Doe")); } public Patient getPatient(int id) { return patients.get(id); } public Map<Integer, Patient> getAllPatients() { return patients; } } public final class DataSource extends AbstractVerticle { Patients patientData = new Patients(); ObjectMapper om = new ObjectMapper(); Random rn; EventBus eb; public void start() { rn = new Random(); eb = vertx.eventBus(); eb.consumer("patients", m -> { if (((String)m.body()).isEmpty()) { try { m.reply(om.writeValueAsString(patientData.getAllPatients())); } catch (Exception e) { m.reply("error parsing patient data"); } } else { Patient p = patientData.getPatient(Integer.parseInt((String)m.body())); if (p != null) { try { m.reply(om.writeValueAsString(p)); } catch (Exception e) { m.reply("error parsing patient data"); } } else { m.reply("Invalid patient"); } } }); eb.consumer("numericalrequest", m -> { JsonObject js = new JsonObject((String)m.body()); String type = js.getString("type"); String id = js.getString("id"); String responseChannel = type + "." + id; startPeriodicNumericalQuery(type, responseChannel); m.reply(responseChannel); }); eb.consumer("waveformrequest", m -> { JsonObject js = new JsonObject((String)m.body()); String type = js.getString("type"); String id = js.getString("id"); String responseChannel = type + "." + id; startPeriodicWaveformQuery(type, responseChannel); m.reply(responseChannel); }); } private void startPeriodicWaveformQuery(String queryType, String responseChannel) { switch (queryType) { case "hr": vertx.setPeriodic(1000, t -> { JsonObject json = new JsonObject(); json.put("x", System.currentTimeMillis() / 1000L); json.put("y", rn.nextInt(200)); eb.send(responseChannel, json.encode()); }); break; case "bp": vertx.setPeriodic(1000, t -> { JsonObject json = new JsonObject(); json.put("x", System.currentTimeMillis() / 1000L); json.put("y", rn.nextInt(200)); eb.send(responseChannel, json.encode()); }); break; } } private void startPeriodicNumericalQuery(String queryType, String responseChannel) { switch (queryType) { case "hr": vertx.setPeriodic(1000, t -> { JsonObject json = new JsonObject(); json.put("x", System.currentTimeMillis() / 1000L); json.put("y", rn.nextInt(200)); eb.send(responseChannel, json.encode()); }); break; case "bp": vertx.setPeriodic(1000, t -> { JsonObject json = new JsonObject(); json.put("x", System.currentTimeMillis() / 1000L); json.put("y", rn.nextInt(200)); eb.send(responseChannel, json.encode()); }); break; } } }
package org.hbase.async; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import static java.util.concurrent.TimeUnit.MILLISECONDS; import com.google.common.cache.LoadingCache; import org.apache.zookeeper.AsyncCallback; import org.apache.zookeeper.KeeperException.Code; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.data.Stat; import org.jboss.netty.channel.ChannelEvent; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.DefaultChannelPipeline; import org.jboss.netty.channel.socket.ClientSocketChannelFactory; import org.jboss.netty.channel.socket.SocketChannel; import org.jboss.netty.channel.socket.SocketChannelConfig; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import org.jboss.netty.util.HashedWheelTimer; import org.jboss.netty.util.Timeout; import org.jboss.netty.util.Timer; import org.jboss.netty.util.TimerTask; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.stumbleupon.async.Callback; import com.stumbleupon.async.Deferred; public final class HBaseClient { /* * TODO(tsuna): Address the following. * * - Properly handle disconnects. * - Attempt to reconnect a couple of times, see if it was a transient * network blip. * - If the -ROOT- region is unavailable when we start, we should * put a watch in ZK instead of polling it every second. * - Handling RPC timeouts. * - Stats: * - QPS per RPC type. * - Latency histogram per RPC type (requires open-sourcing the SU Java * stats classes that I wrote in a separate package). * - Cache hit rate in the local META cache. * - RPC errors and retries. * - Typical batch size when flushing edits (is that useful?). * - Write unit tests and benchmarks! */ private static final Logger LOG = LoggerFactory.getLogger(HBaseClient.class); /** * An empty byte array you can use. This can be useful for instance with * {@link Scanner#setStartKey} and {@link Scanner#setStopKey}. */ public static final byte[] EMPTY_ARRAY = new byte[0]; private static final byte[] ROOT = new byte[] { '-', 'R', 'O', 'O', 'T', '-' }; private static final byte[] ROOT_REGION = new byte[] { '-', 'R', 'O', 'O', 'T', '-', ',', ',', '0' }; private static final byte[] META = new byte[] { '.', 'M', 'E', 'T', 'A', '.' }; private static final byte[] INFO = new byte[] { 'i', 'n', 'f', 'o' }; private static final byte[] REGIONINFO = new byte[] { 'r', 'e', 'g', 'i', 'o', 'n', 'i', 'n', 'f', 'o' }; private static final byte[] SERVER = new byte[] { 's', 'e', 'r', 'v', 'e', 'r' }; /** * Timer we use to handle all our timeouts. * <p> * This is package-private so that this timer can easily be shared with the * other classes in this package. * TODO(tsuna): Get it through the ctor to share it with others. */ final HashedWheelTimer timer = new HashedWheelTimer(20, MILLISECONDS); /** Up to how many milliseconds can we buffer an edit on the client side. */ private volatile short flush_interval = 1000; /** * How many different counters do we want to keep in memory for buffering. * Each entry requires storing the table name, row key, family name and * column qualifier, plus 4 small objects. * * Assuming an average table name of 10 bytes, average key of 20 bytes, * average family name of 10 bytes and average qualifier of 8 bytes, this * would require 65535 * (10 + 20 + 10 + 8 + 4 * 32) / 1024 / 1024 = 11MB * of RAM, which isn't too excessive for a default value. Of course this * might bite people with large keys or qualifiers, but then it's normal * to expect they'd tune this value to cater to their unusual requirements. */ private volatile int increment_buffer_size = 65535; /** * Factory through which we will create all its channels / sockets. */ private final ClientSocketChannelFactory channel_factory; /** Watcher to keep track of the -ROOT- region in ZooKeeper. */ private final ZKClient zkclient; /** * The client currently connected to the -ROOT- region. * If this is {@code null} then we currently don't know where the -ROOT- * region is and we're waiting for a notification from ZooKeeper to tell * us where it is. */ private volatile RegionClient rootregion; /** * Maps {@code (table, start_key)} pairs to the {@link RegionInfo} that * serves this key range for this table. * <p> * The keys in this map are region names. * @see #createRegionSearchKey * Because it's a sorted map, we can efficiently find a region given an * arbitrary key. * @see #getRegion * <p> * This map and the next 2 maps contain the same data, but indexed * differently. There is no consistency guarantee across the maps. * They are not updated all at the same time atomically. This map * is always the first to be updated, because that's the map from * which all the lookups are done in the fast-path of the requests * that need to locate a region. The second map to be updated is * {@link region2client}, because it comes second in the fast-path * of every requests that need to locate a region. The third map * is only used to handle RegionServer disconnections gracefully. * <p> * Note: before using the {@link RegionInfo} you pull out of this map, * you <b>must</b> ensure that {@link RegionInfo#table} doesn't return * {@link #EMPTY_ARRAY}. If it does, it means you got a special entry * used to indicate that this region is known to be unavailable right * now due to an NSRE. You must not use this {@link RegionInfo} as * if it was a normal entry. * @see #handleNSRE */ private final ConcurrentSkipListMap<byte[], RegionInfo> regions_cache = new ConcurrentSkipListMap<byte[], RegionInfo>(RegionInfo.REGION_NAME_CMP); /** * Maps a {@link RegionInfo} to the client currently connected to the * RegionServer that serves this region. * <p> * The opposite mapping is stored in {@link #client2regions}. * There's no consistency guarantee with that other map. * See the javadoc for {@link #regions_cache} regarding consistency. */ private final ConcurrentHashMap<RegionInfo, RegionClient> region2client = new ConcurrentHashMap<RegionInfo, RegionClient>(); /** * Maps a client connected to a RegionServer to the list of regions we know * it's serving so far. * <p> * The opposite mapping is stored in {@link #region2client}. * There's no consistency guarantee with that other map. * See the javadoc for {@link #regions_cache} regarding consistency. * <p> * Each list in the map is protected by its own monitor lock. */ private final ConcurrentHashMap<RegionClient, ArrayList<RegionInfo>> client2regions = new ConcurrentHashMap<RegionClient, ArrayList<RegionInfo>>(); /** * Cache that maps a RegionServer address ("ip:port") to the client * connected to it. * <p> * Access to this map must be synchronized by locking its monitor. * Lock ordering: when locking both this map and a RegionClient, the * RegionClient must always be locked first to avoid deadlocks. Logging * the contents of this map (or calling toString) requires copying it first. * <p> * This isn't a {@link ConcurrentHashMap} because we don't use it frequently * (just when connecting to / disconnecting from RegionServers) and when we * add something to it, we want to do an atomic get-and-put, but * {@code putIfAbsent} isn't a good fit for us since it requires to create * an object that may be "wasted" in case another thread wins the insertion * race, and we don't want to create unnecessary connections. * <p> * Upon disconnection, clients are automatically removed from this map. * We don't use a {@code ChannelGroup} because a {@code ChannelGroup} does * the clean-up on the {@code channelClosed} event, which is actually the * 3rd and last event to be fired when a channel gets disconnected. The * first one to get fired is, {@code channelDisconnected}. This matters to * us because we want to purge disconnected clients from the cache as * quickly as possible after the disconnection, to avoid handing out clients * that are going to cause unnecessary errors. * @see RegionClientPipeline#handleDisconnect */ private final HashMap<String, RegionClient> ip2client = new HashMap<String, RegionClient>(); /** * Map of region name to list of pending RPCs for this region. * <p> * The array-list isn't expected to be empty, except during rare race * conditions. When the list is non-empty, the first element in the * list should be a special "probe" RPC we build to detect when the * region NSRE'd is back online. * <p> * For more details on how this map is used, please refer to the * documentation of {@link #handleNSRE}. * <p> * Each list in the map is protected by its own monitor lock. */ private final ConcurrentSkipListMap<byte[], ArrayList<HBaseRpc>> got_nsre = new ConcurrentSkipListMap<byte[], ArrayList<HBaseRpc>>(RegionInfo.REGION_NAME_CMP); /** * Buffer for atomic increment coalescing. * This buffer starts out null, and remains so until the first time we need * to buffer an increment. Once lazily initialized, this buffer will never * become null again. * <p> * We do this so that we can lazily schedule the flush timer only if we ever * have buffered increments. Applications without buffered increments don't * need to pay any memory for the buffer or any CPU time for a useless timer. * @see #setupIncrementCoalescing */ private volatile LoadingCache<BufferedIncrement, BufferedIncrement.Amount> increment_buffer; // Client usage statistics. // private final Counter num_connections_created = new Counter(); /** How many {@code -ROOT-} lookups were made. */ private final Counter root_lookups = new Counter(); /** How many {@code .META.} lookups were made (with a permit). */ private final Counter meta_lookups_with_permit = new Counter(); /** How many {@code .META.} lookups were made (without a permit). */ private final Counter meta_lookups_wo_permit = new Counter(); /** Number of calls to {@link #flush}. */ private final Counter num_flushes = new Counter(); /** Number of NSREs handled by {@link #handleNSRE}. */ private final Counter num_nsres = new Counter(); /** Number of RPCs delayed by {@link #handleNSRE}. */ private final Counter num_nsre_rpcs = new Counter(); /** Number of {@link MultiAction} sent to the network. */ final Counter num_multi_rpcs = new Counter(); /** Number of calls to {@link #get}. */ private final Counter num_gets = new Counter(); /** Number of calls to {@link #openScanner}. */ private final Counter num_scanners_opened = new Counter(); /** Number of calls to {@link #scanNextRows}. */ private final Counter num_scans = new Counter(); /** Number calls to {@link #put}. */ private final Counter num_puts = new Counter(); /** Number calls to {@link #lockRow}. */ private final Counter num_row_locks = new Counter(); /** Number calls to {@link #delete}. */ private final Counter num_deletes = new Counter(); /** Number of {@link AtomicIncrementRequest} sent. */ private final Counter num_atomic_increments = new Counter(); /** * Constructor. * @param quorum_spec The specification of the quorum, e.g. * {@code "host1,host2,host3"}. */ public HBaseClient(final String quorum_spec) { this(quorum_spec, "/hbase"); } /** * Constructor. * @param quorum_spec The specification of the quorum, e.g. * {@code "host1,host2,host3"}. * @param base_path The base path under which is the znode for the * -ROOT- region. */ public HBaseClient(final String quorum_spec, final String base_path) { this(quorum_spec, base_path, defaultChannelFactory()); } /** Creates a default channel factory in case we haven't been given one. */ private static NioClientSocketChannelFactory defaultChannelFactory() { final Executor executor = Executors.newCachedThreadPool(); return new NioClientSocketChannelFactory(executor, executor); } /** * Constructor for advanced users with special needs. * <p> * <strong>NOTE:</strong> Only advanced users who really know what they're * doing should use this constructor. Passing an inappropriate thread * pool, or blocking its threads will prevent this {@code HBaseClient} * from working properly or lead to poor performance. * @param quorum_spec The specification of the quorum, e.g. * {@code "host1,host2,host3"}. * @param base_path The base path under which is the znode for the * -ROOT- region. * @param executor The executor from which to obtain threads for NIO * operations. It is <strong>strongly</strong> encouraged to use a * {@link Executors#newCachedThreadPool} or something equivalent unless * you're sure to understand how Netty creates and uses threads. * Using a fixed-size thread pool will not work the way you expect. * <p> * Note that calling {@link #shutdown} on this client will <b>NOT</b> * shut down the executor. * @see NioClientSocketChannelFactory * @since 1.2 */ public HBaseClient(final String quorum_spec, final String base_path, final Executor executor) { this(quorum_spec, base_path, new CustomChannelFactory(executor)); } /** A custom channel factory that doesn't shutdown its executor. */ private static final class CustomChannelFactory extends NioClientSocketChannelFactory { CustomChannelFactory(final Executor executor) { super(executor, executor); } @Override public void releaseExternalResources() { // Do nothing, we don't want to shut down the executor. } } /** * Constructor for advanced users with special needs. * <p> * Most users don't need to use this constructor. * @param quorum_spec The specification of the quorum, e.g. * {@code "host1,host2,host3"}. * @param base_path The base path under which is the znode for the * -ROOT- region. * @param channel_factory A custom factory to use to create sockets. * <p> * Note that calling {@link #shutdown} on this client will also cause the * shutdown and release of the factory and its underlying thread pool. * @since 1.2 */ public HBaseClient(final String quorum_spec, final String base_path, final ClientSocketChannelFactory channel_factory) { this.channel_factory = channel_factory; zkclient = new ZKClient(quorum_spec, base_path); } /** * Returns a snapshot of usage statistics for this client. * @since 1.3 */ public ClientStats stats() { final LoadingCache<BufferedIncrement, BufferedIncrement.Amount> cache = increment_buffer; return new ClientStats( num_connections_created.get(), root_lookups.get(), meta_lookups_with_permit.get(), meta_lookups_wo_permit.get(), num_flushes.get(), num_nsres.get(), num_nsre_rpcs.get(), num_multi_rpcs.get(), num_gets.get(), num_scanners_opened.get(), num_scans.get(), num_puts.get(), num_row_locks.get(), num_deletes.get(), num_atomic_increments.get(), cache != null ? cache.stats() : BufferedIncrement.ZERO_STATS ); } /** * Flushes to HBase any buffered client-side write operation. * <p> * @return A {@link Deferred}, whose callback chain will be invoked when * everything that was buffered at the time of the call has been flushed. * <p> * Note that this doesn't guarantee that <b>ALL</b> outstanding RPCs have * completed. This doesn't introduce any sort of global sync point. All * it does really is it sends any buffered RPCs to HBase. */ public Deferred<Object> flush() { num_flushes.increment(); { final LoadingCache<BufferedIncrement, BufferedIncrement.Amount> buf = increment_buffer; // Single volatile-read. if (buf != null) { flushBufferedIncrements(buf); } } final ArrayList<Deferred<Object>> d = new ArrayList<Deferred<Object>>(client2regions.size() + got_nsre.size() * 8); // Bear in mind that we're traversing a ConcurrentHashMap, so we may get // clients that have been removed from the map since we started iterating. for (final RegionClient client : client2regions.keySet()) { d.add(client.flush()); } for (final ArrayList<HBaseRpc> nsred : got_nsre.values()) { synchronized (nsred) { for (final HBaseRpc rpc : nsred) { // TODO(tsuna): This is brittle, need to remember to edit this when // adding new RPCs that change data in HBase. Not good. if (rpc instanceof PutRequest || rpc instanceof AtomicIncrementRequest || rpc instanceof DeleteRequest || rpc instanceof CompareAndSetRequest) { d.add(rpc.getDeferred()); } } } } @SuppressWarnings("unchecked") final Deferred<Object> flushed = (Deferred) Deferred.group(d); return flushed; } public short setFlushInterval(final short flush_interval) { // Note: if we have buffered increments, they'll pick up the new flush // interval next time the current timer fires. if (flush_interval < 0) { throw new IllegalArgumentException("Negative: " + flush_interval); } final short prev = this.flush_interval; this.flush_interval = flush_interval; return prev; } public int setIncrementBufferSize(final int increment_buffer_size) { if (increment_buffer_size < 0) { throw new IllegalArgumentException("Negative: " + increment_buffer_size); } final int current = this.increment_buffer_size; if (current == increment_buffer_size) { return current; } this.increment_buffer_size = increment_buffer_size; final LoadingCache<BufferedIncrement, BufferedIncrement.Amount> prev = increment_buffer; // Volatile-read. if (prev != null) { // Need to resize. makeIncrementBuffer(); // Volatile-write. flushBufferedIncrements(prev); } return current; } /** * Returns the timer used by this client. * <p> * All timeouts, retries and other things that need to "sleep * asynchronously" use this timer. This method is provided so * that you can also schedule your own timeouts using this timer, * if you wish to share this client's timer instead of creating * your own. * <p> * The precision of this timer is implementation-defined but is * guaranteed to be no greater than 20ms. * @since 1.2 */ public Timer getTimer() { return timer; } /** * Returns the maximum time (in milliseconds) for which edits can be buffered. * <p> * The default value is an unspecified and implementation dependant, but is * guaranteed to be non-zero. * <p> * A return value of 0 indicates that edits are sent directly to HBase * without being buffered. * @see #setFlushInterval */ public short getFlushInterval() { return flush_interval; } /** * Returns the capacity of the increment buffer. * <p> * Note this returns the <em>capacity</em> of the buffer, not the number of * items currently in it. There is currently no API to get the current * number of items in it. * @since 1.3 */ public int getIncrementBufferSize() { return increment_buffer_size; } /** * Performs a graceful shutdown of this instance. * <p> * <ul> * <li>{@link #flush Flushes} all buffered edits.</li> * <li>Completes all outstanding requests.</li> * <li>Terminates all connections.</li> * <li>Releases all other resources.</li> * </ul> * <strong>Not calling this method before losing the last reference to this * instance may result in data loss and other unwanted side effects</strong> * @return A {@link Deferred}, whose callback chain will be invoked once all * of the above have been done. If this callback chain doesn't fail, then * the clean shutdown will be successful, and all the data will be safe on * the HBase side (provided that you use <a href="#durability">durable</a> * edits). In case of a failure (the "errback" is invoked) you may want to * retry the shutdown to avoid losing data, depending on the nature of the * failure. TODO(tsuna): Document possible / common failure scenarios. */ public Deferred<Object> shutdown() { // This is part of step 3. We need to execute this in its own thread // because Netty gets stuck in an infinite loop if you try to shut it // down from within a thread of its own thread pool. They don't want // to fix this so as a workaround we always shut Netty's thread pool // down from another thread. final class ShutdownThread extends Thread { ShutdownThread() { super("HBaseClient@" + HBaseClient.super.hashCode() + " shutdown"); } public void run() { // This terminates the Executor. channel_factory.releaseExternalResources(); } }; // 3. Release all other resources. final class ReleaseResourcesCB implements Callback<Object, Object> { public Object call(final Object arg) { LOG.debug("Releasing all remaining resources"); timer.stop(); new ShutdownThread().start(); return arg; } public String toString() { return "release resources callback"; } } // 2. Terminate all connections. final class DisconnectCB implements Callback<Object, Object> { public Object call(final Object arg) { return disconnectEverything().addCallback(new ReleaseResourcesCB()); } public String toString() { return "disconnect callback"; } } // If some RPCs are waiting for -ROOT- to be discovered, we too must wait // because some of those RPCs could be edits that we must not lose. final Deferred<Object> d = zkclient.getDeferredRootIfBeingLookedUp(); if (d != null) { final class RetryShutdown implements Callback<Object, Object> { public Object call(final Object arg) { return shutdown(); } public String toString() { return "retry shutdown"; } } return d.addBoth(new RetryShutdown()); } // 1. Flush everything. return flush().addCallback(new DisconnectCB()); } /** * Closes every socket, which will also flush all internal region caches. */ private Deferred<Object> disconnectEverything() { HashMap<String, RegionClient> ip2client_copy; synchronized (ip2client) { // Make a local copy so we can shutdown every Region Server client // without hold the lock while we iterate over the data structure. ip2client_copy = new HashMap<String, RegionClient>(ip2client); } final ArrayList<Deferred<Object>> d = new ArrayList<Deferred<Object>>(ip2client_copy.values().size() + 1); // Shut down all client connections, clear cache. for (final RegionClient client : ip2client_copy.values()) { d.add(client.shutdown()); } if (rootregion != null && rootregion.isAlive()) { // It's OK if we already did that in the loop above. d.add(rootregion.shutdown()); } ip2client_copy = null; final int size = d.size(); return Deferred.group(d).addCallback( new Callback<Object, ArrayList<Object>>() { public Object call(final ArrayList<Object> arg) { // Normally, now that we've shutdown() every client, all our caches should // be empty since each shutdown() generates a DISCONNECTED event, which // causes RegionClientPipeline to call removeClientFromCache(). HashMap<String, RegionClient> logme = null; synchronized (ip2client) { if (!ip2client.isEmpty()) { logme = new HashMap<String, RegionClient>(ip2client); } } if (logme != null) { // Putting this logging statement inside the synchronized block // can lead to a deadlock, since HashMap.toString() is going to // call RegionClient.toString() on each entry, and this locks the // client briefly. Other parts of the code lock clients first and // the ip2client HashMap second, so this can easily deadlock. LOG.error("Some clients are left in the client cache and haven't" + " been cleaned up: " + logme); logme = null; return disconnectEverything(); // Try again. } zkclient.disconnectZK(); return arg; } public String toString() { return "wait " + size + " RegionClient.shutdown()"; } }); } /** * Ensures that a given table/family pair really exists. * <p> * It's recommended to call this method in the startup code of your * application if you know ahead of time which tables / families you're * going to need, because it'll allow you to "fail fast" if they're missing. * <p> * Both strings are assumed to use the platform's default charset. * @param table The name of the table you intend to use. * @param family The column family you intend to use in that table. * @return A deferred object that indicates the completion of the request. * The {@link Object} has not special meaning and can be {@code null} * (think of it as {@code Deferred<Void>}). But you probably want to attach * at least an errback to this {@code Deferred} to handle failures. * @throws TableNotFoundException (deferred) if the table doesn't exist. * @throws NoSuchColumnFamilyException (deferred) if the family doesn't exist. */ public Deferred<Object> ensureTableFamilyExists(final String table, final String family) { return ensureTableFamilyExists(table.getBytes(), family.getBytes()); } /** * Ensures that a given table/family pair really exists. * <p> * It's recommended to call this method in the startup code of your * application if you know ahead of time which tables / families you're * going to need, because it'll allow you to "fail fast" if they're missing. * <p> * @param table The name of the table you intend to use. * @param family The column family you intend to use in that table. * @return A deferred object that indicates the completion of the request. * The {@link Object} has not special meaning and can be {@code null} * (think of it as {@code Deferred<Void>}). But you probably want to attach * at least an errback to this {@code Deferred} to handle failures. * @throws TableNotFoundException (deferred) if the table doesn't exist. * @throws NoSuchColumnFamilyException (deferred) if the family doesn't exist. */ public Deferred<Object> ensureTableFamilyExists(final byte[] table, final byte[] family) { // Just "fault in" the first region of the table. Not the most optimal or // useful thing to do but gets the job done for now. TODO(tsuna): Improve. final HBaseRpc dummy; if (family == EMPTY_ARRAY) { dummy = GetRequest.exists(table, EMPTY_ARRAY); } else { dummy = GetRequest.exists(table, EMPTY_ARRAY, family); } @SuppressWarnings("unchecked") final Deferred<Object> d = (Deferred) sendRpcToRegion(dummy); return d; } /** * Ensures that a given table really exists. * <p> * It's recommended to call this method in the startup code of your * application if you know ahead of time which tables / families you're * going to need, because it'll allow you to "fail fast" if they're missing. * <p> * @param table The name of the table you intend to use. * The string is assumed to use the platform's default charset. * @return A deferred object that indicates the completion of the request. * The {@link Object} has not special meaning and can be {@code null} * (think of it as {@code Deferred<Void>}). But you probably want to attach * at least an errback to this {@code Deferred} to handle failures. * @throws TableNotFoundException (deferred) if the table doesn't exist. */ public Deferred<Object> ensureTableExists(final String table) { return ensureTableFamilyExists(table.getBytes(), EMPTY_ARRAY); } /** * Ensures that a given table really exists. * <p> * It's recommended to call this method in the startup code of your * application if you know ahead of time which tables / families you're * going to need, because it'll allow you to "fail fast" if they're missing. * <p> * @param table The name of the table you intend to use. * @return A deferred object that indicates the completion of the request. * The {@link Object} has not special meaning and can be {@code null} * (think of it as {@code Deferred<Void>}). But you probably want to attach * at least an errback to this {@code Deferred} to handle failures. * @throws TableNotFoundException (deferred) if the table doesn't exist. */ public Deferred<Object> ensureTableExists(final byte[] table) { return ensureTableFamilyExists(table, EMPTY_ARRAY); } /** * Retrieves data from HBase. * @param request The {@code get} request. * @return A deferred list of key-values that matched the get request. */ public Deferred<ArrayList<KeyValue>> get(final GetRequest request) { num_gets.increment(); return sendRpcToRegion(request).addCallbacks(got, Callback.PASSTHROUGH); } /** Singleton callback to handle responses of "get" RPCs. */ private static final Callback<ArrayList<KeyValue>, Object> got = new Callback<ArrayList<KeyValue>, Object>() { public ArrayList<KeyValue> call(final Object response) { if (response instanceof ArrayList) { @SuppressWarnings("unchecked") final ArrayList<KeyValue> row = (ArrayList<KeyValue>) response; return row; } else { throw new InvalidResponseException(ArrayList.class, response); } } public String toString() { return "type get response"; } }; /** * Creates a new {@link Scanner} for a particular table. * @param table The name of the table you intend to scan. * @return A new scanner for this table. */ public Scanner newScanner(final byte[] table) { return new Scanner(this, table); } /** * Creates a new {@link Scanner} for a particular table. * @param table The name of the table you intend to scan. * The string is assumed to use the platform's default charset. * @return A new scanner for this table. */ public Scanner newScanner(final String table) { return new Scanner(this, table.getBytes()); } /** * Package-private access point for {@link Scanner}s to open themselves. * @param scanner The scanner to open. * @return A deferred scanner ID. */ Deferred<Long> openScanner(final Scanner scanner) { num_scanners_opened.increment(); return sendRpcToRegion(scanner.getOpenRequest()).addCallbacks( scanner_opened, new Callback<Object, Object>() { public Object call(final Object error) { // Don't let the scanner think it's opened on this region. scanner.invalidate(); return error; // Let the error propagate. } public String toString() { return "openScanner errback"; } }); } /** Singleton callback to handle responses of "openScanner" RPCs. */ private static final Callback<Long, Object> scanner_opened = new Callback<Long, Object>() { public Long call(final Object response) { if (response instanceof Long) { return (Long) response; } else { throw new InvalidResponseException(Long.class, response); } } public String toString() { return "type openScanner response"; } }; /** * Package-private access point for {@link Scanner}s to scan more rows. * @param scanner The scanner to use. * @param nrows The maximum number of rows to retrieve. * @return A deferred row. */ Deferred<Object> scanNextRows(final Scanner scanner) { final RegionInfo region = scanner.currentRegion(); final RegionClient client = (region == null ? null : region2client.get(region)); if (client == null) { // Oops, we no longer know anything about this client or region. Our // cache was probably invalidated while the client was scanning. This // means that we lost the connection to that RegionServer, so we have to // re-open this scanner if we wanna keep scanning. scanner.invalidate(); // Invalidate the scanner so that ... @SuppressWarnings("unchecked") final Deferred<Object> d = (Deferred) scanner.nextRows(); return d; } num_scans.increment(); final HBaseRpc next_request = scanner.getNextRowsRequest(); final Deferred<Object> d = next_request.getDeferred(); client.sendRpc(next_request); return d; } /** * Package-private access point for {@link Scanner}s to close themselves. * @param scanner The scanner to close. * @return A deferred object that indicates the completion of the request. * The {@link Object} has not special meaning and can be {@code null}. */ Deferred<Object> closeScanner(final Scanner scanner) { final RegionInfo region = scanner.currentRegion(); final RegionClient client = (region == null ? null : region2client.get(region)); if (client == null) { // Oops, we no longer know anything about this client or region. Our // cache was probably invalidated while the client was scanning. So // we can't close this scanner properly. LOG.warn("Cannot close " + scanner + " properly, no connection open for " + Bytes.pretty(region == null ? null : region.name())); return Deferred.fromResult(null); } final HBaseRpc close_request = scanner.getCloseRequest(); final Deferred<Object> d = close_request.getDeferred(); client.sendRpc(close_request); return d; } /** * Atomically and durably increments a value in HBase. * <p> * This is equivalent to * {@link #atomicIncrement(AtomicIncrementRequest, boolean) atomicIncrement} * {@code (request, true)} * @param request The increment request. * @return The deferred {@code long} value that results from the increment. */ public Deferred<Long> atomicIncrement(final AtomicIncrementRequest request) { num_atomic_increments.increment(); return sendRpcToRegion(request).addCallbacks(icv_done, Callback.PASSTHROUGH); } public Deferred<Long> bufferAtomicIncrement(final AtomicIncrementRequest request) { final long value = request.getAmount(); if (value < 0) { throw new IllegalArgumentException("Cannot buffer atomic increment with" + " negative amount: " + request); } else if (value >= Short.MAX_VALUE // Value to large to safely coalesce. || flush_interval == 0) { // Client-side buffer disabled. return atomicIncrement(request); } final BufferedIncrement incr = new BufferedIncrement(request.table(), request.key(), request.family(), request.qualifier()); final short delta = (short) value; do { BufferedIncrement.Amount amount; // Semi-evil: the very first time we get here, `increment_buffer' will // still be null (we don't initialize it in our constructor) so we catch // the NPE that ensues to allocate the buffer and kick off a timer to // regularly flush it. try { amount = increment_buffer.getUnchecked(incr); } catch (NullPointerException e) { setupIncrementCoalescing(); amount = increment_buffer.getUnchecked(incr); } if (amount.addAndGet(delta) < 0) { // Race condition. We got something out of the buffer, but in the mean // time another thread picked it up and decided to send it to HBase. So // we need to retry, which will create a new entry in the buffer. amount.addAndGet(-delta); // Undo our previous addAndGet. // Loop again to retry. } else { final Deferred<Long> deferred = new Deferred<Long>(); amount.deferred.chain(deferred); return deferred; } } while(true); } /** * Called the first time we get a buffered increment. * Lazily creates the increment buffer and sets up a timer to regularly * flush buffered increments. */ private synchronized void setupIncrementCoalescing() { // If multiple threads attempt to setup coalescing at the same time, the // first one to get here will make `increment_buffer' non-null, and thus // subsequent ones will return immediately. This is important to avoid // creating more than one FlushBufferedIncrementsTimer below. if (increment_buffer != null) { return; } makeIncrementBuffer(); // Volatile-write. // Start periodic buffered increment flushes. final class FlushBufferedIncrementsTimer implements TimerTask { public void run(final Timeout timeout) { try { flushBufferedIncrements(increment_buffer); } finally { final short interval = flush_interval; // Volatile-read. // Even if we paused or disabled the client side buffer by calling // setFlushInterval(0), we will continue to schedule this timer // forever instead of pausing it. Pausing it is troublesome because // we don't keep a reference to this timer, so we can't cancel it or // tell if it's running or not. So let's just KISS and assume that // if we need the timer once, we'll need it forever. If it's truly // not needed anymore, we'll just cause a bit of extra work to the // timer thread every 100ms, no big deal. timer.newTimeout(new FlushBufferedIncrementsTimer(), interval > 0 ? interval : 100, MILLISECONDS); } } } final short interval = flush_interval; // Volatile-read. // Handle the extremely unlikely yet possible racy case where: // flush_interval was > 0 // A buffered increment came in // It was the first one ever so we landed here // Meanwhile setFlushInterval(0) to disable buffering // In which case we just flush whatever we have in 1ms. timer.newTimeout(new FlushBufferedIncrementsTimer(), interval > 0 ? interval : 1, MILLISECONDS); } /** * Flushes all buffered increments. * @param increment_buffer The buffer to flush. */ private static void flushBufferedIncrements(// JAVA Y U NO HAVE TYPEDEF? F U! final LoadingCache<BufferedIncrement, BufferedIncrement.Amount> increment_buffer) { // Calling this method to clean up before shutting down works solely // because `invalidateAll()' will *synchronously* remove everything. // The Guava documentation says "Discards all entries in the cache, // possibly asynchronously" but in practice the code in `LocalCache' // works as follows: // for each segment: // segment.clear // Where clearing a segment consists in: // lock the segment // for each active entry: // add entry to removal queue // null out the hash table // unlock the segment // for each entry in removal queue: // call the removal listener on that entry // So by the time the call to `invalidateAll()' returns, every single // buffered increment will have been dealt with, and it is thus safe // to shutdown the rest of the client to let it complete all outstanding // operations. if (LOG.isDebugEnabled()) { LOG.debug("Flushing " + increment_buffer.size() + " buffered increments"); } increment_buffer.invalidateAll(); } /** * Creates the increment buffer according to current configuration. */ private void makeIncrementBuffer() { final int size = increment_buffer_size; increment_buffer = BufferedIncrement.newCache(this, size); if (LOG.isDebugEnabled()) { LOG.debug("Created increment buffer of " + size + " entries"); } } /** Singleton callback to handle responses of incrementColumnValue RPCs. */ private static final Callback<Long, Object> icv_done = new Callback<Long, Object>() { public Long call(final Object response) { if (response instanceof Long) { return (Long) response; } else { throw new InvalidResponseException(Long.class, response); } } public String toString() { return "type incrementColumnValue response"; } }; /** * Atomically increments a value in HBase. * @param request The increment request. * @param durable If {@code true}, the success of this RPC guarantees that * HBase has stored the edit in a <a href="#durability">durable</a> fashion. * When in doubt, use {@link #atomicIncrement(AtomicIncrementRequest)}. * @return The deferred {@code long} value that results from the increment. */ public Deferred<Long> atomicIncrement(final AtomicIncrementRequest request, final boolean durable) { request.setDurable(durable); return atomicIncrement(request); } /** * Stores data in HBase. * <p> * Note that this provides no guarantee as to the order in which subsequent * {@code put} requests are going to be applied to the backend. If you need * ordering, you must enforce it manually yourself by starting the next * {@code put} once the {@link Deferred} of this one completes successfully. * @param request The {@code put} request. * @return A deferred object that indicates the completion of the request. * The {@link Object} has not special meaning and can be {@code null} * (think of it as {@code Deferred<Void>}). But you probably want to attach * at least an errback to this {@code Deferred} to handle failures. * TODO(tsuna): Document failures clients are expected to handle themselves. */ public Deferred<Object> put(final PutRequest request) { num_puts.increment(); return sendRpcToRegion(request); } /** * Atomic Compare-And-Set (CAS) on a single cell. * <p> * Note that edits sent through this method <b>cannot be batched</b>, and * won't be subject to the {@link #setFlushInterval flush interval}. This * entails that write throughput will be lower with this method as edits * have to be sent out to the wire one by one. * <p> * This request enables you to atomically update the value of an existing * cell in HBase using a CAS operation. It's like a {@link PutRequest} * except that you also pass an expected value. If the last version of the * cell identified by your {@code PutRequest} matches the expected value, * HBase will atomically update it to the new value. * <p> * If the expected value is the empty byte array, HBase will atomically * create the cell provided that it doesn't exist already. This can be used * to ensure that your RPC doesn't overwrite an existing value. Note * however that this trick cannot be used the other way around to delete * an expected value atomically. * @param edit The new value to write. * @param expected The expected value of the cell to compare against. * <strong>This byte array will NOT be copied.</strong> * @return A deferred boolean, if {@code true} the CAS succeeded, otherwise * the CAS failed because the value in HBase didn't match the expected value * of the CAS request. * @since 1.3 */ public Deferred<Boolean> compareAndSet(final PutRequest edit, final byte[] expected) { return sendRpcToRegion(new CompareAndSetRequest(edit, expected)) .addCallback(CAS_CB); } /** * Atomic Compare-And-Set (CAS) on a single cell. * <p> * Note that edits sent through this method <b>cannot be batched</b>. * @see #compareAndSet(PutRequest, byte[]) * @param edit The new value to write. * @param expected The expected value of the cell to compare against. * This string is assumed to use the platform's default charset. * @return A deferred boolean, if {@code true} the CAS succeeded, otherwise * the CAS failed because the value in HBase didn't match the expected value * of the CAS request. * @since 1.3 */ public Deferred<Boolean> compareAndSet(final PutRequest edit, final String expected) { return compareAndSet(edit, expected.getBytes()); } /** * Atomically insert a new cell in HBase. * <p> * Note that edits sent through this method <b>cannot be batched</b>. * <p> * This is equivalent to calling * {@link #compareAndSet(PutRequest, byte[]) compareAndSet}{@code (edit, * EMPTY_ARRAY)} * @see #compareAndSet(PutRequest, byte[]) * @param edit The new value to insert. * @return A deferred boolean, {@code true} if the edit got atomically * inserted in HBase, {@code false} if there was already a value in the * given cell. * @since 1.3 */ public Deferred<Boolean> atomicCreate(final PutRequest edit) { return compareAndSet(edit, EMPTY_ARRAY); } /** Callback to type-check responses of {@link CompareAndSetRequest}. */ private static final class CompareAndSetCB implements Callback<Boolean, Object> { public Boolean call(final Object response) { if (response instanceof Boolean) { return (Boolean)response; } else { throw new InvalidResponseException(Boolean.class, response); } } public String toString() { return "type compareAndSet response"; } } /** Singleton callback for responses of {@link CompareAndSetRequest}. */ private static final CompareAndSetCB CAS_CB = new CompareAndSetCB(); /** * Acquires an explicit row lock. * <p> * For a description of what row locks are, see {@link RowLock}. * @param request The request specify which row to lock. * @return a deferred {@link RowLock}. * @see #unlockRow */ public Deferred<RowLock> lockRow(final RowLockRequest request) { num_row_locks.increment(); return sendRpcToRegion(request).addCallbacks( new Callback<RowLock, Object>() { public RowLock call(final Object response) { if (response instanceof Long) { return new RowLock(request.getRegion().name(), (Long) response); } else { throw new InvalidResponseException(Long.class, response); } } public String toString() { return "type lockRow response"; } }, Callback.PASSTHROUGH); } /** * Releases an explicit row lock. * <p> * For a description of what row locks are, see {@link RowLock}. * @param lock The lock to release. * @return A deferred object that indicates the completion of the request. * The {@link Object} has not special meaning and can be {@code null} * (think of it as {@code Deferred<Void>}). */ public Deferred<Object> unlockRow(final RowLock lock) { final byte[] region_name = lock.region(); final RegionInfo region = regions_cache.get(region_name); if (knownToBeNSREd(region)) { // If this region has been NSRE'd, we can't possibly still hold a lock // on one of its rows, as this would have prevented it from splitting. // So let's just pretend the row has been unlocked. return Deferred.fromResult(null); } final RegionClient client = (region == null ? null : region2client.get(region)); if (client == null) { // Oops, we no longer know anything about this client or region. Our // cache was probably invalidated while the client was holding the lock. LOG.warn("Cannot release " + lock + ", no connection open for " + Bytes.pretty(region_name)); return Deferred.fromResult(null); } final HBaseRpc release = new RowLockRequest.ReleaseRequest(lock, region); release.setRegion(region); final Deferred<Object> d = release.getDeferred(); client.sendRpc(release); return d; } /** * Deletes data from HBase. * @param request The {@code delete} request. * @return A deferred object that indicates the completion of the request. * The {@link Object} has not special meaning and can be {@code null} * (think of it as {@code Deferred<Void>}). But you probably want to attach * at least an errback to this {@code Deferred} to handle failures. */ public Deferred<Object> delete(final DeleteRequest request) { num_deletes.increment(); return sendRpcToRegion(request); } /** * Sends an RPC targeted at a particular region to the right RegionServer. * <p> * This method is package-private so that the low-level {@link RegionClient} * can retry RPCs when handling a {@link NotServingRegionException}. * @param request The RPC to send. This RPC <b>must</b> specify a single * specific table and row key. * @return The deferred result of the RPC (whatever object or exception was * de-serialized back from the network). */ Deferred<Object> sendRpcToRegion(final HBaseRpc request) { if (cannotRetryRequest(request)) { return tooManyAttempts(request, null); } request.attempt++; final byte[] table = request.table; final byte[] key = request.key; final RegionInfo region = getRegion(table, key); final class RetryRpc implements Callback<Deferred<Object>, Object> { public Deferred<Object> call(final Object arg) { if (arg instanceof NonRecoverableException) { return Deferred.fromError((NonRecoverableException) arg); } return sendRpcToRegion(request); } public String toString() { return "retry RPC"; } } if (region != null) { if (knownToBeNSREd(region)) { final NotServingRegionException nsre = new NotServingRegionException("Region known to be unavailable", request); final Deferred<Object> d = request.getDeferred() .addBothDeferring(new RetryRpc()); handleNSRE(request, region.name(), nsre); return d; } final RegionClient client = (Bytes.equals(region.table(), ROOT) ? rootregion : region2client.get(region)); if (client != null && client.isAlive()) { request.setRegion(region); final Deferred<Object> d = request.getDeferred(); client.sendRpc(request); return d; } } return locateRegion(table, key).addBothDeferring(new RetryRpc()); } /** * Returns how many lookups in {@code -ROOT-} were performed. * <p> * This number should remain low. It will be 1 after the first access to * HBase, and will increase by 1 each time the {@code .META.} region moves * to another server, which should seldom happen. * <p> * This isn't to be confused with the number of times we looked up where * the {@code -ROOT-} region itself is located. This happens even more * rarely and a message is logged at the INFO whenever it does. * @since 1.1 * @deprecated This method will be removed in release 2.0. Use * {@link #stats}{@code .}{@link ClientStats#rootLookups rootLookups()} * instead. */ @Deprecated public long rootLookupCount() { return root_lookups.get(); } /** * Returns how many lookups in {@code .META.} were performed (uncontended). * <p> * This number indicates how many times we had to lookup in {@code .META.} * where a key was located. This only counts "uncontended" lookups, where * the thread was able to acquire a "permit" to do a {@code .META.} lookup. * The majority of the {@code .META.} lookups should fall in this category. * @since 1.1 * @deprecated This method will be removed in release 2.0. Use * {@link #stats}{@code * .}{@link ClientStats#uncontendedMetaLookups uncontendedMetaLookups()} * instead. */ @Deprecated public long uncontendedMetaLookupCount() { return meta_lookups_with_permit.get(); } /** * Returns how many lookups in {@code .META.} were performed (contended). * <p> * This number indicates how many times we had to lookup in {@code .META.} * where a key was located. This only counts "contended" lookups, where the * thread was unable to acquire a "permit" to do a {@code .META.} lookup, * because there were already too many {@code .META.} lookups in flight. * In this case, the thread was delayed a bit in order to apply a bit of * back-pressure on the caller, to avoid creating {@code .META.} storms. * The minority of the {@code .META.} lookups should fall in this category. * @since 1.1 * @deprecated This method will be removed in release 2.0. Use * {@link #stats}{@code * .}{@link ClientStats#contendedMetaLookups contendedMetaLookups()} * instead. */ @Deprecated public long contendedMetaLookupCount() { return meta_lookups_wo_permit.get(); } /** * Checks whether or not an RPC can be retried once more. * @param rpc The RPC we're going to attempt to execute. * @return {@code true} if this RPC already had too many attempts, * {@code false} otherwise (in which case it's OK to retry once more). * @throws NonRecoverableException if the request has had too many attempts * already. */ static boolean cannotRetryRequest(final HBaseRpc rpc) { return rpc.attempt > 10; // XXX Don't hardcode. } /** * Returns a {@link Deferred} containing an exception when an RPC couldn't * succeed after too many attempts. * @param request The RPC that was retried too many times. * @param cause What was cause of the last failed attempt, if known. * You can pass {@code null} if the cause is unknown. */ static Deferred<Object> tooManyAttempts(final HBaseRpc request, final HBaseException cause) { // TODO(tsuna): At this point, it's possible that we have to deal with // a broken META table where there's a hole. For the sake of good error // reporting, at this point we should try to getClosestRowBefore + scan // META in order to verify whether there's indeed a hole, and if there's // one, throw a BrokenMetaException explaining where the hole is. final Exception e = new NonRecoverableException("Too many attempts: " + request, cause); request.callback(e); return Deferred.fromError(e); } // Code that find regions (in our cache or using RPCs) // /** * Locates the region in which the given row key for the given table is. * <p> * This does a lookup in the .META. / -ROOT- table(s), no cache is used. * If you want to use a cache, call {@link #getRegion} instead. * @param table The table to which the row belongs. * @param key The row key for which we want to locate the region. * @return A deferred called back when the lookup completes. The deferred * carries an unspecified result. * @see #discoverRegion */ private Deferred<Object> locateRegion(final byte[] table, final byte[] key) { final boolean is_meta = Bytes.equals(table, META); final boolean is_root = !is_meta && Bytes.equals(table, ROOT); // We don't know in which region this row key is. Let's look it up. // First, see if we already know where to look in .META. // Except, obviously, we don't wanna search in META for META or ROOT. final byte[] meta_key = is_root ? null : createRegionSearchKey(table, key); final RegionInfo meta_region = (is_meta || is_root ? null : getRegion(META, meta_key)); if (meta_region != null) { // Lookup in .META. which region server has the region we want. final RegionClient client = region2client.get(meta_region); if (client != null && client.isAlive()) { final boolean has_permit = client.acquireMetaLookupPermit(); if (!has_permit) { // If we failed to acquire a permit, it's worth checking if someone // looked up the region we're interested in. Every once in a while // this will save us a META lookup. if (getRegion(table, key) != null) { return Deferred.fromResult(null); // Looks like no lookup needed. } } final Deferred<Object> d = client.getClosestRowBefore(meta_region, META, meta_key, INFO) .addCallback(meta_lookup_done); if (has_permit) { final class ReleaseMetaLookupPermit implements Callback<Object, Object> { public Object call(final Object arg) { client.releaseMetaLookupPermit(); return arg; } public String toString() { return "release .META. lookup permit"; } }; d.addBoth(new ReleaseMetaLookupPermit()); meta_lookups_with_permit.increment(); } else { meta_lookups_wo_permit.increment(); } // This errback needs to run *after* the callback above. return d.addErrback(newLocateRegionErrback(table, key)); } } // Make a local copy to avoid race conditions where we test the reference // to be non-null but then it becomes null before the next statement. final RegionClient rootregion = this.rootregion; if (rootregion == null || !rootregion.isAlive()) { return zkclient.getDeferredRoot(); } else if (is_root) { // Don't search ROOT in ROOT. return Deferred.fromResult(null); // We already got ROOT (w00t). } // Alright so we don't even know where to look in .META. // Let's lookup the right .META. entry in -ROOT-. final byte[] root_key = createRegionSearchKey(META, meta_key); final RegionInfo root_region = new RegionInfo(ROOT, ROOT_REGION, EMPTY_ARRAY); root_lookups.increment(); return rootregion.getClosestRowBefore(root_region, ROOT, root_key, INFO) .addCallback(root_lookup_done) // This errback needs to run *after* the callback above. .addErrback(newLocateRegionErrback(table, key)); } /** Callback executed when a lookup in META completes. */ private final class MetaCB implements Callback<Object, ArrayList<KeyValue>> { public Object call(final ArrayList<KeyValue> arg) { return discoverRegion(arg); } public String toString() { return "locateRegion in META"; } }; private final MetaCB meta_lookup_done = new MetaCB(); /** Callback executed when a lookup in -ROOT- completes. */ private final class RootCB implements Callback<Object, ArrayList<KeyValue>> { public Object call(final ArrayList<KeyValue> arg) { return discoverRegion(arg); } public String toString() { return "locateRegion in ROOT"; } }; private final RootCB root_lookup_done = new RootCB(); /** * Creates a new callback that handles errors during META lookups. * <p> * This errback should be added *after* adding the callback that invokes * {@link #discoverRegion} so it can properly fill in the table name when * a {@link TableNotFoundException} is thrown (because the low-level code * doesn't know about tables, it only knows about regions, but for proper * error reporting users need the name of the table that wasn't found). * @param table The table to which the row belongs. * @param key The row key for which we want to locate the region. */ private Callback<Object, Exception> newLocateRegionErrback(final byte[] table, final byte[] key) { return new Callback<Object, Exception>() { public Object call(final Exception e) { if (e instanceof TableNotFoundException) { return new TableNotFoundException(table); // Populate the name. } else if (e instanceof RecoverableException) { // Retry to locate the region. TODO(tsuna): exponential backoff? // XXX this can cause an endless retry loop (particularly if the // address of -ROOT- in ZK is stale when we start, this code is // going to retry in an almost-tight loop until the znode is // updated). return locateRegion(table, key); } return e; } public String toString() { return "locateRegion errback"; } }; } /** * Creates the META key to search for in order to locate the given key. * @param table The table the row belongs to. * @param key The key to search for in META. * @return A row key to search for in the META table, that will help us * locate the region serving the given {@code (table, key)}. */ private static byte[] createRegionSearchKey(final byte[] table, final byte[] key) { // Rows in .META. look like this: // tablename,startkey,timestamp final byte[] meta_key = new byte[table.length + key.length + 3]; System.arraycopy(table, 0, meta_key, 0, table.length); meta_key[table.length] = ','; System.arraycopy(key, 0, meta_key, table.length + 1, key.length); meta_key[meta_key.length - 2] = ','; // ':' is the first byte greater than '9'. We always want to find the // entry with the greatest timestamp, so by looking right before ':' // we'll find it. meta_key[meta_key.length - 1] = ':'; return meta_key; } /** * Searches in the regions cache for the region hosting the given row. * @param table The table to which the row belongs. * @param key The row key for which we want to find the region. * @return {@code null} if our cache doesn't know which region is currently * serving that key, in which case you'd have to look that information up * using {@link #locateRegion}. Otherwise returns the cached region * information in which we currently believe that the given row ought to be. */ private RegionInfo getRegion(final byte[] table, final byte[] key) { if (Bytes.equals(table, ROOT)) { return new RegionInfo(ROOT, ROOT_REGION, EMPTY_ARRAY); } byte[] region_name = createRegionSearchKey(table, key); Map.Entry<byte[], RegionInfo> entry = regions_cache.floorEntry(region_name); if (entry == null) { //if (LOG.isDebugEnabled()) { // LOG.debug("getRegion(table=" + Bytes.pretty(table) + ", key=" // + Bytes.pretty(key) + "): cache miss (nothing found)."); return null; } if (!isCacheKeyForTable(table, entry.getKey())) { //if (LOG.isDebugEnabled()) { // LOG.debug("getRegion(table=" + Bytes.pretty(table) + ", key=" // + Bytes.pretty(key) + "): cache miss (diff table):" // + " region=" + entry.getValue()); return null; } region_name = null; final RegionInfo region = entry.getValue(); entry = null; final byte[] stop_key = region.stopKey(); if (stop_key != EMPTY_ARRAY // If the stop key is an empty byte array, it means this region is the // last region for this table and this key ought to be in that region. && Bytes.memcmp(key, stop_key) >= 0) { //if (LOG.isDebugEnabled()) { // LOG.debug("getRegion(table=" + Bytes.pretty(table) + ", key=" // + Bytes.pretty(key) + "): miss (key beyond stop_key):" // + " region=" + region); return null; } //if (LOG.isDebugEnabled()) { // LOG.debug("getRegion(table=" + Bytes.pretty(table) + ", key=" // + Bytes.pretty(key) + "): cache hit, found: " + region); return region; } /** * Checks whether or not the given cache key is for the given table. * @param table The table for which we want the cache key to be. * @param cache_key The cache key to check. * @return {@code true} if the given cache key is for the given table, * {@code false} otherwise. */ private static boolean isCacheKeyForTable(final byte[] table, final byte[] cache_key) { // Check we found an entry that's really for the requested table. for (int i = 0; i < table.length; i++) { if (table[i] != cache_key[i]) { // This table isn't in the map, we found return false; // a key which is for another table. } } // Make sure we didn't find another key that's for another table // whose name is a prefix of the table name we were given. return cache_key[table.length] == ','; } /** * Adds a new region to our regions cache. * @param meta_row The (parsed) result of the * {@link RegionClient#getClosestRowBefore} request sent to the * .META. (or -ROOT-) table. * @return The client serving the region we discovered, or {@code null} if * this region isn't being served right now (and we marked it as NSRE'd). */ private RegionClient discoverRegion(final ArrayList<KeyValue> meta_row) { if (meta_row.isEmpty()) { throw new TableNotFoundException(); } String host = null; int port = -42; RegionInfo region = null; byte[] start_key = null; for (final KeyValue kv : meta_row) { final byte[] qualifier = kv.qualifier(); if (Arrays.equals(REGIONINFO, qualifier)) { final byte[][] tmp = new byte[1][]; // Yes, this is ugly. region = RegionInfo.fromKeyValue(kv, tmp); if (knownToBeNSREd(region)) { invalidateRegionCache(region.name(), true, "has marked it as split."); return null; } start_key = tmp[0]; } else if (Arrays.equals(SERVER, qualifier) && kv.value() != EMPTY_ARRAY) { // Empty during NSRE. final byte[] hostport = kv.value(); int colon = hostport.length - 1; for (; colon > 0 ; colon--) { if (hostport[colon] == ':') { break; } } if (colon == 0) { throw BrokenMetaException.badKV(region, "an `info:server' cell" + " doesn't contain `:' to separate the `host:port'" + Bytes.pretty(hostport), kv); } host = getIP(new String(hostport, 0, colon)); try { port = parsePortNumber(new String(hostport, colon + 1, hostport.length - colon - 1)); } catch (NumberFormatException e) { throw BrokenMetaException.badKV(region, "an `info:server' cell" + " contains an invalid port: " + e.getMessage() + " in " + Bytes.pretty(hostport), kv); } } // TODO(tsuna): If this is the parent of a split region, there are two // other KVs that could be useful: `info:splitA' and `info:splitB'. // Need to investigate whether we can use those as a hint to update our // regions_cache with the daughter regions of the split. } if (start_key == null) { throw new BrokenMetaException(null, "It didn't contain any" + " `info:regioninfo' cell: " + meta_row); } final byte[] region_name = region.name(); if (host == null) { // When there's no `info:server' cell, it typically means that the // location of this region is about to be updated in META, so we // consider this as an NSRE. invalidateRegionCache(region_name, true, "no longer has it assigned."); return null; } // 1. Record the region -> client mapping. // This won't be "discoverable" until another map points to it, because // at this stage no one knows about this region yet, so another thread // may be looking up that region again while we're in the process of // publishing our findings. final RegionClient client = newClient(host, port); final RegionClient oldclient = region2client.put(region, client); if (client == oldclient) { // We were racing with another thread to return client; // discover this region, we lost the race. } RegionInfo oldregion; int nregions; // If we get a ConnectException immediately when trying to connect to the // RegionServer, Netty delivers a CLOSED ChannelStateEvent from a "boss" // thread while we may still be handling the OPEN event in an NIO thread. // Locking the client prevents it from being able to buffer requests when // this happens. After we release the lock, the it will find it's dead. synchronized (client) { // Don't put any code between here and the next put (see next comment). // 2. Store the region in the sorted map. // This will effectively "publish" the result of our work to other // threads. The window between when the previous `put' becomes visible // to all other threads and when we're done updating the sorted map is // when we may unnecessarily re-lookup the same region again. It's an // acceptable trade-off. We avoid extra synchronization complexity in // exchange of occasional duplicate work (which should be rare anyway). oldregion = regions_cache.put(region_name, region); // 3. Update the reverse mapping created in step 1. // This is done last because it's only used to gracefully handle // disconnections and isn't used for serving. ArrayList<RegionInfo> regions = client2regions.get(client); if (regions == null) { final ArrayList<RegionInfo> newlist = new ArrayList<RegionInfo>(); regions = client2regions.putIfAbsent(client, newlist); if (regions == null) { // We've just put `newlist'. regions = newlist; } } synchronized (regions) { regions.add(region); nregions = regions.size(); } } // Don't interleave logging with the operations above, in order to attempt // to reduce the duration of the race windows. LOG.info((oldclient == null ? "Added" : "Replaced") + " client for" + " region " + region + ", which was " + (oldregion == null ? "added to" : "updated in") + " the" + " regions cache. Now we know that " + client + " is hosting " + nregions + " region" + (nregions > 1 ? 's' : "") + '.'); return client; } /** * Invalidates any cached knowledge about the given region. * <p> * This is typically used when a region migrates because of a split * or a migration done by the region load balancer, which causes a * {@link NotServingRegionException}. * <p> * This is package-private so that the low-level {@link RegionClient} can do * the invalidation itself when it gets a {@link NotServingRegionException} * back from a RegionServer. * @param region_name The name of the region to invalidate in our caches. * @param mark_as_nsred If {@code true}, after removing everything we know * about this region, we'll store a special marker in our META cache to mark * this region as "known to be NSRE'd", so that subsequent requests to this * region will "fail-fast". * @param reason If not {@code null}, will be used to log an INFO message * about the cache invalidation done. */ private void invalidateRegionCache(final byte[] region_name, final boolean mark_as_nsred, final String reason) { if (region_name == ROOT_REGION) { if (reason != null) { LOG.info("Invalidated cache for -ROOT- as " + rootregion + ' ' + reason); } rootregion = null; return; } final RegionInfo oldregion = mark_as_nsred ? regions_cache.put(region_name, new RegionInfo(EMPTY_ARRAY, region_name, EMPTY_ARRAY)) : regions_cache.remove(region_name); final RegionInfo region = (oldregion != null ? oldregion : new RegionInfo(EMPTY_ARRAY, region_name, EMPTY_ARRAY)); final RegionClient client = region2client.remove(region); if (oldregion != null && !Bytes.equals(oldregion.name(), region_name)) { // XXX do we want to just re-add oldregion back? This exposes another // race condition (we re-add it and overwrite yet another region change). LOG.warn("Oops, invalidated the wrong regions cache entry." + " Meant to remove " + Bytes.pretty(region_name) + " but instead removed " + oldregion); } if (client == null) { return; } final ArrayList<RegionInfo> regions = client2regions.get(client); if (regions != null) { // `remove()' on an ArrayList causes an array copy. Should we switch // to a LinkedList instead? synchronized (regions) { regions.remove(region); } } if (reason != null) { LOG.info("Invalidated cache for " + region + " as " + client + ' ' + reason); } } /** * Returns true if this region is known to be NSRE'd and shouldn't be used. * @see #handleNSRE */ private static boolean knownToBeNSREd(final RegionInfo region) { return region.table() == EMPTY_ARRAY; } /** * Low and high watermarks when buffering RPCs due to an NSRE. * @see #handleNSRE * XXX TODO(tsuna): Don't hardcode. */ private static final short NSRE_LOW_WATERMARK = 1000; private static final short NSRE_HIGH_WATERMARK = 10000; /** Log a message for every N RPCs we buffer due to an NSRE. */ private static final short NSRE_LOG_EVERY = 500; /** * Handles the {@link NotServingRegionException} for the given RPC. * <p> * This code will take ownership of the RPC in the sense that it will become * responsible for re-scheduling the RPC later once the NSRE situation gets * resolved by HBase. * * <h1>NSRE handling logic</h1> * Whenever we get an NSRE for the first time for a particular region, we * will add an entry for this region in the {@link #got_nsre} map. We also * replace the entry for this region in {@link #regions_cache} with a special * entry that indicates that this region is known to be unavailable for now, * due to the NSRE. This entry is said to be special because it belongs to * the table with an empty name (which is otherwise impossible). This way, * new RPCs that are sent out can still hit our local cache instead of * requiring a META lookup and be directly sent to this method so they can * be queued to wait until the NSRE situation is resolved by HBase. * <p> * When we first get an NSRE, we also create a "probe" RPC, the goal of * which is to periodically poke HBase and check whether the NSRE situation * was resolved. The way we poke HBase is to send an "exists" RPC (which * is actually just a "get" RPC that returns true or false instead of * returning any data) for the table / key of the first RPC to trigger the * NSRE. As soon as the probe returns successfully, we know HBase resolved * the NSRE situation and the region is back online. Note that it doesn't * matter what the result of the probe is, the only thing that matters is * that the probe doesn't get NSRE'd. * <p> * Once the probe RPC succeeds, we flush out all the RPCs that are pending * for the region that got NSRE'd. When the probe fails, it's periodically * re-scheduled with an exponential-ish backoff. * <p> * We put a cap on the number of RPCs we'll keep on hold while we wait for * the NSRE to be resolved. Say you have a high throughput application * that's producing 100k write operations per second. Even if it takes * HBase just a second to bring the region back online, the application * will have generated over 100k RPCs before we realize we're good to go. * This means the application can easily run itself out of memory if we let * the queue grow unbounded. To prevent that from happening, the code has * a low watermark and a high watermark on the number of pending RPCs for * a particular region. Once the low watermark is hit, one RPC will be * failed with a {@link PleaseThrottleException}. This is an advisory * warning that HBase isn't keeping up and that the application should * slow down its HBase usage momentarily. After hitting the low watermark, * further RPCs that are still getting NSRE'd on the same region will get * buffered again until we hit the high watermark. Once the high watermark * is hit, all subsequent RPCs that get NSRE'd will immediately fail with a * {@link PleaseThrottleException} (and they will fail-fast). * @param rpc The RPC that failed or is going to fail with an NSRE. * @param region_name The name of the region this RPC is going to. * Obviously, this method cannot be used for RPCs that aren't targeted * at a particular region. * @param e The exception that caused (or may cause) this RPC to fail. */ void handleNSRE(HBaseRpc rpc, final byte[] region_name, final NotServingRegionException e) { num_nsre_rpcs.increment(); final boolean can_retry_rpc = !cannotRetryRequest(rpc); boolean known_nsre = true; // We already aware of an NSRE for this region? ArrayList<HBaseRpc> nsred_rpcs = got_nsre.get(region_name); HBaseRpc exists_rpc = null; // Our "probe" RPC. if (nsred_rpcs == null) { // Looks like this could be a new NSRE... final ArrayList<HBaseRpc> newlist = new ArrayList<HBaseRpc>(64); exists_rpc = GetRequest.exists(rpc.table, rpc.key); newlist.add(exists_rpc); if (can_retry_rpc) { newlist.add(rpc); } nsred_rpcs = got_nsre.putIfAbsent(region_name, newlist); if (nsred_rpcs == null) { // We've just put `newlist'. nsred_rpcs = newlist; // => We're the first thread to get known_nsre = false; // the NSRE for this region. } } if (known_nsre) { // Some RPCs seem to already be pending due to this NSRE boolean reject = true; // Should we reject this RPC (too many pending)? int size; // How many RPCs are already pending? synchronized (nsred_rpcs) { size = nsred_rpcs.size(); // If nsred_rpcs is empty, there was a race with another thread which // is executing RetryNSREd.call and that just cleared this array and // removed nsred_rpcs from got_nsre right after we got the reference, // so we need to add it back there, unless another thread already // did it (in which case we're really unlucky and lost 2 races). if (size == 0) { final ArrayList<HBaseRpc> added = got_nsre.putIfAbsent(region_name, nsred_rpcs); if (added == null) { // We've just put `nsred_rpcs'. exists_rpc = GetRequest.exists(rpc.table, rpc.key); nsred_rpcs.add(exists_rpc); // We hold the lock on nsred_rpcs if (can_retry_rpc) { nsred_rpcs.add(rpc); // so we can safely add those 2. } known_nsre = false; // We mistakenly believed it was known. } else { // We lost the second race. // Here we synchronize on two different references without any // apparent ordering guarantee, which can typically lead to // deadlocks. In this case though we're fine, as any other thread // that still has a reference to `nsred_rpcs' is gonna go through // this very same code path and will lock `nsred_rpcs' first // before finding that it too lost 2 races, so it'll lock `added' // second. So there's actually a very implicit ordering. if (can_retry_rpc) { synchronized (added) { // Won't deadlock (explanation above). if (added.isEmpty()) { LOG.error("WTF? Shouldn't happen! Lost 2 races and found" + " an empty list of NSRE'd RPCs (" + added + ") for " + Bytes.pretty(region_name)); exists_rpc = GetRequest.exists(rpc.table, rpc.key); added.add(exists_rpc); } else { exists_rpc = added.get(0); } if (can_retry_rpc) { added.add(rpc); // Add ourselves in the existing array... } } } nsred_rpcs = added; // ... and update our reference. } } // If `rpc' is the first element in nsred_rpcs, it's our "probe" RPC, // in which case we must not add it to the array again. else if ((exists_rpc = nsred_rpcs.get(0)) != rpc) { if (size < NSRE_HIGH_WATERMARK) { if (size == NSRE_LOW_WATERMARK) { nsred_rpcs.add(null); // "Skip" one slot. } else if (can_retry_rpc) { reject = false; if (nsred_rpcs.contains(rpc)) { // XXX O(n) check... :-/ LOG.error("WTF? Trying to add " + rpc + " twice to NSREd RPC" + " on " + Bytes.pretty(region_name)); } else { nsred_rpcs.add(rpc); } } } } else { // This is our probe RPC. reject = false; // So don't reject it. } } // end of the synchronized block. // Stop here if this is a known NSRE and `rpc' is not our probe RPC. if (known_nsre && exists_rpc != rpc) { if (size != NSRE_HIGH_WATERMARK && size % NSRE_LOG_EVERY == 0) { final String msg = "There are now " + size + " RPCs pending due to NSRE on " + Bytes.pretty(region_name); if (size + NSRE_LOG_EVERY < NSRE_HIGH_WATERMARK) { LOG.info(msg); // First message logged at INFO level. } else { LOG.warn(msg); // Last message logged with increased severity. } } if (reject) { rpc.callback(new PleaseThrottleException(size + " RPCs waiting on " + Bytes.pretty(region_name) + " to come back online", e, rpc, exists_rpc.getDeferred())); } return; // This NSRE is already known and being handled. } } num_nsres.increment(); // Mark this region as being NSRE'd in our regions_cache. invalidateRegionCache(region_name, true, (known_nsre ? "still " : "") + "seems to be splitting or closing it."); // Need a `final' variable to access from within the inner class below. final ArrayList<HBaseRpc> rpcs = nsred_rpcs; // Guaranteed non-null. final HBaseRpc probe = exists_rpc; // Guaranteed non-null. nsred_rpcs = null; exists_rpc = null; if (known_nsre && probe.attempt > 1) { // Our probe is almost guaranteed to cause a META lookup, so virtually // every time we retry it its attempt count will be incremented twice // (once for a META lookup, once to send the actual probe). Here we // decrement the attempt count to "de-penalize" the probe from causing // META lookups, because that's what we want it to do. If the probe // is lucky and doesn't trigger a META lookup (rare) it'll get a free // extra attempt, no big deal. probe.attempt } else if (!can_retry_rpc) { // `rpc' isn't a probe RPC and can't be retried, make it fail-fast now. rpc.callback(tooManyAttempts(rpc, e)); } rpc = null; // No longer need this reference. // Callback we're going to add on our probe RPC. When this callback gets // invoked, it means that our probe RPC completed, so NSRE situation seems // resolved and we can retry all the RPCs that were waiting on that region. // We also use this callback as an errback to avoid leaking RPCs in case // of an unexpected failure of the probe RPC (e.g. a RegionServer dying // while it's splitting a region, which would cause a connection reset). final class RetryNSREd implements Callback<Object, Object> { public Object call(final Object arg) { if (arg instanceof Exception) { LOG.warn("Probe " + probe + " failed", (Exception) arg); } ArrayList<HBaseRpc> removed = got_nsre.remove(region_name); if (removed != rpcs && removed != null) { // Should never happen. synchronized (removed) { // But just in case... synchronized (rpcs) { LOG.error("WTF? Impossible! Removed the wrong list of RPCs" + " from got_nsre. Was expecting list@" + System.identityHashCode(rpcs) + " (size=" + rpcs.size() + "), got list@" + System.identityHashCode(removed) + " (size=" + removed.size() + ')'); } for (final HBaseRpc r : removed) { if (r != null && r != probe) { sendRpcToRegion(r); // We screwed up but let's not lose RPCs. } } removed.clear(); } } removed = null; synchronized (rpcs) { if (LOG.isDebugEnabled()) { LOG.debug("Retrying " + rpcs.size() + " RPCs now that the NSRE on " + Bytes.pretty(region_name) + " seems to have cleared"); } final Iterator<HBaseRpc> i = rpcs.iterator(); if (i.hasNext()) { HBaseRpc r = i.next(); if (r != probe) { LOG.error("WTF? Impossible! Expected first == probe but first=" + r + " and probe=" + probe); sendRpcToRegion(r); } while (i.hasNext()) { if ((r = i.next()) != null) { sendRpcToRegion(r); } } } else { LOG.error("WTF? Impossible! Empty rpcs array=" + rpcs + " found by " + this); } rpcs.clear(); } return arg; } public String toString() { return "retry other RPCs NSRE'd on " + Bytes.pretty(region_name); } }; // It'll take a short while for HBase to clear the NSRE. If a // region is being split, we should be able to use it again pretty // quickly, but if a META entry is stale (e.g. due to RegionServer // failures / restarts), it may take up to several seconds. final class NSRETimer implements TimerTask { public void run(final Timeout timeout) { if (probe.attempt == 0) { // Brand new probe. probe.getDeferred().addBoth(new RetryNSREd()); } if (LOG.isDebugEnabled()) { LOG.debug("Done waiting after NSRE on " + Bytes.pretty(region_name) + ", retrying " + probe); } // Make sure the probe will cause a META lookup. invalidateRegionCache(region_name, false, null); sendRpcToRegion(probe); // Restart the RPC. } public String toString() { return "probe NSRE " + probe; } }; // Linear backoff followed by exponential backoff. Some NSREs can be // resolved in a second or so, some seem to easily take ~6 seconds, // sometimes more when a RegionServer has failed and the master is slowly // splitting its logs and re-assigning its regions. final int wait_ms = probe.attempt < 4 ? 200 * (probe.attempt + 2) // 400, 600, 800, 1000 : 1000 + (1 << probe.attempt); // 1016, 1032, 1064, 1128, 1256, 1512, .. timer.newTimeout(new NSRETimer(), wait_ms, MILLISECONDS); } // Code that manages connection and disconnection to Region Servers. // /** * Returns a client to communicate with a Region Server. * <p> * Note that this method is synchronized, so only one client at a time can * be created. In practice this shouldn't be a problem as this method is * not expected to be frequently called. * @param host The normalized <strong>IP address</strong> of the region * server. Passing a hostname or a denormalized IP address will work * silently and will result in unnecessary extra connections (clients are * cached, which is why always using the normalized IP address will result * in fewer connections). * @param port The port on which the region server is serving. * @return A client for this region server. */ private RegionClient newClient(final String host, final int port) { // This big synchronized block is required because using a // ConcurrentHashMap wouldn't be sufficient. We could still have 2 // threads attempt to create the same client at the same time, and they // could both test the map at the same time and create 2 instances. final String hostport = host + ':' + port; RegionClient client; SocketChannel chan = null; synchronized (ip2client) { client = ip2client.get(hostport); if (client != null && client.isAlive()) { return client; } // We don't use Netty's ClientBootstrap class because it makes it // unnecessarily complicated to have control over which ChannelPipeline // exactly will be given to the channel. It's over-designed. final RegionClientPipeline pipeline = new RegionClientPipeline(); client = pipeline.init(); chan = channel_factory.newChannel(pipeline); ip2client.put(hostport, client); // This is guaranteed to return null. } num_connections_created.increment(); // Configure and connect the channel without locking ip2client. final SocketChannelConfig config = chan.getConfig(); config.setConnectTimeoutMillis(5000); config.setTcpNoDelay(true); config.setKeepAlive(true); // TODO(tsuna): Is this really needed? chan.connect(new InetSocketAddress(host, port)); // Won't block. return client; } /** * A {@link DefaultChannelPipeline} that gives us a chance to deal with * certain events before any handler runs. * <p> * We hook a couple of methods in order to report disconnection events to * the {@link HBaseClient} so that it can clean up its connection caches * ASAP to avoid using disconnected (or soon to be disconnected) sockets. * <p> * Doing it this way is simpler than having a first handler just to handle * disconnection events, to which we'd need to pass a callback to invoke * to report the event back to the {@link HBaseClient}. */ private final class RegionClientPipeline extends DefaultChannelPipeline { /** * Have we already disconnected?. * We use this to avoid doing the cleanup work for the same client more * than once, even if we get multiple events indicating that the client * is no longer connected to the RegionServer (e.g. DISCONNECTED, CLOSED). * No synchronization needed as this is always accessed from only one * thread at a time (equivalent to a non-shared state in a Netty handler). */ private boolean disconnected = false; RegionClientPipeline() { } /** * Initializes this pipeline. * This method <strong>MUST</strong> be called on each new instance * before it's used as a pipeline for a channel. */ RegionClient init() { final RegionClient client = new RegionClient(HBaseClient.this); super.addLast("handler", client); return client; } @Override public void sendDownstream(final ChannelEvent event) { //LoggerFactory.getLogger(RegionClientPipeline.class) // .debug("hooked sendDownstream " + event); if (event instanceof ChannelStateEvent) { handleDisconnect((ChannelStateEvent) event); } super.sendDownstream(event); } @Override public void sendUpstream(final ChannelEvent event) { //LoggerFactory.getLogger(RegionClientPipeline.class) // .debug("hooked sendUpstream " + event); if (event instanceof ChannelStateEvent) { handleDisconnect((ChannelStateEvent) event); } super.sendUpstream(event); } private void handleDisconnect(final ChannelStateEvent state_event) { if (disconnected) { return; } switch (state_event.getState()) { case OPEN: if (state_event.getValue() == Boolean.FALSE) { break; // CLOSED } return; case CONNECTED: if (state_event.getValue() == null) { break; // DISCONNECTED } return; default: return; // Not an event we're interested in, ignore it. } disconnected = true; // So we don't clean up the same client twice. try { final RegionClient client = super.get(RegionClient.class); SocketAddress remote = super.getChannel().getRemoteAddress(); // At this point Netty gives us no easy way to access the // SocketAddress of the peer we tried to connect to, so we need to // find which entry in the map was used for the rootregion. This // kinda sucks but I couldn't find an easier way. if (remote == null) { remote = slowSearchClientIP(client); } // Prevent the client from buffering requests while we invalidate // everything we have about it. synchronized (client) { removeClientFromCache(client, remote); } } catch (Exception e) { LoggerFactory.getLogger(RegionClientPipeline.class) .error("Uncaught exception when handling a disconnection of " + getChannel(), e); } } } /** * Performs a slow search of the IP used by the given client. * <p> * This is needed when we're trying to find the IP of the client before its * channel has successfully connected, because Netty's API offers no way of * retrieving the IP of the remote peer until we're connected to it. * @param client The client we want the IP of. * @return The IP of the client, or {@code null} if we couldn't find it. */ private InetSocketAddress slowSearchClientIP(final RegionClient client) { String hostport = null; synchronized (ip2client) { for (final Map.Entry<String, RegionClient> e : ip2client.entrySet()) { if (e.getValue() == client) { hostport = e.getKey(); break; } } } if (hostport == null) { HashMap<String, RegionClient> copy; synchronized (ip2client) { copy = new HashMap<String, RegionClient>(ip2client); } LOG.error("WTF? Should never happen! Couldn't find " + client + " in " + copy); return null; } LOG.warn("Couldn't connect to the RegionServer @ " + hostport); final int colon = hostport.indexOf(':', 1); if (colon < 1) { LOG.error("WTF? Should never happen! No `:' found in " + hostport); return null; } final String host = getIP(hostport.substring(0, colon)); int port; try { port = parsePortNumber(hostport.substring(colon + 1, hostport.length())); } catch (NumberFormatException e) { LOG.error("WTF? Should never happen! Bad port in " + hostport, e); return null; } return new InetSocketAddress(host, port); } /** * Removes all the cache entries referred to the given client. * @param client The client for which we must invalidate everything. * @param remote The address of the remote peer, if known, or null. */ private void removeClientFromCache(final RegionClient client, final SocketAddress remote) { if (client == rootregion) { LOG.info("Lost connection with the -ROOT- region"); rootregion = null; } ArrayList<RegionInfo> regions = client2regions.remove(client); if (regions != null) { // Make a copy so we don't need to synchronize on it while iterating. RegionInfo[] regions_copy; synchronized (regions) { regions_copy = regions.toArray(new RegionInfo[regions.size()]); regions = null; // If any other thread still has a reference to `regions', their // updates will be lost (and we don't care). } for (final RegionInfo region : regions_copy) { final byte[] table = region.table(); final byte[] stop_key = region.stopKey(); // If stop_key is the empty array: // This region is the last region for this table. In order to // find the start key of the last region, we append a '\0' byte // at the end of the table name and search for the entry with a // key right before it. // Otherwise: // Search for the entry with a key right before the stop_key. final byte[] search_key = createRegionSearchKey(stop_key.length == 0 ? Arrays.copyOf(table, table.length + 1) : table, stop_key); final Map.Entry<byte[], RegionInfo> entry = regions_cache.lowerEntry(search_key); if (entry != null && entry.getValue() == region) { // Invalidate the regions cache first, as it's the most damaging // one if it contains stale data. regions_cache.remove(entry.getKey()); LOG.debug("Removed from regions cache: {}", region); } final RegionClient oldclient = region2client.remove(region); if (client == oldclient) { LOG.debug("Association removed: {} -> {}", region, client); } else if (oldclient != null) { // Didn't remove what we expected?! LOG.warn("When handling disconnection of " + client + " and removing " + region + " from region2client" + ", it was found that " + oldclient + " was in fact" + " serving this region"); } } } if (remote == null) { return; // Can't continue without knowing the remote address. } String hostport = null; if (remote instanceof InetSocketAddress) { final InetSocketAddress sock = (InetSocketAddress) remote; final InetAddress addr = sock.getAddress(); if (addr == null) { LOG.error("WTF? Unresolved IP for " + remote + ". This shouldn't happen."); return; } else { hostport = addr.getHostAddress() + ':' + sock.getPort(); } } else { LOG.error("WTF? Found a non-InetSocketAddress remote: " + remote + ". This shouldn't happen."); return; } RegionClient old; synchronized (ip2client) { old = ip2client.remove(hostport); } LOG.debug("Removed from IP cache: {} -> {}", hostport, client); if (old == null) { LOG.warn("When expiring " + client + " from the client cache (host:port=" + hostport + "), it was found that there was no entry" + " corresponding to " + remote + ". This shouldn't happen."); } } // ZooKeeper stuff. // /** * Helper to locate the -ROOT- region through ZooKeeper. * <p> * We don't watch the file of the -ROOT- region. We just asynchronously * read it once to find -ROOT-, then we close our ZooKeeper session. * There are a few reasons for this. First of all, the -ROOT- region * doesn't move often. When it does, and when we need to use it, we'll * realize that -ROOT- is no longer where we though it was and we'll find * it again. Secondly, maintaining a session open just to watch the * -ROOT- region is a waste of resources both on our side and on ZK's side. * ZK is chatty, it will frequently send us heart beats that will keep * waking its event thread, etc. Third, if the application we're part of * already needs to maintain a session with ZooKeeper anyway, we can't * easily share it with them anyway, because of ZooKeeper's API. Indeed, * unfortunately the ZooKeeper API requires that the {@link ZooKeeper} * object be re-created when the session is invalidated (due to a * disconnection or a timeout), which means that it's impossible to * share the {@link ZooKeeper} object. Ideally in an application there * should be only one instance, but their poor API makes it impractical, * since the instance must be re-created when the session is invalidated, * which entails that one entity should own the reconnection process and * have a way of giving everyone else the new instance. This is extremely * cumbersome so I don't expect anyone to do this, which is why we manage * our own instance. */ private final class ZKClient implements Watcher { /** * HBASE-3065 (r1151751) prepends meta-data in ZooKeeper files. * The meta-data always starts with this magic byte. */ private static final byte MAGIC = (byte) 0xFF; /** The specification of the quorum, e.g. "host1,host2,host3" */ private final String quorum_spec; /** The base path under which is the znode for the -ROOT- region. */ private final String base_path; /** * Our ZooKeeper instance. * Must grab this' monitor before accessing. */ private ZooKeeper zk; /** * When we're not connected to ZK, users who are trying to access the * -ROOT- region can queue up here to be called back when it's available. * Must grab this' monitor before accessing. */ private ArrayList<Deferred<Object>> deferred_rootregion; /** * Constructor. * @param quorum_spec The specification of the quorum, e.g. * {@code "host1,host2,host3"}. * @param base_path The base path under which is the znode for the * -ROOT- region. */ public ZKClient(final String quorum_spec, final String base_path) { this.quorum_spec = quorum_spec; this.base_path = base_path; } /** * Returns a deferred that will be called back once we found -ROOT-. * @return A deferred which will be invoked with an unspecified argument * once we know where -ROOT- is. Note that by the time you get called * back, we may have lost the connection to the -ROOT- region again. */ public Deferred<Object> getDeferredRoot() { final Deferred<Object> d = new Deferred<Object>(); synchronized (this) { try { connectZK(); // Kick off a connection if needed. if (deferred_rootregion == null) { LOG.info("Need to find the -ROOT- region"); deferred_rootregion = new ArrayList<Deferred<Object>>(); } deferred_rootregion.add(d); } catch (NonRecoverableException e) { LOG.error(e.getMessage(), e.getCause()); d.callback(e); } } return d; } /** * Like {@link getDeferredRoot} but returns null if we're not already * trying to find -ROOT-. * In other words calling this method doesn't trigger a -ROOT- lookup * unless there's already one in flight. * @return @{code null} if -ROOT- isn't being looked up right now, * otherwise a deferred which will be invoked with an unspecified argument * once we know where -ROOT- is. Note that by the time you get called * back, we may have lost the connection to the -ROOT- region again. */ Deferred<Object> getDeferredRootIfBeingLookedUp() { synchronized (this) { if (deferred_rootregion == null) { return null; } final Deferred<Object> d = new Deferred<Object>(); deferred_rootregion.add(d); return d; } } /** * Atomically returns and {@code null}s out the current list of * Deferreds waiting for the -ROOT- region. */ private ArrayList<Deferred<Object>> atomicGetAndRemoveWaiters() { synchronized (this) { try { return deferred_rootregion; } finally { deferred_rootregion = null; } } } /** * Processes a ZooKeeper event. * <p> * This method is called back by {@link ZooKeeper} from its main event * thread. So make sure you don't block. * @param event The event to process. */ public void process(final WatchedEvent event) { LOG.debug("Got ZooKeeper event: {}", event); try { switch (event.getState()) { case SyncConnected: getRootRegion(); break; default: disconnectZK(); // Reconnect only if we're still trying to locate -ROOT-. synchronized (this) { if (deferred_rootregion != null) { LOG.warn("No longer connected to ZooKeeper, event=" + event); connectZK(); } } return; } } catch (Exception e) { LOG.error("Uncaught exception when handling event " + event, e); return; } LOG.debug("Done handling ZooKeeper event: {}", event); } /** * Connects to ZooKeeper. * @throws NonRecoverableException if something from which we can't * recover happened -- e.g. us being unable to resolve the hostname * of any of the zookeeper servers. */ private void connectZK() { try { // Session establishment is asynchronous, so this won't block. synchronized (this) { if (zk != null) { // Already connected. return; } zk = new ZooKeeper(quorum_spec, 5000, this); } } catch (UnknownHostException e) { // No need to retry, we usually cannot recover from this. throw new NonRecoverableException("Cannot connect to ZooKeeper," + " is the quorum specification valid? " + quorum_spec, e); } catch (IOException e) { LOG.error("Failed to connect to ZooKeeper", e); // XXX don't retry recursively, create a timer with an exponential // backoff and schedule the reconnection attempt for later. connectZK(); } } /** * Disconnects from ZooKeeper. * <p> * <strong>This method is blocking.</strong> Unfortunately, ZooKeeper * doesn't offer an asynchronous API to close a session at this time. * It waits until the server responds to the {@code closeSession} RPC. */ public void disconnectZK() { synchronized (this) { if (zk == null) { return; } try { // I'm not sure but I think both the client and the server race to // close the socket, which often causes the DEBUG spam: // java.net.SocketException: Socket is not connected // When the client attempts to close its socket after its OS and // JVM are done processing the TCP FIN and it's already closed. LOG.debug("Ignore any DEBUG exception from ZooKeeper"); final long start = System.nanoTime(); zk.close(); LOG.debug("ZooKeeper#close completed in {}ns", System.nanoTime() - start); } catch (InterruptedException e) { // The signature of the method pretends that it can throw an // InterruptedException, but this is a lie, the code of that // method will never throw this type of exception. LOG.error("Should never happen", e); } zk = null; } } /** Schedule a timer to retry {@link #getRootRegion} after some time. */ private void retryGetRootRegionLater(final AsyncCallback.DataCallback cb) { timer.newTimeout(new TimerTask() { public void run(final Timeout timeout) { if (zk != null) { LOG.debug("Retrying to find the -ROOT- region in ZooKeeper"); zk.getData(base_path + "/root-region-server", ZKClient.this, cb, null); } else { connectZK(); } } }, 1000, MILLISECONDS); } /** * Puts a watch in ZooKeeper to monitor the file of the -ROOT- region. * This method just registers an asynchronous callback. */ private void getRootRegion() { final AsyncCallback.DataCallback cb = new AsyncCallback.DataCallback() { @SuppressWarnings("fallthrough") public void processResult(final int rc, final String path, final Object ctx, final byte[] data, final Stat stat) { if (rc == Code.NONODE.intValue()) { LOG.error("The znode for the -ROOT- region doesn't exist!"); retryGetRootRegionLater(this); return; } else if (rc != Code.OK.intValue()) { LOG.error("Looks like our ZK session expired or is broken, rc=" + rc + ": " + Code.get(rc)); disconnectZK(); connectZK(); return; } if (data == null || data.length == 0 || data.length > Short.MAX_VALUE) { LOG.error("The location of the -ROOT- region in ZooKeeper is " + (data == null || data.length == 0 ? "empty" : "too large (" + data.length + " bytes!)")); retryGetRootRegionLater(this); return; // TODO(tsuna): Add a watch to wait until the file changes. } // There are 3 cases. Older versions of HBase encode the location // of the root region as "host:port", 0.91 uses "host,port,startcode" // and newer versions of 0.91 use "<metadata>host,port,startcode" // where the <metadata> starts with MAGIC, then a 4 byte integer, // then that many bytes of meta data. boolean newstyle; // True if we expect a 0.91 style location. final short offset; // Bytes to skip at the beginning of data. short firstsep = -1; // Index of the first separator (':' or ','). if (data[0] == MAGIC) { newstyle = true; final int metadata_length = Bytes.getInt(data, 1); if (metadata_length < 1 || metadata_length > 65000) { LOG.error("Malformed meta-data in " + Bytes.pretty(data) + ", invalid metadata length=" + metadata_length); retryGetRootRegionLater(this); return; // TODO(tsuna): Add a watch to wait until the file changes. } offset = (short) (1 + 4 + metadata_length); } else { newstyle = false; // Maybe true, the loop below will tell us. offset = 0; } final short n = (short) data.length; // Look for the first separator. Skip the offset, and skip the // first byte, because we know the separate can only come after // at least one byte. loop: for (short i = (short) (offset + 1); i < n; i++) { switch (data[i]) { case ',': newstyle = true; /* fall through */ case ':': firstsep = i; break loop; } } if (firstsep == -1) { LOG.error("-ROOT- location doesn't contain a separator" + " (':' or ','): " + Bytes.pretty(data)); retryGetRootRegionLater(this); return; // TODO(tsuna): Add a watch to wait until the file changes. } final String host; final short portend; // Index past where the port number ends. if (newstyle) { host = new String(data, offset, firstsep - offset); short i; for (i = (short) (firstsep + 2); i < n; i++) { if (data[i] == ',') { break; } } portend = i; // Port ends on the comma. } else { host = new String(data, 0, firstsep); portend = n; // Port ends at the end of the array. } final int port = parsePortNumber(new String(data, firstsep + 1, portend - firstsep - 1)); final String ip = getIP(host); if (ip == null) { LOG.error("Couldn't resolve the IP of the -ROOT- region from " + host + " in \"" + Bytes.pretty(data) + '"'); retryGetRootRegionLater(this); return; // TODO(tsuna): Add a watch to wait until the file changes. } LOG.info("Connecting to -ROOT- region @ " + ip + ':' + port); final RegionClient client = rootregion = newClient(ip, port); final ArrayList<Deferred<Object>> ds = atomicGetAndRemoveWaiters(); if (ds != null) { for (final Deferred<Object> d : ds) { d.callback(client); } } disconnectZK(); // By the time we're done, we may need to find -ROOT- again. So // check to see if there are people waiting to find it again, and if // there are, re-open a new session with ZK. // TODO(tsuna): This typically happens when the address of -ROOT- in // ZK is stale. In this case, we should setup a watch to get // notified once the znode gets updated, instead of continuously // polling ZK and creating new sessions. synchronized (ZKClient.this) { if (deferred_rootregion != null) { connectZK(); } } } }; synchronized (this) { if (zk != null) { LOG.debug("Finding the -ROOT- region in ZooKeeper"); zk.getData(base_path + "/root-region-server", this, cb, null); } } } } // Little helpers. // /** * Gets a hostname or an IP address and returns the textual representation * of the IP address. * <p> * <strong>This method can block</strong> as there is no API for * asynchronous DNS resolution in the JDK. * @param host The hostname to resolve. * @return The IP address associated with the given hostname, * or {@code null} if the address couldn't be resolved. */ private static String getIP(final String host) { final long start = System.nanoTime(); try { final String ip = InetAddress.getByName(host).getHostAddress(); final long latency = System.nanoTime() - start; if (latency > 500000 && LOG.isDebugEnabled()) { LOG.debug("Resolved IP of `" + host + "' to " + ip + " in " + latency + "ns"); } else if (latency >= 3000000) { LOG.warn("Slow DNS lookup! Resolved IP of `" + host + "' to " + ip + " in " + latency + "ns"); } return ip; } catch (UnknownHostException e) { LOG.error("Failed to resolve the IP of `" + host + "' in " + (System.nanoTime() - start) + "ns"); return null; } } /** * Parses a TCP port number from a string. * @param portnum The string to parse. * @return A strictly positive, validated port number. * @throws NumberFormatException if the string couldn't be parsed as an * integer or if the value was outside of the range allowed for TCP ports. */ private static int parsePortNumber(final String portnum) throws NumberFormatException { final int port = Integer.parseInt(portnum); if (port <= 0 || port > 65535) { throw new NumberFormatException(port == 0 ? "port is zero" : (port < 0 ? "port is negative: " : "port is too large: ") + port); } return port; } }
package edu.ucsf.mousedatabase; import java.sql.*; import java.sql.Date; import java.util.*; import javax.naming.Context; import javax.naming.InitialContext; import javax.sql.DataSource; import edu.ucsf.mousedatabase.beans.MouseSubmission; import edu.ucsf.mousedatabase.beans.UserData; import edu.ucsf.mousedatabase.dataimport.ImportHandler; import edu.ucsf.mousedatabase.dataimport.ImportHandler.ImportObjectType; import edu.ucsf.mousedatabase.objects.*; import edu.ucsf.mousedatabase.objects.ChangeRequest.Action; import edu.ucsf.mousedatabase.servlets.ReportServlet; import org.apache.commons.lang3.StringUtils; public class DBConnect { //set this to true for debugging private static final boolean logQueries = false; private static final String mouseRecordTableColumns = "mouse.id, name, mousetype, modification_type," + "transgenictype.transgenictype,regulatory_element_comment as 'regulatory element'," +"expressedsequence.expressedsequence, reporter_comment as 'reporter', strain, " + "general_comment, source, mta_required, repository_id, repository.repository, " + "repository_catalog_number,gensat,other_comment, gene.mgi as 'gene MGI', " + "gene.symbol as 'gene symbol', gene.fullname as 'gene name',cryopreserved," + "status,endangered,submittedmouse_id, targetgenes.mgi as 'target gene MGI'," + "targetgenes.symbol as 'target gene symbol', targetgenes.fullname as 'target gene name', official_name\r\n"; private static final String mouseRecordTableJoins = " left join mousetype on mouse.mousetype_id=mousetype.id\r\n" +" left join gene on mouse.gene_id=gene.id\r\n" +" left join gene as targetgenes on mouse.target_gene_id=targetgenes.id\r\n" +" left join transgenictype on mouse.transgenictype_id=transgenictype.id\r\n" +" left join expressedsequence on mouse.expressedsequence_id=expressedsequence.id\r\n" +" left join repository on mouse.repository_id=repository.id\r\n "; private static final String mouseRecordQueryHeader = "SELECT " + mouseRecordTableColumns +" FROM mouse\r\n" + mouseRecordTableJoins; private static final String mouseRecordQueryCountHeader = "SELECT count(*) as count" +" FROM mouse\r\n" + mouseRecordTableJoins; private static final String mouseSubmissionQueryHeader = "SELECT submittedmouse.* , mouse.id as mouseRecordID\r\n" + " FROM submittedmouse left join mouse on submittedmouse.id=mouse.submittedmouse_id\r\n "; private static final String changeRequestQueryHeader = "SELECT changerequest.*, mouse.name, holder.firstname, holder.lastname, holder.email, facility.facility\r\n" + " FROM changerequest left join mouse on changerequest.mouse_id=mouse.id\r\n" + " left join holder on changerequest.holder_id=holder.id\r\n" + " left join facility on changerequest.facility_id=facility.id"; private static final String holderQueryHeader = "SELECT holder.*, (select count(*) \r\n" + " FROM mouse_holder_facility left join mouse on mouse_holder_facility.mouse_id=mouse.id\r\n" + " WHERE holder_id=holder.id and covert=0 and mouse.status='live') as 'mice held',\r\n" + "(select count(*) \r\n" + " FROM mouse_holder_facility left join mouse on mouse_holder_facility.mouse_id=mouse.id\r\n" + " WHERE holder_id=holder.id and covert=1 and mouse.status='live') as 'covert mice held'\r\n" + " FROM holder\r\n"; private static final String facilityQueryHeader = "SELECT id, facility, description, code" + ", (select count(*) from mouse_holder_facility where facility_id=facility.id) as 'mice held'\r\n" + " FROM facility\r\n "; private static final String mouseHolderQueryHeader = "SELECT holder_id, facility_id, covert, cryo_live_status, firstname, lastname, " + "department, email, alternate_email, alternate_name, tel, facility" + "\r\n FROM mouse_holder_facility t1 left join holder on t1.holder_id=holder.id " + "left join facility on t1.facility_id=facility.id \r\n"; private static final String geneQueryHeader = "SELECT id,fullname,symbol,mgi \r\n FROM gene\r\n "; private static final String mouseIDSearchTermsRegex = "^(#[0-9]+,?\\s*)+$"; private static Connection connect() throws Exception { try { Context initCtx = new InitialContext(); Context envCtx = (Context) initCtx.lookup("java:comp/env"); DataSource ds = (DataSource) envCtx.lookup("jdbc/mouse_inventory"); return ds.getConnection(); } catch (Exception e) { Log.Error("Problem connecting",e); throw e; } } /*Comparator<Sub> comparator = new Comparator<Sub>(){ public int compare(Sub a, Sub b) { return HTMLGeneration.emptyIfNull(a.PIName) .compareTo(HTMLGeneration.emptyIfNull(b.PIName)); } }; Collections.sort(linesByRecipientPI,comparator); */ for (Sub sub : linesByRecipientPI) { //TODO why don't we want the PIName as well?? result.append(sub.Line); } return result.toString(); } private static String runImportOtherInstitutionsSubmissionReport(int reportId) { ArrayList<Integer> submissionIds = getImportNewObjectIds(reportId); if (submissionIds.size() <= 0) { return "No submissions were created for report id " + reportId; } String additionalJoins = "left join import_new_objects on submittedmouse.id=import_new_objects.object_id"; ArrayList<String> whereTerms = new ArrayList<String>(); whereTerms.add("import_new_objects.import_report_id=" + reportId); StringBuilder result = new StringBuilder(); result.append("Message Sent,Response,Submission Number,PI Recipient,Mouse Name,MGI ID,Source Institution,Recipient,Email Recipient 1, Email Recipient 2"); result.append("\r\n"); List<SubmittedMouse> submissions = getSubmissions(additionalJoins, whereTerms,null); List<Sub> linesByRecipientPI = new ArrayList<Sub>(); for (SubmittedMouse submission : submissions) { Properties props = submission.getProperties(); if (props.containsKey("holderCount")) { int holderCount = Integer.parseInt(props.getProperty("holderCount")); for(int i =0; i< holderCount;i++) { StringBuilder line = new StringBuilder(); String piName = HTMLGeneration.emptyIfNull(props.getProperty("Recipient PI Name-"+i )); line.append(","); line.append(","); line.append(submission.getSubmissionID()); line.append(",\""); line.append(piName); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("NewMouseName"))); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("MouseMGIID"))); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("Sender institution-"+i))); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("Recipient-"+i))); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("New Holder Email-"+i))); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("Recipient Email-"+i))); line.append("\""); line.append("\r\n"); linesByRecipientPI.add(new Sub(piName, line.toString())); } } else { StringBuilder line = new StringBuilder(); String piName = HTMLGeneration.emptyIfNull(props.getProperty("Recipient PI Name" )); line.append(submission.getSubmissionID()); line.append(",\""); line.append(piName); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("NewMouseName"))); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("MouseMGIID"))); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("Sender institution"))); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("Recipient"))); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("New Holder Email"))); line.append("\",\""); line.append(HTMLGeneration.emptyIfNull(props.getProperty("Recipient Email"))); line.append("\""); line.append("\r\n"); linesByRecipientPI.add(new Sub(piName, line.toString())); } } /*Comparator<Sub> comparator = new Comparator<Sub>(){ public int compare(Sub a, Sub b) { return HTMLGeneration.emptyIfNull(a.PIName) .compareTo(HTMLGeneration.emptyIfNull(b.PIName)); } }; Collections.sort(linesByRecipientPI,comparator); */ for (Sub sub : linesByRecipientPI) { //TODO why don't we want the PIName as well?? result.append(sub.Line); } return result.toString(); }
package shadow.build.closure; import com.google.javascript.jscomp.*; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; /** * util pass to transform ESM rewritten export const foo from const to var * since with ES6+ it'll remain as const but fail eval-based loading since const scope * is too strict. */ public class GlobalsAsVar implements NodeTraversal.Callback, CompilerPass { private final AbstractCompiler compiler; public GlobalsAsVar(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { NodeTraversal.traverse(compiler, root, this); } @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { return t.inGlobalScope(); } @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isLet() || n.isConst()) { if (!parent.isExport()) { Node lhs = n.getFirstChild(); if (lhs.isName()) { Node rhs = lhs.getFirstChild(); Node replacement; if (rhs != null) { replacement = IR.var(lhs.detach(), rhs.detach()); } else { replacement = IR.var(lhs.detach()); } n.replaceWith(replacement); } } } } public static void main(String[] args) { CompilerOptions co = new CompilerOptions(); ShadowCompiler cc = new ShadowCompiler(); cc.initOptions(co); SourceFile srcFile = SourceFile.fromCode("test.js", "let noInit; let [a, ...rest] = [1,2,3,4]; const foo = 1; function foo() { const bar = 2; }"); JsAst ast = new JsAst(srcFile); Node node = ast.getAstRoot(cc); GlobalsAsVar pass = new GlobalsAsVar(cc); NodeTraversal.traverse(cc, node, pass); System.out.println(cc.toSource(node)); } }
package eu.freme.broker.tools; import java.io.IOException; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletResponse; import org.springframework.stereotype.Component; /** * Filter that allows CORS for all all domains on all API endpoints. * * @author Jan Nehring - jan.nehring@dfki.de */ @Component public class CORSFilter implements Filter { public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, ServletException { HttpServletResponse response = (HttpServletResponse) res; response.setHeader("Access-Control-Allow-Origin", "*"); response.setHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, DELETE"); response.setHeader("Access-Control-Max-Age", "3600"); response.setHeader("Access-Control-Allow-Headers", "x-requested-with, content-type"); chain.doFilter(req, res); } public void init(FilterConfig filterConfig) {} public void destroy() {} }
package gaul.cacofonix.store; import gaul.cacofonix.DataPoint; import gaul.cacofonix.Metric; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Timer; import java.util.TimerTask; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; /** * * @author ashish */ public class H2Datastore implements Datastore { private static final Logger logger = LogManager.getLogger("cacofonix.datastore"); private final Timer timer; private final Connection conn; public H2Datastore(String dbUrl) { org.h2.Driver.load(); try { conn = DriverManager.getConnection(dbUrl); conn.setAutoCommit(true); timer = new Timer("Database Cleaner", true); init(); } catch (IOException | SQLException err) { throw new RuntimeException("Error setting up datastore at " + dbUrl, err); } logger.info("Started data store at " + dbUrl); } private void init() throws SQLException, IOException { String sql = load("store/createh2.sql"); try (Statement stmt = conn.createStatement()) { stmt.execute(sql); } timer.scheduleAtFixedRate(new Cleaner(), 43200_000, 4200_000); } private String load(String resource) throws IOException { ClassLoader loader = this.getClass().getClassLoader(); InputStream inputStream = loader.getResourceAsStream(resource); if (inputStream == null) { throw new FileNotFoundException("Unable to load " + resource); } try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"))) { StringBuilder builder = new StringBuilder(); String line; while ((line = reader.readLine()) != null) { builder.append(line).append('\n'); } return builder.toString(); } } private int getMetricId(String metric) throws SQLException { String query = "select id from metric where metric_name = ?"; try (PreparedStatement stmt = conn.prepareStatement(query)) { stmt.setString(1, metric); ResultSet result = stmt.executeQuery(); if (result.next()) { return result.getInt(1); } return -1; } } @Override public List<Metric> getMetrics() throws DatastoreException { try { try (Statement stmt = conn.createStatement()) { ResultSet result = stmt.executeQuery( "select id, metric_name, retention, frequency from metric order by metric_name"); List<Metric> metrics = new LinkedList<>(); while (result.next()) { Metric m = new PersistedMetric(result.getInt(1), result.getString(2), result.getInt(4), result.getInt(3)); metrics.add(m); } return metrics; } } catch (SQLException err) { throw new DatastoreException("Unable to get list of metrics. " + err.getMessage(), err); } } private void saveMetric(String metric) throws SQLException { String query = "insert into metric (metric_name) values (?)"; try (PreparedStatement stmt = conn.prepareStatement(query)) { stmt.setString(1, metric); stmt.executeUpdate(); } } @Override public void save(String metric, DataPoint dp) throws DatastoreException { logger.info("{}, {}, {}", metric, dp.getTimestamp(), dp.getValue()); try { int id = getMetricId(metric); if (id == -1) { saveMetric(metric); id = getMetricId(metric); } String query = "insert into datapoint values (?, ?, ?)"; try (PreparedStatement stmt = conn.prepareStatement(query)) { stmt.setInt(1, id); stmt.setLong(2, dp.getTimestamp()); stmt.setDouble(3, dp.getValue()); stmt.executeUpdate(); } } catch (SQLException err) { throw new DatastoreException("Unable to save metric. " + err.getMessage(), err); } } @Override public List<DataPoint> query(final String metric, long start, long end) throws DatastoreException { try { int metricId = getMetricId(metric); if (metricId == -1) { return Collections.emptyList(); } try (Statement stmt = conn.createStatement()) { String query = String.format("select tstamp, value from datapoint " + "where metric_id = %d and tstamp >= %d and tstamp <= %d " + "order by tstamp", metricId, start, end); ResultSet result = stmt.executeQuery(query); List<DataPoint> points = new LinkedList<>(); while (result.next()) { points.add(new DataPoint(result.getLong(1), result.getDouble(2))); } return points; } } catch (SQLException err) { String msg = String.format("Unable to get metric for %s. %s", metric, err.getMessage()); throw new DatastoreException(msg, err); } } @Override public void close() { timer.cancel(); try { conn.close(); } catch (SQLException ex) { } } private class Cleaner extends TimerTask { @Override public void run() { try { long now = System.currentTimeMillis(); PreparedStatement stmt = conn.prepareStatement("delete from datapoint where metric_id = ? and tstamp < ?"); for (Metric metric : getMetrics()) { if (metric.getRetention() > 0) { PersistedMetric pm = (PersistedMetric)metric; stmt.setInt(1, pm.getId()); stmt.setLong(2, now - (pm.getRetention() * 1000L)); int rows = stmt.executeUpdate(); logger.debug("Deleted {} rows for {}", rows, metric.getName()); } } } catch (SQLException | DatastoreException err) { logger.warn("Error running cleaner.", err); } } } public class PersistedMetric extends Metric { private final int id; public PersistedMetric(int id, String name, int interval, int retention) { super(name, interval, retention); this.id = id; } public int getId() { return id; } } }
package graphql.execution; import graphql.Assert; import graphql.Internal; import graphql.language.Argument; import graphql.language.ArrayValue; import graphql.language.NullValue; import graphql.language.ObjectField; import graphql.language.ObjectValue; import graphql.language.Value; import graphql.language.VariableDefinition; import graphql.language.VariableReference; import graphql.schema.Coercing; import graphql.schema.CoercingParseValueException; import graphql.schema.GraphQLArgument; import graphql.schema.GraphQLCodeRegistry; import graphql.schema.GraphQLEnumType; import graphql.schema.GraphQLInputObjectField; import graphql.schema.GraphQLInputObjectType; import graphql.schema.GraphQLInputType; import graphql.schema.GraphQLList; import graphql.schema.GraphQLScalarType; import graphql.schema.GraphQLSchema; import graphql.schema.GraphQLType; import graphql.schema.visibility.GraphqlFieldVisibility; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import static graphql.Assert.assertShouldNeverHappen; import static graphql.collect.ImmutableKit.map; import static graphql.schema.GraphQLTypeUtil.isList; import static graphql.schema.GraphQLTypeUtil.isNonNull; import static graphql.schema.GraphQLTypeUtil.unwrapOne; import static graphql.schema.visibility.DefaultGraphqlFieldVisibility.DEFAULT_FIELD_VISIBILITY; @SuppressWarnings("rawtypes") @Internal public class ValuesResolver { /** * This method coerces the "raw" variables values provided to the engine. The coerced values will be used to * provide arguments to {@link graphql.schema.DataFetchingEnvironment} * The coercing is ultimately done via {@link Coercing}. * * @param schema the schema * @param variableDefinitions the variable definitions * @param variableValues the supplied variables * * @return coerced variable values as a map */ public Map<String, Object> coerceVariableValues(GraphQLSchema schema, List<VariableDefinition> variableDefinitions, Map<String, Object> variableValues) { GraphqlFieldVisibility fieldVisibility = schema.getCodeRegistry().getFieldVisibility(); Map<String, Object> coercedValues = new LinkedHashMap<>(); for (VariableDefinition variableDefinition : variableDefinitions) { String variableName = variableDefinition.getName(); List<Object> nameStack = new ArrayList<>(); GraphQLType variableType = TypeFromAST.getTypeFromAST(schema, variableDefinition.getType()); Assert.assertTrue(variableType instanceof GraphQLInputType); // can be NullValue Value defaultValue = variableDefinition.getDefaultValue(); boolean hasValue = variableValues.containsKey(variableName); Object value = variableValues.get(variableName); if (!hasValue && defaultValue != null) { Object coercedDefaultValue = coerceValueAst(fieldVisibility, variableType, defaultValue, null); coercedValues.put(variableName, coercedDefaultValue); } else if (isNonNull(variableType) && (!hasValue || value == null)) { throw new NonNullableValueCoercedAsNullException(variableDefinition, variableType); } else if (hasValue) { if (value == null) { coercedValues.put(variableName, null); } else { Object coercedValue = coerceValue(fieldVisibility, variableDefinition, variableDefinition.getName(), variableType, value, nameStack); coercedValues.put(variableName, coercedValue); } } else { // hasValue = false && defaultValue == null for a nullable type // meaning no value was provided for variableName } } return coercedValues; } public Map<String, Object> getArgumentValues(List<GraphQLArgument> argumentTypes, List<Argument> arguments, Map<String, Object> variables) { GraphQLCodeRegistry codeRegistry = GraphQLCodeRegistry.newCodeRegistry().fieldVisibility(DEFAULT_FIELD_VISIBILITY).build(); return getArgumentValuesImpl(codeRegistry, argumentTypes, arguments, variables); } public Map<String, Object> getArgumentValues(GraphQLCodeRegistry codeRegistry, List<GraphQLArgument> argumentTypes, List<Argument> arguments, Map<String, Object> variables) { return getArgumentValuesImpl(codeRegistry, argumentTypes, arguments, variables); } private Map<String, Object> getArgumentValuesImpl(GraphQLCodeRegistry codeRegistry, List<GraphQLArgument> argumentTypes, List<Argument> arguments, Map<String, Object> coercedVariableValues) { if (argumentTypes.isEmpty()) { return Collections.emptyMap(); } Map<String, Object> coercedValues = new LinkedHashMap<>(); Map<String, Argument> argumentMap = argumentMap(arguments); for (GraphQLArgument argumentDefinition : argumentTypes) { GraphQLInputType argumentType = argumentDefinition.getType(); String argumentName = argumentDefinition.getName(); Argument argument = argumentMap.get(argumentName); Object defaultValue = argumentDefinition.getDefaultValue(); boolean hasValue = argument != null; Object value; Value argumentValue = argument != null ? argument.getValue() : null; if (argumentValue instanceof VariableReference) { String variableName = ((VariableReference) argumentValue).getName(); hasValue = coercedVariableValues.containsKey(variableName); value = coercedVariableValues.get(variableName); } else { value = argumentValue; } if (!hasValue && argumentDefinition.hasSetDefaultValue()) { //TODO: default value needs to be coerced coercedValues.put(argumentName, defaultValue); } else if (isNonNull(argumentType) && (!hasValue || value == null)) { throw new RuntimeException(); } else if (hasValue) { if (value == null) { coercedValues.put(argumentName, null); } else if (argumentValue instanceof VariableReference) { coercedValues.put(argumentName, value); } else { value = coerceValueAst(codeRegistry.getFieldVisibility(), argumentType, argument.getValue(), coercedVariableValues); coercedValues.put(argumentName, value); } } else { // nullable type && hasValue == false && hasDefaultValue == false // meaning no value was provided for argumentName } } return coercedValues; } private Map<String, Argument> argumentMap(List<Argument> arguments) { Map<String, Argument> result = new LinkedHashMap<>(arguments.size()); for (Argument argument : arguments) { result.put(argument.getName(), argument); } return result; } @SuppressWarnings("unchecked") private Object coerceValue(GraphqlFieldVisibility fieldVisibility, VariableDefinition variableDefinition, String inputName, GraphQLType graphQLType, Object value, List<Object> nameStack) { try { nameStack.add(inputName); if (isNonNull(graphQLType)) { Object returnValue = coerceValue(fieldVisibility, variableDefinition, inputName, unwrapOne(graphQLType), value, nameStack); if (returnValue == null) { throw new NonNullableValueCoercedAsNullException(variableDefinition, inputName, nameStack, graphQLType); } return returnValue; } if (value == null) { return null; } if (graphQLType instanceof GraphQLScalarType) { return coerceValueForScalar((GraphQLScalarType) graphQLType, value); } else if (graphQLType instanceof GraphQLEnumType) { return coerceValueForEnum((GraphQLEnumType) graphQLType, value); } else if (graphQLType instanceof GraphQLList) { return coerceValueForList(fieldVisibility, variableDefinition, inputName, (GraphQLList) graphQLType, value, nameStack); } else if (graphQLType instanceof GraphQLInputObjectType) { if (value instanceof Map) { return coerceValueForInputObjectType(fieldVisibility, variableDefinition, (GraphQLInputObjectType) graphQLType, (Map<String, Object>) value, nameStack); } else { throw CoercingParseValueException.newCoercingParseValueException() .message("Expected type 'Map' but was '" + value.getClass().getSimpleName() + "'. Variables for input objects must be an instance of type 'Map'.") .path(nameStack) .build(); } } else { return assertShouldNeverHappen("unhandled type %s", graphQLType); } } catch (CoercingParseValueException e) { if (e.getLocations() != null) { throw e; } throw CoercingParseValueException.newCoercingParseValueException() .message("Variable '" + inputName + "' has an invalid value : " + e.getMessage()) .extensions(e.getExtensions()) .cause(e.getCause()) .sourceLocation(variableDefinition.getSourceLocation()) .path(nameStack) .build(); } } private Object coerceValueForInputObjectType(GraphqlFieldVisibility fieldVisibility, VariableDefinition variableDefinition, GraphQLInputObjectType inputObjectType, Map<String, Object> inputMap, List<Object> nameStack) { Map<String, Object> result = new LinkedHashMap<>(); List<GraphQLInputObjectField> fields = fieldVisibility.getFieldDefinitions(inputObjectType); List<String> fieldNames = map(fields, GraphQLInputObjectField::getName); for (String inputFieldName : inputMap.keySet()) { if (!fieldNames.contains(inputFieldName)) { throw new InputMapDefinesTooManyFieldsException(inputObjectType, inputFieldName); } } for (GraphQLInputObjectField inputField : fields) { if (inputMap.containsKey(inputField.getName()) || alwaysHasValue(inputField)) { // getOrDefault will return a null value if its present in the map as null // defaulting only applies if the key is missing - we want this Object inputValue = inputMap.getOrDefault(inputField.getName(), inputField.getDefaultValue()); Object coerceValue = coerceValue(fieldVisibility, variableDefinition, inputField.getName(), inputField.getType(), inputValue, nameStack); result.put(inputField.getName(), coerceValue == null ? inputField.getDefaultValue() : coerceValue); } } return result; } private boolean alwaysHasValue(GraphQLInputObjectField inputField) { return inputField.getDefaultValue() != null || isNonNull(inputField.getType()); } private Object coerceValueForScalar(GraphQLScalarType graphQLScalarType, Object value) { return graphQLScalarType.getCoercing().parseValue(value); } private Object coerceValueForEnum(GraphQLEnumType graphQLEnumType, Object value) { return graphQLEnumType.parseValue(value); } private List coerceValueForList(GraphqlFieldVisibility fieldVisibility, VariableDefinition variableDefinition, String inputName, GraphQLList graphQLList, Object value, List<Object> nameStack) { if (value instanceof Iterable) { List<Object> result = new ArrayList<>(); for (Object val : (Iterable) value) { result.add(coerceValue(fieldVisibility, variableDefinition, inputName, graphQLList.getWrappedType(), val, nameStack)); } return result; } else { return Collections.singletonList(coerceValue(fieldVisibility, variableDefinition, inputName, graphQLList.getWrappedType(), value, nameStack)); } } private Object coerceValueAst(GraphqlFieldVisibility fieldVisibility, GraphQLType type, Value inputValue, Map<String, Object> variables) { if (inputValue instanceof VariableReference) { return variables.get(((VariableReference) inputValue).getName()); } if (inputValue instanceof NullValue) { return null; } if (type instanceof GraphQLScalarType) { return parseLiteral(inputValue, ((GraphQLScalarType) type).getCoercing(), variables); } if (isNonNull(type)) { return coerceValueAst(fieldVisibility, unwrapOne(type), inputValue, variables); } if (type instanceof GraphQLInputObjectType) { return coerceValueAstForInputObject(fieldVisibility, (GraphQLInputObjectType) type, (ObjectValue) inputValue, variables); } if (type instanceof GraphQLEnumType) { return ((GraphQLEnumType) type).parseLiteral(inputValue); } if (isList(type)) { return coerceValueAstForList(fieldVisibility, (GraphQLList) type, inputValue, variables); } return null; } private Object parseLiteral(Value inputValue, Coercing coercing, Map<String, Object> variables) { // the CoercingParseLiteralException exception that could happen here has been validated earlier via ValidationUtil return coercing.parseLiteral(inputValue, variables); } private Object coerceValueAstForList(GraphqlFieldVisibility fieldVisibility, GraphQLList graphQLList, Value value, Map<String, Object> variables) { if (value instanceof ArrayValue) { ArrayValue arrayValue = (ArrayValue) value; List<Object> result = new ArrayList<>(); for (Value singleValue : arrayValue.getValues()) { result.add(coerceValueAst(fieldVisibility, graphQLList.getWrappedType(), singleValue, variables)); } return result; } else { return Collections.singletonList(coerceValueAst(fieldVisibility, graphQLList.getWrappedType(), value, variables)); } } private Object coerceValueAstForInputObject(GraphqlFieldVisibility fieldVisibility, GraphQLInputObjectType type, ObjectValue inputValue, Map<String, Object> coercedVariableValues) { Map<String, Object> coercedValues = new LinkedHashMap<>(); Map<String, ObjectField> inputFieldsByName = mapObjectValueFieldsByName(inputValue); List<GraphQLInputObjectField> inputFieldTypes = fieldVisibility.getFieldDefinitions(type); for (GraphQLInputObjectField inputFieldType : inputFieldTypes) { // Object defaultValue = inputFieldType.getDefaultValue(); // String fieldName = inputFieldType.getName(); GraphQLInputType fieldType = inputFieldType.getType(); String fieldName = inputFieldType.getName(); ObjectField field = inputFieldsByName.get(fieldName); Object defaultValue = inputFieldType.getDefaultValue(); boolean hasValue = field != null; Object value; Value fieldValue = field != null ? field.getValue() : null; if (fieldValue instanceof VariableReference) { String variableName = ((VariableReference) fieldValue).getName(); hasValue = coercedVariableValues.containsKey(variableName); value = coercedVariableValues.get(variableName); } else { value = fieldValue; } if (!hasValue && inputFieldType.getDefaultValue() != null /*should be hasSetDefaultValue */) { //TODO: default value should be coerced coercedValues.put(fieldName, defaultValue); } else if (isNonNull(fieldType) && (!hasValue || value == null)) { throw new NonNullableValueCoercedAsNullException(inputFieldType); } else if (hasValue) { if (value == null) { coercedValues.put(fieldName, null); } else if (fieldValue instanceof VariableReference) { coercedValues.put(fieldName, value); } else { value = coerceValueAst(fieldVisibility, fieldType, fieldValue, coercedVariableValues); coercedValues.put(fieldName, value); } } else { // nullable type && hasValue == false && hasDefaultValue == false // meaning no value was provided for this field } } return coercedValues; } private void assertNonNullInputField(GraphQLInputObjectField inputTypeField) { if (isNonNull(inputTypeField.getType())) { throw new NonNullableValueCoercedAsNullException(inputTypeField); } } private Map<String, ObjectField> mapObjectValueFieldsByName(ObjectValue inputValue) { Map<String, ObjectField> inputValueFieldsByName = new LinkedHashMap<>(); for (ObjectField objectField : inputValue.getObjectFields()) { inputValueFieldsByName.put(objectField.getName(), objectField); } return inputValueFieldsByName; } }
import net.miginfocom.swing.MigLayout; import javax.imageio.ImageIO; import javax.swing.*; import javax.swing.filechooser.FileFilter; import javax.swing.filechooser.FileNameExtensionFilter; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.image.BufferedImage; import java.io.File; public class ImageWindow { private final static int WIDTH = 500; private final static int HEIGHT = 500; private final static int IMAGE_WIDTH = 300; private final static int IMAGE_HEIGHT = 300; private JFrame window; private JButton load; private JButton save; private JLabel loadedImage; private JLabel mixedImage; private JButton mixButton; private JRadioButton[] buttons; private ButtonGroup red; private ButtonGroup green; private ButtonGroup blue; private BufferedImage original; private BufferedImage mixed; public ImageWindow() { initItems(); addItems(); initActions(); } private void initItems() { window = new JFrame("RGB Mixer"); window.setSize(WIDTH, HEIGHT); load = new JButton("Choose Any Image"); loadedImage = new JLabel(); loadedImage.setSize(new Dimension(IMAGE_WIDTH, IMAGE_HEIGHT)); loadedImage.setPreferredSize(new Dimension(IMAGE_WIDTH, IMAGE_HEIGHT)); loadedImage.setMinimumSize(new Dimension(IMAGE_WIDTH, IMAGE_HEIGHT)); loadedImage.setMaximumSize(new Dimension(IMAGE_WIDTH, IMAGE_HEIGHT)); save = new JButton("Saved Mixed Image"); mixedImage = new JLabel(); mixedImage.setSize(new Dimension(IMAGE_WIDTH, IMAGE_HEIGHT)); mixedImage.setPreferredSize(new Dimension(IMAGE_WIDTH, IMAGE_HEIGHT)); mixedImage.setMinimumSize(new Dimension(IMAGE_WIDTH, IMAGE_HEIGHT)); mixedImage.setMaximumSize(new Dimension(IMAGE_WIDTH, IMAGE_HEIGHT)); buttons = new JRadioButton[9]; buttons[0] = new JRadioButton("Red"); buttons[1] = new JRadioButton("Red"); buttons[2] = new JRadioButton("Red"); buttons[3] = new JRadioButton("Green"); buttons[4] = new JRadioButton("Green"); buttons[5] = new JRadioButton("Green"); buttons[6] = new JRadioButton("Blue"); buttons[7] = new JRadioButton("Blue"); buttons[8] = new JRadioButton("Blue"); buttons[0].setSelected(true); buttons[4].setSelected(true); buttons[8].setSelected(true); mixButton = new JButton("Mix..."); red = new ButtonGroup(); red.add(buttons[0]); red.add(buttons[3]); red.add(buttons[6]); green = new ButtonGroup(); green.add(buttons[1]); green.add(buttons[4]); green.add(buttons[7]); blue = new ButtonGroup(); blue.add(buttons[2]); blue.add(buttons[5]); blue.add(buttons[8]); } private void addItems() { window.setLayout(new MigLayout("wrap 5")); window.add(load); window.add(new JSeparator(SwingConstants.VERTICAL), "span 1 16, height 350"); window.add(new JLabel("Red")); window.add(new JSeparator(SwingConstants.VERTICAL), "span 1 16, height 350"); window.add(mixedImage, "span 1 15"); window.add(loadedImage, "span 1 15"); window.add(buttons[0]); window.add(buttons[3]); window.add(buttons[6]); window.add(new JSeparator(SwingConstants.HORIZONTAL), "width 75"); window.add(new JLabel("Green")); window.add(buttons[1]); window.add(buttons[4]); window.add(buttons[7]); window.add(new JSeparator(SwingConstants.HORIZONTAL), "width 75"); window.add(new JLabel("Blue")); window.add(buttons[2]); window.add(buttons[5]); window.add(buttons[8]); window.add(new JSeparator(SwingConstants.HORIZONTAL), "width 75"); window.add(mixButton); window.add(save); window.setVisible(true); window.pack(); } private void initActions() { window.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); load.addActionListener(new ActionListener() { private JFileChooser imageChooser = null;//new JFileChooser(); private File file = null; @Override public void actionPerformed(ActionEvent e) { if (imageChooser == null) { imageChooser = new JFileChooser(); imageChooser.setMultiSelectionEnabled(false); imageChooser.setAcceptAllFileFilterUsed(false); // Get array of available formats String[] suffices = ImageIO.getReaderFileSuffixes(); // Add a file filter for each one for (String suffice : suffices) { FileFilter filter = new FileNameExtensionFilter(suffice + " files", suffice); imageChooser.addChoosableFileFilter(filter); } } if (file != null) { imageChooser.setSelectedFile(file); } if (imageChooser.showOpenDialog(window) == JFileChooser.APPROVE_OPTION) { file = imageChooser.getSelectedFile(); load.setText(file.getName()); try { original = ImageIO.read(file); mixed = ImageIO.read(file); loadedImage.setIcon(new ImageIcon(original)); mixedImage.setIcon(new ImageIcon(mixed)); } catch (Exception exp) { } } } }); } }
package io.rappid.webtest.common; import com.google.common.base.Objects; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.WebDriver; import org.openqa.selenium.remote.RemoteWebDriver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.ITestContext; import org.testng.ITestResult; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeSuite; import org.testng.xml.XmlTest; import java.util.concurrent.TimeUnit; public abstract class WebTest extends WebTestBase { public final static String DRIVER = "driver"; protected final static ThreadLocal<WebDriverBackend> backend = new ThreadLocal<WebDriverBackend>(); protected final static ThreadLocal<WebTest> testBackend = new ThreadLocal<WebTest>(); protected final Logger log = LoggerFactory.getLogger(this.getClass()); private static ITestContext testContext; public static WebDriverBackend getWebDriverBackend() { return backend.get(); } public static WebTest getWebTest() { return testBackend.get(); } @Override public WebDriver driver() { return getWebDriverBackend().driver(); } @Override public JavascriptExecutor jsExecutor() { return getWebDriverBackend().driver(); } @Override public WebTest webTest() { return testBackend.get(); } protected int implicitlyWaitTimeout() { return 5; } @BeforeSuite(alwaysRun = true) public void beforeSuite(ITestContext context) { testContext = context; } protected synchronized String getParameter(String key) { return testContext.getCurrentXmlTest().getParameter(key); } @BeforeMethod(alwaysRun = true) public synchronized void beforeMethod(ITestResult testResult) throws Exception { testBackend.set(this); String host = Objects.firstNonNull(getParameter("grid.host"), "localhost"); int port = Integer.parseInt(Objects.firstNonNull(getParameter("grid.port"), "4444")); String browser = Objects.firstNonNull(getParameter("browser"), "chrome"); WebDriverBackend webDriverBackend = new WebDriverBackend(host, port, browser); backend.set(webDriverBackend); log.info("Using grid: " + webDriverBackend.toString()); RemoteWebDriver driver = webDriverBackend.driver(); testResult.setAttribute(DRIVER, driver); enableImplicitlyWait(); } public void enableImplicitlyWait() { getWebTest().driver().manage().timeouts().implicitlyWait(implicitlyWaitTimeout(), TimeUnit.SECONDS); } public void disableImplicitlyWait() { getWebTest().driver().manage().timeouts().implicitlyWait(0, TimeUnit.SECONDS); } @AfterMethod(alwaysRun = true) public synchronized void afterMethod() { try { WebDriverBackend webDriverBackend = backend.get(); if (webDriverBackend != null) { webDriverBackend.close(); } } catch (Throwable e) { log.error("Ignoring error in afterMethod: ", e); } } public abstract String getStartUrl(); }
package is.ru.tictactoe; import java.util.Scanner; public class TicTacToeConsole { public static void main(String[] args){ Scanner scanner = new Scanner(System.in); System.out.println(); System.out.print("Please enter a name for player1: "); String p1Name = getName(scanner); Player player1 = new Player(p1Name, Symbol.CROSS); System.out.println(); System.out.print("Please enter a name for player2: "); String p2Name = getName(scanner); Player player2 = new Player(p2Name, Symbol.CIRCLE); play(scanner, player1, player2); scanner.close(); } private static void play(Scanner scanner, Player player1, Player player2) { boolean continueGame = true; TicTacToe game = new TicTacToe(player1, player2); Player player = player1; while (continueGame) { gameIteration(player, game, scanner); printBoard(game.getBoard()); Player winner = game.restartGame(); continueGame = printResults(winner, scanner); } } private static boolean printResults(Player winner, Scanner scanner) { if(winner != null){ System.out.println(winner.getName() + ", you won this round"); } else{ System.out.println("Well this one was a draw, hopefully someone will win next time"); } System.out.println("Do you want to play another round (Y/N): "); char cont = scanner.next().charAt(0); return Character.toUpperCase(cont) == 'Y' ? true : false; } private static void gameIteration(Player player, TicTacToe game, Scanner scanner) { final int SIZE = game.getSize(); for (int i = 0; i < (SIZE*SIZE); i++) { printBoard(game.getBoard()); System.out.print(player.getName() + ", its your move: "); int input = getInput(scanner, game.getBoard()); Player temp = player; player = game.checkWinner(player, input); if(temp == player){ return; } } } private static int getInput(Scanner scanner, Board board){ int input; do{ while(!scanner.hasNextInt()){ System.out.print("Please enter a valid input: "); scanner.next(); } input = scanner.nextInt(); if(board.isTaken(input)){ System.out.print("This spot is taken, please choose another one: "); } else if(!board.checkValidMove(input)){ System.out.print("Please enter a number between 1 and 9: "); } }while(board.isTaken(input) || !board.checkValidMove(input)); return input; } private static void printBoard(Board board) { System.out.println(); board.print(); System.out.println(); } private static String getName(Scanner scanner){ String name; name = scanner.nextLine(); return name; } }
package istc.bigdawg.signature; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import convenience.RTED; import istc.bigdawg.packages.QueryContainerForCommonDatabase; import istc.bigdawg.plan.operators.Operator; import istc.bigdawg.signature.builder.ArraySignatureBuilder; import istc.bigdawg.signature.builder.RelationalSignatureBuilder; import istc.bigdawg.utils.IslandsAndCast.Scope; import istc.bigdawg.utils.sqlutil.SQLExpressionUtils; import net.sf.jsqlparser.JSQLParserException; import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.expression.Parenthesis; import net.sf.jsqlparser.parser.CCJSqlParserUtil; import net.sf.jsqlparser.schema.Column; public class Signature { private static Logger logger = Logger.getLogger(Signature.class.getName()); private static String fieldSeparator = "|||||"; private static String fieldSeparatorRest = "[|][|][|][|][|]"; private static String elementSeparator = "&&&&&"; private static String elementSeparatorRest = "[&][&][&][&][&]"; private Scope island; private String sig1; private List<String> sig2; private List<String> sig3; private String query; private List<String> sig4k; private List<Map<String, Set<String>>> objectExpressionMapping = null; // private String identifier; // private static Pattern possibleObjectsPattern = Pattern.compile("[_@a-zA-Z0-9]+"); // private static Pattern tagPattern = Pattern.compile("BIGDAWGTAG_[0-9_]+"); /** * Construct a signature of three parts: sig1 tells about the structure, sig2 are all object references, sig3 are all constants * @param cc * @param query * @param island * @throws Exception */ public Signature(String query, Scope island, Operator root, Map<String, QueryContainerForCommonDatabase> container) throws Exception { if (island.equals(Scope.RELATIONAL)){ setSig2(RelationalSignatureBuilder.sig2(query)); setSig3(RelationalSignatureBuilder.sig3(query)); } else if (island.equals(Scope.ARRAY)) { setSig2(ArraySignatureBuilder.sig2(query)); setSig3(ArraySignatureBuilder.sig3(query)); } else { throw new Exception("Invalid Signature island input: "+island); } objectExpressionMapping = new ArrayList<>(); if (container.isEmpty() ) { Map<String, Set<String>> mapping = root.getObjectToExpressionMappingForSignature(); root.removeCTEEntriesFromObjectToExpressionMapping(mapping); objectExpressionMapping.add(mapping); } List<String> cs = new ArrayList<>(); for (String s : container.keySet()) { cs.add(container.get(s).generateTreeExpression()); objectExpressionMapping.add(container.get(s).generateObjectToExpressionMapping()); } setSig4k(cs); setSig1(root.getTreeRepresentation(true)); this.setQuery(query); this.setIsland(island); } public Signature(String s) throws Exception{ List<String> parsed = Arrays.asList(s.split(fieldSeparatorRest)); if (parsed.size() != 5 && parsed.size() != 6) { throw new Exception("Ill-formed input string; cannot recover signature; String: "+s); } try { this.island = Scope.valueOf(parsed.get(0)); this.sig1 = new String(parsed.get(1)); this.sig2 = Arrays.asList(parsed.get(2).split(elementSeparatorRest)); this.sig3 = Arrays.asList(parsed.get(3).split(elementSeparatorRest)); this.query = new String(parsed.get(4)); if (parsed.size() == 5) this.sig4k = new ArrayList<>(); else this.sig4k = Arrays.asList(parsed.get(5).split(elementSeparatorRest)); } catch (Exception e) { e.printStackTrace(); throw new Exception("Ill-formed input string; cannot recover signature; String: "+s); } } public static double getTreeEditDistance(String s1, String s2) { return RTED.computeDistance(s1, s2); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Signature:\n"); sb.append("Island : ").append(island.toString()).append('\n'); sb.append("Signature 1 : ").append(sig1.toString()).append('\n'); sb.append("Signature 2 : ").append(sig2.toString()).append('\n'); sb.append("Signature 3 : ").append(sig3.toString()).append('\n'); sb.append("Query : ").append(query).append('\n'); sb.append("Signature 4-k: ").append(sig4k.toString()).append('\n'); return sb.toString(); } public void print() { System.out.println(this.toString()); } public String getSig1() { return sig1; } public void setSig1(String sig1) { this.sig1 = sig1; } public List<String> getSig2() { return sig2; } public void setSig2(List<String> sig2) { this.sig2 = sig2; } public List<String> getSig3() { return sig3; } public void setSig3(List<String> sig3) { this.sig3 = sig3; } public String getQuery() { if (this.getIsland() == Scope.RELATIONAL){ return String.format("bdrel(%s);", query); } else if (this.getIsland() == Scope.ARRAY){ return String.format("bdarray(%s);", query); } else { // TODO make a clause for each type of query return query; } } public void setQuery(String query) { this.query = query; } public Scope getIsland() { return island; } private void setIsland(Scope island) { this.island = island; } public List<String> getSig4k() { return sig4k; } public void setSig4k(List<String> sig4k) { this.sig4k = sig4k; } public List<Map<String, Set<String>>> getObjectToExpressionMapping() { return objectExpressionMapping; } public List<Map<String, Set<String>>> getTreesOfObjectToExpressionMapping() throws JSQLParserException { List<Map<String, Set<String>>> ret = new ArrayList<>(); for (Map<String, Set<String>> mapping : objectExpressionMapping) { Map<String, Set<String>> addition = new HashMap<>(); for (String obj : mapping.keySet()) { Set<String> expr = new HashSet<>(); for (String exp : mapping.get(obj)) { expr.add(SQLExpressionUtils.parseCondForTree(CCJSqlParserUtil.parseCondExpression(exp))); } addition.put(obj, expr); } ret.add(addition); } return ret; } public List<Map<String, Set<String>>> getTreesOfStrippedDownObjectToExpressionMapping() throws JSQLParserException { List<Map<String, Set<String>>> ret = new ArrayList<>(); for (Map<String, Set<String>> mapping : objectExpressionMapping) { Map<String, Set<String>> addition = new HashMap<>(); for (String obj : mapping.keySet()) { Set<String> expr = new HashSet<>(); for (String exp : mapping.get(obj)) { Expression e = SQLExpressionUtils.stripDownExpressionForSignature(CCJSqlParserUtil.parseCondExpression(exp)); while (e instanceof Parenthesis) e = ((Parenthesis)e).getExpression(); if (e instanceof Column) continue; expr.add(SQLExpressionUtils.parseCondForTree(e)); } addition.put(obj, expr); } ret.add(addition); } return ret; } public static void printO2EMapping(Operator o) throws Exception { Map<String, Set<String>> m = o.getObjectToExpressionMappingForSignature(); o.removeCTEEntriesFromObjectToExpressionMapping(m); System.out.println("Mapping: "); for (String s : m.keySet()) { System.out.printf("-- %s:\n",s); for (String s2 : m.get(s)) { String e; try { e = SQLExpressionUtils.parseCondForTree(CCJSqlParserUtil.parseCondExpression(s2)); } catch (JSQLParserException ex) { e = SQLExpressionUtils.parseCondForTree(CCJSqlParserUtil.parseExpression(s2)); } System.out.printf(" - %s\n",e); } } } public static void printStrippedO2EMapping(Operator o) throws Exception { Map<String, Set<String>> m = o.getObjectToExpressionMappingForSignature(); o.removeCTEEntriesFromObjectToExpressionMapping(m); System.out.println("Stripped down function: "); for (String s : m.keySet()) { System.out.printf("-- %s:\n",s); for (String s2 : m.get(s)) { Expression e; try { e = SQLExpressionUtils.stripDownExpressionForSignature(CCJSqlParserUtil.parseCondExpression(s2)); } catch (JSQLParserException ex) { e = SQLExpressionUtils.stripDownExpressionForSignature(CCJSqlParserUtil.parseExpression(s2)); } while (e instanceof Parenthesis) e = ((Parenthesis)e).getExpression(); if (e instanceof Column) continue; System.out.printf(" - %s\n",SQLExpressionUtils.parseCondForTree(e)); } } } public String toRecoverableString() { StringBuilder sb = new StringBuilder(); sb.append(island.toString()); sb.append(fieldSeparator).append(sig1); sb.append(fieldSeparator).append(String.join(elementSeparator, sig2)); sb.append(fieldSeparator).append(String.join(elementSeparator, sig3)); sb.append(fieldSeparator).append(query); if (sig4k.size() > 0) sb.append(fieldSeparator).append(String.join(elementSeparator, sig4k)); return sb.toString(); } public double compare(Signature sig) { logger.debug("SIGNATURE 1: " + this.toRecoverableString()); logger.debug("SIGNATURE 2: " + sig.toRecoverableString()); double dist = 0; List<String> l2; List<String> l4k2 = new ArrayList<>(sig.sig4k); int size; // sig1 dist = getTreeEditDistance(sig1, sig.sig1); logger.debug("SIGNATURE sig1 dist: " + dist); // sig2 if (sig2.size() > sig.sig2.size()) { l2 = new ArrayList<>(sig2); l2.retainAll(sig.sig2); size = sig2.size(); } else { l2 = new ArrayList<>(sig.sig2); l2.retainAll(sig2); size = sig.sig2.size(); } double sig2Dist = ((double)l2.size()) / size; dist *= sig2Dist; logger.debug("SIGNATURE sig2 dist: " + sig2Dist); // sig3 double sig3Dist = (sig3.size() > sig.sig3.size()) ? sig3.size() - sig.sig3.size() : sig.sig3.size() - sig3.size(); logger.debug("SIGNATURE sig3 dist: " + sig3Dist); dist += sig3Dist; // sig4k double sig4kDist = sig4k.size() < sig.sig4k.size() ? sig.sig4k.size() - sig4k.size() : sig4k.size() - sig.sig4k.size(); for (int i = 0 ; i < sig4k.size() ; i++ ) { double result = Double.MAX_VALUE; int j = 0; int holder = -1; while (!l4k2.isEmpty() && j < l4k2.size()) { double temp = getTreeEditDistance(sig4k.get(i), l4k2.get(j)); if (temp < result) { result = temp; holder = j; } j++; } if (holder > 0) { l4k2.remove(holder); sig4kDist += result; } else break; } logger.debug("SIGNATURE sig4k dist: " + sig4kDist); dist += sig4kDist; logger.debug("SIGNATURE final dist: " + dist); return dist; } }
package javaslang.collection; import javaslang.Tuple2; import javaslang.control.Option; import java.util.Comparator; import java.util.Objects; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.UnaryOperator; /** * An interface for inherently recursive data structures. The order of elements is determined by * {@link java.lang.Iterable#iterator()}, which may vary each time it is called. * <p> * Implementations of {@code Traversable} should calculate the {@code hashCode} via {@link #hash(java.lang.Iterable)}. * <p> * Additionally to {@link TraversableOnce} this interface has the following methods: * * <ul> * <li>{@link #hasDefiniteSize()}</li> * <li>{@link #isTraversableAgain()}</li> * </ul> * * @param <T> Component type * @since 1.1.0 */ public interface Traversable<T> extends TraversableOnce<T> { /** * Used by collections to compute the hashCode only once. * <p> * Idiom: * <pre> * <code> * class MyCollection implements Serializable { * * // Not allowed to be serialized! * private final transient Lazy&lt;Integer&gt; hashCode = Lazy.of(() -&gt; Traversable.hash(this)); * * &#64;Override * public int hashCode() { * return hashCode.get(); * } * } * </code> * </pre> * * <strong>Note:</strong> In the case of an empty collection, such as {@code Nil} it is recommended to * directly return {@code Traversable.hash(this)} instead of asking a {@code Lazy} value: * <pre> * <code> * interface List&lt;T&gt; { * * class Nil&lt;T&gt; { * * &#64;Override * public int hashCode() { * return Traversable.hash(this); * } * } * } * </code> * </pre> * * @param <T> Component type * @param objects An java.lang.Iterable * @return The hashCode of the given java.lang.Iterable * @throws NullPointerException if objects is null */ static <T> int hash(java.lang.Iterable<? extends T> objects) { int hashCode = 1; for (Object o : objects) { hashCode = 31 * hashCode + Objects.hashCode(o); } return hashCode; } // -- Adjusted return types of TraversableOnce @Override Traversable<T> clear(); @Override Traversable<T> distinct(); @Override Traversable<T> distinctBy(Comparator<? super T> comparator); @Override <U> Traversable<T> distinctBy(Function<? super T, ? extends U> keyExtractor); @Override Traversable<T> drop(int n); @Override Traversable<T> dropRight(int n); @Override Traversable<T> dropWhile(Predicate<? super T> predicate); @Override Traversable<T> filter(Predicate<? super T> predicate); @Override <U> Traversable<U> flatMap(Function<? super T, ? extends java.lang.Iterable<? extends U>> mapper); @Override Traversable<Object> flatten(); @Override <C> Map<C, ? extends Traversable<T>> groupBy(Function<? super T, ? extends C> classifier); @Override Traversable<T> init(); @Override Option<? extends Traversable<T>> initOption(); @Override <U> Traversable<U> map(Function<? super T, ? extends U> mapper); @Override Tuple2<? extends Traversable<T>, ? extends Traversable<T>> partition(Predicate<? super T> predicate); @Override Traversable<T> peek(Consumer<? super T> action); @Override Traversable<T> replace(T currentElement, T newElement); @Override Traversable<T> replaceAll(T currentElement, T newElement); @Override Traversable<T> replaceAll(UnaryOperator<T> operator); @Override Traversable<T> retainAll(java.lang.Iterable<? extends T> elements); @Override Tuple2<? extends Traversable<T>, ? extends Traversable<T>> span(Predicate<? super T> predicate); @Override Traversable<T> tail(); @Override Option<? extends Traversable<T>> tailOption(); @Override Traversable<T> take(int n); @Override Traversable<T> takeRight(int n); @Override Traversable<T> takeWhile(Predicate<? super T> predicate); }
package joshng.util.collect; import joshng.util.ByteSerializable; import joshng.util.StringUtils; import java.util.Arrays; public class ByteArrayKey implements ByteSerializable<byte[]> { private final byte[] bytes; private final int hashCode; public ByteArrayKey(byte[] bytes) { this.hashCode = Arrays.hashCode(bytes); this.bytes = bytes; } public static ByteArrayKey valueOf(String hex) { return new ByteArrayKey(StringUtils.bytesFromHexString(hex)); } public byte[] bytes() { return bytes; } @Override public boolean equals(Object o) { return this == o || o != null && o.getClass() == getClass() && isEqual((ByteArrayKey)o); } private boolean isEqual(ByteArrayKey that) { return that.hashCode == hashCode && Arrays.equals(bytes, that.bytes); } @Override public int hashCode() { return hashCode; } @Override public String toString() { return StringUtils.toHexStringTruncatedWithEllipsis(bytes, 0, 32); } @Override public byte[] getSerializableValue() { return bytes; } public int getLength() { return bytes.length; } }
package link.webarata3.poi; import org.apache.poi.openxml4j.exceptions.InvalidFormatException; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.CellType; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.usermodel.WorkbookFactory; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Date; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.IntStream; /** * Apache POI * * @author webarata3 */ public class BenrippoiUtil { /** * Excel * * @param fileName Excel * @return Excel Workbook * @throws IOException * @throws InvalidFormatException */ public static Workbook open(String fileName) throws IOException, InvalidFormatException { InputStream is = Files.newInputStream(Paths.get(fileName)); return open(is); } /** * Excel * * @param is ExcelInputStream * @return Excel Workbook * @throws IOException * @throws InvalidFormatException */ public static Workbook open(InputStream is) throws IOException, InvalidFormatException { return WorkbookFactory.create(is); } /** * ExcelA1B2 * * @param x 0 * @param y 0 * @return */ public static String cellIndexToCellLabel(int x, int y) { String cellName = dec26(x, 0); return cellName + (y + 1); } private static String dec26(int num, int first) { return (num > 25 ? dec26(num / 26, 1) : "") + String.valueOf((char) ('A' + (num - first) % 26)); } /** * Row * * @param sheet * @param y 0 * @return Row */ public static Row getRow(Sheet sheet, int y) { Row row = sheet.getRow(y); if (row != null) { return row; } return sheet.createRow(y); } /** * Cell * * @param sheet * @param x 0 * @param y 0 * @return Cell */ public static Cell getCell(Sheet sheet, int x, int y) { Row row = sheet.getRow(y); Cell cell = row.getCell(x); if (cell != null) { return cell; } return row.createCell(x, CellType.BLANK); } /** * A1B2 * * @param sheet * @param cellLabel A1B2 * @return Cell */ public static Cell getCell(Sheet sheet, String cellLabel) { Pattern p1 = Pattern.compile("([a-zA-Z]+)([0-9]+)"); Matcher matcher = p1.matcher(cellLabel); matcher.find(); // Cell LabelAB1AB String reverseString = new StringBuilder(matcher.group(1).toUpperCase()).reverse().toString(); // A1Z26 // 2626 int x = IntStream.range(0, reverseString.length()).map((i) -> { int delta = reverseString.charAt(i) - 'A' + 1; return delta * (int) Math.pow(26.0, (double) i); }).reduce(-1, (v1, v2) -> v1 + v2); return getCell(sheet, x, Integer.parseInt(matcher.group(2)) - 1); } /** * String * * @param cell * @return String */ public static String cellToString(Cell cell) { CellProxy cellProxy = new CellProxy(cell); return cellProxy.toStr(); } /** * int * * @param cell * @return int */ public static int cellToInt(Cell cell) { CellProxy cellProxy = new CellProxy(cell); return cellProxy.toInt(); } /** * double * * @param cell * @return double */ public static double cellToDouble(Cell cell) { CellProxy cellProxy = new CellProxy(cell); return cellProxy.toDouble(); } /** * boolean * * @param cell * @return boolean */ public static boolean cellToBoolean(Cell cell) { CellProxy cellProxy = new CellProxy(cell); return cellProxy.toBoolean(); } /** * Date * * @param cell * @return Date */ public static Date cellToDate(Cell cell) { CellProxy cellProxy = new CellProxy(cell); return cellProxy.toDate(); } }
package mcjty.rftools; import mcjty.lib.varia.GlobalCoordinate; import mcjty.lib.varia.Logging; import mcjty.lib.varia.WrenchChecker; import mcjty.rftools.blocks.blockprotector.BlockProtectorTileEntity; import mcjty.rftools.blocks.blockprotector.BlockProtectors; import mcjty.rftools.blocks.environmental.NoTeleportAreaManager; import mcjty.rftools.blocks.environmental.PeacefulAreaManager; import mcjty.rftools.blocks.screens.ScreenSetup; import mcjty.rftools.playerprops.BuffProperties; import mcjty.rftools.playerprops.PlayerExtendedProperties; import mcjty.rftools.playerprops.PorterProperties; import mcjty.rftools.playerprops.PropertiesDispatcher; import net.minecraft.block.Block; import net.minecraft.entity.Entity; import net.minecraft.entity.monster.IMob; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Vec3d; import net.minecraft.world.Explosion; import net.minecraft.world.World; import net.minecraftforge.event.AttachCapabilitiesEvent; import net.minecraftforge.event.entity.living.EnderTeleportEvent; import net.minecraftforge.event.entity.living.LivingSpawnEvent; import net.minecraftforge.event.entity.player.PlayerInteractEvent; import net.minecraftforge.event.world.BlockEvent; import net.minecraftforge.event.world.ExplosionEvent; import net.minecraftforge.fml.common.eventhandler.Event; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.common.gameevent.TickEvent; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; public class ForgeEventHandlers { @SubscribeEvent public void onPlayerTickEvent(TickEvent.PlayerTickEvent event) { if (event.phase == TickEvent.Phase.START && !event.player.worldObj.isRemote) { PorterProperties porterProperties = PlayerExtendedProperties.getPorterProperties(event.player); if (porterProperties != null) { porterProperties.tickTeleport(event.player); } BuffProperties buffProperties = PlayerExtendedProperties.getBuffProperties(event.player); if (buffProperties != null) { buffProperties.tickBuffs((EntityPlayerMP) event.player); } } } @SubscribeEvent public void onEntityConstructing(AttachCapabilitiesEvent.Entity event){ if (event.getEntity() instanceof EntityPlayer) { if (!event.getEntity().hasCapability(PlayerExtendedProperties.PORTER_CAPABILITY, null)) { event.addCapability(new ResourceLocation(RFTools.MODID, "Properties"), new PropertiesDispatcher()); } } } private Collection<GlobalCoordinate> getProtectors(World world, int x, int y, int z) { Collection<GlobalCoordinate> protectors; BlockProtectors blockProtectors = BlockProtectors.getProtectors(world); if (blockProtectors == null) { protectors = Collections.emptyList(); } else { int id = world.provider.getDimension(); protectors = blockProtectors.findProtectors(x, y, z, id, 2); } return protectors; } @SubscribeEvent public void onPlayerInteractEvent(PlayerInteractEvent event) { ItemStack heldItem = event.getEntityPlayer().getHeldItem(event.getHand()); if (heldItem == null || heldItem.getItem() == null) { return; } if (event.getEntityPlayer().isSneaking() && WrenchChecker.isAWrench(heldItem.getItem())) { // If the block is protected we prevent sneak-wrenching it. World world = event.getWorld(); int x = event.getPos().getX(); int y = event.getPos().getY(); int z = event.getPos().getZ(); Collection<GlobalCoordinate> protectors = getProtectors(world, x, y, z); checkHarvestProtection(event, x, y, z, world, protectors); } } @SubscribeEvent public void onBlockBreakEvent(BlockEvent.BreakEvent event) { int x = event.getPos().getX(); int y = event.getPos().getY(); int z = event.getPos().getZ(); World world = event.getWorld(); if (!world.isRemote) { if (((EntityPlayerMP)event.getPlayer()).interactionManager.isCreative()) { // In creative we don't want our screens to be destroyed by left click unless he/she is sneaking Block block = world.getBlockState(event.getPos()).getBlock(); if (block == ScreenSetup.screenBlock || block == ScreenSetup.screenHitBlock) { if (!event.getPlayer().isSneaking()) { // If not sneaking while we hit a screen we cancel the destroy. Otherwise we go through. event.setCanceled(true); return; } } } // } else { // if (Minecraft.getMinecraft().playerController.isInCreativeMode()) { // // In creative we don't want our screens to be destroyed by left click unless he/she is sneaking // Block block = world.getBlockState(event.getPos()).getBlock(); // if (block == ScreenSetup.screenBlock || block == ScreenSetup.screenHitBlock) { // if (!event.getPlayer().isSneaking()) { // // If not sneaking while we hit a screen we cancel the destroy. Otherwise we go through. // event.setCanceled(true); // block.onBlockClicked(world, event.getPos(), event.getPlayer()); // return; } Collection<GlobalCoordinate> protectors = getProtectors(world, x, y, z); checkHarvestProtection(event, x, y, z, world, protectors); } private void checkHarvestProtection(Event event, int x, int y, int z, World world, Collection<GlobalCoordinate> protectors) { for (GlobalCoordinate protector : protectors) { TileEntity te = world.getTileEntity(protector.getCoordinate()); if (te instanceof BlockProtectorTileEntity) { BlockProtectorTileEntity blockProtectorTileEntity = (BlockProtectorTileEntity) te; BlockPos relative = blockProtectorTileEntity.absoluteToRelative(x, y, z); boolean b = blockProtectorTileEntity.isProtected(relative); if (b) { if (blockProtectorTileEntity.attemptHarvestProtection()) { event.setCanceled(true); } else { blockProtectorTileEntity.removeProtection(relative); } return; } } } } @SubscribeEvent public void onDetonate(ExplosionEvent.Detonate event) { Explosion explosion = event.getExplosion(); Vec3d explosionVector = explosion.getPosition(); Collection<GlobalCoordinate> protectors = getProtectors(event.getWorld(), (int) explosionVector.xCoord, (int) explosionVector.yCoord, (int) explosionVector.zCoord); if (protectors.isEmpty()) { return; } List<BlockPos> affectedBlocks = event.getAffectedBlocks(); List<BlockPos> toremove = new ArrayList<>(); int rf = 0; for (GlobalCoordinate protector : protectors) { BlockPos pos = protector.getCoordinate(); TileEntity te = event.getWorld().getTileEntity(pos); if (te instanceof BlockProtectorTileEntity) { BlockProtectorTileEntity blockProtectorTileEntity = (BlockProtectorTileEntity) te; for (BlockPos block : affectedBlocks) { BlockPos relative = blockProtectorTileEntity.absoluteToRelative(block); boolean b = blockProtectorTileEntity.isProtected(relative); if (b) { Vec3d blockVector = new Vec3d(block); double distanceTo = explosionVector.distanceTo(blockVector); int rfneeded = blockProtectorTileEntity.attemptExplosionProtection((float) (distanceTo / explosion.explosionSize), explosion.explosionSize); if (rfneeded > 0) { toremove.add(block); rf += rfneeded; } else { blockProtectorTileEntity.removeProtection(relative); } } } } } affectedBlocks.removeAll(toremove); Logging.logDebug("RF Needed for one explosion:" + rf); } @SubscribeEvent public void onEntityTeleport(EnderTeleportEvent event) { World world = event.getEntity().getEntityWorld(); int id = world.provider.getDimension(); Entity entity = event.getEntity(); BlockPos coordinate = new BlockPos((int) entity.posX, (int) entity.posY, (int) entity.posZ); if (NoTeleportAreaManager.isTeleportPrevented(entity, new GlobalCoordinate(coordinate, id))) { event.setCanceled(true); Logging.logDebug("No Teleport manager: Prevented teleport of " + entity.getClass().getName()); } else { coordinate = new BlockPos((int) event.getTargetX(), (int) event.getTargetY(), (int) event.getTargetZ()); if (NoTeleportAreaManager.isTeleportPrevented(entity, new GlobalCoordinate(coordinate, id))) { event.setCanceled(true); Logging.logDebug("No Teleport manager: Prevented teleport of " + entity.getClass().getName()); } } } @SubscribeEvent public void onEntitySpawnEvent(LivingSpawnEvent.CheckSpawn event) { World world = event.getWorld(); int id = world.provider.getDimension(); Entity entity = event.getEntity(); if (entity instanceof IMob) { BlockPos coordinate = new BlockPos((int) entity.posX, (int) entity.posY, (int) entity.posZ); if (PeacefulAreaManager.isPeaceful(new GlobalCoordinate(coordinate, id))) { event.setResult(Event.Result.DENY); Logging.logDebug("Peaceful manager: Prevented a spawn of " + entity.getClass().getName()); } } } }
package net.imagej.legacy; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import org.scijava.ItemVisibility; import org.scijava.app.AppService; import org.scijava.command.Interactive; import org.scijava.display.DisplayService; import org.scijava.io.IOService; import org.scijava.log.LogService; import org.scijava.menu.MenuConstants; import org.scijava.options.OptionsPlugin; import org.scijava.platform.PlatformService; import org.scijava.plugin.Attr; import org.scijava.plugin.Menu; import org.scijava.plugin.Parameter; import org.scijava.plugin.Plugin; import org.scijava.text.TextService; import org.scijava.ui.DialogPrompt.MessageType; import org.scijava.ui.UIService; import org.scijava.welcome.WelcomeService; import org.scijava.widget.Button; /** * Allows the setting and persisting of options relevant to ImageJ2, in ImageJ1. * Not displayed in the IJ2 UI. * * @author Mark Hiner * @author Curtis Rueden */ @Plugin(type = OptionsPlugin.class, label = "ImageJ2 Options", menu = { @Menu(label = MenuConstants.EDIT_LABEL, weight = MenuConstants.EDIT_WEIGHT, mnemonic = MenuConstants.EDIT_MNEMONIC), @Menu(label = "Options"), @Menu(label = "ImageJ2...") }, attrs = { @Attr(name = "legacy-only") }) public class ImageJ2Options extends OptionsPlugin implements Interactive { // -- Fields -- // TODO: Use <html> and <br> to put the following warning into a single // parameter. There seems to be a bug with at the moment, though... @Parameter(visibility = ItemVisibility.MESSAGE) private final String warning1 = "These options enable beta ImageJ2 functionality."; @Parameter(visibility = ItemVisibility.MESSAGE) private final String warning2 = "You can turn them on for testing, but they are still buggy,"; @Parameter(visibility = ItemVisibility.MESSAGE) private final String warning3 = "and have not yet been optimized for performance."; @Parameter( label = "Use SCIFIO when opening files", description = "<html>Whether to use ImageJ2's file I/O mechanism when " + "opening files.<br>Image files will be opened using the SCIFIO library " + "(SCientific Image<br>Format Input and Output), which provides truly " + "extensible support for<br>reading and writing image file formats.", callback = "run") private boolean sciJavaIO = false; @Parameter(label = "What is ImageJ2?", persist = false, callback = "help") private Button help; @Parameter private DefaultLegacyService legacyService; @Parameter(required = false) private WelcomeService welcomeService; @Parameter(required = false) private AppService appService; @Parameter(required = false) private PlatformService platformService; @Parameter(required = false) private DisplayService displayService; @Parameter(required = false) private TextService textService; @Parameter(required = false) private UIService uiService; @Parameter(required = false) private LogService log; private final static URL WELCOME_URL; static { URL url = null; try { url = new URL("https://github.com/imagej/imagej/blob/master/WELCOME.md#welcome-to-imagej2"); } catch (final MalformedURLException e) { e.printStackTrace(); } WELCOME_URL = url; } // -- Option accessors -- /** * Gets whether to synchronize ImageJ 1.x and ImageJ2 data structures. * <p> * This is an experimental feature that proactively syncs objects between IJ1 * (e.g., {@link ij.ImagePlus}) and IJ2 (e.g., {@link net.imagej.Dataset}) * data structures. * </p> * <p> * <b>Warning:</b> this feature currently has serious bugs, and enabling it * will have a serious impact on performance and stability! * </p> * <p> * If you need to enable it for testing or development purposes, do so by * setting the {@code imagej.legacy.sync} system property. * </p> */ public boolean isSyncEnabled() { return Boolean.getBoolean("imagej.legacy.sync"); } public boolean isSciJavaIO() { return sciJavaIO; } @SuppressWarnings("unused") private void help() { if (welcomeService != null) { welcomeService.displayWelcome(); return; } if (appService != null && textService != null && displayService != null) { final File baseDir = appService.getApp().getBaseDirectory(); final File welcomeFile = new File(baseDir, "WELCOME.md"); if (welcomeFile.exists()) try { final String welcomeText = textService.asHTML(welcomeFile); displayService.createDisplay(welcomeText); return; } catch (final IOException e) { if (log != null) { log.error(e); } else { e.printStackTrace(); } } } // if local options fail, try the web browser if (platformService != null && WELCOME_URL != null) { try { platformService.open(WELCOME_URL); return; } catch (final IOException e) { if (log != null) { log.error(e); } else { e.printStackTrace(); } } } final String message = "No appropriate service found to display the message"; if (uiService != null) { uiService.showDialog(message, MessageType.ERROR_MESSAGE); return; } if (log != null) { log.error(message); } else { System.err.println(message); } } }
package net.ripe.db.whois.common; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; @Immutable public class Message { protected Messages.Type type; protected String text; protected Object[] args; protected String formattedText; protected Message() {} public Message(final Messages.Type type, final String text, final Object... args) { this.type = type; this.text = text; this.args = args; this.formattedText = formatMessage(text, args); } protected String formatMessage(final String text, final Object[] args) { return args.length == 0 ? text : String.format(text, args); } @Override public String toString() { return formattedText; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final Message message = (Message) o; return type == message.type && formattedText.equals(message.formattedText); } @Override public int hashCode() { int result = type.hashCode(); result = 31 * result + formattedText.hashCode(); return result; } @Nullable public Messages.Type getType() { return type; } @Nullable public String getFormattedText() { return formattedText; } @Nullable public String getText() { return text; } @Nullable public Object[] getArgs() { return args; } }
package networking.mediator; import Game.*; import Game.adapters.InstructionAdapter; import Game.adapters.PanelAdapter; import Game.adapters.PlayerAdapter; import Game.adapters.TeamAdapter; import networking.server.NetworkRequest; import networking.server.RequestType; import java.util.List; import java.util.TimerTask; public class HostMediator extends BaseMediator implements IMediator { HostGame hostGame; TimerTask timerTask1; java.util.Timer timerRefresh1; public HostMediator(HostGame hostGame, int port) { super(port); this.hostGame = hostGame; timerRefresh1 = new java.util.Timer(); timerTask1 = new TimerTask() { @Override public void run() { handleAll(); } }; timerRefresh1.schedule(timerTask1, 0, 1000); } public HostMediator(HostGame hostGame) { this(hostGame, 8085); } /** * Author Qun * This method changes the status of the player * So when all players are ready the game can be started * * @param networkRequest the incoming request to change playerstatus */ public void handlePlayersChangeStatus(NetworkRequest networkRequest) { if (networkRequest.getType() == RequestType.POST) { // Makes a player object from the inputstream Player incomingPlayer = PlayerAdapter.toObject(networkRequest.getPayload()); for (Player player : hostGame.getPlayers()) { if (incomingPlayer.getIp().equals(player.getIp())) { player.setPlayerStatus(incomingPlayer.getPlayerStatus()); } } } } private void handleAll() { List<Player> players = hostGame.getPlayers(); List<Team> teams = hostGame.getTeams(); String json; NetworkRequest send; if (players.size() > 0 && teams.size() > 0) { // send the players players.get(0).setTeam(hostGame.getTeams().get(0)); json = PlayerAdapter.toString(players); send = new NetworkRequest(RequestType.SEND, "/players/", json); networkServer.send(send.toString(), "127.0.0.1"); } if (teams.size() > 0) { // send the teams json = TeamAdapter.toString(teams); send = new NetworkRequest(RequestType.SEND, "/teams/", json); networkServer.send(send.toString(), "127.0.0.1"); } } @Override public void handlePlayers(NetworkRequest networkRequest) { if (networkRequest.getType() == RequestType.GET) { // Retrieve players List<Player> players = hostGame.getPlayers(); // JSONify players String json = PlayerAdapter.toString(players); NetworkRequest response = new NetworkRequest(RequestType.SEND, "/players/", json); networkServer.send(response.toString(), networkRequest.getNetworkMessage().getSender()); } if (networkRequest.getType() == RequestType.POST) { // Makes a player object from the inputstream Player player = PlayerAdapter.toObject(networkRequest.getPayload()); hostGame.createPlayer(player.getUsername(), networkRequest.getNetworkMessage().getSender()); autoAssignTeam(player); } else { networkServer.requeueRequest(networkRequest); } //handleAll(); } @Override public void handleInstruction(NetworkRequest networkRequest) { if (networkRequest.getType() == RequestType.GET) { String ip = networkRequest.getNetworkMessage().getSender(); Instruction latestInstruction = getPlayer(ip).getActiveInstruction(); //JSonify instruction String json = InstructionAdapter.toString(latestInstruction); NetworkRequest response = new NetworkRequest(RequestType.SEND, networkRequest.getUrl(), json); networkServer.send(response.toString(), ip); } else if (networkRequest.getType() == RequestType.POST) { //todo In de api kijken of het hier om gaat Instruction expiredInstruction = InstructionAdapter.toObject(networkRequest.getPayload()); hostGame.registerInvalidInstruction(expiredInstruction); } else { networkServer.requeueRequest(networkRequest); } //handleAll(); } @Override public void handleTeams(NetworkRequest networkRequest) { if (networkRequest.getType() == RequestType.GET) { //Retrieve teams List<Team> teams = hostGame.getTeams(); String json = TeamAdapter.toString(teams); NetworkRequest response = new NetworkRequest(RequestType.SEND, networkRequest.getUrl(), json); networkServer.send(response.toString(), networkRequest.getNetworkMessage().getSender()); } else { networkServer.requeueRequest(networkRequest); } //handleAll(); } /** * Author Qun * The GET returns all panels a game has * The Post processes a the panel a player pressed * @param networkRequest the incoming request to get and also process panels */ @Override public void handlePanels(NetworkRequest networkRequest) { if (networkRequest.getType() == RequestType.GET) { // Retrieve panels List<Panel> panels = hostGame.getPanels(); // JSONify panels String json = PanelAdapter.toString(panels); NetworkRequest response = new NetworkRequest(RequestType.SEND, networkRequest.getUrl(), json); networkServer.send(response.toString(), networkRequest.getNetworkMessage().getSender()); } else if (networkRequest.getType() == RequestType.POST) { // converting the incoming json to panel Panel panel = PanelAdapter.toObjectsSinglePanel(networkRequest.getPayload()); for (Player player : hostGame.getPlayers()) { //Processes the panel, it check which player it is by checking his IP adress if (player.getIp().equals(networkRequest.getNetworkMessage().getSender())) { hostGame.processPanel(player, panel); } } } else { networkServer.requeueRequest(networkRequest); } //handleAll(); } @Override public void handleStatus(NetworkRequest networkRequest) { if (networkRequest.getType() == RequestType.GET) { //todo Deze zal pas later worden geimplementeerd op het moment dat we precies weten welke informatie nodig is } else { networkServer.requeueRequest(networkRequest); } //handleAll(); } public void handleTeamsCreate(NetworkRequest networkRequest) { Team team = TeamAdapter.toObject(networkRequest.getPayload()); hostGame.createTeam(team.getName()); //handleAll(); } public void handleTeamsAssign(NetworkRequest networkRequest) { Team teamRequest = TeamAdapter.toObject(networkRequest.getPayload()); for (Team team : hostGame.getTeams()) { if (team.getName() == teamRequest.getName()) { hostGame.assignTeam(getPlayer(networkRequest.getNetworkMessage().getSender()), team); } } //handleAll(); } //gets player by IP public Player getPlayer(String ipadress) { Player returnPlayer = null; for (Player player : hostGame.getPlayers()) { if (player.getIp().equals(ipadress)) returnPlayer = player; } return returnPlayer; } public void autoAssignTeam(Player player) { List<Team> teams = hostGame.getTeams(); if(teams.get(0).getPlayers().size() < teams.get(1).getPlayers().size()) { teams.get(0).addPlayer(player); player.setTeam(teams.get(0)); } else{ teams.get(1).addPlayer(player); player.setTeam(teams.get(1)); } } }
package openmods.block; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import java.util.Set; import net.minecraft.block.properties.PropertyEnum; import net.minecraft.entity.EntityLivingBase; import net.minecraft.util.EnumFacing; import net.minecraft.util.math.BlockPos; import openmods.geometry.BlockTextureTransform; import openmods.geometry.HalfAxis; import openmods.geometry.Orientation; import openmods.utils.BlockUtils; public enum BlockRotationMode { /** * No rotations - always oriented by world directions */ NONE(RotationAxis.NO_AXIS, Orientation.XP_YP) { @Override public boolean isPlacementValid(Orientation dir) { return true; } @Override public Orientation fromValue(int value) { return Orientation.XP_YP; } @Override public int toValue(Orientation dir) { return 0; } @Override public Orientation getPlacementOrientationFromSurface(EnumFacing side) { return Orientation.XP_YP; } @Override public Orientation getPlacementOrientationFromEntity(BlockPos pos, EntityLivingBase player) { return Orientation.XP_YP; } @Override public boolean toolRotationAllowed() { return false; } @Override public Orientation calculateToolRotation(Orientation currentOrientation, EnumFacing axis) { return null; } }, /** * Two orientations - either N-S or W-E. Top side remains unchanged. * Placement side will become local north or south. * Tool rotation will either rotate around Y (if clicked T or B) or set to clicked side (otherwise). */ TWO_DIRECTIONS(RotationAxis.THREE_AXIS, Orientation.ZN_YP, Orientation.XP_YP) { private Orientation directionToOrientation(final EnumFacing normalDir) { switch (normalDir) { case EAST: case WEST: return Orientation.ZN_YP; case NORTH: case SOUTH: return Orientation.XP_YP; default: return null; } } @Override public Orientation getPlacementOrientationFromSurface(EnumFacing side) { return directionToOrientation(side); } @Override public Orientation getPlacementOrientationFromEntity(BlockPos pos, EntityLivingBase player) { return directionToOrientation(player.getHorizontalFacing()); } @Override public Orientation calculateToolRotation(Orientation currentOrientation, EnumFacing axis) { switch (axis) { case UP: case DOWN: return (currentOrientation == Orientation.ZN_YP)? Orientation.XP_YP : Orientation.ZN_YP; case NORTH: case SOUTH: case EAST: case WEST: return directionToOrientation(axis); default: return null; } } }, /** * Three orientations: N-S, W-E, T-B. * Placement side will become local top or bottom. * Tool rotation will set top direction to clicked side. */ THREE_DIRECTIONS(RotationAxis.THREE_AXIS, Orientation.XP_YP, Orientation.YP_XN, Orientation.XP_ZN) { private Orientation directionToOrientation(EnumFacing dir) { switch (dir) { case EAST: case WEST: return Orientation.YP_XN; case NORTH: case SOUTH: return Orientation.XP_ZN; case UP: case DOWN: default: return Orientation.XP_YP; } } @Override public Orientation getPlacementOrientationFromSurface(EnumFacing side) { return directionToOrientation(side); } @Override public Orientation getPlacementOrientationFromEntity(BlockPos pos, EntityLivingBase player) { final EnumFacing normalDir = BlockUtils.get3dOrientation(player, pos); return directionToOrientation(normalDir); } @Override public Orientation calculateToolRotation(Orientation currentOrientation, EnumFacing axis) { return directionToOrientation(axis); } }, /** * Rotate around Y in for directions: N,S,W,E. * Placement side will become local north. * Tool rotation will either rotate around Y (if clicked T or B) or set to clicked side (otherwise). */ FOUR_DIRECTIONS(RotationAxis.THREE_AXIS, Orientation.XP_YP, Orientation.ZN_YP, Orientation.XN_YP, Orientation.ZP_YP) { private Orientation directionToOrientation(EnumFacing side) { switch (side) { case SOUTH: return Orientation.XP_YP; case WEST: return Orientation.ZP_YP; case NORTH: return Orientation.XN_YP; case EAST: return Orientation.ZN_YP; default: return null; } } @Override public Orientation getPlacementOrientationFromSurface(EnumFacing side) { return directionToOrientation(side); } @Override public Orientation getPlacementOrientationFromEntity(BlockPos pos, EntityLivingBase player) { final EnumFacing side = player.getHorizontalFacing().getOpposite(); return directionToOrientation(side); } @Override public Orientation calculateToolRotation(Orientation currentOrientation, EnumFacing axis) { switch (axis) { case UP: return currentOrientation.rotateAround(HalfAxis.POS_Y); case DOWN: return currentOrientation.rotateAround(HalfAxis.NEG_Y); case NORTH: case SOUTH: case EAST: case WEST: return directionToOrientation(axis); default: return null; } } }, SIX_DIRECTIONS(RotationAxis.THREE_AXIS, Orientation.XP_YN, Orientation.XP_YP, Orientation.XP_ZN, Orientation.XN_ZP, Orientation.ZN_XN, Orientation.ZP_XP) { public Orientation directionToOrientation(EnumFacing localTop) { switch (localTop) { case DOWN: return Orientation.XP_YN; case EAST: return Orientation.ZP_XP; case NORTH: return Orientation.XP_ZN; case SOUTH: return Orientation.XN_ZP; case WEST: return Orientation.ZN_XN; case UP: default: return Orientation.XP_YP; } } @Override public Orientation getPlacementOrientationFromSurface(EnumFacing side) { return directionToOrientation(side); } @Override public Orientation getPlacementOrientationFromEntity(BlockPos pos, EntityLivingBase player) { final EnumFacing localTop = BlockUtils.get3dOrientation(player, pos).getOpposite(); return directionToOrientation(localTop); } @Override public Orientation calculateToolRotation(Orientation currentOrientation, EnumFacing axis) { return directionToOrientation(axis); } }, /** * Like {@link #SIX_DIRECTIONS}, but with horizontal orientations used in 1.7.10 (single rotation from top) */ SIX_DIRECTIONS_LEGACY(RotationAxis.THREE_AXIS, Orientation.XN_YN, Orientation.XP_YP, Orientation.XP_ZN, Orientation.XP_ZP, Orientation.YP_XN, Orientation.YN_XP) { public Orientation directionToOrientation(EnumFacing localTop) { switch (localTop) { case DOWN: return Orientation.XN_YN; case EAST: return Orientation.YN_XP; case NORTH: return Orientation.XP_ZN; case SOUTH: return Orientation.XP_ZP; case WEST: return Orientation.YP_XN; case UP: default: return Orientation.XP_YP; } } @Override public Orientation getPlacementOrientationFromSurface(EnumFacing side) { return directionToOrientation(side); } @Override public Orientation getPlacementOrientationFromEntity(BlockPos pos, EntityLivingBase player) { final EnumFacing localTop = BlockUtils.get3dOrientation(player, pos).getOpposite(); return directionToOrientation(localTop); } @Override public Orientation calculateToolRotation(Orientation currentOrientation, EnumFacing axis) { return directionToOrientation(axis); } }, /** * And now it's time for weird ones... * Three orientations: N-S, W-E, T-B. * Placement side will become local top or bottom. * Side can be rotated in four directions */ THREE_FOUR_DIRECTIONS(RotationAxis.THREE_AXIS, Orientation.XP_YP, Orientation.XN_YP, Orientation.ZP_YP, Orientation.ZN_YP, Orientation.YP_XN, Orientation.YN_XN, Orientation.ZP_XN, Orientation.ZN_XN, Orientation.XP_ZN, Orientation.XN_ZN, Orientation.YP_ZN, Orientation.YN_ZN) { private Orientation directionToOrientation(EnumFacing dir) { switch (dir) { case EAST: case WEST: return Orientation.YP_XN; case NORTH: case SOUTH: return Orientation.XP_ZN; case UP: case DOWN: default: return Orientation.XP_YP; } } @Override public Orientation getPlacementOrientationFromSurface(EnumFacing side) { return directionToOrientation(side); } @Override public Orientation getPlacementOrientationFromEntity(BlockPos pos, EntityLivingBase player) { final EnumFacing normalDir = BlockUtils.get3dOrientation(player, pos); return directionToOrientation(normalDir); } @Override public Orientation calculateToolRotation(Orientation currentOrientation, EnumFacing axis) { final HalfAxis newTop = HalfAxis.fromEnumFacing(axis); final HalfAxis currentTop = currentOrientation.y; if (newTop == currentTop) { return currentOrientation.rotateAround(HalfAxis.POS_Y); } else if (newTop == currentTop.negate()) { return currentOrientation.rotateAround(HalfAxis.NEG_Y); } else { return directionToOrientation(axis); } } }, /** * Yet another weird one. * Top side can rotate when oriented up or down. * When top points to cardinal direction, texture top should always align with horizon */ TWELVE_DIRECTIONS(RotationAxis.THREE_AXIS, Orientation.lookupYZ(HalfAxis.NEG_Y, HalfAxis.NEG_Z), // first two TOP/BOTTOM orientation are here for easy migration from SIX_DIRECTIONS Orientation.lookupYZ(HalfAxis.POS_Y, HalfAxis.POS_Z), Orientation.lookupYZ(HalfAxis.NEG_Z, HalfAxis.NEG_Y), Orientation.lookupYZ(HalfAxis.POS_Z, HalfAxis.NEG_Y), Orientation.lookupYZ(HalfAxis.NEG_X, HalfAxis.NEG_Y), Orientation.lookupYZ(HalfAxis.POS_X, HalfAxis.NEG_Y), Orientation.lookupYZ(HalfAxis.NEG_Y, HalfAxis.POS_Z), Orientation.lookupYZ(HalfAxis.NEG_Y, HalfAxis.NEG_X), Orientation.lookupYZ(HalfAxis.NEG_Y, HalfAxis.POS_X), Orientation.lookupYZ(HalfAxis.POS_Y, HalfAxis.NEG_Z), Orientation.lookupYZ(HalfAxis.POS_Y, HalfAxis.POS_X), Orientation.lookupYZ(HalfAxis.POS_Y, HalfAxis.NEG_X)) { public Orientation directionToOrientation(EnumFacing localTop) { switch (localTop) { case DOWN: return Orientation.lookupYZ(HalfAxis.NEG_Y, HalfAxis.NEG_Z); case EAST: return Orientation.lookupYZ(HalfAxis.POS_X, HalfAxis.NEG_Y); case NORTH: return Orientation.lookupYZ(HalfAxis.NEG_Z, HalfAxis.NEG_Y); case SOUTH: return Orientation.lookupYZ(HalfAxis.POS_Z, HalfAxis.NEG_Y); case WEST: return Orientation.lookupYZ(HalfAxis.NEG_X, HalfAxis.NEG_Y); case UP: default: return Orientation.lookupYZ(HalfAxis.POS_Y, HalfAxis.POS_Z); } } @Override public Orientation getPlacementOrientationFromSurface(EnumFacing side) { return directionToOrientation(side); } @Override public Orientation getPlacementOrientationFromEntity(BlockPos pos, EntityLivingBase player) { EnumFacing playerDir = player.getHorizontalFacing().getOpposite(); if (player.rotationPitch > 45.5F) { return Orientation.lookupYZ(HalfAxis.POS_Y, HalfAxis.fromEnumFacing(playerDir)); } else if (player.rotationPitch < -45.5F) { return Orientation.lookupYZ(HalfAxis.NEG_Y, HalfAxis.fromEnumFacing(playerDir)); } else { return Orientation.lookupYZ(HalfAxis.fromEnumFacing(playerDir), HalfAxis.NEG_Y); } } @Override public Orientation calculateToolRotation(Orientation currentOrientation, EnumFacing axis) { switch (axis) { case NORTH: case SOUTH: case EAST: case WEST: return Orientation.lookupYZ(HalfAxis.fromEnumFacing(axis), HalfAxis.NEG_Y); case UP: if (currentOrientation.y != HalfAxis.POS_Y) return Orientation.lookupYZ(HalfAxis.POS_Y, HalfAxis.POS_Z); else return currentOrientation.rotateAround(HalfAxis.POS_Y); case DOWN: if (currentOrientation.y != HalfAxis.NEG_Y) return Orientation.lookupYZ(HalfAxis.NEG_Y, HalfAxis.NEG_Z); else return currentOrientation.rotateAround(HalfAxis.POS_Y); default: return null; } } }; private static final int MAX_ORIENTATIONS = 16; private BlockRotationMode(EnumFacing[] rotations, Orientation... validOrientations) { this.rotationAxes = rotations; this.validDirections = ImmutableSet.copyOf(validOrientations); final int count = validOrientations.length; Preconditions.checkArgument(this.validDirections.size() == count, "Duplicated directions"); Preconditions.checkArgument(count <= MAX_ORIENTATIONS, "Too many values: %s", count); this.property = PropertyEnum.create("orientation", Orientation.class, validDirections); this.idToOrientation = new Orientation[MAX_ORIENTATIONS]; this.orientationToId = new int[Orientation.VALUES.length]; for (int i = 0; i < count; i++) { final Orientation orientation = validOrientations[i]; Preconditions.checkNotNull(orientation); idToOrientation[i] = orientation; orientationToId[orientation.ordinal()] = i; } if (count == 0) { this.bitCount = 0; this.mask = 0; } else { final int maxValue = count - 1; this.bitCount = Integer.SIZE - Integer.numberOfLeadingZeros(maxValue); this.mask = (1 << bitCount) - 1; for (int i = count; i < idToOrientation.length; i++) idToOrientation[i] = idToOrientation[0]; } this.textureTransform = setupTextureTransform(BlockTextureTransform.builder()).build(); } private final Orientation[] idToOrientation; private final int[] orientationToId; public final EnumFacing[] rotationAxes; public final Set<Orientation> validDirections; public final int bitCount; public final int mask; public final BlockTextureTransform textureTransform; public final PropertyEnum<Orientation> property; protected BlockTextureTransform.Builder setupTextureTransform(BlockTextureTransform.Builder builder) { return builder.mirrorU(EnumFacing.NORTH).mirrorU(EnumFacing.EAST).mirrorV(EnumFacing.DOWN); } public Orientation fromValue(int value) { try { return idToOrientation[value]; } catch (IndexOutOfBoundsException e) { return idToOrientation[0]; } } public int toValue(Orientation dir) { try { return orientationToId[dir.ordinal()]; } catch (IndexOutOfBoundsException e) { return 0; } } public boolean isPlacementValid(Orientation dir) { return validDirections.contains(dir); } public abstract Orientation getPlacementOrientationFromSurface(EnumFacing side); public abstract Orientation getPlacementOrientationFromEntity(BlockPos pos, EntityLivingBase player); public boolean toolRotationAllowed() { return true; } public abstract Orientation calculateToolRotation(Orientation currentOrientation, EnumFacing axis); }
package openmods.container; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.annotation.Nonnull; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.inventory.ClickType; import net.minecraft.inventory.Container; import net.minecraft.inventory.IContainerListener; import net.minecraft.inventory.IInventory; import net.minecraft.inventory.Slot; import net.minecraft.item.ItemStack; import openmods.utils.InventoryUtils; public abstract class ContainerBase<T> extends Container { protected final int inventorySize; protected final IInventory playerInventory; protected final T owner; protected final IInventory inventory; protected static class RestrictedSlot extends Slot { private final int inventoryIndex; public RestrictedSlot(IInventory inventory, int slot, int x, int y) { super(inventory, slot, x, y); inventoryIndex = slot; // since slotIndex is private } @Override public boolean isItemValid(@Nonnull ItemStack itemstack) { return inventory.isItemValidForSlot(inventoryIndex, itemstack); } } public ContainerBase(IInventory playerInventory, IInventory ownerInventory, T owner) { this.owner = owner; this.inventory = ownerInventory; this.inventorySize = inventory.getSizeInventory(); this.playerInventory = playerInventory; } protected void addInventoryGrid(int xOffset, int yOffset, int width) { int height = (int)Math.ceil((double)inventorySize / width); for (int y = 0, slotId = 0; y < height; y++) { for (int x = 0; x < width; x++, slotId++) { addSlotToContainer(new RestrictedSlot(inventory, slotId, xOffset + x * 18, yOffset + y * 18)); } } } protected void addInventoryLine(int xOffset, int yOffset, int start, int count) { addInventoryLine(xOffset, yOffset, start, count, 0); } protected void addInventoryLine(int xOffset, int yOffset, int start, int count, int margin) { for (int x = 0, slotId = start; x < count; x++, slotId++) { addSlotToContainer(new RestrictedSlot(inventory, slotId, xOffset + x * (18 + margin), yOffset)); } } protected void addPlayerInventorySlots(int offsetY) { addPlayerInventorySlots(8, offsetY); } protected void addPlayerInventorySlots(int offsetX, int offsetY) { for (int row = 0; row < 3; row++) for (int column = 0; column < 9; column++) addSlotToContainer(new Slot(playerInventory, column + row * 9 + 9, offsetX + column * 18, offsetY + row * 18)); for (int slot = 0; slot < 9; slot++) addSlotToContainer(new Slot(playerInventory, slot, offsetX + slot * 18, offsetY + 58)); } @Override public boolean canInteractWith(EntityPlayer entityplayer) { return inventory.isUsableByPlayer(entityplayer); } public T getOwner() { return owner; } protected boolean mergeItemStackSafe(@Nonnull ItemStack stackToMerge, int start, int stop, boolean reverse) { boolean inventoryChanged = false; final int delta = reverse? -1 : 1; List<Slot> slots = getSlots(); if (stackToMerge.isStackable()) { int slotId = reverse? stop - 1 : start; while (!stackToMerge.isEmpty() && ((!reverse && slotId < stop) || (reverse && slotId >= start))) { Slot slot = slots.get(slotId); if (canTransferItemsIn(slot)) { ItemStack stackInSlot = slot.getStack(); if (InventoryUtils.tryMergeStacks(stackToMerge, stackInSlot)) { slot.onSlotChanged(); inventoryChanged = true; } } slotId += delta; } } if (!stackToMerge.isEmpty()) { int slotId = reverse? stop - 1 : start; while ((!reverse && slotId < stop) || (reverse && slotId >= start)) { Slot slot = slots.get(slotId); ItemStack stackInSlot = slot.getStack(); if (stackInSlot.isEmpty() && canTransferItemsIn(slot) && slot.isItemValid(stackToMerge)) { slot.putStack(stackToMerge.copy()); slot.onSlotChanged(); stackToMerge.setCount(0); return true; } slotId += delta; } } return inventoryChanged; } @Override @Nonnull public ItemStack transferStackInSlot(EntityPlayer player, int slotId) { // TODO 1.11 verify final Slot slot = inventorySlots.get(slotId); if (slot != null && canTransferItemOut(slot) && slot.getHasStack()) { ItemStack itemToTransfer = slot.getStack(); ItemStack copy = itemToTransfer.copy(); if (slotId < inventorySize) { if (!mergeItemStackSafe(itemToTransfer, inventorySize, inventorySlots.size(), true)) return ItemStack.EMPTY; } else if (!mergeItemStackSafe(itemToTransfer, 0, inventorySize, false)) return ItemStack.EMPTY; slot.putStack(itemToTransfer); if (itemToTransfer.getCount() != copy.getCount()) return copy; } return ItemStack.EMPTY; } protected boolean canTransferItemOut(Slot slot) { if (slot instanceof ICustomSlot) return ((ICustomSlot)slot).canTransferItemsOut(); return true; } protected boolean canTransferItemsIn(Slot slot) { if (slot instanceof ICustomSlot) return ((ICustomSlot)slot).canTransferItemsIn(); return true; } public int getInventorySize() { return inventorySize; } protected List<Slot> getSlots() { return inventorySlots; } public Set<EntityPlayer> getPlayers() { Set<EntityPlayer> players = new HashSet<EntityPlayer>(); for (IContainerListener crafter : listeners) { if (crafter instanceof EntityPlayerMP) { players.add((EntityPlayerMP)crafter); } } return players; } public void onButtonClicked(EntityPlayer player, int buttonId) {} @Override public boolean enchantItem(EntityPlayer player, int buttonId) { onButtonClicked(player, buttonId); return false; } @Override public ItemStack slotClick(int slotId, int dragType, ClickType clickType, EntityPlayer player) { if (slotId >= 0 && slotId < inventorySlots.size()) { Slot slot = getSlot(slotId); if (slot instanceof ICustomSlot) return ((ICustomSlot)slot).onClick(player, dragType, clickType); } return super.slotClick(slotId, dragType, clickType, player); } @Override public boolean canDragIntoSlot(Slot slot) { if (slot instanceof ICustomSlot) return ((ICustomSlot)slot).canDrag(); return super.canDragIntoSlot(slot); } }
package org.apache.couchdb.lucene; import java.util.Scanner; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.NIOFSDirectory; /** * Search entry point. */ public final class Search { private static final Logger logger = LogManager.getLogger(Search.class); public static void main(final String[] args) throws Exception { IndexReader reader = null; IndexSearcher searcher = null; final Scanner scanner = new Scanner(System.in); while (scanner.hasNextLine()) { if (reader == null) { // Open a reader and searcher if index exists. if (IndexReader.indexExists(Config.INDEX_DIR)) { reader = IndexReader.open(NIOFSDirectory.getDirectory(Config.INDEX_DIR), true); searcher = new IndexSearcher(reader); } } else { // Refresh reader and searcher if necessary. final IndexReader newReader = reader.reopen(); if (reader != newReader) { final IndexReader oldReader = reader; reader = newReader; searcher = new IndexSearcher(reader); oldReader.close(); } } // Process search request if index exists. if (searcher == null) { System.out.println("{\"code\":503,\"body\":\"couchdb-lucene not available.\"}"); } else { final SearchRequest request = new SearchRequest(scanner.nextLine()); final String result = request.execute(searcher); System.out.println(result); } } } /* public String query(final String dbname, final String query, final String sort_fields, final boolean ascending, final int skip, final int limit, final boolean include_docs, final boolean debug) throws IOException, ParseException { if (limit > Config.MAX_LIMIT) { return Utils.error("limit of " + limit + " exceeds maximum limit of " + Config.MAX_LIMIT); } final BooleanQuery bq = new BooleanQuery(); bq.add(new TermQuery(new Term(Config.DB, dbname)), Occur.MUST); bq.add(parse(query), Occur.MUST); final IndexSearcher searcher; synchronized (mutex) { searcher = this.searcher; } searcher.getIndexReader().incRef(); final TopDocs td; final long start = System.nanoTime(); try { if (sort_fields == null) { td = searcher.search(bq, null, skip + limit); } else { final Sort sort; if (sort_fields.indexOf(",") != -1) { sort = new Sort(sort_fields.split(",")); } else { sort = new Sort(sort_fields, !ascending); } td = searcher.search(bq, null, skip + limit, sort); } } finally { searcher.getIndexReader().decRef(); } final long search_duration = System.nanoTime() - start; TopFieldDocs tfd = null; if (td instanceof TopFieldDocs) { tfd = (TopFieldDocs) td; } final JSONObject json = new JSONObject(); json.element("total_rows", td.totalHits); // Report on sorting order, if specified. if (tfd != null) { final JSONArray sort_order = new JSONArray(); for (final SortField field : tfd.fields) { final JSONObject col = new JSONObject(); col.element("field", field.getField()); col.element("reverse", field.getReverse()); final String type; switch (field.getType()) { case SortField.DOC: type = "doc"; break; case SortField.SCORE: type = "score"; break; case SortField.INT: type = "int"; break; case SortField.LONG: type = "long"; break; case SortField.BYTE: type = "byte"; break; case SortField.CUSTOM: type = "custom"; break; case SortField.DOUBLE: type = "double"; break; case SortField.FLOAT: type = "float"; break; case SortField.SHORT: type = "short"; break; case SortField.STRING: type = "string"; break; default: type = "unknown"; break; } col.element("type", type); sort_order.add(col); } json.element("sort_order", sort_order); } final int max = min(td.totalHits, limit); final String[] fetch_ids = include_docs ? new String[max] : null; final JSONArray rows = new JSONArray(); for (int i = skip; i < skip + max; i++) { final Document doc = searcher.doc(td.scoreDocs[i].doc, FS); final JSONObject obj = new JSONObject(); obj.element("_id", doc.get(Config.ID)); obj.element("_rev", doc.get(Config.REV)); obj.element("score", td.scoreDocs[i].score); if (tfd != null) { final FieldDoc fd = (FieldDoc) tfd.scoreDocs[i]; obj.element("sort_order", fd.fields); } if (fetch_ids != null) { fetch_ids[i - skip] = obj.getString(Config.ID); } if (include_docs) { obj.element("doc", db.getDoc(dbname, obj.getString("_id"), obj.getString("_rev"))); } rows.add(obj); } if (fetch_ids != null) { final JSONObject fetch_docs = db.getDocs(dbname, fetch_ids); final JSONArray arr = fetch_docs.getJSONArray("rows"); for (int i = 0; i < max; i++) { rows.getJSONObject(i).element("doc", arr.getJSONObject(i).getJSONObject("doc")); } } json.element("rows", rows); final long total_duration = System.nanoTime() - start; final JSONObject result = new JSONObject(); result.element("code", 200); if (debug) { final StringBuilder builder = new StringBuilder(500); // build basic lines. builder.append("<dl>"); builder.append("<dt>database name</dt><dd>" + dbname + "</dd>"); builder.append("<dt>query</dt><dd>" + bq + "</dd>"); builder.append("<dt>sort</dt><dd>" + sort_fields + "</dd>"); builder.append("<dt>skip</dt><dd>" + skip + "</dd>"); builder.append("<dt>limit</dt><dd>" + limit + "</dd>"); builder.append("<dt>total_rows</dt><dd>" + json.getInt("total_rows") + "</dd>"); if (json.get("sort_order") != null) { builder.append("<dt>sort_order</dt><dd>" + json.get("sort_order") + "</dd>"); } builder.append("<dt>search duration</dt><dd>" + DurationFormatUtils.formatDurationHMS(search_duration / 1000000) + "</dd>"); builder.append("<dt>total duration</dt><dd>" + DurationFormatUtils.formatDurationHMS(total_duration / 1000000) + "</dd>"); builder.append("<dt>rows</dt><dd>"); builder.append("<ol start=\"" + skip + "\">"); for (int i = 0; i < rows.size(); i++) { builder.append("<li>" + rows.get(i) + "</li>"); } builder.append("</ol>"); builder.append("</dd>"); builder.append("</dl>"); result.element("body", builder.toString()); } else { result.element("json", json); } return result.toString(); } */ }
package org.broad.igv.sam; import org.broad.igv.Globals; import org.broad.igv.event.AlignmentTrackEvent; import org.broad.igv.event.IGVEventBus; import org.broad.igv.event.IGVEventObserver; import org.broad.igv.feature.FeatureUtils; import org.broad.igv.feature.Locus; import org.broad.igv.feature.Range; import org.broad.igv.feature.Strand; import org.broad.igv.feature.genome.ChromosomeNameComparator; import org.broad.igv.feature.genome.Genome; import org.broad.igv.jbrowse.CircularViewUtilities; import org.broad.igv.lists.GeneList; import org.broad.igv.logging.LogManager; import org.broad.igv.logging.Logger; import org.broad.igv.prefs.Constants; import org.broad.igv.prefs.IGVPreferences; import org.broad.igv.prefs.PreferencesManager; import org.broad.igv.renderer.GraphicUtils; import org.broad.igv.sashimi.SashimiPlot; import org.broad.igv.session.Persistable; import org.broad.igv.session.Session; import org.broad.igv.tools.PFMExporter; import org.broad.igv.track.*; import org.broad.igv.ui.FontManager; import org.broad.igv.ui.IGV; import org.broad.igv.ui.InsertSizeSettingsDialog; import org.broad.igv.ui.color.ColorTable; import org.broad.igv.ui.color.ColorUtilities; import org.broad.igv.ui.color.PaletteColorTable; import org.broad.igv.ui.panel.FrameManager; import org.broad.igv.ui.panel.IGVPopupMenu; import org.broad.igv.ui.panel.ReferenceFrame; import org.broad.igv.ui.util.MessageUtils; import org.broad.igv.ui.util.UIUtilities; import org.broad.igv.util.Pair; import org.broad.igv.util.ResourceLocator; import org.broad.igv.util.StringUtils; import org.broad.igv.util.blat.BlatClient; import org.broad.igv.util.collections.CollUtils; import org.broad.igv.util.extview.ExtendViewClient; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import javax.swing.*; import java.awt.*; import java.awt.datatransfer.Clipboard; import java.awt.datatransfer.StringSelection; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseEvent; import java.awt.geom.Rectangle2D; import java.util.List; import java.util.*; import static org.broad.igv.prefs.Constants.*; /** * @author jrobinso */ public class AlignmentTrack extends AbstractTrack implements IGVEventObserver { // Alignment colors static Color DEFAULT_ALIGNMENT_COLOR = new Color(185, 185, 185); //200, 200, 200); public enum ColorOption { INSERT_SIZE, READ_STRAND, FIRST_OF_PAIR_STRAND, PAIR_ORIENTATION, READ_ORDER, SAMPLE, READ_GROUP, LIBRARY, MOVIE, ZMW, BISULFITE, NOMESEQ, TAG, NONE, UNEXPECTED_PAIR, MAPPED_SIZE, LINK_STRAND, YC_TAG, BASE_MODIFICATION, BASE_MODIFICATION_5MC, BASE_MODIFICATION_C; public boolean isBaseMod() { return this == BASE_MODIFICATION || this == BASE_MODIFICATION_5MC || this == BASE_MODIFICATION_C; } } public enum SortOption { START, STRAND, NUCLEOTIDE, QUALITY, SAMPLE, READ_GROUP, INSERT_SIZE, FIRST_OF_PAIR_STRAND, MATE_CHR, TAG, SUPPLEMENTARY, NONE, HAPLOTYPE, READ_ORDER, READ_NAME, ALIGNED_READ_LENGTH } public enum ShadeAlignmentsOption { NONE("none"), MAPPING_QUALITY_HIGH("mapping quality high"), MAPPING_QUALITY_LOW("mapping quality low"); public final String label; ShadeAlignmentsOption(String label) { this.label = label; } } public enum GroupOption { STRAND("read strand"), SAMPLE("sample"), READ_GROUP("read group"), LIBRARY("library"), FIRST_OF_PAIR_STRAND("first-in-pair strand"), TAG("tag"), PAIR_ORIENTATION("pair orientation"), MATE_CHROMOSOME("chromosome of mate"), NONE("none"), SUPPLEMENTARY("supplementary flag"), BASE_AT_POS("base at position"), MOVIE("movie"), ZMW("ZMW"), HAPLOTYPE("haplotype"), READ_ORDER("read order"), LINKED("linked"), PHASE("phase"), REFERENCE_CONCORDANCE("reference concordance"), MAPPING_QUALITY("mapping quality"); public final String label; GroupOption(String label) { this.label = label; } } public enum BisulfiteContext { CG, CHH, CHG, HCG, GCH, WCG, NONE } enum OrientationType { RR, LL, RL, LR, UNKNOWN } private static Logger log = LogManager.getLogger(AlignmentTrack.class); private static final int GROUP_LABEL_HEIGHT = 10; private static final int GROUP_MARGIN = 5; private static final int TOP_MARGIN = 20; private static final int DS_MARGIN_0 = 2; private static final int DOWNAMPLED_ROW_HEIGHT = 3; private static final int INSERTION_ROW_HEIGHT = 9; private static final int DS_MARGIN_2 = 5; private static int nClusters = 2; private static final Map<BisulfiteContext, String> bisulfiteContextToPubString = new HashMap<>(); static { bisulfiteContextToPubString.put(BisulfiteContext.CG, "CG"); bisulfiteContextToPubString.put(BisulfiteContext.CHH, "CHH"); bisulfiteContextToPubString.put(BisulfiteContext.CHG, "CHG"); bisulfiteContextToPubString.put(BisulfiteContext.HCG, "HCG"); bisulfiteContextToPubString.put(BisulfiteContext.GCH, "GCH"); bisulfiteContextToPubString.put(BisulfiteContext.WCG, "WCG"); bisulfiteContextToPubString.put(BisulfiteContext.NONE, "None"); } private static final Map<BisulfiteContext, Pair<byte[], byte[]>> bisulfiteContextToContextString = new HashMap<>(); static { bisulfiteContextToContextString.put(BisulfiteContext.CG, new Pair<>(new byte[]{}, new byte[]{'G'})); bisulfiteContextToContextString.put(BisulfiteContext.CHH, new Pair<>(new byte[]{}, new byte[]{'H', 'H'})); bisulfiteContextToContextString.put(BisulfiteContext.CHG, new Pair<>(new byte[]{}, new byte[]{'H', 'G'})); bisulfiteContextToContextString.put(BisulfiteContext.HCG, new Pair<>(new byte[]{'H'}, new byte[]{'G'})); bisulfiteContextToContextString.put(BisulfiteContext.GCH, new Pair<>(new byte[]{'G'}, new byte[]{'H'})); bisulfiteContextToContextString.put(BisulfiteContext.WCG, new Pair<>(new byte[]{'W'}, new byte[]{'G'})); } public static boolean isBisulfiteColorType(ColorOption o) { return (o.equals(ColorOption.BISULFITE) || o.equals(ColorOption.NOMESEQ)); } private static String getBisulfiteContextPubStr(BisulfiteContext item) { return bisulfiteContextToPubString.get(item); } public static byte[] getBisulfiteContextPreContext(BisulfiteContext item) { Pair<byte[], byte[]> pair = AlignmentTrack.bisulfiteContextToContextString.get(item); return pair.getFirst(); } public static byte[] getBisulfiteContextPostContext(BisulfiteContext item) { Pair<byte[], byte[]> pair = AlignmentTrack.bisulfiteContextToContextString.get(item); return pair.getSecond(); } private AlignmentDataManager dataManager; private SequenceTrack sequenceTrack; private CoverageTrack coverageTrack; private SpliceJunctionTrack spliceJunctionTrack; private final Genome genome; private ExperimentType experimentType; private final AlignmentRenderer renderer; RenderOptions renderOptions; private boolean removed = false; private RenderRollback renderRollback; private boolean showGroupLine; private Map<ReferenceFrame, List<InsertionInterval>> insertionIntervalsMap; private int expandedHeight = 14; private final int collapsedHeight = 9; private final int maxSquishedHeight = 5; private int squishedHeight = maxSquishedHeight; private final int minHeight = 50; private Rectangle alignmentsRect; private Rectangle downsampleRect; private Rectangle insertionRect; private ColorTable readNamePalette; // Dynamic fields protected final HashMap<String, Color> selectedReadNames = new HashMap<>(); /** * Create a new alignment track * * @param locator * @param dataManager * @param genome */ public AlignmentTrack(ResourceLocator locator, AlignmentDataManager dataManager, Genome genome) { super(locator); this.dataManager = dataManager; this.genome = genome; renderer = new AlignmentRenderer(this); renderOptions = new RenderOptions(this); setColor(DEFAULT_ALIGNMENT_COLOR); dataManager.setAlignmentTrack(this); dataManager.subscribe(this); IGVPreferences prefs = getPreferences(); minimumHeight = 50; showGroupLine = prefs.getAsBoolean(SAM_SHOW_GROUP_SEPARATOR); try { setDisplayMode(DisplayMode.valueOf(prefs.get(SAM_DISPLAY_MODE).toUpperCase())); } catch (Exception e) { setDisplayMode(DisplayMode.EXPANDED); } if (prefs.getAsBoolean(SAM_SHOW_REF_SEQ)) { sequenceTrack = new SequenceTrack("Reference sequence"); sequenceTrack.setHeight(14); } if (renderOptions.colorOption == ColorOption.BISULFITE) { setExperimentType(ExperimentType.BISULFITE); } readNamePalette = new PaletteColorTable(ColorUtilities.getDefaultPalette()); insertionIntervalsMap = Collections.synchronizedMap(new HashMap<>()); dataManager.setViewAsPairs(prefs.getAsBoolean(SAM_DISPLAY_PAIRED), renderOptions); IGVEventBus.getInstance().subscribe(FrameManager.ChangeEvent.class, this); IGVEventBus.getInstance().subscribe(AlignmentTrackEvent.class, this); } public void init() { if (experimentType == null) { ExperimentType type = dataManager.inferType(); if (type != null) { setExperimentType(type); } } } @Override public void receiveEvent(Object event) { if (event instanceof FrameManager.ChangeEvent) { // Trim insertionInterval map to current frames Map<ReferenceFrame, List<InsertionInterval>> newMap = Collections.synchronizedMap(new HashMap<>()); for (ReferenceFrame frame : ((FrameManager.ChangeEvent) event).getFrames()) { if (insertionIntervalsMap.containsKey(frame)) { newMap.put(frame, insertionIntervalsMap.get(frame)); } } insertionIntervalsMap = newMap; } else if (event instanceof AlignmentTrackEvent) { AlignmentTrackEvent e = (AlignmentTrackEvent) event; AlignmentTrackEvent.Type eventType = e.getType(); switch (eventType) { case ALLELE_THRESHOLD: dataManager.alleleThresholdChanged(); break; case RELOAD: clearCaches(); repaint(); case REFRESH: repaint(); break; } } } void setExperimentType(ExperimentType type) { if (type != experimentType) { experimentType = type; boolean showJunction = getPreferences(type).getAsBoolean(Constants.SAM_SHOW_JUNCTION_TRACK); if (showJunction != spliceJunctionTrack.isVisible()) { spliceJunctionTrack.setVisible(showJunction); if (IGV.hasInstance()) { IGV.getInstance().revalidateTrackPanels(); } } boolean showCoverage = getPreferences(type).getAsBoolean(SAM_SHOW_COV_TRACK); if (showCoverage != coverageTrack.isVisible()) { coverageTrack.setVisible(showCoverage); if (IGV.hasInstance()) { IGV.getInstance().revalidateTrackPanels(); } } boolean showAlignments = getPreferences(type).getAsBoolean(SAM_SHOW_ALIGNMENT_TRACK); if (showAlignments != isVisible()) { setVisible(showAlignments); if (IGV.hasInstance()) { IGV.getInstance().revalidateTrackPanels(); } } //ExperimentTypeChangeEvent event = new ExperimentTypeChangeEvent(this, experimentType); //IGVEventBus.getInstance().post(event); } } ExperimentType getExperimentType() { return experimentType; } public AlignmentDataManager getDataManager() { return dataManager; } public void setCoverageTrack(CoverageTrack coverageTrack) { this.coverageTrack = coverageTrack; } public CoverageTrack getCoverageTrack() { return coverageTrack; } public void setSpliceJunctionTrack(SpliceJunctionTrack spliceJunctionTrack) { this.spliceJunctionTrack = spliceJunctionTrack; } public SpliceJunctionTrack getSpliceJunctionTrack() { return spliceJunctionTrack; } @Override public IGVPopupMenu getPopupMenu(TrackClickEvent te) { return new PopupMenu(te); } @Override public void setHeight(int preferredHeight) { super.setHeight(preferredHeight); minimumHeight = preferredHeight; } @Override public int getHeight() { int nGroups = dataManager.getMaxGroupCount(); int h = Math.max(minHeight, getNLevels() * getRowHeight() + nGroups * GROUP_MARGIN + TOP_MARGIN + DS_MARGIN_0 + DOWNAMPLED_ROW_HEIGHT + DS_MARGIN_2); //if (insertionRect != null) { // TODO - replace with expand insertions preference h += INSERTION_ROW_HEIGHT + DS_MARGIN_0; return Math.max(minimumHeight, h); } private int getRowHeight() { if (getDisplayMode() == DisplayMode.EXPANDED) { return expandedHeight; } else if (getDisplayMode() == DisplayMode.COLLAPSED) { return collapsedHeight; } else { return squishedHeight; } } private int getNLevels() { return dataManager.getNLevels(); } @Override public boolean isReadyToPaint(ReferenceFrame frame) { if (frame.getChrName().equals(Globals.CHR_ALL) || frame.getScale() > dataManager.getMinVisibleScale()) { return true; // Nothing to paint } else { List<InsertionInterval> insertionIntervals = getInsertionIntervals(frame); insertionIntervals.clear(); return dataManager.isLoaded(frame); } } @Override public void load(ReferenceFrame referenceFrame) { if (log.isDebugEnabled()) { log.debug("Reading - thread: " + Thread.currentThread().getName()); } dataManager.load(referenceFrame, renderOptions, true); } public void render(RenderContext context, Rectangle rect) { int viewWindowSize = context.getReferenceFrame().getCurrentRange().getLength(); if (viewWindowSize > dataManager.getVisibilityWindow()) { Rectangle visibleRect = context.getVisibleRect().intersection(rect); Graphics2D g2 = context.getGraphic2DForColor(Color.gray); GraphicUtils.drawCenteredText("Zoom in to see alignments.", visibleRect, g2); return; } context.getGraphics2D("LABEL").setFont(FontManager.getFont(GROUP_LABEL_HEIGHT)); // Split track rectangle into sections. int seqHeight = sequenceTrack == null ? 0 : sequenceTrack.getHeight(); if (seqHeight > 0) { Rectangle seqRect = new Rectangle(rect); seqRect.height = seqHeight; sequenceTrack.render(context, seqRect); } // Top gap. rect.y += DS_MARGIN_0; downsampleRect = new Rectangle(rect); downsampleRect.height = DOWNAMPLED_ROW_HEIGHT; renderDownsampledIntervals(context, downsampleRect); if (renderOptions.isShowInsertionMarkers()) { insertionRect = new Rectangle(rect); insertionRect.y += DOWNAMPLED_ROW_HEIGHT + DS_MARGIN_0; insertionRect.height = INSERTION_ROW_HEIGHT; renderInsertionIntervals(context, insertionRect); rect.y = insertionRect.y + insertionRect.height; } alignmentsRect = new Rectangle(rect); alignmentsRect.y += 2; alignmentsRect.height -= (alignmentsRect.y - rect.y); renderAlignments(context, alignmentsRect); } private void renderDownsampledIntervals(RenderContext context, Rectangle downsampleRect) { // Might be offscreen if (!context.getVisibleRect().intersects(downsampleRect)) return; final AlignmentInterval loadedInterval = dataManager.getLoadedInterval(context.getReferenceFrame()); if (loadedInterval == null) return; Graphics2D g = context.getGraphic2DForColor(Color.black); List<DownsampledInterval> intervals = loadedInterval.getDownsampledIntervals(); for (DownsampledInterval interval : intervals) { final double scale = context.getScale(); final double origin = context.getOrigin(); int x0 = (int) ((interval.getStart() - origin) / scale); int x1 = (int) ((interval.getEnd() - origin) / scale); int w = Math.max(1, x1 - x0); // If there is room, leave a gap on one side if (w > 5) w // Greyscale from 0 -> 100 downsampled //int gray = 200 - interval.getCount(); //Color color = (gray <= 0 ? Color.black : ColorUtilities.getGrayscaleColor(gray)); g.fillRect(x0, downsampleRect.y, w, downsampleRect.height); } } private void renderAlignments(RenderContext context, Rectangle inputRect) { final AlignmentInterval loadedInterval = dataManager.getLoadedInterval(context.getReferenceFrame(), true); if (loadedInterval == null) { return; } final AlignmentCounts alignmentCounts = loadedInterval.getCounts(); //log.debug("Render features"); PackedAlignments groups = dataManager.getGroups(loadedInterval, renderOptions); if (groups == null) { //Assume we are still loading. //This might not always be true return; } // Check for YC tag if (renderOptions.colorOption == null && dataManager.hasYCTags()) { renderOptions.colorOption = ColorOption.YC_TAG; } Map<String, PEStats> peStats = dataManager.getPEStats(); if (peStats != null) { renderOptions.peStats = peStats; } Rectangle visibleRect = context.getVisibleRect(); // Divide rectangle into equal height levels double y = inputRect.getY(); double h; if (getDisplayMode() == DisplayMode.EXPANDED) { h = expandedHeight; } else if (getDisplayMode() == DisplayMode.COLLAPSED) { h = collapsedHeight; } else { int visHeight = visibleRect.height; int depth = dataManager.getNLevels(); if (depth == 0) { squishedHeight = Math.min(maxSquishedHeight, Math.max(1, expandedHeight)); } else { squishedHeight = Math.min(maxSquishedHeight, Math.max(1, Math.min(expandedHeight, visHeight / depth))); } h = squishedHeight; } // Loop through groups Graphics2D groupBorderGraphics = context.getGraphic2DForColor(AlignmentRenderer.GROUP_DIVIDER_COLOR); int nGroups = groups.size(); int groupNumber = 0; GroupOption groupOption = renderOptions.getGroupByOption(); for (Map.Entry<String, List<Row>> entry : groups.entrySet()) { groupNumber++; double yGroup = y; // Remember this for label // Loop through the alignment rows for this group List<Row> rows = entry.getValue(); for (Row row : rows) { if ((visibleRect != null && y > visibleRect.getMaxY())) { break; } assert visibleRect != null; if (y + h > visibleRect.getY()) { Rectangle rowRectangle = new Rectangle(inputRect.x, (int) y, inputRect.width, (int) h); renderer.renderAlignments(row.alignments, alignmentCounts, context, rowRectangle, renderOptions); row.y = y; row.h = h; } y += h; } if (groupOption != GroupOption.NONE) { // Draw a subtle divider line between groups if (showGroupLine) { if (groupNumber < nGroups) { int borderY = (int) y + GROUP_MARGIN / 2; GraphicUtils.drawDottedDashLine(groupBorderGraphics, inputRect.x, borderY, inputRect.width, borderY); } } // Label the group, if there is room double groupHeight = rows.size() * h; if (groupHeight > GROUP_LABEL_HEIGHT + 2) { String groupName = entry.getKey(); Graphics2D g = context.getGraphics2D("LABEL"); FontMetrics fm = g.getFontMetrics(); Rectangle2D stringBouds = fm.getStringBounds(groupName, g); Rectangle rect = new Rectangle(inputRect.x, (int) yGroup, (int) stringBouds.getWidth() + 10, (int) stringBouds.getHeight()); GraphicUtils.drawVerticallyCenteredText(groupName, 5, rect, g, false, true); } } y += GROUP_MARGIN; } final int bottom = inputRect.y + inputRect.height; groupBorderGraphics.drawLine(inputRect.x, bottom, inputRect.width, bottom); } private List<InsertionInterval> getInsertionIntervals(ReferenceFrame frame) { List<InsertionInterval> insertionIntervals = insertionIntervalsMap.computeIfAbsent(frame, k -> new ArrayList<>()); return insertionIntervals; } private void renderInsertionIntervals(RenderContext context, Rectangle rect) { // Might be offscreen if (!context.getVisibleRect().intersects(rect)) return; List<InsertionMarker> intervals = context.getInsertionMarkers(); if (intervals == null) return; InsertionMarker selected = InsertionManager.getInstance().getSelectedInsertion(context.getChr()); int w = (int) ((1.41 * rect.height) / 2); boolean hideSmallIndels = renderOptions.isHideSmallIndels(); int smallIndelThreshold = renderOptions.getSmallIndelThreshold(); List<InsertionInterval> insertionIntervals = getInsertionIntervals(context.getReferenceFrame()); insertionIntervals.clear(); for (InsertionMarker insertionMarker : intervals) { if (hideSmallIndels && insertionMarker.size < smallIndelThreshold) continue; final double scale = context.getScale(); final double origin = context.getOrigin(); int midpoint = (int) ((insertionMarker.position - origin) / scale); int x0 = midpoint - w; int x1 = midpoint + w; Rectangle iRect = new Rectangle(x0 + context.translateX, rect.y, 2 * w, rect.height); insertionIntervals.add(new InsertionInterval(iRect, insertionMarker)); Color c = (selected != null && selected.position == insertionMarker.position) ? new Color(200, 0, 0, 80) : AlignmentRenderer.purple; Graphics2D g = context.getGraphic2DForColor(c); g.fillPolygon(new Polygon(new int[]{x0, x1, midpoint}, new int[]{rect.y, rect.y, rect.y + rect.height}, 3)); } } public void renderExpandedInsertion(InsertionMarker insertionMarker, RenderContext context, Rectangle inputRect) { boolean leaveMargin = getDisplayMode() != DisplayMode.SQUISHED; // Insertion interval Graphics2D g = context.getGraphic2DForColor(Color.red); Rectangle iRect = new Rectangle(inputRect.x, insertionRect.y, inputRect.width, insertionRect.height); g.fill(iRect); List<InsertionInterval> insertionIntervals = getInsertionIntervals(context.getReferenceFrame()); iRect.x += context.translateX; insertionIntervals.add(new InsertionInterval(iRect, insertionMarker)); inputRect.y += DS_MARGIN_0 + DOWNAMPLED_ROW_HEIGHT + DS_MARGIN_0 + INSERTION_ROW_HEIGHT + DS_MARGIN_2; //log.debug("Render features"); final AlignmentInterval loadedInterval = dataManager.getLoadedInterval(context.getReferenceFrame(), true); PackedAlignments groups = dataManager.getGroups(loadedInterval, renderOptions); if (groups == null) { //Assume we are still loading. //This might not always be true return; } Rectangle visibleRect = context.getVisibleRect(); // Divide rectangle into equal height levels double y = inputRect.getY() - 3; double h; if (getDisplayMode() == DisplayMode.EXPANDED) { h = expandedHeight; } else if (getDisplayMode() == DisplayMode.COLLAPSED) { h = collapsedHeight; } else { int visHeight = visibleRect.height; int depth = dataManager.getNLevels(); if (depth == 0) { squishedHeight = Math.min(maxSquishedHeight, Math.max(1, expandedHeight)); } else { squishedHeight = Math.min(maxSquishedHeight, Math.max(1, Math.min(expandedHeight, visHeight / depth))); } h = squishedHeight; } for (Map.Entry<String, List<Row>> entry : groups.entrySet()) { // Loop through the alignment rows for this group List<Row> rows = entry.getValue(); for (Row row : rows) { if ((visibleRect != null && y > visibleRect.getMaxY())) { return; } assert visibleRect != null; if (y + h > visibleRect.getY()) { Rectangle rowRectangle = new Rectangle(inputRect.x, (int) y, inputRect.width, (int) h); renderer.renderExpandedInsertion(insertionMarker, row.alignments, context, rowRectangle, leaveMargin); row.y = y; row.h = h; } y += h; } y += GROUP_MARGIN; } } private InsertionInterval getInsertionInterval(ReferenceFrame frame, int x, int y) { List<InsertionInterval> insertionIntervals = getInsertionIntervals(frame); for (InsertionInterval i : insertionIntervals) { if (i.rect.contains(x, y)) return i; } return null; } public void sortRows(final SortOption option, final Double location, final String tag, final boolean invertSort) { final List<ReferenceFrame> frames = FrameManager.getFrames(); for (ReferenceFrame frame : frames) { final double actloc = location != null ? location : frame.getCenter(); final AlignmentInterval interval = getDataManager().getLoadedInterval(frame); interval.sortRows(option, actloc, tag, invertSort); } } private void sortAlignmentTracks(SortOption option, String tag, boolean invertSort) { IGV.getInstance().sortAlignmentTracks(option, tag, invertSort); Collection<IGVPreferences> allPrefs = PreferencesManager.getAllPreferences(); for (IGVPreferences prefs : allPrefs) { prefs.put(SAM_SORT_OPTION, option.toString()); prefs.put(SAM_SORT_BY_TAG, tag); prefs.put(SAM_INVERT_SORT, invertSort); } } /** * Visually regroup alignments by the provided {@code GroupOption}. * * @param option * @see AlignmentDataManager#packAlignments */ public void groupAlignments(GroupOption option, String tag, Range pos) { if (option == GroupOption.TAG && tag != null) { renderOptions.setGroupByTag(tag); } if (option == GroupOption.BASE_AT_POS && pos != null) { renderOptions.setGroupByPos(pos); } renderOptions.setGroupByOption(option); dataManager.packAlignments(renderOptions); repaint(); } public void setBisulfiteContext(BisulfiteContext option) { renderOptions.bisulfiteContext = option; getPreferences().put(SAM_BISULFITE_CONTEXT, option.toString()); } public void setColorOption(ColorOption option) { renderOptions.setColorOption(option); } public void setColorByTag(String tag) { renderOptions.setColorByTag(tag); getPreferences(experimentType).put(SAM_COLOR_BY_TAG, tag); } public void setShadeAlignmentsOptions(ShadeAlignmentsOption option) { renderOptions.setShadeAlignmentsOption(option); } public void packAlignments() { dataManager.packAlignments(renderOptions); } /** * Copy the contents of the popup text to the system clipboard. */ private void copyToClipboard(final TrackClickEvent e, Alignment alignment, double location, int mouseX) { if (alignment != null) { StringBuilder buf = new StringBuilder(); buf.append(alignment.getClipboardString(location, mouseX) .replace("<b>", "") .replace("</b>", "") .replace("<br>", "\n") .replace("<br/>", "\n") .replace("<hr>", "\n .replace("<hr/>", "\n buf.append("\n"); buf.append("Alignment start position = ").append(alignment.getChr()).append(":").append(alignment.getAlignmentStart() + 1); buf.append("\n"); buf.append(alignment.getReadSequence()); StringSelection stringSelection = new StringSelection(buf.toString()); Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); clipboard.setContents(stringSelection, null); } } /** * Jump to the mate region */ private void gotoMate(final TrackClickEvent te, Alignment alignment) { if (alignment != null) { ReadMate mate = alignment.getMate(); if (mate != null && mate.isMapped()) { setSelected(alignment); String chr = mate.getChr(); int start = mate.start - 1; // Don't change scale double range = te.getFrame().getEnd() - te.getFrame().getOrigin(); int newStart = (int) Math.max(0, (start + (alignment.getEnd() - alignment.getStart()) / 2 - range / 2)); int newEnd = newStart + (int) range; te.getFrame().jumpTo(chr, newStart, newEnd); te.getFrame().recordHistory(); } else { MessageUtils.showMessage("Alignment does not have mate, or it is not mapped."); } } } /** * Split the screen so the current view and mate region are side by side. * Need a better name for this method. */ private void splitScreenMate(final TrackClickEvent te, Alignment alignment) { if (alignment != null) { ReadMate mate = alignment.getMate(); if (mate != null && mate.isMapped()) { setSelected(alignment); String mateChr = mate.getChr(); int mateStart = mate.start - 1; ReferenceFrame frame = te.getFrame(); String locus1 = frame.getFormattedLocusString(); // Generate a locus string for the read mate. Keep the window width (in base pairs) == to the current range Range range = frame.getCurrentRange(); int length = range.getLength(); int s2 = Math.max(0, mateStart - length / 2); int e2 = s2 + length; String startStr = String.valueOf(s2); String endStr = String.valueOf(e2); String mateLocus = mateChr + ":" + startStr + "-" + endStr; Session currentSession = IGV.getInstance().getSession(); List<String> loci; if (FrameManager.isGeneListMode()) { loci = new ArrayList<>(FrameManager.getFrames().size()); for (ReferenceFrame ref : FrameManager.getFrames()) { //If the frame-name is a locus, we use it unaltered //Don't want to reprocess, easy to get off-by-one String name = ref.getName(); if (Locus.fromString(name) != null) { loci.add(name); } else { loci.add(ref.getFormattedLocusString()); } } loci.add(mateLocus); } else { loci = Arrays.asList(locus1, mateLocus); } StringBuilder listName = new StringBuilder(); for (String s : loci) { listName.append(s + " "); } GeneList geneList = new GeneList(listName.toString(), loci, false); currentSession.setCurrentGeneList(geneList); Comparator<String> geneListComparator = (n0, n1) -> { ReferenceFrame f0 = FrameManager.getFrame(n0); ReferenceFrame f1 = FrameManager.getFrame(n1); String chr0 = f0 == null ? "" : f0.getChrName(); String chr1 = f1 == null ? "" : f1.getChrName(); int s0 = f0 == null ? 0 : f0.getCurrentRange().getStart(); int s1 = f1 == null ? 0 : f1.getCurrentRange().getStart(); int chrComp = ChromosomeNameComparator.get().compare(chr0, chr1); if (chrComp != 0) return chrComp; return s0 - s1; }; //Need to sort the frames by position currentSession.sortGeneList(geneListComparator); IGV.getInstance().resetFrames(); } else { MessageUtils.showMessage("Alignment does not have mate, or it is not mapped."); } } } public boolean isLogNormalized() { return false; } public float getRegionScore(String chr, int start, int end, int zoom, RegionScoreType type, String frameName) { return 0.0f; } public String getValueStringAt(String chr, double position, int mouseX, int mouseY, ReferenceFrame frame) { if (downsampleRect != null && mouseY > downsampleRect.y && mouseY <= downsampleRect.y + downsampleRect.height) { AlignmentInterval loadedInterval = dataManager.getLoadedInterval(frame); if (loadedInterval == null) { return null; } else { List<DownsampledInterval> intervals = loadedInterval.getDownsampledIntervals(); DownsampledInterval interval = FeatureUtils.getFeatureAt(position, 0, intervals); if (interval != null) { return interval.getValueString(); } return null; } } else { InsertionInterval insertionInterval = getInsertionInterval(frame, mouseX, mouseY); if (insertionInterval != null) { return "Insertions (" + insertionInterval.insertionMarker.size + " bases)"; } else { Alignment feature = getAlignmentAt(position, mouseY, frame); if (feature != null) { return feature.getAlignmentValueString(position, mouseX, renderOptions); } } } return null; } private Alignment getAlignment(final TrackClickEvent te) { MouseEvent e = te.getMouseEvent(); final ReferenceFrame frame = te.getFrame(); if (frame == null) { return null; } final double location = frame.getChromosomePosition(e.getX()); return getAlignmentAt(location, e.getY(), frame); } private Alignment getAlignmentAt(double position, int y, ReferenceFrame frame) { if (alignmentsRect == null || dataManager == null) { return null; // <= not loaded yet } PackedAlignments groups = dataManager.getGroupedAlignmentsContaining(position, frame); if (groups == null || groups.isEmpty()) { return null; } for (List<Row> rows : groups.values()) { for (Row row : rows) { if (y >= row.y && y <= row.y + row.h) { List<Alignment> features = row.alignments; // No buffer for alignments, you must zoom in far enough for them to be visible int buffer = 0; return FeatureUtils.getFeatureAt(position, buffer, features); } } } return null; } /** * Get the most "specific" alignment at the specified location. Specificity refers to the smallest alignemnt * in a group that contains the location (i.e. if a group of linked alignments overlap take the smallest one). * * @param te * @return */ private Alignment getSpecficAlignment(TrackClickEvent te) { Alignment alignment = getAlignment(te); if (alignment != null) { final ReferenceFrame frame = te.getFrame(); MouseEvent e = te.getMouseEvent(); final double location = frame.getChromosomePosition(e.getX()); if (alignment instanceof LinkedAlignment) { Alignment sa = null; for (Alignment a : ((LinkedAlignment) alignment).alignments) { if (a.contains(location)) { if (sa == null || (a.getAlignmentEnd() - a.getAlignmentStart() < sa.getAlignmentEnd() - sa.getAlignmentStart())) { sa = a; } } } alignment = sa; } else if (alignment instanceof PairedAlignment) { Alignment sa = null; if (((PairedAlignment) alignment).firstAlignment.contains(location)) { sa = ((PairedAlignment) alignment).firstAlignment; } else if (((PairedAlignment) alignment).secondAlignment.contains(location)) { sa = ((PairedAlignment) alignment).secondAlignment; } alignment = sa; } } return alignment; } @Override public boolean handleDataClick(TrackClickEvent te) { MouseEvent e = te.getMouseEvent(); if (Globals.IS_MAC && e.isMetaDown() || (!Globals.IS_MAC && e.isControlDown())) { // Selection final ReferenceFrame frame = te.getFrame(); if (frame != null) { selectAlignment(e, frame); IGV.getInstance().repaint(this); return true; } } InsertionInterval insertionInterval = getInsertionInterval(te.getFrame(), te.getMouseEvent().getX(), te.getMouseEvent().getY()); if (insertionInterval != null) { final String chrName = te.getFrame().getChrName(); InsertionMarker currentSelection = InsertionManager.getInstance().getSelectedInsertion(chrName); if (currentSelection != null && currentSelection.position == insertionInterval.insertionMarker.position) { InsertionManager.getInstance().clearSelected(); } else { InsertionManager.getInstance().setSelected(chrName, insertionInterval.insertionMarker.position); } IGVEventBus.getInstance().post(new InsertionSelectionEvent(insertionInterval.insertionMarker)); return true; } if (IGV.getInstance().isShowDetailsOnClick()) { openTooltipWindow(te); return true; } return false; } private void selectAlignment(MouseEvent e, ReferenceFrame frame) { double location = frame.getChromosomePosition(e.getX()); Alignment alignment = this.getAlignmentAt(location, e.getY(), frame); if (alignment != null) { if (selectedReadNames.containsKey(alignment.getReadName())) { selectedReadNames.remove(alignment.getReadName()); } else { setSelected(alignment); } } } private void setSelected(Alignment alignment) { Color c = readNamePalette.get(alignment.getReadName()); selectedReadNames.put(alignment.getReadName(), c); } private void clearCaches() { if (dataManager != null) dataManager.clear(); if (spliceJunctionTrack != null) spliceJunctionTrack.clear(); } public void setViewAsPairs(boolean vAP) { // TODO -- generalize this test to all incompatible pairings if (vAP && renderOptions.groupByOption == GroupOption.STRAND) { boolean ungroup = MessageUtils.confirm("\"View as pairs\" is incompatible with \"Group by strand\". Ungroup?"); if (ungroup) { renderOptions.setGroupByOption(null); } else { return; } } dataManager.setViewAsPairs(vAP, renderOptions); repaint(); } public enum ExperimentType {OTHER, RNA, BISULFITE, THIRD_GEN} class RenderRollback { final ColorOption colorOption; final GroupOption groupByOption; final String groupByTag; final String colorByTag; final String linkByTag; final DisplayMode displayMode; final int expandedHeight; final boolean showGroupLine; RenderRollback(RenderOptions renderOptions, DisplayMode displayMode) { this.colorOption = renderOptions.colorOption; this.groupByOption = renderOptions.groupByOption; this.colorByTag = renderOptions.colorByTag; this.groupByTag = renderOptions.groupByTag; this.displayMode = displayMode; this.expandedHeight = AlignmentTrack.this.expandedHeight; this.showGroupLine = AlignmentTrack.this.showGroupLine; this.linkByTag = renderOptions.linkByTag; } void restore(RenderOptions renderOptions) { renderOptions.colorOption = this.colorOption; renderOptions.groupByOption = this.groupByOption; renderOptions.colorByTag = this.colorByTag; renderOptions.groupByTag = this.groupByTag; renderOptions.linkByTag = this.linkByTag; AlignmentTrack.this.expandedHeight = this.expandedHeight; AlignmentTrack.this.showGroupLine = this.showGroupLine; AlignmentTrack.this.setDisplayMode(this.displayMode); } } public boolean isRemoved() { return removed; } @Override public boolean isVisible() { return super.isVisible() && !removed; } IGVPreferences getPreferences() { return getPreferences(experimentType); } public static IGVPreferences getPreferences(ExperimentType type) { try { // Disable experimentType preferences for 2.4 if (Globals.VERSION.contains("2.4")) { return PreferencesManager.getPreferences(NULL_CATEGORY); } else { String prefKey = Constants.NULL_CATEGORY; if (type == ExperimentType.THIRD_GEN) { prefKey = Constants.THIRD_GEN; } else if (type == ExperimentType.RNA) { prefKey = Constants.RNA; } return PreferencesManager.getPreferences(prefKey); } } catch (NullPointerException e) { String prefKey = Constants.NULL_CATEGORY; if (type == ExperimentType.THIRD_GEN) { prefKey = Constants.THIRD_GEN; } else if (type == ExperimentType.RNA) { prefKey = Constants.RNA; } return PreferencesManager.getPreferences(prefKey); } } @Override public void unload() { super.unload(); if (dataManager != null) { dataManager.unsubscribe(this); } removed = true; setVisible(false); } private boolean isLinkedReads() { return renderOptions != null && renderOptions.isLinkedReads(); } private void setLinkedReadView(boolean linkedReads, String tag) { if (!linkedReads || isLinkedReadView()) { undoLinkedReadView(); } renderOptions.setLinkedReads(linkedReads); if (linkedReads) { renderOptions.setLinkByTag(tag); renderOptions.setColorOption(ColorOption.TAG); renderOptions.setColorByTag(tag); if (dataManager.isPhased()) { renderOptions.setGroupByOption(GroupOption.TAG); renderOptions.setGroupByTag("HP"); } showGroupLine = false; setDisplayMode(DisplayMode.SQUISHED); } dataManager.packAlignments(renderOptions); repaint(); } /** * Detect if we are in linked-read view */ private boolean isLinkedReadView() { return renderOptions != null && renderOptions.isLinkedReads() && renderOptions.getLinkByTag() != null && renderOptions.getColorOption() == ColorOption.TAG && renderOptions.getColorByTag() != null; } /** * Link alignments by arbitrary tag, without the extra settings applied to link-read-view * * @param linkReads * @param tag */ private void setLinkByTag(boolean linkReads, String tag) { if (isLinkedReadView()) { undoLinkedReadView(); } if (linkReads) { renderOptions.setLinkByTag(tag); if (renderOptions.getGroupByOption() == GroupOption.NONE) { renderOptions.setGroupByOption(GroupOption.LINKED); } } else { renderOptions.setLinkByTag(null); if (renderOptions.getGroupByOption() == GroupOption.LINKED) { renderOptions.setGroupByOption(GroupOption.NONE); } } renderOptions.setLinkedReads(linkReads); dataManager.packAlignments(renderOptions); repaint(); } private void undoLinkedReadView() { renderOptions.setLinkByTag(null); renderOptions.setColorOption(ColorOption.NONE); renderOptions.setColorByTag(null); renderOptions.setGroupByOption(GroupOption.NONE); renderOptions.setGroupByTag(null); showGroupLine = true; setDisplayMode(DisplayMode.EXPANDED); } private void sendPairsToCircularView(TrackClickEvent e) { List<ReferenceFrame> frames = e.getFrame() != null ? Arrays.asList(e.getFrame()) : FrameManager.getFrames(); List<Alignment> inView = new ArrayList<>(); for (ReferenceFrame frame : frames) { AlignmentInterval interval = AlignmentTrack.this.getDataManager().getLoadedInterval(frame); if (interval != null) { Iterator<Alignment> iter = interval.getAlignmentIterator(); Range r = frame.getCurrentRange(); while (iter.hasNext()) { Alignment a = iter.next(); if (a.getEnd() > r.getStart() && a.getStart() < r.getEnd()) { final boolean isDiscordantPair = a.isPaired() && a.getMate().isMapped() && (!a.getMate().getChr().equals(a.getChr()) || Math.abs(a.getInferredInsertSize()) > 10000); if (isDiscordantPair) { inView.add(a); } } } } Color chordColor = AlignmentTrack.this.getColor().equals(DEFAULT_ALIGNMENT_COLOR) ? Color.BLUE : AlignmentTrack.this.getColor(); CircularViewUtilities.sendAlignmentsToJBrowse(inView, AlignmentTrack.this.getName(), chordColor); } } private void sendSplitToCircularView(TrackClickEvent e) { List<ReferenceFrame> frames = e.getFrame() != null ? Arrays.asList(e.getFrame()) : FrameManager.getFrames(); List<Alignment> inView = new ArrayList<>(); for (ReferenceFrame frame : frames) { AlignmentInterval interval = AlignmentTrack.this.getDataManager().getLoadedInterval(frame); if (interval != null) { Iterator<Alignment> iter = interval.getAlignmentIterator(); Range r = frame.getCurrentRange(); while (iter.hasNext()) { Alignment a = iter.next(); if (a.getEnd() > r.getStart() && a.getStart() < r.getEnd() && a.getAttribute("SA") != null) { inView.add(a); } } } Color chordColor = AlignmentTrack.this.getColor().equals(DEFAULT_ALIGNMENT_COLOR) ? Color.BLUE : AlignmentTrack.this.getColor(); CircularViewUtilities.sendAlignmentsToJBrowse(inView, AlignmentTrack.this.getName(), chordColor); } } /** * Listener for deselecting one component when another is selected */ private static class Deselector implements ActionListener { private final JMenuItem toDeselect; private final JMenuItem parent; Deselector(JMenuItem parent, JMenuItem toDeselect) { this.parent = parent; this.toDeselect = toDeselect; } @Override public void actionPerformed(ActionEvent e) { if (this.parent.isSelected()) { this.toDeselect.setSelected(false); } } } private static class InsertionInterval { final Rectangle rect; final InsertionMarker insertionMarker; InsertionInterval(Rectangle rect, InsertionMarker insertionMarker) { this.rect = rect; this.insertionMarker = insertionMarker; } } /** * Popup menu class for AlignmentTrack. The menu gets instantiated from TrackPanelComponent on right-click in the * alignment track or its associated name panel. */ class PopupMenu extends IGVPopupMenu { PopupMenu(final TrackClickEvent e) { final MouseEvent me = e.getMouseEvent(); final ReferenceFrame frame = e.getFrame(); final Alignment clickedAlignment = (frame == null) ? null : getAlignmentAt(frame.getChromosomePosition(me.getX()), me.getY(), frame); // Title JLabel popupTitle = new JLabel(" " + AlignmentTrack.this.getName(), JLabel.CENTER); Font newFont = getFont().deriveFont(Font.BOLD, 12); popupTitle.setFont(newFont); add(popupTitle); // Circular view items -- optional if (PreferencesManager.getPreferences().getAsBoolean(CIRC_VIEW_ENABLED) && CircularViewUtilities.ping()) { addSeparator(); JMenuItem item = new JMenuItem("Add Discordant Pairs to Circular View"); item.setEnabled(dataManager.isPairedEnd()); add(item); item.addActionListener(ae -> AlignmentTrack.this.sendPairsToCircularView(e)); JMenuItem item2 = new JMenuItem("Add Split Reads to Circular View"); add(item2); item2.addActionListener(ae -> AlignmentTrack.this.sendSplitToCircularView(e)); } // Some generic items from TrackMenuUtils Collection<Track> tracks = List.of(AlignmentTrack.this); addSeparator(); add(TrackMenuUtils.getTrackRenameItem(tracks)); addCopyToClipboardItem(e, clickedAlignment); addSeparator(); JMenuItem item = new JMenuItem("Change Track Color..."); item.addActionListener(evt -> TrackMenuUtils.changeTrackColor(tracks)); add(item); // Experiment type (RNA, THIRD GEN, OTHER) addSeparator(); addExperimentTypeMenuItem(); if (experimentType == ExperimentType.THIRD_GEN) { addHaplotype(e); } // Linked read items addLinkedReadItems(); // Group, sort, color, shade, and pack addSeparator(); addGroupMenuItem(e); addSortMenuItem(); addColorByMenuItem(); addShadeAlignmentsMenuItem(); //addFilterMenuItem(); addPackMenuItem(); // Shading and mismatch items addSeparator(); addShadeBaseByMenuItem(); JMenuItem misMatchesItem = addShowMismatchesMenuItem(); JMenuItem showAllItem = addShowAllBasesMenuItem(); misMatchesItem.addActionListener(new Deselector(misMatchesItem, showAllItem)); showAllItem.addActionListener(new Deselector(showAllItem, misMatchesItem)); // Paired end items addSeparator(); addViewAsPairsMenuItem(); if (clickedAlignment != null) { addGoToMate(e, clickedAlignment); showMateRegion(e, clickedAlignment); } addInsertSizeMenuItem(); // Third gen (primarily) items addSeparator(); addThirdGenItems(); // Display mode items addSeparator(); TrackMenuUtils.addDisplayModeItems(tracks, this); // Select alignment items addSeparator(); addSelectByNameItem(); addClearSelectionsMenuItem(); // Copy items addSeparator(); addCopySequenceItems(e); addConsensusSequence(e); // Blat items addSeparator(); addBlatItem(e); addBlatClippingItems(e); // Insertion items, only if clicked over an insertion AlignmentBlock insertion = getInsertion(clickedAlignment, e.getMouseEvent().getX()); if (insertion != null) { addSeparator(); addInsertionItems(insertion); } // Sashimi plot, probably should be depdenent on experimentType (RNA) addSeparator(); JMenuItem sashimi = new JMenuItem("Sashimi Plot"); sashimi.addActionListener(e1 -> SashimiPlot.openSashimiPlot()); add(sashimi); // Show alignments, coverage, splice junctions addSeparator(); addShowItems(); } private void addHaplotype(TrackClickEvent e) { JMenuItem item = new JMenuItem("Cluster (phase) alignments"); final ReferenceFrame frame; if (e.getFrame() == null && FrameManager.getFrames().size() == 1) { frame = FrameManager.getFrames().get(0); } else { frame = e.getFrame(); } item.setEnabled(frame != null); add(item); item.addActionListener(ae -> { //This shouldn't ever be true, but just in case it's more user-friendly if (frame == null) { MessageUtils.showMessage("Unknown region bounds"); return; } String nString = MessageUtils.showInputDialog("Enter the number of clusters", String.valueOf(AlignmentTrack.nClusters)); if (nString == null) { return; } try { AlignmentTrack.nClusters = Integer.parseInt(nString); } catch (NumberFormatException e1) { MessageUtils.showMessage("Clusters size must be an integer"); return; } final int start = (int) frame.getOrigin(); final int end = (int) frame.getEnd(); AlignmentInterval interval = dataManager.getLoadedInterval(frame); HaplotypeUtils haplotypeUtils = new HaplotypeUtils(interval, AlignmentTrack.this.genome); boolean success = haplotypeUtils.clusterAlignments(frame.getChrName(), start, end, AlignmentTrack.nClusters); if (success) { AlignmentTrack.this.groupAlignments(GroupOption.HAPLOTYPE, null, null); AlignmentTrack.this.repaint(); } //dataManager.sortRows(SortOption.HAPLOTYPE, frame, (end + start) / 2, null); //AlignmentTrack.repaint(); }); } /** * Item for exporting "consensus" sequence of region, based on loaded alignments. * * @param e */ private void addConsensusSequence(TrackClickEvent e) { JMenuItem item = new JMenuItem("Copy consensus sequence"); final ReferenceFrame frame; if (e.getFrame() == null && FrameManager.getFrames().size() == 1) { frame = FrameManager.getFrames().get(0); } else { frame = e.getFrame(); } item.setEnabled(frame != null); add(item); item.addActionListener(ae -> { if (frame == null) { // Should never happen MessageUtils.showMessage("Unknown region bounds, cannot export consensus"); return; } final int start = (int) frame.getOrigin(); final int end = (int) frame.getEnd(); if ((end - start) > 1000000) { MessageUtils.showMessage("Cannot export region more than 1 Megabase"); return; } AlignmentInterval interval = dataManager.getLoadedInterval(frame); AlignmentCounts counts = interval.getCounts(); String text = PFMExporter.createPFMText(counts, frame.getChrName(), start, end); StringUtils.copyTextToClipboard(text); }); } private JMenu getBisulfiteContextMenuItem(ButtonGroup group) { JMenu bisulfiteContextMenu = new JMenu("bisulfite mode"); JRadioButtonMenuItem nomeESeqOption = null; boolean showNomeESeq = getPreferences().getAsBoolean(SAM_NOMESEQ_ENABLED); if (showNomeESeq) { nomeESeqOption = new JRadioButtonMenuItem("NOMe-seq bisulfite mode"); nomeESeqOption.setSelected(renderOptions.getColorOption() == ColorOption.NOMESEQ); nomeESeqOption.addActionListener(aEvt -> { setColorOption(ColorOption.NOMESEQ); AlignmentTrack.this.repaint(); }); group.add(nomeESeqOption); } for (final BisulfiteContext item : BisulfiteContext.values()) { String optionStr = getBisulfiteContextPubStr(item); JRadioButtonMenuItem m1 = new JRadioButtonMenuItem(optionStr); m1.setSelected(renderOptions.bisulfiteContext == item); m1.addActionListener(aEvt -> { setColorOption(ColorOption.BISULFITE); setBisulfiteContext(item); AlignmentTrack.this.repaint(); }); bisulfiteContextMenu.add(m1); group.add(m1); } if (nomeESeqOption != null) { bisulfiteContextMenu.add(nomeESeqOption); } return bisulfiteContextMenu; } void addSelectByNameItem() { // Change track height by attribute JMenuItem item = new JMenuItem("Select by name..."); item.addActionListener(aEvt -> { String val = MessageUtils.showInputDialog("Enter read name: "); if (val != null && val.trim().length() > 0) { selectedReadNames.put(val, readNamePalette.get(val)); AlignmentTrack.this.repaint(); } }); add(item); } void addExperimentTypeMenuItem() { Map<String, ExperimentType> mappings = new LinkedHashMap<>(); mappings.put("Other", ExperimentType.OTHER); mappings.put("RNA", ExperimentType.RNA); mappings.put("3rd Gen", ExperimentType.THIRD_GEN); //mappings.put("Bisulfite", ExperimentType.BISULFITE); JMenu groupMenu = new JMenu("Experiment Type"); ButtonGroup group = new ButtonGroup(); for (Map.Entry<String, ExperimentType> el : mappings.entrySet()) { JCheckBoxMenuItem mi = getExperimentTypeMenuItem(el.getKey(), el.getValue()); groupMenu.add(mi); group.add(mi); } add(groupMenu); } private JCheckBoxMenuItem getExperimentTypeMenuItem(String label, final ExperimentType option) { JCheckBoxMenuItem mi = new JCheckBoxMenuItem(label); mi.setSelected(AlignmentTrack.this.getExperimentType() == option); mi.addActionListener(aEvt -> AlignmentTrack.this.setExperimentType(option)); return mi; } void addGroupMenuItem(final TrackClickEvent te) {//ReferenceFrame frame) { final MouseEvent me = te.getMouseEvent(); ReferenceFrame frame = te.getFrame(); if (frame == null) { frame = FrameManager.getDefaultFrame(); // Clicked over name panel, not a specific frame } final Range range = frame.getCurrentRange(); final String chrom = range.getChr(); final int chromStart = (int) frame.getChromosomePosition(me.getX()); // Change track height by attribute JMenu groupMenu = new JMenu("Group alignments by"); ButtonGroup group = new ButtonGroup(); GroupOption[] groupOptions = { GroupOption.NONE, GroupOption.STRAND, GroupOption.FIRST_OF_PAIR_STRAND, GroupOption.SAMPLE, GroupOption.LIBRARY, GroupOption.READ_GROUP, GroupOption.MATE_CHROMOSOME, GroupOption.PAIR_ORIENTATION, GroupOption.SUPPLEMENTARY, GroupOption.REFERENCE_CONCORDANCE, GroupOption.MOVIE, GroupOption.ZMW, GroupOption.READ_ORDER, GroupOption.LINKED, GroupOption.PHASE, GroupOption.MAPPING_QUALITY }; for (final GroupOption option : groupOptions) { JCheckBoxMenuItem mi = new JCheckBoxMenuItem(option.label); mi.setSelected(renderOptions.getGroupByOption() == option); mi.addActionListener(aEvt -> { groupAlignments(option, null, null); }); groupMenu.add(mi); group.add(mi); } JCheckBoxMenuItem tagOption = new JCheckBoxMenuItem("tag"); tagOption.addActionListener(aEvt -> { String tag = MessageUtils.showInputDialog("Enter tag", renderOptions.getGroupByTag()); if (tag != null) { if (tag.trim().length() > 0) { groupAlignments(GroupOption.TAG, tag, null); } else { groupAlignments(GroupOption.NONE, null, null); } } }); tagOption.setSelected(renderOptions.getGroupByOption() == GroupOption.TAG); groupMenu.add(tagOption); group.add(tagOption); Range oldGroupByPos = renderOptions.getGroupByPos(); if (oldGroupByPos != null && renderOptions.getGroupByOption() == GroupOption.BASE_AT_POS) { // already sorted by the base at a position JCheckBoxMenuItem oldGroupByPosOption = new JCheckBoxMenuItem("base at " + oldGroupByPos.getChr() + ":" + Globals.DECIMAL_FORMAT.format(1 + oldGroupByPos.getStart())); groupMenu.add(oldGroupByPosOption); oldGroupByPosOption.setSelected(true); } if (renderOptions.getGroupByOption() != GroupOption.BASE_AT_POS || oldGroupByPos == null || !oldGroupByPos.getChr().equals(chrom) || oldGroupByPos.getStart() != chromStart) { // not already sorted by this position JCheckBoxMenuItem newGroupByPosOption = new JCheckBoxMenuItem("base at " + chrom + ":" + Globals.DECIMAL_FORMAT.format(1 + chromStart)); newGroupByPosOption.addActionListener(aEvt -> { Range groupByPos = new Range(chrom, chromStart, chromStart + 1); groupAlignments(GroupOption.BASE_AT_POS, null, groupByPos); }); groupMenu.add(newGroupByPosOption); group.add(newGroupByPosOption); } groupMenu.add(new JPopupMenu.Separator()); JCheckBoxMenuItem invertGroupNameSortingOption = new JCheckBoxMenuItem("Reverse group order"); invertGroupNameSortingOption.setSelected(renderOptions.invertGroupSorting); invertGroupNameSortingOption.addActionListener(aEvt -> { renderOptions.invertGroupSorting = !renderOptions.invertGroupSorting; dataManager.packAlignments(renderOptions); AlignmentTrack.this.repaint(); }); groupMenu.add(invertGroupNameSortingOption); add(groupMenu); } /** * Sort menu */ void addSortMenuItem() { JMenu sortMenu = new JMenu("Sort alignments by"); //LinkedHashMap is supposed to preserve order of insertion for iteration Map<String, SortOption> mappings = new LinkedHashMap<>(); mappings.put("start location", SortOption.START); mappings.put("read strand", SortOption.STRAND); mappings.put("first-of-pair strand", SortOption.FIRST_OF_PAIR_STRAND); mappings.put("base", SortOption.NUCLEOTIDE); mappings.put("mapping quality", SortOption.QUALITY); mappings.put("sample", SortOption.SAMPLE); mappings.put("read group", SortOption.READ_GROUP); mappings.put("read order", SortOption.READ_ORDER); mappings.put("read name", SortOption.READ_NAME); mappings.put("aligned read length", SortOption.ALIGNED_READ_LENGTH); // mappings.put("supplementary flag", SortOption.SUPPLEMENTARY); if (dataManager.isPairedEnd()) { mappings.put("insert size", SortOption.INSERT_SIZE); mappings.put("chromosome of mate", SortOption.MATE_CHR); } for (Map.Entry<String, SortOption> el : mappings.entrySet()) { JMenuItem mi = new JMenuItem(el.getKey()); mi.addActionListener(aEvt -> { final SortOption option = el.getValue(); renderOptions.setSortOption(option); sortAlignmentTracks(option, null, renderOptions.isInvertSorting()); }); sortMenu.add(mi); } JMenuItem tagOption = new JMenuItem("tag"); tagOption.addActionListener(aEvt -> { String tag = MessageUtils.showInputDialog("Enter tag", renderOptions.getSortByTag()); if (tag != null && tag.trim().length() > 0) { renderOptions.setSortByTag(tag); renderOptions.setSortOption((SortOption.TAG)); sortAlignmentTracks(SortOption.TAG, tag, renderOptions.isInvertSorting()); } }); sortMenu.add(tagOption); sortMenu.add(new JPopupMenu.Separator()); JCheckBoxMenuItem invertGroupNameSortingOption = new JCheckBoxMenuItem("reverse sorting"); invertGroupNameSortingOption.setSelected(renderOptions.isInvertSorting()); invertGroupNameSortingOption.addActionListener(aEvt -> { final boolean updatedInvertSorting = !renderOptions.isInvertSorting(); renderOptions.setInvertSorting(updatedInvertSorting); sortAlignmentTracks(renderOptions.getSortOption(), renderOptions.getSortByTag(), updatedInvertSorting); }); sortMenu.add(invertGroupNameSortingOption); add(sortMenu); } public void addFilterMenuItem() { JMenu filterMenu = new JMenu("Filter alignments by"); JMenuItem mi = new JMenuItem("mapping quality"); mi.addActionListener(aEvt -> { // TODO -- use current value for default String defString = PreferencesManager.getPreferences().get(SAM_QUALITY_THRESHOLD); if (defString == null) defString = ""; String mqString = MessageUtils.showInputDialog("Minimum mapping quality: ", defString); try { int mq = Integer.parseInt(mqString); // TODO do something with this //System.out.println(mq); } catch (NumberFormatException e) { MessageUtils.showMessage("Mapping quality must be an integer"); } }); filterMenu.add(mi); add(filterMenu); } private JRadioButtonMenuItem getColorMenuItem(String label, final ColorOption option) { JRadioButtonMenuItem mi = new JRadioButtonMenuItem(label); mi.setSelected(renderOptions.getColorOption() == option); mi.addActionListener(aEvt -> { setColorOption(option); AlignmentTrack.this.repaint(); }); return mi; } void addColorByMenuItem() { // Change track height by attribute JMenu colorMenu = new JMenu("Color alignments by"); ButtonGroup group = new ButtonGroup(); Map<String, ColorOption> mappings = new LinkedHashMap<>(); mappings.put("none", ColorOption.NONE); if (dataManager.hasYCTags()) { mappings.put("YC tag", ColorOption.YC_TAG); } if (dataManager.isPairedEnd()) { mappings.put("insert size", ColorOption.INSERT_SIZE); mappings.put("pair orientation", ColorOption.PAIR_ORIENTATION); mappings.put("insert size and pair orientation", ColorOption.UNEXPECTED_PAIR); } mappings.put("read strand", ColorOption.READ_STRAND); if (dataManager.isPairedEnd()) { mappings.put("first-of-pair strand", ColorOption.FIRST_OF_PAIR_STRAND); } mappings.put("read group", ColorOption.READ_GROUP); if (dataManager.isPairedEnd()) { mappings.put("read order", ColorOption.READ_ORDER); } mappings.put("sample", ColorOption.SAMPLE); mappings.put("library", ColorOption.LIBRARY); mappings.put("movie", ColorOption.MOVIE); mappings.put("ZMW", ColorOption.ZMW); for (Map.Entry<String, ColorOption> el : mappings.entrySet()) { JRadioButtonMenuItem mi = getColorMenuItem(el.getKey(), el.getValue()); colorMenu.add(mi); group.add(mi); } JRadioButtonMenuItem tagOption = new JRadioButtonMenuItem("tag"); tagOption.setSelected(renderOptions.getColorOption() == ColorOption.TAG); tagOption.addActionListener(aEvt -> { setColorOption(ColorOption.TAG); String tag = MessageUtils.showInputDialog("Enter tag", renderOptions.getColorByTag()); if (tag != null && tag.trim().length() > 0) { setColorByTag(tag); AlignmentTrack.this.repaint(); } }); colorMenu.add(tagOption); group.add(tagOption); colorMenu.add(getBisulfiteContextMenuItem(group)); // Base modifications mappings.clear(); mappings.put("base modification", ColorOption.BASE_MODIFICATION); mappings.put("base modification (5mC)", ColorOption.BASE_MODIFICATION_5MC); mappings.put("base modification (all C)", ColorOption.BASE_MODIFICATION_C); colorMenu.addSeparator(); for (Map.Entry<String, ColorOption> el : mappings.entrySet()) { JRadioButtonMenuItem mi = getColorMenuItem(el.getKey(), el.getValue()); colorMenu.add(mi); group.add(mi); } add(colorMenu); } void addShadeAlignmentsMenuItem(){ JMenu shadeMenu = new JMenu("Shade alignments by"); for( ShadeAlignmentsOption option: ShadeAlignmentsOption.values()) { JRadioButtonMenuItem mi = new JRadioButtonMenuItem(option.label); mi.setSelected(renderOptions.getShadeAlignmentsOption() == option); mi.addActionListener(aEvt -> { setShadeAlignmentsOptions(option); AlignmentTrack.this.repaint(); }); shadeMenu.add(mi); } add(shadeMenu); } void addPackMenuItem() { // Change track height by attribute JMenuItem item = new JMenuItem("Re-pack alignments"); item.addActionListener(aEvt -> UIUtilities.invokeOnEventThread(() -> { IGV.getInstance().packAlignmentTracks(); AlignmentTrack.this.repaint(); })); add(item); } void addCopyToClipboardItem(final TrackClickEvent te, Alignment alignment) { final MouseEvent me = te.getMouseEvent(); JMenuItem item = new JMenuItem("Copy read details to clipboard"); final ReferenceFrame frame = te.getFrame(); if (frame == null) { item.setEnabled(false); } else { final double location = frame.getChromosomePosition(me.getX()); // Change track height by attribute item.addActionListener(aEvt -> copyToClipboard(te, alignment, location, me.getX())); if (alignment == null) { item.setEnabled(false); } } add(item); } void addViewAsPairsMenuItem() { final JMenuItem item = new JCheckBoxMenuItem("View as pairs"); item.setSelected(renderOptions.isViewPairs()); item.addActionListener(aEvt -> { boolean viewAsPairs = item.isSelected(); setViewAsPairs(viewAsPairs); }); item.setEnabled(dataManager.isPairedEnd()); add(item); } void addGoToMate(final TrackClickEvent te, Alignment alignment) { // Change track height by attribute JMenuItem item = new JMenuItem("Go to mate"); MouseEvent e = te.getMouseEvent(); final ReferenceFrame frame = te.getFrame(); if (frame == null) { item.setEnabled(false); } else { item.addActionListener(aEvt -> gotoMate(te, alignment)); if (alignment == null || !alignment.isPaired() || !alignment.getMate().isMapped()) { item.setEnabled(false); } } add(item); } void showMateRegion(final TrackClickEvent te, Alignment clickedAlignment) { // Change track height by attribute JMenuItem item = new JMenuItem("View mate region in split screen"); MouseEvent e = te.getMouseEvent(); final ReferenceFrame frame = te.getFrame(); if (frame == null) { item.setEnabled(false); } else { double location = frame.getChromosomePosition(e.getX()); if (clickedAlignment instanceof PairedAlignment) { Alignment first = ((PairedAlignment) clickedAlignment).getFirstAlignment(); Alignment second = ((PairedAlignment) clickedAlignment).getSecondAlignment(); if (first.contains(location)) { clickedAlignment = first; } else if (second.contains(location)) { clickedAlignment = second; } else { clickedAlignment = null; } } final Alignment alignment = clickedAlignment; item.addActionListener(aEvt -> splitScreenMate(te, alignment)); if (alignment == null || !alignment.isPaired() || !alignment.getMate().isMapped()) { item.setEnabled(false); } } add(item); } void addClearSelectionsMenuItem() { // Change track height by attribute JMenuItem item = new JMenuItem("Clear selections"); item.addActionListener(aEvt -> { selectedReadNames.clear(); AlignmentTrack.this.repaint(); }); add(item); } JMenuItem addShowAllBasesMenuItem() { // Change track height by attribute final JMenuItem item = new JCheckBoxMenuItem("Show all bases"); if (renderOptions.getColorOption() == ColorOption.BISULFITE || renderOptions.getColorOption() == ColorOption.NOMESEQ) { // item.setEnabled(false); } else { item.setSelected(renderOptions.isShowAllBases()); } item.addActionListener(aEvt -> { renderOptions.setShowAllBases(item.isSelected()); AlignmentTrack.this.repaint(); }); add(item); return item; } void addQuickConsensusModeItem() { // Change track height by attribute final JMenuItem item = new JCheckBoxMenuItem("Quick consensus mode"); item.setSelected(renderOptions.isQuickConsensusMode()); item.addActionListener(aEvt -> { renderOptions.setQuickConsensusMode(item.isSelected()); AlignmentTrack.this.repaint(); }); add(item); } JMenuItem addShowMismatchesMenuItem() { // Change track height by attribute final JMenuItem item = new JCheckBoxMenuItem("Show mismatched bases"); item.setSelected(renderOptions.isShowMismatches()); item.addActionListener(aEvt -> { renderOptions.setShowMismatches(item.isSelected()); AlignmentTrack.this.repaint(); }); add(item); return item; } void addInsertSizeMenuItem() { // Change track height by attribute final JMenuItem item = new JCheckBoxMenuItem("Set insert size options ..."); item.addActionListener(aEvt -> { InsertSizeSettingsDialog dlg = new InsertSizeSettingsDialog(IGV.getInstance().getMainFrame(), renderOptions); dlg.setModal(true); dlg.setVisible(true); if (!dlg.isCanceled()) { renderOptions.setComputeIsizes(dlg.isComputeIsize()); renderOptions.setMinInsertSizePercentile(dlg.getMinPercentile()); renderOptions.setMaxInsertSizePercentile(dlg.getMaxPercentile()); if (renderOptions.computeIsizes) { dataManager.updatePEStats(renderOptions); } renderOptions.setMinInsertSize(dlg.getMinThreshold()); renderOptions.setMaxInsertSize(dlg.getMaxThreshold()); AlignmentTrack.this.repaint(); } }); item.setEnabled(dataManager.isPairedEnd()); add(item); } void addShadeBaseByMenuItem() { final JMenuItem item = new JCheckBoxMenuItem("Shade base by quality"); item.setSelected(renderOptions.getShadeBasesOption()); item.addActionListener(aEvt -> UIUtilities.invokeOnEventThread(() -> { renderOptions.setShadeBasesOption(item.isSelected()); AlignmentTrack.this.repaint(); })); add(item); } void addShowItems() { if (coverageTrack != null) { final JMenuItem item = new JCheckBoxMenuItem("Show Coverage Track"); item.setSelected(coverageTrack.isVisible()); item.setEnabled(!coverageTrack.isRemoved()); item.addActionListener(aEvt -> { getCoverageTrack().setVisible(item.isSelected()); IGV.getInstance().repaint(Arrays.asList(coverageTrack)); }); add(item); } if (spliceJunctionTrack != null) { final JMenuItem item = new JCheckBoxMenuItem("Show Splice Junction Track"); item.setSelected(spliceJunctionTrack.isVisible()); item.setEnabled(!spliceJunctionTrack.isRemoved()); item.addActionListener(aEvt -> { spliceJunctionTrack.setVisible(item.isSelected()); IGV.getInstance().repaint(Arrays.asList(spliceJunctionTrack)); }); add(item); } final JMenuItem alignmentItem = new JCheckBoxMenuItem("Show Alignment Track"); alignmentItem.setSelected(true); alignmentItem.addActionListener(e -> { AlignmentTrack.this.setVisible(alignmentItem.isSelected()); IGV.getInstance().repaint(Arrays.asList(AlignmentTrack.this)); }); // Disable if this is the only visible track if (!((coverageTrack != null && coverageTrack.isVisible()) || (spliceJunctionTrack != null && spliceJunctionTrack.isVisible()))) { alignmentItem.setEnabled(false); } add(alignmentItem); } void addCopySequenceItems(final TrackClickEvent te) { final JMenuItem item = new JMenuItem("Copy read sequence"); add(item); final Alignment alignment = getSpecficAlignment(te); if (alignment == null) { item.setEnabled(false); return; } final String seq = alignment.getReadSequence(); if (seq == null) { item.setEnabled(false); return; } item.addActionListener(aEvt -> StringUtils.copyTextToClipboard(seq)); /* Add a "Copy left clipped sequence" item if there is left clipping. */ int minimumBlatLength = BlatClient.MINIMUM_BLAT_LENGTH; int[] clipping = SAMAlignment.getClipping(alignment.getCigarString()); if (clipping[1] > 0) { String lcSeq = getClippedSequence(alignment.getReadSequence(), alignment.getReadStrand(), 0, clipping[1]); final JMenuItem lccItem = new JMenuItem("Copy left-clipped sequence"); add(lccItem); lccItem.addActionListener(aEvt -> StringUtils.copyTextToClipboard(lcSeq)); } /* Add a "Copy right clipped sequence" item if there is right clipping. */ if (clipping[3] > 0) { int seqLength = seq.length(); String rcSeq = getClippedSequence( alignment.getReadSequence(), alignment.getReadStrand(), seqLength - clipping[3], seqLength); final JMenuItem rccItem = new JMenuItem("Copy right-clipped sequence"); add(rccItem); rccItem.addActionListener(aEvt -> StringUtils.copyTextToClipboard(rcSeq)); } } void addBlatItem(final TrackClickEvent te) { // Change track height by attribute final JMenuItem item = new JMenuItem("BLAT read sequence"); add(item); final Alignment alignment = getSpecficAlignment(te); if (alignment == null) { item.setEnabled(false); return; } final String seq = alignment.getReadSequence(); if (seq == null || seq.equals("*")) { item.setEnabled(false); return; } item.addActionListener(aEvt -> { String blatSeq = alignment.getReadStrand() == Strand.NEGATIVE ? SequenceTrack.getReverseComplement(seq) : seq; BlatClient.doBlatQuery(blatSeq, alignment.getReadName()); }); } void addBlatClippingItems(final TrackClickEvent te) { final Alignment alignment = getSpecficAlignment(te); if (alignment == null) { return; } int minimumBlatLength = BlatClient.MINIMUM_BLAT_LENGTH; int[] clipping = SAMAlignment.getClipping(alignment.getCigarString()); /* Add a "BLAT left clipped sequence" item if there is significant left clipping. */ if (clipping[1] > minimumBlatLength) { String lcSeq = getClippedSequence(alignment.getReadSequence(), alignment.getReadStrand(), 0, clipping[1]); final JMenuItem lcbItem = new JMenuItem("BLAT left-clipped sequence"); add(lcbItem); lcbItem.addActionListener(aEvt -> BlatClient.doBlatQuery(lcSeq, alignment.getReadName() + " - left clip") ); } /* Add a "BLAT right clipped sequence" item if there is significant right clipping. */ if (clipping[3] > minimumBlatLength) { String seq = alignment.getReadSequence(); int seqLength = seq.length(); String rcSeq = getClippedSequence( alignment.getReadSequence(), alignment.getReadStrand(), seqLength - clipping[3], seqLength); final JMenuItem rcbItem = new JMenuItem("BLAT right-clipped sequence"); add(rcbItem); rcbItem.addActionListener(aEvt -> BlatClient.doBlatQuery(rcSeq, alignment.getReadName() + " - right clip") ); } } private String getClippedSequence(String readSequence, Strand strand, int i, int i2) { if (readSequence == null || readSequence.equals("*")) { return "*"; } String seq = readSequence.substring(i, i2); if (strand == Strand.NEGATIVE) { seq = SequenceTrack.getReverseComplement(seq); } return seq; } void addExtViewItem(final TrackClickEvent te) { // Change track height by attribute final JMenuItem item = new JMenuItem("ExtView"); add(item); final Alignment alignment = getAlignment(te); if (alignment == null) { item.setEnabled(false); return; } final String seq = alignment.getReadSequence(); if (seq == null) { item.setEnabled(false); return; } item.addActionListener(aEvt -> ExtendViewClient.postExtendView(alignment)); } /** * Add all menu items that link alignments by tag or readname. These are mutually exclusive. The * list includes 2 items for 10X "Loupe link-read" style views, a supplementary alignment option, * and linking by arbitrary tag. */ void addLinkedReadItems() { addSeparator(); add(linkedReadViewItem("BX")); add(linkedReadViewItem("MI")); addSeparator(); final JCheckBoxMenuItem supplementalItem = new JCheckBoxMenuItem("Link supplementary alignments"); supplementalItem.setSelected(isLinkedReads() && "READNAME".equals(renderOptions.getLinkByTag())); supplementalItem.addActionListener(aEvt -> { boolean linkedReads = supplementalItem.isSelected(); setLinkByTag(linkedReads, "READNAME"); }); add(supplementalItem); String linkedTagsString = PreferencesManager.getPreferences().get(SAM_LINK_BY_TAGS); if (linkedTagsString != null) { String[] t = Globals.commaPattern.split(linkedTagsString); for (String tag : t) { if (tag.length() > 0) { add(linkedReadItem(tag)); } } } final JMenuItem linkByTagItem = new JMenuItem("Link by tag..."); linkByTagItem.addActionListener(aEvt -> { String tag = MessageUtils.showInputDialog("Link by tag:"); if (tag != null) { setLinkByTag(true, tag); String linkedTags = PreferencesManager.getPreferences().get(SAM_LINK_BY_TAGS); if (linkedTags == null) { linkedTags = tag; } else { linkedTags += "," + tag; } PreferencesManager.getPreferences().put(SAM_LINK_BY_TAGS, linkedTags); } }); add(linkByTagItem); } private JCheckBoxMenuItem linkedReadViewItem(String tag) { final JCheckBoxMenuItem item = new JCheckBoxMenuItem("Linked read view (" + tag + ")"); item.setSelected(isLinkedReadView() && tag != null && tag.equals(renderOptions.getLinkByTag())); item.addActionListener(aEvt -> { boolean linkedReads = item.isSelected(); setLinkedReadView(linkedReads, tag); }); return item; } private JCheckBoxMenuItem linkedReadItem(String tag) { final JCheckBoxMenuItem item = new JCheckBoxMenuItem("Link by " + tag); item.setSelected(!isLinkedReadView() && isLinkedReads() && tag.equals(renderOptions.getLinkByTag())); item.addActionListener(aEvt -> { boolean linkedReads = item.isSelected(); setLinkByTag(linkedReads, tag); }); return item; } private void addInsertionItems(AlignmentBlock insertion) { final JMenuItem item = new JMenuItem("Copy insert sequence"); add(item); item.addActionListener(aEvt -> StringUtils.copyTextToClipboard(insertion.getBases().getString())); if (insertion.getBases() != null && insertion.getBases().length >= 10) { final JMenuItem blatItem = new JMenuItem("BLAT insert sequence"); add(blatItem); blatItem.addActionListener(aEvt -> { String blatSeq = insertion.getBases().getString(); BlatClient.doBlatQuery(blatSeq, "BLAT insert sequence"); }); } } void addThirdGenItems() { final JMenuItem qcItem = new JCheckBoxMenuItem("Quick consensus mode"); qcItem.setSelected(renderOptions.isQuickConsensusMode()); qcItem.addActionListener(aEvt -> { renderOptions.setQuickConsensusMode(qcItem.isSelected()); AlignmentTrack.this.repaint(); }); final JMenuItem thresholdItem = new JMenuItem("Small indel threshold..."); thresholdItem.addActionListener(evt -> UIUtilities.invokeOnEventThread(() -> { String sith = MessageUtils.showInputDialog("Small indel threshold: ", String.valueOf(renderOptions.getSmallIndelThreshold())); try { renderOptions.setSmallIndelThreshold(Integer.parseInt(sith)); AlignmentTrack.this.repaint(); } catch (NumberFormatException e) { log.error("Error setting small indel threshold - not an integer", e); } })); thresholdItem.setEnabled(renderOptions.isHideSmallIndels()); final JMenuItem item = new JCheckBoxMenuItem("Hide small indels"); item.setSelected(renderOptions.isHideSmallIndels()); item.addActionListener(aEvt -> UIUtilities.invokeOnEventThread(() -> { renderOptions.setHideSmallIndels(item.isSelected()); thresholdItem.setEnabled(item.isSelected()); AlignmentTrack.this.repaint(); })); final JMenuItem imItem = new JCheckBoxMenuItem("Show insertion markers"); imItem.setSelected(renderOptions.isShowInsertionMarkers()); imItem.addActionListener(aEvt -> { renderOptions.setShowInsertionMarkers(imItem.isSelected()); AlignmentTrack.this.repaint(); }); add(imItem); add(qcItem); add(item); add(thresholdItem); } } private AlignmentBlock getInsertion(Alignment alignment, int pixelX) { if (alignment != null && alignment.getInsertions() != null) { for (AlignmentBlock block : alignment.getInsertions()) { if (block.containsPixel(pixelX)) { return block; } } } return null; } @Override public void unmarshalXML(Element element, Integer version) { super.unmarshalXML(element, version); if (element.hasAttribute("experimentType")) { experimentType = ExperimentType.valueOf(element.getAttribute("experimentType")); } NodeList tmp = element.getElementsByTagName("RenderOptions"); if (tmp.getLength() > 0) { Element renderElement = (Element) tmp.item(0); renderOptions = new RenderOptions(this); renderOptions.unmarshalXML(renderElement, version); } } @Override public void marshalXML(Document document, Element element) { super.marshalXML(document, element); if (experimentType != null) { element.setAttribute("experimentType", experimentType.toString()); } Element sourceElement = document.createElement("RenderOptions"); renderOptions.marshalXML(document, sourceElement); element.appendChild(sourceElement); } static class InsertionMenu extends IGVPopupMenu { final AlignmentBlock insertion; InsertionMenu(AlignmentBlock insertion) { this.insertion = insertion; addCopySequenceItem(); if (insertion.getBases() != null && insertion.getBases().length > 10) { addBlatItem(); } } void addCopySequenceItem() { // Change track height by attribute final JMenuItem item = new JMenuItem("Copy insert sequence"); add(item); item.addActionListener(aEvt -> StringUtils.copyTextToClipboard(insertion.getBases().getString())); } void addBlatItem() { // Change track height by attribute final JMenuItem item = new JMenuItem("BLAT insert sequence"); add(item); item.addActionListener(aEvt -> { String blatSeq = insertion.getBases().getString(); BlatClient.doBlatQuery(blatSeq, "BLAT insert sequence"); }); item.setEnabled(insertion.getBases() != null && insertion.getBases().length >= 10); } @Override public boolean includeStandardItems() { return false; } } public static class RenderOptions implements Cloneable, Persistable { public static final String NAME = "RenderOptions"; private AlignmentTrack track; private Boolean shadeBasesOption; private Boolean shadeCenters; private Boolean flagUnmappedPairs; private Boolean showAllBases; private Integer minInsertSize; private Integer maxInsertSize; private ColorOption colorOption; private SortOption sortOption; private GroupOption groupByOption; private ShadeAlignmentsOption shadeAlignmentsOption; private Integer mappingQualityLow; private Integer mappingQualityHigh; private boolean viewPairs = false; private String colorByTag; private String groupByTag; private String sortByTag; private String linkByTag; private Boolean linkedReads; private Boolean quickConsensusMode; private Boolean showMismatches; private Boolean computeIsizes; private Double minInsertSizePercentile; private Double maxInsertSizePercentile; private Boolean pairedArcView; private Boolean flagZeroQualityAlignments; private Range groupByPos; private Boolean invertSorting; private boolean invertGroupSorting; private Boolean showInsertionMarkers; private Boolean hideSmallIndels; private Integer smallIndelThreshold; BisulfiteContext bisulfiteContext = BisulfiteContext.CG; Map<String, PEStats> peStats; public RenderOptions() { } RenderOptions(AlignmentTrack track) { //updateColorScale(); this.track = track; peStats = new HashMap<>(); } IGVPreferences getPreferences() { return this.track != null ? this.track.getPreferences() : AlignmentTrack.getPreferences(ExperimentType.OTHER); } void setShowAllBases(boolean showAllBases) { this.showAllBases = showAllBases; } void setShowMismatches(boolean showMismatches) { this.showMismatches = showMismatches; } void setMinInsertSize(int minInsertSize) { this.minInsertSize = minInsertSize; //updateColorScale(); } public void setViewPairs(boolean viewPairs) { this.viewPairs = viewPairs; } void setComputeIsizes(boolean computeIsizes) { this.computeIsizes = computeIsizes; } void setMaxInsertSizePercentile(double maxInsertSizePercentile) { this.maxInsertSizePercentile = maxInsertSizePercentile; } void setMaxInsertSize(int maxInsertSize) { this.maxInsertSize = maxInsertSize; } void setMinInsertSizePercentile(double minInsertSizePercentile) { this.minInsertSizePercentile = minInsertSizePercentile; } void setColorByTag(String colorByTag) { this.colorByTag = colorByTag; } void setColorOption(ColorOption colorOption) { this.colorOption = colorOption; } void setSortOption(SortOption sortOption){ this.sortOption = sortOption; } void setSortByTag(String sortByTag) { this.sortByTag = sortByTag; } void setGroupByTag(String groupByTag) { this.groupByTag = groupByTag; } void setGroupByPos(Range groupByPos) { this.groupByPos = groupByPos; } void setInvertSorting(boolean invertSorting){ this.invertSorting = invertSorting; } void setInvertGroupSorting(boolean invertGroupSorting){ this.invertGroupSorting = invertGroupSorting; } void setLinkByTag(String linkByTag) { this.linkByTag = linkByTag; } void setQuickConsensusMode(boolean quickConsensusMode) { this.quickConsensusMode = quickConsensusMode; } public void setGroupByOption(GroupOption groupByOption) { this.groupByOption = (groupByOption == null) ? GroupOption.NONE : groupByOption; } void setShadeAlignmentsOption(ShadeAlignmentsOption shadeAlignmentsOption){ this.shadeAlignmentsOption = shadeAlignmentsOption; } void setShadeBasesOption(boolean shadeBasesOption) { this.shadeBasesOption = shadeBasesOption; } void setLinkedReads(boolean linkedReads) { this.linkedReads = linkedReads; } public void setShowInsertionMarkers(boolean drawInsertionIntervals) { this.showInsertionMarkers = drawInsertionIntervals; } public void setHideSmallIndels(boolean hideSmallIndels) { this.hideSmallIndels = hideSmallIndels; } public void setSmallIndelThreshold(int smallIndelThreshold) { this.smallIndelThreshold = smallIndelThreshold; } // getters public int getMinInsertSize() { return minInsertSize == null ? getPreferences().getAsInt(SAM_MIN_INSERT_SIZE_THRESHOLD) : minInsertSize; } public int getMaxInsertSize() { return maxInsertSize == null ? getPreferences().getAsInt(SAM_MAX_INSERT_SIZE_THRESHOLD) : maxInsertSize; } public boolean isFlagUnmappedPairs() { return flagUnmappedPairs == null ? getPreferences().getAsBoolean(SAM_FLAG_UNMAPPED_PAIR) : flagUnmappedPairs; } public boolean getShadeBasesOption() { return shadeBasesOption == null ? getPreferences().getAsBoolean(SAM_SHADE_BASES) : shadeBasesOption; } public boolean isShowMismatches() { return showMismatches == null ? getPreferences().getAsBoolean(SAM_SHOW_MISMATCHES) : showMismatches; } public boolean isShowAllBases() { return showAllBases == null ? getPreferences().getAsBoolean(SAM_SHOW_ALL_BASES) : showAllBases; } public boolean isShadeCenters() { return shadeCenters == null ? getPreferences().getAsBoolean(SAM_SHADE_CENTER) : shadeCenters; } boolean isShowInsertionMarkers() { return showInsertionMarkers == null ? getPreferences().getAsBoolean(SAM_SHOW_INSERTION_MARKERS) : showInsertionMarkers; } public boolean isFlagZeroQualityAlignments() { return flagZeroQualityAlignments == null ? getPreferences().getAsBoolean(SAM_FLAG_ZERO_QUALITY) : flagZeroQualityAlignments; } public boolean isViewPairs() { return viewPairs; } public boolean isComputeIsizes() { return computeIsizes == null ? getPreferences().getAsBoolean(SAM_COMPUTE_ISIZES) : computeIsizes; } public double getMinInsertSizePercentile() { return minInsertSizePercentile == null ? getPreferences().getAsFloat(SAM_MIN_INSERT_SIZE_PERCENTILE) : minInsertSizePercentile; } public double getMaxInsertSizePercentile() { return maxInsertSizePercentile == null ? getPreferences().getAsFloat(SAM_MAX_INSERT_SIZE_PERCENTILE) : maxInsertSizePercentile; } public ColorOption getColorOption() { return colorOption == null ? CollUtils.valueOf(ColorOption.class, getPreferences().get(SAM_COLOR_BY), ColorOption.NONE) : colorOption; } public String getColorByTag() { return colorByTag == null ? getPreferences().get(SAM_COLOR_BY_TAG) : colorByTag; } public ShadeAlignmentsOption getShadeAlignmentsOption() { if (shadeAlignmentsOption != null) { return shadeAlignmentsOption; } else { try { return ShadeAlignmentsOption.valueOf(getPreferences().get(SAM_SHADE_ALIGNMENT_BY)); } catch (IllegalArgumentException e) { log.error("Error parsing alignment shade option: " + ShadeAlignmentsOption.valueOf(getPreferences().get(SAM_SHADE_ALIGNMENT_BY))); return ShadeAlignmentsOption.NONE; } } } public int getMappingQualityLow() { return mappingQualityLow == null ? getPreferences().getAsInt(SAM_SHADE_QUALITY_LOW) : mappingQualityLow; } public int getMappingQualityHigh() { return mappingQualityHigh == null ? getPreferences().getAsInt(SAM_SHADE_QUALITY_HIGH) : mappingQualityHigh; } SortOption getSortOption() { return sortOption == null ? CollUtils.valueOf(SortOption.class, getPreferences().get(SAM_SORT_OPTION), null) : sortOption; } String getSortByTag() { return sortByTag == null ? getPreferences().get(SAM_SORT_BY_TAG) : sortByTag; } public String getGroupByTag() { return groupByTag == null ? getPreferences().get(SAM_GROUP_BY_TAG) : groupByTag; } public Range getGroupByPos() { if (groupByPos == null) { String pos = getPreferences().get(SAM_GROUP_BY_POS); if (pos != null) { String[] posParts = pos.split(" "); if (posParts.length != 2) { groupByPos = null; } else { int posChromStart = Integer.parseInt(posParts[1]); groupByPos = new Range(posParts[0], posChromStart, posChromStart + 1); } } } return groupByPos; } public boolean isInvertSorting(){ return invertSorting == null ? getPreferences().getAsBoolean(SAM_INVERT_SORT) : invertSorting; } public boolean isInvertGroupSorting(){ return invertGroupSorting; } public String getLinkByTag() { return linkByTag; } public GroupOption getGroupByOption() { GroupOption gbo = groupByOption; // Interpret null as the default option. gbo = (gbo == null) ? CollUtils.valueOf(GroupOption.class, getPreferences().get(SAM_GROUP_OPTION), GroupOption.NONE) : gbo; // Add a second check for null in case defaultValues.groupByOption == null gbo = (gbo == null) ? GroupOption.NONE : gbo; return gbo; } public boolean isLinkedReads() { return linkedReads != null && linkedReads; } public boolean isQuickConsensusMode() { return quickConsensusMode == null ? getPreferences().getAsBoolean(SAM_QUICK_CONSENSUS_MODE) : quickConsensusMode; } public boolean isHideSmallIndels() { return hideSmallIndels == null ? getPreferences().getAsBoolean(SAM_HIDE_SMALL_INDEL) : hideSmallIndels; } public int getSmallIndelThreshold() { return smallIndelThreshold == null ? getPreferences().getAsInt(SAM_SMALL_INDEL_BP_THRESHOLD) : smallIndelThreshold; } @Override public void marshalXML(Document document, Element element) { if (shadeBasesOption != null) { element.setAttribute("shadeBasesOption", shadeBasesOption.toString()); } if (shadeCenters != null) { element.setAttribute("shadeCenters", shadeCenters.toString()); } if (flagUnmappedPairs != null) { element.setAttribute("flagUnmappedPairs", flagUnmappedPairs.toString()); } if (showAllBases != null) { element.setAttribute("showAllBases", showAllBases.toString()); } if (minInsertSize != null) { element.setAttribute("minInsertSize", minInsertSize.toString()); } if (maxInsertSize != null) { element.setAttribute("maxInsertSize", maxInsertSize.toString()); } if (colorOption != null) { element.setAttribute("colorOption", colorOption.toString()); } if (groupByOption != null) { element.setAttribute("groupByOption", groupByOption.toString()); } if (shadeAlignmentsOption != null){ element.setAttribute("shadeAlignmentsByOption", shadeAlignmentsOption.toString()); } if (mappingQualityLow != null) { element.setAttribute("mappingQualityLow", mappingQualityLow.toString()); } if (mappingQualityHigh != null) { element.setAttribute("mappingQualityHigh", mappingQualityHigh.toString()); } if (viewPairs != false) { element.setAttribute("viewPairs", Boolean.toString(viewPairs)); } if (colorByTag != null) { element.setAttribute("colorByTag", colorByTag); } if (groupByTag != null) { element.setAttribute("groupByTag", groupByTag); } if (sortByTag != null) { element.setAttribute("sortByTag", sortByTag); } if (linkByTag != null) { element.setAttribute("linkByTag", linkByTag); } if (linkedReads != null) { element.setAttribute("linkedReads", linkedReads.toString()); } if (quickConsensusMode != null) { element.setAttribute("quickConsensusMode", quickConsensusMode.toString()); } if (showMismatches != null) { element.setAttribute("showMismatches", showMismatches.toString()); } if (computeIsizes != null) { element.setAttribute("computeIsizes", computeIsizes.toString()); } if (minInsertSizePercentile != null) { element.setAttribute("minInsertSizePercentile", minInsertSizePercentile.toString()); } if (maxInsertSizePercentile != null) { element.setAttribute("maxInsertSizePercentile", maxInsertSizePercentile.toString()); } if (pairedArcView != null) { element.setAttribute("pairedArcView", pairedArcView.toString()); } if (flagZeroQualityAlignments != null) { element.setAttribute("flagZeroQualityAlignments", flagZeroQualityAlignments.toString()); } if (groupByPos != null) { element.setAttribute("groupByPos", groupByPos.toString()); } if(invertSorting != null) { element.setAttribute("invertSorting", Boolean.toString(invertSorting)); } if(sortOption != null){ element.setAttribute("sortOption", sortOption.toString()); } if (invertGroupSorting) { element.setAttribute("invertGroupSorting", Boolean.toString(invertGroupSorting)); } if (hideSmallIndels != null) { element.setAttribute("hideSmallIndels", hideSmallIndels.toString()); } if (smallIndelThreshold != null) { element.setAttribute("smallIndelThreshold", smallIndelThreshold.toString()); } if (showInsertionMarkers != null) { element.setAttribute("showInsertionMarkers", showInsertionMarkers.toString()); } } @Override public void unmarshalXML(Element element, Integer version) { if (element.hasAttribute("shadeBasesOption")) { String v = element.getAttribute("shadeBasesOption"); if (v != null) { shadeBasesOption = v.equalsIgnoreCase("quality") || v.equalsIgnoreCase("true"); } } if (element.hasAttribute("shadeCenters")) { shadeCenters = Boolean.parseBoolean(element.getAttribute("shadeCenters")); } if (element.hasAttribute("showAllBases")) { showAllBases = Boolean.parseBoolean(element.getAttribute("showAllBases")); } if (element.hasAttribute("flagUnmappedPairs")) { flagUnmappedPairs = Boolean.parseBoolean(element.getAttribute("flagUnmappedPairs")); } if (element.hasAttribute("minInsertSize")) { minInsertSize = Integer.parseInt(element.getAttribute("minInsertSize")); } if (element.hasAttribute("maxInsertSize")) { maxInsertSize = Integer.parseInt(element.getAttribute("maxInsertSize")); } if (element.hasAttribute("colorOption")) { colorOption = ColorOption.valueOf(element.getAttribute("colorOption")); } if (element.hasAttribute("sortOption")) { sortOption = SortOption.valueOf((element.getAttribute("sortOption"))); } if (element.hasAttribute("groupByOption")) { groupByOption = GroupOption.valueOf(element.getAttribute("groupByOption")); } if (element.hasAttribute("shadeAlignmentsByOption")){ shadeAlignmentsOption = ShadeAlignmentsOption.valueOf(element.getAttribute("shadeAlignmentsByOption")); } if (element.hasAttribute("mappingQualityLow")){ mappingQualityLow = Integer.parseInt(element.getAttribute("mappingQualityLow")); } if (element.hasAttribute("mappingQualityHigh")) { mappingQualityHigh = Integer.parseInt(element.getAttribute("mappingQualityHigh")); } if (element.hasAttribute("viewPairs")) { viewPairs = Boolean.parseBoolean(element.getAttribute("viewPairs")); } if (element.hasAttribute("colorByTag")) { colorByTag = element.getAttribute("colorByTag"); } if (element.hasAttribute("groupByTag")) { groupByTag = element.getAttribute("groupByTag"); } if (element.hasAttribute("sortByTag")) { sortByTag = element.getAttribute("sortByTag"); } if (element.hasAttribute("linkByTag")) { linkByTag = element.getAttribute("linkByTag"); } if (element.hasAttribute("linkedReads")) { linkedReads = Boolean.parseBoolean(element.getAttribute("linkedReads")); } if (element.hasAttribute("quickConsensusMode")) { quickConsensusMode = Boolean.parseBoolean(element.getAttribute("quickConsensusMode")); } if (element.hasAttribute("showMismatches")) { showMismatches = Boolean.parseBoolean(element.getAttribute("showMismatches")); } if (element.hasAttribute("computeIsizes")) { computeIsizes = Boolean.parseBoolean(element.getAttribute("computeIsizes")); } if (element.hasAttribute("minInsertSizePercentile")) { minInsertSizePercentile = Double.parseDouble(element.getAttribute("minInsertSizePercentile")); } if (element.hasAttribute("maxInsertSizePercentile")) { maxInsertSizePercentile = Double.parseDouble(element.getAttribute("maxInsertSizePercentile")); } if (element.hasAttribute("pairedArcView")) { pairedArcView = Boolean.parseBoolean(element.getAttribute("pairedArcView")); } if (element.hasAttribute("flagZeroQualityAlignments")) { flagZeroQualityAlignments = Boolean.parseBoolean(element.getAttribute("flagZeroQualityAlignments")); } if (element.hasAttribute("groupByPos")) { groupByPos = Range.fromString(element.getAttribute("groupByPos")); } if (element.hasAttribute("invertSorting")) { invertSorting = Boolean.parseBoolean(element.getAttribute("invertSorting")); } if (element.hasAttribute("invertGroupSorting")) { invertGroupSorting = Boolean.parseBoolean(element.getAttribute("invertGroupSorting")); } if (element.hasAttribute("hideSmallIndels")) { hideSmallIndels = Boolean.parseBoolean(element.getAttribute("hideSmallIndels")); } if (element.hasAttribute("smallIndelThreshold")) { smallIndelThreshold = Integer.parseInt(element.getAttribute("smallIndelThreshold")); } if (element.hasAttribute("showInsertionMarkers")) { showInsertionMarkers = Boolean.parseBoolean(element.getAttribute("showInsertionMarkers")); } } } }
package org.cactoos.text; import org.cactoos.Scalar; import org.cactoos.Text; /** * Text padded at start to reach the given length. * * <p>There is thread safe. * * @since 0.32 */ public final class PaddedStartText extends TextEnvelope { /** * Ctor. * @param text The text * @param length The minimum length of the resulting string * @param symbol The padding symbol */ public PaddedStartText( final Text text, final int length, final char symbol) { super((Scalar<String>) () -> { final String original = text.asString(); return new JoinedText( new TextOf(""), new RepeatedText( new TextOf(symbol), length - original.length() ), text ).asString(); }); } }
package org.clafer.ir.analysis; import java.util.Objects; import java.util.Optional; import java.util.stream.Stream; import org.clafer.domain.Domain; import static org.clafer.domain.Domain.boundDomain; import org.clafer.ir.IrBoolExpr; import org.clafer.ir.IrCompare; import static org.clafer.ir.IrCompare.Op.Equal; import static org.clafer.ir.IrCompare.Op.NotEqual; import org.clafer.ir.IrIfOnlyIf; import org.clafer.ir.IrIfThenElse; import org.clafer.ir.IrImplies; import org.clafer.ir.IrIntExpr; import org.clafer.ir.IrLone; import org.clafer.ir.IrModule; import org.clafer.ir.IrNot; import org.clafer.ir.IrOffset; import org.clafer.ir.IrOr; import org.clafer.ir.IrRewriter; import org.clafer.ir.IrSetExpr; import org.clafer.ir.IrUtil; import org.clafer.ir.Irs; import static org.clafer.ir.Irs.add; import static org.clafer.ir.Irs.and; import static org.clafer.ir.Irs.equal; import static org.clafer.ir.Irs.greaterThan; import static org.clafer.ir.Irs.greaterThanEqual; import static org.clafer.ir.Irs.ifOnlyIf; import static org.clafer.ir.Irs.ifThenElse; import static org.clafer.ir.Irs.implies; import static org.clafer.ir.Irs.lessThanEqual; import static org.clafer.ir.Irs.lone; import static org.clafer.ir.Irs.mul; import static org.clafer.ir.Irs.not; import static org.clafer.ir.Irs.offset; import static org.clafer.ir.Irs.or; import static org.clafer.ir.Irs.sub; import static org.clafer.ir.Irs.ternary; /** * * @author jimmy */ public class Optimizer { private Optimizer() { } /** * Optimize the module. * * @param module the module to optimize * @return the optimized module */ public static IrModule optimize(IrModule module) { return optimizer.rewrite(module, null); } private static Stream<Compare> compares(IrIntExpr expr) { if (expr instanceof IrCompare) { IrCompare compare = (IrCompare) expr; switch (compare.getOp()) { case Equal: case NotEqual: return Stream.of( new Compare(compare.getLeft(), compare.getOp(), compare.getRight()), new Compare(compare.getRight(), compare.getOp(), compare.getLeft())); default: return Stream.of( new Compare(compare.getLeft(), compare.getOp(), compare.getRight())); } } else if (expr instanceof IrIfOnlyIf) { IrIfOnlyIf ifOnlyIf = (IrIfOnlyIf) expr; return Stream.of( new Compare(ifOnlyIf.getLeft(), IrCompare.Op.Equal, ifOnlyIf.getRight()), new Compare(ifOnlyIf.getRight(), IrCompare.Op.Equal, ifOnlyIf.getLeft())); } else if (expr instanceof IrImplies) { IrImplies implies = (IrImplies) expr; return Stream.of( new Compare(implies.getAntecedent(), IrCompare.Op.LessThanEqual, implies.getConsequent())); } return Stream.empty(); } private static Stream<Compare> moreCompares(IrIntExpr expr) { if (expr instanceof IrNot) { IrNot not = (IrNot) expr; return Stream.of( new Compare(not.getExpr(), IrCompare.Op.Equal, Irs.Zero), new Compare(Irs.Zero, IrCompare.Op.Equal, not.getExpr())); } return compares(expr); } private static final IrRewriter<Void> optimizer = new IrRewriter<Void>() { @Override public IrBoolExpr visit(IrLone ir, Void a) { IrBoolExpr[] operands = rewrite(ir.getOperands(), a); if (operands.length == 2) { Optional<IrBoolExpr> opt = Stream.concat( compares(operands[0]).map(r -> optimizeLoneCompare(operands[1], r)), compares(operands[1]).map(r -> optimizeLoneCompare(operands[0], r))) .filter(Objects::nonNull) .findFirst(); if (opt.isPresent()) { return opt.get(); } } return changed(ir.getOperands(), operands) ? lone(operands) : ir; } @Override public IrBoolExpr visit(IrOr ir, Void a) { IrBoolExpr[] operands = rewrite(ir.getOperands(), a); if (operands.length == 2) { Optional<IrBoolExpr> opt = Stream.concat( compares(operands[0]).map(r -> optimizeOrCompare(operands[1], r)), compares(operands[1]).map(r -> optimizeOrCompare(operands[0], r))) .filter(Objects::nonNull) .findFirst(); if (opt.isPresent()) { return opt.get(); } } return changed(ir.getOperands(), operands) ? or(operands) : ir; } @Override public IrBoolExpr visit(IrImplies ir, Void a) { // Rewrite // !a => !b // b => a if (ir.getAntecedent().isNegative() && ir.getConsequent().isNegative()) { return rewrite(implies(not(ir.getConsequent()), not(ir.getAntecedent())), a); } // Rewrite // !a => b // a or b if (ir.getAntecedent().isNegative()) { return rewrite(or(not(ir.getAntecedent()), ir.getConsequent()), a); } // Rewrite // a => !b // a + b <= 1 if (ir.getConsequent().isNegative()) { return rewrite(lone(ir.getAntecedent(), not(ir.getConsequent())), a); } IrBoolExpr antecedent = rewrite(ir.getAntecedent(), a); IrBoolExpr consequent = rewrite(ir.getConsequent(), a); Optional<IrBoolExpr> opt = compares(consequent).map(c -> optimizeImplicationCompare(antecedent, c)) .filter(Objects::nonNull) .findFirst(); if (opt.isPresent()) { return opt.get(); } return changed(ir.getAntecedent(), antecedent) || changed(ir.getConsequent(), consequent) ? implies(antecedent, consequent) : ir; } @Override public IrBoolExpr visit(IrIfThenElse ir, Void a) { IrBoolExpr antecedent = rewrite(ir.getAntecedent(), a); IrBoolExpr consequent = rewrite(ir.getConsequent(), a); IrBoolExpr alternative = rewrite(ir.getAlternative(), a); Optional<IrBoolExpr> opt = moreCompares(consequent).flatMap(l -> moreCompares(alternative).map(r -> optimizeIfThenElseCompare(antecedent, l, r))) .filter(Objects::nonNull) .findFirst(); if (opt.isPresent()) { return opt.get(); } return changed(ir.getAntecedent(), antecedent) || changed(ir.getConsequent(), consequent) || changed(ir.getAlternative(), alternative) ? ifThenElse(antecedent, consequent, alternative) : ir; } @Override public IrBoolExpr visit(IrIfOnlyIf ir, Void a) { IrBoolExpr left = rewrite(ir.getLeft(), a); IrBoolExpr right = rewrite(ir.getRight(), a); Optional<IrBoolExpr> opt = Stream.concat( compares(right).map(r -> optimizeIfOnlyIfCompare(left, r)), compares(left).map(r -> optimizeIfOnlyIfCompare(right, r))) .filter(Objects::nonNull) .findFirst(); if (opt.isPresent()) { return opt.get(); } return changed(ir.getLeft(), left) || changed(ir.getRight(), right) ? ifOnlyIf(left, right) : ir; } @Override public IrSetExpr visit(IrOffset ir, Void a) { if (ir.getSet() instanceof IrOffset) { // Rewrite // offset(offset(set, a), b) // offset(set, a + b) // This optimization is important for going multiple steps up the // hierarchy. IrOffset innerOffset = (IrOffset) ir.getSet(); return rewrite(offset(innerOffset.getSet(), ir.getOffset() + innerOffset.getOffset()), a); } return super.visit(ir, a); } }; /** * Optimize {@code lone(antecedent, left `op` right)} where `op` is = or !=. */ private static IrBoolExpr optimizeLoneCompare(IrBoolExpr antecedent, Compare compare) { IrIntExpr left = compare.left; IrCompare.Op op = compare.op; IrIntExpr right = compare.right; Domain domain = left.getDomain(); Integer constant = IrUtil.getConstant(right); if (domain.size() == 2 && constant != null) { switch (op) { case Equal: // Rewrite // lone(bool, int = 888) // where dom(int) = {-3, 888} // asInt(bool) <= 888 - int // asInt(bool) + int <= 888 if (domain.getHighBound() == constant.intValue()) { return lessThanEqual(add(antecedent, left), domain.getHighBound()); } // Rewrite // lone(bool, int = -3) // where dom(int) = {-3, 888} // asInt(bool) <= int - (-3) if (domain.getLowBound() == constant.intValue()) { return lessThanEqual(antecedent, sub(left, domain.getLowBound())); } break; case NotEqual: // Rewrite // lone(bool, int != 888) // where dom(int) = {-3, 888} // asInt(bool) <= int - (-3) if (domain.getHighBound() == constant.intValue()) { return lessThanEqual(antecedent, sub(left, domain.getLowBound())); } // Rewrite // lone(bool, int != -3) // where dom(int) = {-3, 888} // asInt(bool) <= 888 - int // asInt(bool) + int <= 888 if (domain.getLowBound() == constant.intValue()) { return lessThanEqual(add(antecedent, left), domain.getHighBound()); } break; } } return null; } /** * Optimize {@code antecedent or (left `op` right)} where `op` is = or !=. */ private static IrBoolExpr optimizeOrCompare(IrBoolExpr antecedent, Compare compare) { IrIntExpr left = compare.left; IrCompare.Op op = compare.op; IrIntExpr right = compare.right; Domain domain = left.getDomain(); Integer constant = IrUtil.getConstant(right); if (domain.size() == 2 && constant != null) { switch (op) { case Equal: // Rewrite // bool or int = 888 // where dom(int) = {-3, 888} // asInt(bool) > (-3) - int // asInt(bool) + int > (-3) if (domain.getHighBound() == constant.intValue()) { return greaterThan(add(antecedent, left), domain.getLowBound()); } // Rewrite // bool or int = -3 // where dom(int) = {-3, 888} // asInt(bool) > int - 888 if (domain.getLowBound() == constant.intValue()) { return greaterThan(antecedent, sub(left, domain.getHighBound())); } break; case NotEqual: // Rewrite // bool or int != 888 // where dom(int) = {-3, 888} // asInt(bool) > int - 888 if (domain.getHighBound() == constant.intValue()) { return greaterThan(antecedent, sub(left, domain.getHighBound())); } // Rewrite // bool or int != -3 // where dom(int) = {-3, 888} // asInt(bool) > (-3) - int // asInt(bool) + int > (-3) if (domain.getLowBound() == constant.intValue()) { return greaterThan(add(antecedent, left), domain.getLowBound()); } break; } } return null; } /** * Optimize {@code antecedent => (left `op` right)} where `op` is = or !=. */ private static IrBoolExpr optimizeImplicationCompare(IrBoolExpr antecedent, Compare compare) { IrIntExpr left = compare.left; IrCompare.Op op = compare.op; IrIntExpr right = compare.right; Domain domain = left.getDomain(); Integer constant = IrUtil.getConstant(right); if (domain.size() == 2 && constant != null) { switch (op) { case Equal: // Rewrite // bool => int = 888 // where dom(int) = {-3, 888} // asInt(bool) <= int - (-3) if (domain.getHighBound() == constant.intValue()) { return lessThanEqual(antecedent, sub(left, domain.getLowBound())); } // Rewrite // bool => int = -3 // where dom(int) = {-3, 888} // asInt(bool) <= 888 - int // asInt(bool) + int <= 888 if (domain.getLowBound() == constant.intValue()) { return lessThanEqual(add(antecedent, left), domain.getHighBound()); } break; case NotEqual: // Rewrite // bool => int != 888 // where dom(int) = {-3, 888} // asInt(bool) <= 888 - int // asInt(bool) + int <= 888 if (domain.getHighBound() == constant.intValue()) { return lessThanEqual(add(antecedent, left), domain.getHighBound()); } // Rewrite // bool => int != -3 // where dom(int) = {-3, 888} // asInt(bool) <= int - (-3) if (domain.getLowBound() == constant.intValue()) { return lessThanEqual(antecedent, sub(left, domain.getLowBound())); } break; } } return null; } /** * Optimize {@code if b then x = y else x = z} to {@code x = b ? y : z}. */ private static IrBoolExpr optimizeIfThenElseCompare( IrBoolExpr condition, Compare compare1, Compare compare2) { IrIntExpr left1 = compare1.left; IrCompare.Op op1 = compare1.op; IrIntExpr right1 = compare1.right; IrIntExpr left2 = compare2.left; IrCompare.Op op2 = compare2.op; IrIntExpr right2 = compare2.right; if (IrCompare.Op.Equal.equals(op1) && IrCompare.Op.Equal.equals(op2) && left1.equals(left2)) { return equal(left1, ternary(condition, right1, right2)); } return null; } private static IrBoolExpr optimizeIfOnlyIfCompare(IrBoolExpr reify, Compare compare) { IrIntExpr left = compare.left; IrCompare.Op op = compare.op; IrIntExpr right = compare.right; Integer constant = IrUtil.getConstant(right); if (constant != null) { if (constant.equals(left.getHighBound())) { int span = constant - left.getLowBound(); switch (op) { case Equal: if (left.getDomain().size() == 2) { // Optimize reify <=> (left == max) to // left = min + span * reify. return equal(left, add(left.getLowBound(), mul(span, reify, boundDomain(0, span)))); } // Optimize reify <=> (left == max) to // (left <= max - !reify) && (left >= max - span * !reify). return and( lessThanEqual(left, sub(constant, not(reify))), greaterThanEqual(left, sub(constant, mul(span, not(reify), boundDomain(0, span))))); case NotEqual: if (left.getDomain().size() == 2) { // Optimize reify <=> (left != max) to // left = max - span * reify. return equal(left, sub(left.getHighBound(), mul(span, reify, boundDomain(0, span)))); } // Optimize reify <=> (left != max) to // (left <= max - reify) && (left >= max - span * reify). return and( lessThanEqual(left, sub(constant, reify)), greaterThanEqual(left, sub(constant, mul(span, reify, boundDomain(0, span))))); } } else if (constant.equals(left.getLowBound())) { int span = left.getHighBound() - constant; switch (op) { case Equal: if (left.getDomain().size() == 2) { // Optimize reify <=> (left == min) to // left = max - span * reify. return equal(left, sub(left.getHighBound(), mul(span, reify, boundDomain(0, span)))); } // Optimize reify <=> (left == min) to // (left >= min + !reify) && (left <= min + span * !reify). return and(greaterThanEqual(left, add(constant, not(reify))), lessThanEqual(left, add(constant, mul(span, not(reify), boundDomain(0, span))))); case NotEqual: if (left.getDomain().size() == 2) { // Optimize reify <=> (left != min) to // left = min + span * reify. return equal(left, add(left.getLowBound(), mul(span, reify, boundDomain(0, span)))); } // Optimize reify <=> (left != min) to // (left >= min + reify) && (left <= min + span * reify). return and(greaterThanEqual(left, add(constant, reify)), lessThanEqual(left, add(constant, mul(span, reify, boundDomain(0, span))))); } } } return null; } private static class Compare { final IrIntExpr left; final IrCompare.Op op; final IrIntExpr right; public Compare(IrIntExpr left, IrCompare.Op op, IrIntExpr right) { this.left = left; this.op = op; this.right = right; } @Override public String toString() { return left + " " + op.getSyntax() + " " + right; } } }
package org.databene.commons; /** * {@link ParseException} child class that represents a syntax error.<br/><br/> * Created: 24.03.2011 11:49:34 * @since 0.5.8 * @author Volker Bergmann */ public class SyntaxError extends ParseException { private static final long serialVersionUID = 835847387636212632L; public SyntaxError(String message, String parsedText) { super(message, parsedText); } public SyntaxError(String message, String parsedText, int line, int column) { super(message, parsedText, line, column); } public SyntaxError(String message, Throwable cause, String parsedText, int line, int column) { super(message, cause, parsedText, line, column); } }
package org.dita.dost.writer; import static java.util.Arrays.asList; import static javax.xml.XMLConstants.NULL_NS_URI; import static org.dita.dost.util.Constants.*; import static org.dita.dost.util.URLUtils.*; import static org.dita.dost.util.XMLUtils.toList; import java.io.File; import java.net.URI; import java.util.*; import java.util.stream.Collectors; import org.dita.dost.exception.DITAOTException; import org.dita.dost.log.DITAOTLogger; import org.dita.dost.log.MessageBean; import org.dita.dost.log.MessageUtils; import org.dita.dost.util.*; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.AttributesImpl; /** * Filter for processing key reference elements in DITA files. * Instances are reusable but not thread-safe. */ public final class KeyrefPaser extends AbstractXMLFilter { /** * Set of attributes which should not be copied from * key definition to key reference which is {@code <topicref>}. */ private static final Set<String> no_copy; static { final Set<String> nc = new HashSet<>(); nc.add(ATTRIBUTE_NAME_ID); nc.add(ATTRIBUTE_NAME_CLASS); nc.add(ATTRIBUTE_NAME_XTRC); nc.add(ATTRIBUTE_NAME_XTRF); nc.add(ATTRIBUTE_NAME_HREF); nc.add(ATTRIBUTE_NAME_KEYS); nc.add(ATTRIBUTE_NAME_TOC); nc.add(ATTRIBUTE_NAME_PROCESSING_ROLE); no_copy = Collections.unmodifiableSet(nc); } /** * Set of attributes which should not be copied from * key definition to key reference which is not {@code <topicref>}. */ private static final Set<String> no_copy_topic; static { final Set<String> nct = new HashSet<>(no_copy); nct.add("query"); nct.add("search"); nct.add(ATTRIBUTE_NAME_TOC); nct.add(ATTRIBUTE_NAME_PRINT); nct.add(ATTRIBUTE_NAME_COPY_TO); nct.add(ATTRIBUTE_NAME_CHUNK); nct.add(ATTRIBUTE_NAME_NAVTITLE); no_copy_topic = Collections.unmodifiableSet(nct); } /** List of key reference element definitions. */ private final static List<KeyrefInfo> keyrefInfos; static { final List<KeyrefInfo> ki = new ArrayList<>(); ki.add(new KeyrefInfo(TOPIC_AUTHOR, ATTRIBUTE_NAME_HREF, false, true)); ki.add(new KeyrefInfo(TOPIC_DATA, ATTRIBUTE_NAME_HREF, false, true)); ki.add(new KeyrefInfo(TOPIC_DATA_ABOUT, ATTRIBUTE_NAME_HREF, false, true)); ki.add(new KeyrefInfo(TOPIC_IMAGE, ATTRIBUTE_NAME_HREF, false, true)); ki.add(new KeyrefInfo(SVG_D_SVGREF, ATTRIBUTE_NAME_HREF, true, false)); ki.add(new KeyrefInfo(TOPIC_LINK, ATTRIBUTE_NAME_HREF, false, true)); ki.add(new KeyrefInfo(TOPIC_LQ, ATTRIBUTE_NAME_HREF, false, true)); ki.add(new KeyrefInfo(MAP_NAVREF, "mapref", true, false)); ki.add(new KeyrefInfo(TOPIC_PUBLISHER, ATTRIBUTE_NAME_HREF, false, true)); ki.add(new KeyrefInfo(TOPIC_SOURCE, ATTRIBUTE_NAME_HREF, false, true)); ki.add(new KeyrefInfo(MAP_TOPICREF, ATTRIBUTE_NAME_HREF, false, false)); ki.add(new KeyrefInfo(TOPIC_XREF, ATTRIBUTE_NAME_HREF, false, true)); ki.add(new KeyrefInfo(TOPIC_CITE, ATTRIBUTE_NAME_HREF, false, false)); ki.add(new KeyrefInfo(TOPIC_DT, ATTRIBUTE_NAME_HREF, false, false)); ki.add(new KeyrefInfo(TOPIC_KEYWORD, ATTRIBUTE_NAME_HREF, false, false)); ki.add(new KeyrefInfo(TOPIC_TERM, ATTRIBUTE_NAME_HREF, false, false)); ki.add(new KeyrefInfo(TOPIC_PH, ATTRIBUTE_NAME_HREF, false, false)); ki.add(new KeyrefInfo(TOPIC_INDEXTERM, ATTRIBUTE_NAME_HREF, false, false)); ki.add(new KeyrefInfo(TOPIC_INDEX_BASE, ATTRIBUTE_NAME_HREF, false, false)); ki.add(new KeyrefInfo(TOPIC_INDEXTERMREF, ATTRIBUTE_NAME_HREF, false, false)); ki.add(new KeyrefInfo(TOPIC_LONGQUOTEREF, ATTRIBUTE_NAME_HREF, false, false)); final Map<String, String> objectAttrs = new HashMap<>(); objectAttrs.put(ATTRIBUTE_NAME_ARCHIVEKEYREFS, ATTRIBUTE_NAME_ARCHIVE); objectAttrs.put(ATTRIBUTE_NAME_CLASSIDKEYREF, ATTRIBUTE_NAME_CLASSID); objectAttrs.put(ATTRIBUTE_NAME_CODEBASEKEYREF, ATTRIBUTE_NAME_CODEBASE); objectAttrs.put(ATTRIBUTE_NAME_DATAKEYREF, ATTRIBUTE_NAME_DATA); ki.add(new KeyrefInfo(TOPIC_OBJECT, objectAttrs, true, false)); final Map<String, String> paramAttrs = new HashMap<>(); paramAttrs.put(ATTRIBUTE_NAME_KEYREF, ATTRIBUTE_NAME_VALUE); ki.add(new KeyrefInfo(TOPIC_PARAM, paramAttrs, true, false)); keyrefInfos = Collections.unmodifiableList(ki); } private final static List<String> KEYREF_ATTRIBUTES = Collections.unmodifiableList(asList( ATTRIBUTE_NAME_KEYREF, ATTRIBUTE_NAME_ARCHIVEKEYREFS, ATTRIBUTE_NAME_CLASSIDKEYREF, ATTRIBUTE_NAME_CODEBASEKEYREF, ATTRIBUTE_NAME_DATAKEYREF )); /** * Stack used to store the current KeyScope, and its start uri. */ private final Deque<KeyScope> definitionMaps; /** * Stack used to store the place of current element * relative to the key reference element. */ private final Deque<Integer> keyrefLevalStack; /** * Used to store the place of current element * relative to the key reference element. If it is out of range of key * reference element it is zero, otherwise it is positive number. * It is also used to indicate whether current element is descendant of the * key reference element. */ private int keyrefLevel; /** * Indicates whether the keyref is valid. * The descendant element should know whether keyref is valid because keyrefs can be nested. */ private final Deque<Boolean> validKeyref; /** * Flag indicating whether the key reference element is empty, * if it is empty, it should pull matching content from the key definition. */ private boolean empty; /** Stack of element names of the element containing keyref attribute. */ private final Deque<String> elemName; /** Current element keyref info, {@code null} if not keyref type element. */ private KeyrefInfo currentElement; private boolean hasChecked; /** Flag stack to indicate whether key reference element has sub-elements. */ private final Deque<Boolean> hasSubElem; /** Current key definition. */ private KeyDef keyDef; /** Set of link targets which are not resource-only */ private Set<URI> normalProcessingRoleTargets; private MergeUtils mergeUtils; /** * Constructor. */ public KeyrefPaser() { keyrefLevel = 0; definitionMaps = new ArrayDeque<>(); keyrefLevalStack = new ArrayDeque<>(); validKeyref = new ArrayDeque<>(); empty = true; elemName = new ArrayDeque<>(); hasSubElem = new ArrayDeque<>(); mergeUtils = new MergeUtils(); } @Override public void setLogger(final DITAOTLogger logger) { super.setLogger(logger); mergeUtils.setLogger(logger); } public void setKeyDefinition(final KeyScope definitionMap) { this.definitionMaps.push(definitionMap); } /** * Get set of link targets which have normal processing role. Paths are relative to current file. */ public Set<URI> getNormalProcessingRoleTargets() { return Collections.unmodifiableSet(normalProcessingRoleTargets); } /** * Process key references. * * @param filename file to process * @throws DITAOTException if key reference resolution failed */ @Override public void write(final File filename) throws DITAOTException { assert filename.isAbsolute(); super.write(new File(currentFile)); } @Override public void startDocument() throws SAXException { normalProcessingRoleTargets = new HashSet<>(); getContentHandler().startDocument(); } @Override public void characters(final char[] ch, final int start, final int length) throws SAXException { if (keyrefLevel != 0 && (length == 0 || new String(ch, start, length).trim().isEmpty())) { if (!hasChecked) { empty = true; } } else { hasChecked = true; empty = false; } getContentHandler().characters(ch, start, length); } @Override public void endElement(final String uri, final String localName, final String name) throws SAXException { if (keyrefLevel != 0 && empty) { // If current element is in the scope of key reference element // and the element is empty if (!validKeyref.isEmpty() && validKeyref.peek()) { final Element elem = keyDef.element; // Key reference is valid, // need to pull matching content from the key definition // If keyref on topicref, and no topicmeta, copy topicmeta from key definition if (elemName.peek().equals(MAP_TOPICREF.localName)) { final Optional<Element> topicmetaNode = XMLUtils.getChildElement(elem, MAP_TOPICMETA); if (topicmetaNode.isPresent()) { domToSax(topicmetaNode.get(), true, false); } } else if (!name.equals(elemName.peek())) { // If current element name doesn't equal the key reference element // just grab the content from the matching element of key definition final NodeList nodeList = elem.getElementsByTagName(name); if (nodeList.getLength() > 0) { final Element node = (Element) nodeList.item(0); final NodeList nList = node.getChildNodes(); for (int index = 0; index < nList.getLength(); index++) { final Node n = nList.item(index); if (n.getNodeType() == Node.TEXT_NODE) { final char[] ch = n.getNodeValue().toCharArray(); getContentHandler().characters(ch, 0, ch.length); break; } } } } else { // Current element name equals the key reference element // grab keyword or term from key definition if (!hasSubElem.peek() && currentElement != null) { final List<Element> keywords = toList(elem.getElementsByTagName(TOPIC_KEYWORD.localName)); final List<Element> keywordsInKeywords = keywords.stream() .filter(item -> TOPIC_KEYWORDS.matches(item.getParentNode())) .collect(Collectors.toList()); // XXX: No need to look for term as content model for keywords doesn't allow it // if (nodeList.getLength() == 0) { // nodeList = elem.getElementsByTagName(TOPIC_TERM.localName); if (!keywordsInKeywords.isEmpty()) { if (!currentElement.hasNestedElements) { // only one keyword or term is used. if (!currentElement.isEmpty) { domToSax(keywordsInKeywords.get(0), false); } } else { // If the key reference element carries href attribute // all keyword or term are used. if (TOPIC_LINK.matches(currentElement.type)) { final AttributesImpl atts = new AttributesImpl(); XMLUtils.addOrSetAttribute(atts, ATTRIBUTE_NAME_CLASS, TOPIC_LINKTEXT.toString()); getContentHandler().startElement(NULL_NS_URI, TOPIC_LINKTEXT.localName, TOPIC_LINKTEXT.localName, atts); } else if (TOPIC_IMAGE.matches(currentElement.type)) { final AttributesImpl atts = new AttributesImpl(); XMLUtils.addOrSetAttribute(atts, ATTRIBUTE_NAME_CLASS, TOPIC_ALT.toString()); getContentHandler().startElement(NULL_NS_URI, TOPIC_ALT.localName, TOPIC_ALT.localName, atts); } if (!currentElement.isEmpty) { for (final Element onekeyword: keywordsInKeywords) { domToSax(onekeyword, true); } } if (TOPIC_LINK.matches(currentElement.type)) { getContentHandler().endElement(NULL_NS_URI, TOPIC_LINKTEXT.localName, TOPIC_LINKTEXT.localName); } else if (TOPIC_IMAGE.matches(currentElement.type)) { getContentHandler().endElement(NULL_NS_URI, TOPIC_ALT.localName, TOPIC_ALT.localName); } } } else { if (TOPIC_LINK.matches(currentElement.type)) { // If the key reference element is link or its specialization, // should pull in the linktext final NodeList linktext = elem.getElementsByTagName(TOPIC_LINKTEXT.localName); if (linktext.getLength() > 0) { domToSax((Element) linktext.item(0), true); } else if (fallbackToNavtitleOrHref(elem)) { final NodeList navtitleElement = elem.getElementsByTagName(TOPIC_NAVTITLE.localName); if (navtitleElement.getLength() > 0) { writeLinktext((Element) navtitleElement.item(0)); } else { final String navtitle = elem.getAttribute(ATTRIBUTE_NAME_NAVTITLE); if (!navtitle.trim().isEmpty()) { writeLinktext(navtitle); } else { final String hrefAtt = elem.getAttribute(ATTRIBUTE_NAME_HREF); if (!hrefAtt.trim().isEmpty()) { writeLinktext(hrefAtt); } } } } } else if (TOPIC_IMAGE.matches(currentElement.type)) { // If the key reference element is an image or its specialization, // should pull in the linktext final NodeList linktext = elem.getElementsByTagName(TOPIC_LINKTEXT.localName); if (linktext.getLength() > 0) { writeAlt((Element) linktext.item(0)); } else if (fallbackToNavtitleOrHref(elem)) { final NodeList navtitleElement = elem.getElementsByTagName(TOPIC_NAVTITLE.localName); if (navtitleElement.getLength() > 0) { writeAlt((Element) navtitleElement.item(0)); } else { final String navtitle = elem.getAttribute(ATTRIBUTE_NAME_NAVTITLE); if (!navtitle.trim().isEmpty()) { writeAlt(navtitle); } } } } else if (!currentElement.isEmpty && fallbackToNavtitleOrHref(elem)) { final NodeList linktext = elem.getElementsByTagName(TOPIC_LINKTEXT.localName); if (linktext.getLength() > 0) { domToSax((Element) linktext.item(0), false); } else { final NodeList navtitleElement = elem.getElementsByTagName(TOPIC_NAVTITLE.localName); if (navtitleElement.getLength() > 0) { domToSax((Element) navtitleElement.item(0), false); } else { final String navtitle = elem.getAttribute(ATTRIBUTE_NAME_NAVTITLE); if (!navtitle.trim().isEmpty()) { final char[] ch = navtitle.toCharArray(); getContentHandler().characters(ch, 0, ch.length); } else { final String hrefAtt = elem.getAttribute(ATTRIBUTE_NAME_HREF); if (!hrefAtt.trim().isEmpty()) { final char[] ch = hrefAtt.toCharArray(); getContentHandler().characters(ch, 0, ch.length); } } } } } } } } } } if (keyrefLevel != 0) { keyrefLevel empty = false; } if (keyrefLevel == 0 && !keyrefLevalStack.isEmpty()) { // To the end of key reference, pop the stacks. keyrefLevel = keyrefLevalStack.pop(); validKeyref.pop(); elemName.pop(); hasSubElem.pop(); } definitionMaps.pop(); getContentHandler().endElement(uri, localName, name); } /** * Write linktext element * * @param srcElem element content */ private void writeLinktext(Element srcElem) throws SAXException { final AttributesImpl atts = new AttributesImpl(); XMLUtils.addOrSetAttribute(atts, ATTRIBUTE_NAME_CLASS, TOPIC_LINKTEXT.toString()); getContentHandler().startElement(NULL_NS_URI, TOPIC_LINKTEXT.localName, TOPIC_LINKTEXT.localName, atts); domToSax(srcElem, false); getContentHandler().endElement(NULL_NS_URI, TOPIC_LINKTEXT.localName, TOPIC_LINKTEXT.localName); } /** * Write linktext element * * @param navtitle element text content */ private void writeLinktext(final String navtitle) throws SAXException { final AttributesImpl atts = new AttributesImpl(); XMLUtils.addOrSetAttribute(atts, ATTRIBUTE_NAME_CLASS, TOPIC_LINKTEXT.toString()); getContentHandler().startElement(NULL_NS_URI, TOPIC_LINKTEXT.localName, TOPIC_LINKTEXT.localName, atts); final char[] ch = navtitle.toCharArray(); getContentHandler().characters(ch, 0, ch.length); getContentHandler().endElement(NULL_NS_URI, TOPIC_LINKTEXT.localName, TOPIC_LINKTEXT.localName); } /** * Write alt element * * @param srcElem element content */ private void writeAlt(Element srcElem) throws SAXException { final AttributesImpl atts = new AttributesImpl(); XMLUtils.addOrSetAttribute(atts, ATTRIBUTE_NAME_CLASS, TOPIC_ALT.toString()); getContentHandler().startElement(NULL_NS_URI, TOPIC_ALT.localName, TOPIC_ALT.localName, atts); domToSax(srcElem, false); getContentHandler().endElement(NULL_NS_URI, TOPIC_ALT.localName, TOPIC_ALT.localName); } /** * Write alt element * * @param navtitle element text content */ private void writeAlt(final String navtitle) throws SAXException { final AttributesImpl atts = new AttributesImpl(); XMLUtils.addOrSetAttribute(atts, ATTRIBUTE_NAME_CLASS, TOPIC_ALT.toString()); getContentHandler().startElement(NULL_NS_URI, TOPIC_ALT.localName, TOPIC_ALT.localName, atts); final char[] ch = navtitle.toCharArray(); getContentHandler().characters(ch, 0, ch.length); getContentHandler().endElement(NULL_NS_URI, TOPIC_ALT.localName, TOPIC_ALT.localName); } @Override public void startElement(final String uri, final String localName, final String name, final Attributes atts) throws SAXException { final KeyScope childScope = Optional.ofNullable(atts.getValue(ATTRIBUTE_NAME_KEYSCOPE)) .flatMap(n -> Optional.ofNullable(definitionMaps.peek().getChildScope(n))) .orElse(definitionMaps.peek()); definitionMaps.push(childScope); currentElement = null; final String cls = atts.getValue(ATTRIBUTE_NAME_CLASS); for (final KeyrefInfo k : keyrefInfos) { if (k.type.matches(cls)) { currentElement = k; break; } } Attributes resAtts = atts; hasChecked = false; empty = true; if (!hasKeyref(atts) || currentElement == null) { // If the keyrefLevel doesn't equal 0, it means that current element is under the key reference element; if (keyrefLevel != 0) { keyrefLevel++; hasSubElem.pop(); hasSubElem.push(true); } } else { elemName.push(name); if (keyrefLevel != 0) { keyrefLevalStack.push(keyrefLevel); hasSubElem.pop(); hasSubElem.push(true); } hasSubElem.push(false); keyrefLevel = 1; resAtts = processElement(atts); } getContentHandler().startElement(uri, localName, name, resAtts); } private Attributes processElement(final Attributes atts) { final AttributesImpl resAtts = new AttributesImpl(atts); boolean valid = false; for (final Map.Entry<String, String> attrPair: currentElement.attrs.entrySet()) { final String keyrefAttr = attrPair.getKey(); final String refAttr = attrPair.getValue(); final String keyrefValue = atts.getValue(keyrefAttr); if (keyrefValue != null) { final int slashIndex = keyrefValue.indexOf(SLASH); String keyName = keyrefValue; String elementId = ""; if (slashIndex != -1) { keyName = keyrefValue.substring(0, slashIndex); elementId = keyrefValue.substring(slashIndex); } keyDef = definitionMaps.peek().get(keyName); final Element elem = keyDef != null ? keyDef.element : null; // If definition is not null if (keyDef != null) { if (currentElement != null) { final NamedNodeMap attrs = elem.getAttributes(); final URI href = keyDef.href; if (href != null && !href.toString().isEmpty()) { if (TOPIC_IMAGE.matches(currentElement.type)) { valid = true; final URI target = keyDef.source.resolve(href); final URI relativeTarget = URLUtils.getRelativePath(currentFile, target); final URI targetOutput = normalizeHrefValue(relativeTarget, elementId); XMLUtils.addOrSetAttribute(resAtts, refAttr, targetOutput.toString()); } else if (isLocalDita(elem) && keyDef.source != null) { valid = true; final URI target = keyDef.source.resolve(href); final URI topicFile = currentFile.resolve(stripFragment(target)); final URI relativeTarget = setFragment(URLUtils.getRelativePath(currentFile, topicFile), target.getFragment()); String topicId = null; if (relativeTarget.getFragment() == null && !"".equals(elementId)) { topicId = getFirstTopicId(topicFile); } final URI targetOutput = normalizeHrefValue(relativeTarget, elementId, topicId); XMLUtils.addOrSetAttribute(resAtts, refAttr, targetOutput.toString()); if (keyDef.scope != null && !keyDef.scope.equals(ATTR_SCOPE_VALUE_LOCAL)) { XMLUtils.addOrSetAttribute(resAtts, ATTRIBUTE_NAME_SCOPE, keyDef.scope); } else { XMLUtils.removeAttribute(resAtts, ATTRIBUTE_NAME_SCOPE); } if (keyDef.format != null && !keyDef.format.equals(ATTR_FORMAT_VALUE_DITA)) { XMLUtils.addOrSetAttribute(resAtts, ATTRIBUTE_NAME_FORMAT, keyDef.format); } else { XMLUtils.removeAttribute(resAtts, ATTRIBUTE_NAME_FORMAT); } // TODO: This should be a separate SAX filter if (!ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY.equals(atts.getValue(ATTRIBUTE_NAME_PROCESSING_ROLE))) { final URI f = currentFile.resolve(targetOutput); normalProcessingRoleTargets.add(f); } } else { valid = true; if (href.isAbsolute() || (keyDef.scope != null && keyDef.scope.equals(ATTR_SCOPE_VALUE_EXTERNAL))) { final URI targetOutput = normalizeHrefValue(href, elementId); XMLUtils.addOrSetAttribute(resAtts, refAttr, targetOutput.toString()); } else { //Adjust path for peer or local references with relative path final URI target = keyDef.source.resolve(href); final URI relativeTarget = URLUtils.getRelativePath(currentFile, target); final URI targetOutput = normalizeHrefValue(relativeTarget, elementId); XMLUtils.addOrSetAttribute(resAtts, refAttr, targetOutput.toString()); } if (keyDef.scope != null && !keyDef.scope.equals(ATTR_SCOPE_VALUE_LOCAL)) { XMLUtils.addOrSetAttribute(resAtts, ATTRIBUTE_NAME_SCOPE, keyDef.scope); } else { XMLUtils.removeAttribute(resAtts, ATTRIBUTE_NAME_SCOPE); } if (keyDef.format != null && !keyDef.format.equals(ATTR_FORMAT_VALUE_DITA)) { XMLUtils.addOrSetAttribute(resAtts, ATTRIBUTE_NAME_FORMAT, keyDef.format); } else { XMLUtils.removeAttribute(resAtts, ATTRIBUTE_NAME_FORMAT); } } } else if (href == null || href.toString().isEmpty()) { // Key definition does not carry an href or href equals "". valid = true; XMLUtils.removeAttribute(resAtts, ATTRIBUTE_NAME_SCOPE); XMLUtils.removeAttribute(resAtts, ATTRIBUTE_NAME_HREF); XMLUtils.removeAttribute(resAtts, ATTRIBUTE_NAME_TYPE); XMLUtils.removeAttribute(resAtts, ATTRIBUTE_NAME_FORMAT); } else { // key does not exist. final MessageBean m = definitionMaps.peek().name == null ? MessageUtils.getMessage("DOTJ047I", atts.getValue(ATTRIBUTE_NAME_KEYREF)) : MessageUtils.getMessage("DOTJ048I", atts.getValue(ATTRIBUTE_NAME_KEYREF), definitionMaps.peek().name); logger.info(m.setLocation(atts).toString()); } if (valid) { if (MAP_TOPICREF.matches(currentElement.type)) { for (int index = 0; index < attrs.getLength(); index++) { final Attr attr = (Attr) attrs.item(index); if (!no_copy.contains(attr.getNodeName())) { XMLUtils.removeAttribute(resAtts, attr.getNodeName()); XMLUtils.addOrSetAttribute(resAtts, attr); } } } else { for (int index = 0; index < attrs.getLength(); index++) { final Attr attr = (Attr) attrs.item(index); if (!no_copy_topic.contains(attr.getNodeName()) && (attr.getNodeName().equals(refAttr) || resAtts.getIndex(attr.getNodeName()) == -1)) { XMLUtils.removeAttribute(resAtts, attr.getNodeName()); XMLUtils.addOrSetAttribute(resAtts, attr); } } } } } } else { // key does not exist final MessageBean m = definitionMaps.peek().name == null ? MessageUtils.getMessage("DOTJ047I", atts.getValue(ATTRIBUTE_NAME_KEYREF)) : MessageUtils.getMessage("DOTJ048I", atts.getValue(ATTRIBUTE_NAME_KEYREF), definitionMaps.peek().name); logger.info(m.setLocation(atts).toString()); } validKeyref.push(valid); } } return resAtts; } private boolean hasKeyref(final Attributes atts) { if (TOPIC_PARAM.matches(atts) && (atts.getValue(ATTRIBUTE_NAME_VALUETYPE) != null && !atts.getValue(ATTRIBUTE_NAME_VALUETYPE).equals(ATTRIBUTE_VALUETYPE_VALUE_REF))) { return false; } for (final String attr: KEYREF_ATTRIBUTES) { if (atts.getIndex(attr) != -1) { return true; } } return false; } private boolean isLocalDita(final Element elem) { final String scopeValue = elem.getAttribute(ATTRIBUTE_NAME_SCOPE); final String formatValue = elem.getAttribute(ATTRIBUTE_NAME_FORMAT); return ("".equals(scopeValue) || ATTR_SCOPE_VALUE_LOCAL.equals(scopeValue)) && ("".equals(formatValue) || ATTR_FORMAT_VALUE_DITA.equals(formatValue) || ATTR_FORMAT_VALUE_DITAMAP.equals(formatValue)); } /** * Return true when keyref text resolution should use navtitle as a final fallback. * @param elem Key definition element */ private boolean fallbackToNavtitleOrHref(final Element elem) { final String hrefValue = elem.getAttribute(ATTRIBUTE_NAME_HREF); final String locktitleValue = elem.getAttribute(ATTRIBUTE_NAME_LOCKTITLE); return ((ATTRIBUTE_NAME_LOCKTITLE_VALUE_YES.equals(locktitleValue)) || ("".equals(hrefValue)) || !(isLocalDita(elem))); } /** * Serialize DOM node into a SAX stream, while modifying map classes to topic classes for common elements. * * @param elem element to serialize * @param retainElements {@code true} to serialize elements, {@code false} to only serialize text nodes. */ private void domToSax(final Element elem, final boolean retainElements) throws SAXException { domToSax(elem, retainElements, true); } /** * Serialize DOM node into a SAX stream. * * @param elem element to serialize * @param retainElements {@code true} to serialize elements, {@code false} to only serialize text nodes. * @param swapMapClass {@code true} to change map/ to topic/ in common class attributes, {@code false} to leave as is */ private void domToSax(final Element elem, final boolean retainElements, final boolean swapMapClass) throws SAXException { if (retainElements) { final AttributesImpl atts = new AttributesImpl(); final NamedNodeMap attrs = elem.getAttributes(); for (int i = 0; i < attrs.getLength(); i++) { final Attr a = (Attr) attrs.item(i); if (a.getNodeName().equals(ATTRIBUTE_NAME_CLASS) && swapMapClass) { XMLUtils.addOrSetAttribute(atts, ATTRIBUTE_NAME_CLASS, changeclassValue(a.getNodeValue())); } else { XMLUtils.addOrSetAttribute(atts, a); } } getContentHandler().startElement(NULL_NS_URI, elem.getNodeName(), elem.getNodeName(), atts); } final NodeList nodeList = elem.getChildNodes(); for (int i = 0; i<nodeList.getLength(); i++) { final Node node = nodeList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { final Element e = (Element) node; // retain tm and text elements if (TOPIC_TM.matches(e) || TOPIC_TEXT.matches(e)) { domToSax(e, true, swapMapClass); } else { domToSax(e, retainElements, swapMapClass); } } else if (node.getNodeType() == Node.TEXT_NODE) { final char[] ch = node.getNodeValue().toCharArray(); getContentHandler().characters(ch, 0, ch.length); } } if (retainElements) { getContentHandler().endElement(NULL_NS_URI, elem.getNodeName(), elem.getNodeName()); } } /** * Change map type to topic type. */ private String changeclassValue(final String classValue) { final DitaClass cls = new DitaClass(classValue); if (cls.equals(MAP_LINKTEXT)) { return TOPIC_LINKTEXT.toString(); } else if (cls.equals(MAP_SEARCHTITLE)) { return TOPIC_SEARCHTITLE.toString(); } else if (cls.equals(MAP_SHORTDESC)) { return TOPIC_SHORTDESC.toString(); } else { return cls.toString(); } } /** * change elementId into topicId if there is no topicId in key definition. */ private static URI normalizeHrefValue(final URI keyName, final String tail) { if (keyName.getFragment() == null) { return toURI(keyName + tail.replaceAll(SLASH, SHARP)); } return toURI(keyName + tail); } /** * Get first topic id */ private String getFirstTopicId(final URI topicFile) { return mergeUtils.getFirstTopicId(topicFile, false); } /** * Insert topic id into href */ private static URI normalizeHrefValue(final URI fileName, final String tail, final String topicId) { //Insert first topic id only when topicid is not set in keydef //and keyref has elementid if (fileName.getFragment() == null && !"".equals(tail)) { return setFragment(fileName, topicId + tail); } return toURI(fileName + tail); } private static final class KeyrefInfo { /** DITA class. */ final DitaClass type; /** Map of key reference to reference attributes. */ final Map<String, String> attrs; /** Element has nested elements. */ final boolean hasNestedElements; /** Element is empty. */ final boolean isEmpty; /** * Construct a new key reference info object. * * @param type element type * @param attrs Map of key reference to reference attributes * @param isEmpty flag if element is empty * @param hasNestedElements element is a reference type */ KeyrefInfo(final DitaClass type, final Map<String, String> attrs, final boolean isEmpty, final boolean hasNestedElements) { this.type = type; this.attrs = attrs; this.isEmpty = isEmpty; this.hasNestedElements = hasNestedElements; } /** * Construct a new key reference info object. * * @param type element type * @param refAttr reference attribute name * @param isEmpty flag if element is empty * @param hasNestedElements element is a reference type */ KeyrefInfo(final DitaClass type, final String refAttr, final boolean isEmpty, final boolean hasNestedElements) { final Map<String, String> attrs = new HashMap<>(); attrs.put(ATTRIBUTE_NAME_KEYREF, refAttr); this.type = type; this.attrs = attrs; this.isEmpty = isEmpty; this.hasNestedElements = hasNestedElements; } } }
package org.jboss.sasl; import static org.jboss.sasl.anonymous.AbstractAnonymousFactory.ANONYMOUS; import static org.jboss.sasl.digest.DigestMD5ServerFactory.DIGEST_MD5; import static org.jboss.sasl.clienttoken.ClientTokenClientFactory.JBOSS_CLIENTTOKEN; import javax.security.sasl.SaslClientFactory; import javax.security.sasl.SaslServerFactory; import java.security.Provider; import org.jboss.sasl.anonymous.AnonymousClientFactory; import org.jboss.sasl.anonymous.AnonymousServerFactory; import org.jboss.sasl.clienttoken.ClientTokenClientFactory; import org.jboss.sasl.digest.DigestMD5ServerFactory; /** * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> * @author <a href="mailto:darran.lofthouse@jboss.com">Darran Lofthouse</a> */ public final class JBossSaslProvider extends Provider { private static final long serialVersionUID = 7613128233053194670L; private static final String SASL_CLIENT_FACTORY = SaslClientFactory.class.getSimpleName(); private static final String SASL_SERVER_FACTORY = SaslServerFactory.class.getSimpleName(); /** * Construct a new instance. */ public JBossSaslProvider() { super("jboss-sasl", 1.0, "JBoss SASL Provider"); put(SASL_CLIENT_FACTORY + "." + ANONYMOUS, AnonymousClientFactory.class.getName()); put(SASL_SERVER_FACTORY + "." + ANONYMOUS, AnonymousServerFactory.class.getName()); //put(SASL_SERVER_FACTORY + "." + DIGEST_MD5, DigestMD5ServerFactory.class.getName()); //put(SASL_CLIENT_FACTORY + "." + JBOSS_CLIENTTOKEN, ClientTokenClientFactory.class.getName()); } }
package org.jenetics; import static org.jenetics.util.object.hashCodeOf; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import javolution.xml.XMLFormat; import javolution.xml.XMLSerializable; import javolution.xml.stream.XMLStreamException; import org.jscience.mathematics.number.Integer64; import org.jenetics.util.Array; import org.jenetics.util.Function; import org.jenetics.util.Factory; import org.jenetics.util.ISeq; public class Integer64Chromosome extends NumberChromosome<Integer64, Integer64Gene> implements XMLSerializable { private static final long serialVersionUID = 1L; protected Integer64Chromosome(final ISeq<Integer64Gene> genes) { super(genes); } public Integer64Chromosome(final Integer64Gene... genes) { super(new Array<>(genes).toISeq()); } /** * Create a new random IntegerChromosome. * * @param min the minimum value of the {@link Float64Gene}s. * @param max the maximum value of the {@link Float64Gene}s. * @param length the length of the chromosome. * @throws NullPointerException if {@code min} or {@code max} is {@code null}. */ public Integer64Chromosome( final Integer64 min, final Integer64 max, final int length ) { super( new Array<Integer64Gene>(length).fill( Integer64Gene.valueOf(min, max).asFactory() ).toISeq() ); _valid = true; } /** * Create a new random IntegerChromosome of length one. * * @param min the min value of the {@link Float64Gene}s. * @param max the max value of the {@link Float64Gene}s. */ public Integer64Chromosome(final long min, final long max) { this(Integer64.valueOf(min), Integer64.valueOf(max)); } /** * Create a new random IntegerChromosome with length one. * * @param min the min value of the {@link Float64Gene}s. * @param max the max value of the {@link Float64Gene}s. * @throws NullPointerException if {@code min} or {@code max} is {@code null}. */ public Integer64Chromosome(final Integer64 min, final Integer64 max) { this(min, max, 1); } /** * Create a new random IntegerChromosome. * * @param min the min value of the {@link Float64Gene}s. * @param max the max value of the {@link Float64Gene}s. * @param length the length of the chromosome. */ public Integer64Chromosome(final long min, final long max, int length) { this(Integer64.valueOf(min), Integer64.valueOf(max), length); } @Override public Integer64Chromosome newInstance(final ISeq<Integer64Gene> genes) { return new Integer64Chromosome(genes); } /** * Return a more specific view of this chromosome factory. * * @return a more specific view of thiw chromosome factory. */ @SuppressWarnings("unchecked") public Factory<Integer64Chromosome> asFactory() { return (Factory<Integer64Chromosome>)(Object)this; } /** * Create a new, <em>random</em> chromosome. */ @Override public Integer64Chromosome newInstance() { return new Integer64Chromosome(_min, _max, length()); } @Override public int hashCode() { return hashCodeOf(getClass()).and(super.hashCode()).value(); } @Override public boolean equals(final Object obj) { if (obj == this) { return true; } return obj instanceof Integer64Chromosome && super.equals(obj); } /** * Return a {@link Function} which returns the gene array from this * {@link Chromosome}. */ public static final Function<AbstractChromosome<Integer64Gene>, ISeq<Integer64Gene>> Genes = AbstractChromosome.genes(); /** * Return a {@link Function} which returns the first {@link Gene} from this * {@link Chromosome}. */ public static final Function<Chromosome<Integer64Gene>, Integer64Gene> Gene = AbstractChromosome.gene(); /** * Return a {@link Function} which returns the {@link Gene} with the given * {@code index} from this {@link Chromosome}. */ public static final Function<Chromosome<Integer64Gene>, Integer64Gene> Gene(final int index) { return AbstractChromosome.gene(index); } static final XMLFormat<Integer64Chromosome> XML = new XMLFormat<Integer64Chromosome>(Integer64Chromosome.class) { private static final String LENGTH = "length"; private static final String MIN = "min"; private static final String MAX = "max"; @Override public Integer64Chromosome newInstance( final Class<Integer64Chromosome> cls, final InputElement xml ) throws XMLStreamException { final int length = xml.getAttribute(LENGTH, 0); final long min = xml.getAttribute(MIN, 0L); final long max = xml.getAttribute(MAX, 100L); final Array<Integer64Gene> genes = new Array<>(length); for (int i = 0; i < length; ++i) { final Integer64 value = xml.getNext(); genes.set(i, Integer64Gene.valueOf(value.longValue(), min, max)); } final Integer64Chromosome chromosome = new Integer64Chromosome(genes.toISeq()); chromosome._min = Integer64.valueOf(min); chromosome._max = Integer64.valueOf(max); return chromosome; } @Override public void write(final Integer64Chromosome chromosome, final OutputElement xml) throws XMLStreamException { xml.setAttribute(LENGTH, chromosome.length()); xml.setAttribute(MIN, chromosome._min.longValue()); xml.setAttribute(MAX, chromosome._max.longValue()); for (Integer64Gene gene : chromosome) { xml.add(gene.getAllele()); } } @Override public void read(final InputElement element, final Integer64Chromosome chromosome) throws XMLStreamException { } }; private void writeObject(final ObjectOutputStream out) throws IOException { out.defaultWriteObject(); out.writeInt(length()); out.writeLong(_min.longValue()); out.writeLong(_max.longValue()); for (Integer64Gene gene : _genes) { out.writeLong(gene.longValue()); } } private void readObject(final ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); final int length = in.readInt(); Integer64 min = Integer64.valueOf(in.readLong()); Integer64 max = Integer64.valueOf(in.readLong()); _min = min; _max = max; final Array<Integer64Gene> genes = new Array<>(length); for (int i = 0; i < length; ++i) { genes.set(i, Integer64Gene.valueOf(Integer64.valueOf(in.readLong()), min, max)); } _genes = genes.toISeq(); } }
package threads; import java.io.IOException; import buffer.DataForSerialOutput; import buffer.IncomingData; import buffer.IncomingDataBuffer; import enums.SerialProtocol; import permissions.ReadOnly; import requete.RequeteSTM; import requete.RequeteType; import robot.RobotReal; import serie.SerialInterface; import table.GameElementNames; import table.GameElementType; import table.Table; import utils.Config; import utils.ConfigInfo; import utils.Log; import utils.Vec2; import container.Service; import debug.IncomingDataDebug; import debug.IncomingDataDebugBuffer; import enums.RobotColor; import enums.Tribool; import exceptions.MissingCharacterException; import hook.HookFactory; import obstacles.types.ObstacleCircular; public class ThreadSerialInput extends Thread implements Service { protected Log log; protected Config config; private SerialInterface serie; private IncomingDataBuffer buffer; private HookFactory hookfactory; private DataForSerialOutput output; private RobotReal robot; private Table table; private IncomingDataDebugBuffer bufferdebug; private int codeCoquillage; private RequeteSTM requete; private boolean capteursOn = false; private volatile int nbCapteurs; private boolean matchDemarre = false; private int[] lecture = new int[100]; private int idDernierPaquet = -1; private final static int ID_FORT = 0; private final static int ID_FAIBLE = 1; private final static int COMMANDE = 2; private final static int PARAM = 3; public ThreadSerialInput(Log log, Config config, SerialInterface serie, IncomingDataBuffer buffer, IncomingDataDebugBuffer bufferdebug, RequeteSTM requete, RobotReal robot, Table table, HookFactory hookfactory, DataForSerialOutput output) { this.log = log; this.config = config; this.serie = serie; this.buffer = buffer; this.requete = requete; this.hookfactory = hookfactory; this.output = output; this.robot = robot; this.table = table; this.bufferdebug = bufferdebug; idDernierPaquet = serie.getFirstID(); } @Override public void run() { /** * Initialisation des valeurs de la STM */ while(true) { try { synchronized(serie) { while(!serie.available()) serie.wait(); int index = 0; try { if(serie.read() != 0x55) { log.warning("Mauvais entête (0x55)"); continue; } if(serie.read() != 0xAA) { log.warning("Mauvais entête (0xAA)"); continue; } lecture[index++] = serie.read(); // id partie 1 lecture[index++] = serie.read(); // id partie 2 int idPaquet = (lecture[ID_FORT] << 8) + lecture[ID_FAIBLE]; if(idPaquet > idDernierPaquet) { idDernierPaquet++; while(idPaquet > idDernierPaquet) { if(Config.debugSerie) log.warning("On a raté un message"); output.askResend(idDernierPaquet++); } // on a idDernierPaquet = idPaquet } } catch(MissingCharacterException e) { log.critical("La série est trop longue à fournir la commande, annulation"); continue; } lecture[index++] = serie.read(); // commande if(lecture[COMMANDE] == SerialProtocol.IN_PING.codeInt) { // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; output.sendPong(); } else if(lecture[COMMANDE] == SerialProtocol.IN_PONG1.codeInt) { lecture[index++] = serie.read(); // pong2 lecture[index++] = serie.read(); // checksum if(lecture[COMMANDE+1] != SerialProtocol.IN_PONG2.codeInt) log.warning("Pong reçu non conforme"); else if(Config.debugSerie) log.debug("Reçu pong"); } else if(lecture[COMMANDE] == SerialProtocol.IN_DEBUG_ASSER.codeInt) { lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; bufferdebug.add(new IncomingDataDebug((lecture[PARAM] << 8) + lecture[PARAM+1], (lecture[PARAM+2] << 8) + lecture[PARAM+3], (lecture[PARAM+4] << 8) + lecture[PARAM+5], (lecture[PARAM+6] << 8) + lecture[PARAM+7], (lecture[PARAM+8] << 8) + lecture[PARAM+9], (lecture[PARAM+10] << 8) + lecture[PARAM+11], (lecture[PARAM+12] << 8) + lecture[PARAM+13], (lecture[PARAM+14] << 8) + lecture[PARAM+15])); } else if((lecture[COMMANDE] & SerialProtocol.MASK_LAST_BIT.codeInt) == SerialProtocol.IN_XYO.codeInt) { lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); // courbure // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; int xRobot = lecture[PARAM] << 4; xRobot += lecture[PARAM+1] >> 4; xRobot -= 1500; int yRobot = (lecture[PARAM+1] & 0x0F) << 8; yRobot = yRobot + lecture[PARAM+2]; Vec2<ReadOnly> positionRobot = new Vec2<ReadOnly>(xRobot, yRobot); double orientationRobot = ((lecture[PARAM+3] << 8) + lecture[PARAM+4]) / 1000.; double courbure = lecture[PARAM+5] / 1000.; boolean enMarcheAvant = lecture[COMMANDE] == SerialProtocol.IN_XYO.codeInt; robot.setPositionOrientationCourbureDirection(positionRobot, orientationRobot, courbure, enMarcheAvant); } else if((lecture[COMMANDE] & SerialProtocol.MASK_LAST_BIT.codeInt) == SerialProtocol.IN_INFO_CAPTEURS.codeInt) { lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); lecture[index++] = serie.read(); // courbure for(int i = 0; i < nbCapteurs / 2; i++) { lecture[index++] = serie.read(); // capteur lecture[index++] = serie.read(); // capteur lecture[index++] = serie.read(); // capteur } // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; int xRobot = lecture[PARAM] << 4; xRobot += lecture[PARAM+1] >> 4; xRobot -= 1500; int yRobot = (lecture[PARAM+1] & 0x0F) << 8; yRobot = yRobot + lecture[PARAM+2]; Vec2<ReadOnly> positionRobot = new Vec2<ReadOnly>(xRobot, yRobot); double orientationRobot = ((lecture[PARAM+3] << 8) + lecture[PARAM+4]) / 1000.; double courbure = lecture[PARAM+5] / 1000.; boolean enMarcheAvant = lecture[COMMANDE] == SerialProtocol.IN_INFO_CAPTEURS.codeInt; /** * Acquiert ce que voit les capteurs */ int[] mesures = new int[nbCapteurs]; for(int i = 0; i < nbCapteurs / 2; i++) { mesures[2*i] = convertIR((lecture[PARAM+6+3*i] << 4) + (lecture[PARAM+6+3*i+1] >> 4)); if(2*i+1 != nbCapteurs-1) mesures[2*i+1] = convertIR(((lecture[PARAM+6+3*i+1] & 0x0F) << 8) + lecture[PARAM+6+3*i+2]); } log.debug("Le robot est en "+positionRobot); robot.setPositionOrientationCourbureDirection(positionRobot, orientationRobot, courbure, enMarcheAvant); if(capteursOn) buffer.add(new IncomingData(mesures, positionRobot, orientationRobot, enMarcheAvant)); } /** * La couleur du robot */ else if((lecture[COMMANDE] & SerialProtocol.MASK_LAST_BIT.codeInt) == SerialProtocol.IN_COULEUR_ROBOT.codeInt) { // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; if(!matchDemarre) config.set(ConfigInfo.COULEUR, RobotColor.getCouleur(lecture[COMMANDE] != SerialProtocol.IN_COULEUR_ROBOT.codeInt)); else log.warning("Le bas niveau a signalé un changement de couleur en plein match"); } else if((lecture[COMMANDE] & SerialProtocol.MASK_LAST_BIT.codeInt) == SerialProtocol.IN_PRESENCE_BALISE.codeInt) { // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; if(!matchDemarre) config.set(ConfigInfo.BALISE_PRESENTE, lecture[COMMANDE] == SerialProtocol.IN_PRESENCE_BALISE.codeInt); else log.warning("Le bas niveau a signalé un changement de présence de balise en plein match"); } else if(lecture[COMMANDE] == SerialProtocol.IN_RESEND_PACKET.codeInt) { lecture[index++] = serie.read(); lecture[index++] = serie.read(); // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; String s = Integer.toHexString(((lecture[PARAM] << 8) + lecture[PARAM+1])).toUpperCase(); if(s.length() == 1) s = "0"+s; else s = s.substring(s.length()-2, s.length()); log.warning("Demande de renvoi du paquet "+s); output.resend((lecture[PARAM] << 8) + lecture[PARAM+1]); } else if(lecture[COMMANDE] == SerialProtocol.IN_CODE_COQUILLAGES.codeInt) { if(!matchDemarre) { int tmp = codeCoquillage; lecture[index++] = serie.read(); // code // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; codeCoquillage = lecture[PARAM]; log.debug("Code coquillage reçu : "+codeCoquillage); if(tmp != codeCoquillage) { switch(codeCoquillage) { case 0: GameElementNames.COQUILLAGE_1.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(1300,450), 38)); GameElementNames.COQUILLAGE_2.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(1300,750), 38)); GameElementNames.COQUILLAGE_3.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(600,550), 38)); GameElementNames.COQUILLAGE_4.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(300,350), 38)); GameElementNames.COQUILLAGE_5.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(0,150), 38)); GameElementNames.COQUILLAGE_6.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(0,450), 38)); GameElementNames.COQUILLAGE_7.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(-300,350), 38)); GameElementNames.COQUILLAGE_8.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-600,550), 38)); GameElementNames.COQUILLAGE_9.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(-1300,750), 38)); GameElementNames.COQUILLAGE_10.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(-1300,450), 38)); GameElementNames.COQUILLAGE_ROCHER_DROITE_SOMMET.set(GameElementType.COQUILLAGE_EN_HAUTEUR_AMI, null); GameElementNames.COQUILLAGE_ROCHER_DROITE_INTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_AMI, null); GameElementNames.COQUILLAGE_ROCHER_DROITE_EXTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_AMI, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_SOMMET.set(GameElementType.COQUILLAGE_EN_HAUTEUR_ENNEMI, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_INTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_ENNEMI, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_EXTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_ENNEMI, null); break; case 1: GameElementNames.COQUILLAGE_1.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(1300,450), 38)); GameElementNames.COQUILLAGE_2.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(1300,750), 38)); GameElementNames.COQUILLAGE_3.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(600,550), 38)); GameElementNames.COQUILLAGE_4.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(300,350), 38)); GameElementNames.COQUILLAGE_5.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(0,150), 38)); GameElementNames.COQUILLAGE_6.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(0,450), 38)); GameElementNames.COQUILLAGE_7.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-300,350), 38)); GameElementNames.COQUILLAGE_8.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-600,550), 38)); GameElementNames.COQUILLAGE_9.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-1300,750), 38)); GameElementNames.COQUILLAGE_10.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(-1300,450), 38)); GameElementNames.COQUILLAGE_ROCHER_DROITE_SOMMET.set(GameElementType.COQUILLAGE_EN_HAUTEUR_NEUTRE, null); GameElementNames.COQUILLAGE_ROCHER_DROITE_INTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_AMI, null); GameElementNames.COQUILLAGE_ROCHER_DROITE_EXTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_AMI, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_SOMMET.set(GameElementType.COQUILLAGE_EN_HAUTEUR_NEUTRE, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_INTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_ENNEMI, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_EXTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_ENNEMI, null); break; case 2: GameElementNames.COQUILLAGE_1.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(1300,450), 38)); GameElementNames.COQUILLAGE_2.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(1300,750), 38)); GameElementNames.COQUILLAGE_3.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(900,450), 38)); GameElementNames.COQUILLAGE_4.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(900,750), 38)); GameElementNames.COQUILLAGE_5.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(300,350), 38)); GameElementNames.COQUILLAGE_6.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-300,350), 38)); GameElementNames.COQUILLAGE_7.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-900,750), 38)); GameElementNames.COQUILLAGE_8.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(-900,450), 38)); GameElementNames.COQUILLAGE_9.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-1300,750), 38)); GameElementNames.COQUILLAGE_10.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(-1300,450), 38)); GameElementNames.COQUILLAGE_ROCHER_DROITE_SOMMET.set(GameElementType.COQUILLAGE_EN_HAUTEUR_NEUTRE, null); GameElementNames.COQUILLAGE_ROCHER_DROITE_INTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_AMI, null); GameElementNames.COQUILLAGE_ROCHER_DROITE_EXTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_AMI, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_SOMMET.set(GameElementType.COQUILLAGE_EN_HAUTEUR_NEUTRE, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_INTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_ENNEMI, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_EXTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_ENNEMI, null); break; case 3: GameElementNames.COQUILLAGE_1.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(1300,450), 38)); GameElementNames.COQUILLAGE_2.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(1300,750), 38)); GameElementNames.COQUILLAGE_3.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(900,450), 38)); GameElementNames.COQUILLAGE_4.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(900,750), 38)); GameElementNames.COQUILLAGE_5.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(300,350), 38)); GameElementNames.COQUILLAGE_6.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(-300,350), 38)); GameElementNames.COQUILLAGE_7.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-900,750), 38)); GameElementNames.COQUILLAGE_8.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(-900,450), 38)); GameElementNames.COQUILLAGE_9.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-1300,750), 38)); GameElementNames.COQUILLAGE_10.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-1300,450), 38)); GameElementNames.COQUILLAGE_ROCHER_DROITE_SOMMET.set(GameElementType.COQUILLAGE_EN_HAUTEUR_NEUTRE, null); GameElementNames.COQUILLAGE_ROCHER_DROITE_INTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_ENNEMI, null); GameElementNames.COQUILLAGE_ROCHER_DROITE_EXTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_ENNEMI, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_SOMMET.set(GameElementType.COQUILLAGE_EN_HAUTEUR_NEUTRE, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_INTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_AMI, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_EXTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_AMI, null); break; case 4: GameElementNames.COQUILLAGE_1.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(1300,450), 38)); GameElementNames.COQUILLAGE_2.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(1300,750), 38)); GameElementNames.COQUILLAGE_3.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(900,450), 38)); GameElementNames.COQUILLAGE_4.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(900,750), 38)); GameElementNames.COQUILLAGE_5.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(900,150), 38)); GameElementNames.COQUILLAGE_6.set(GameElementType.COQUILLAGE_NEUTRE, new ObstacleCircular(new Vec2<ReadOnly>(-900,150), 38)); GameElementNames.COQUILLAGE_7.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-900,750), 38)); GameElementNames.COQUILLAGE_8.set(GameElementType.COQUILLAGE_AMI, new ObstacleCircular(new Vec2<ReadOnly>(-900,450), 38)); GameElementNames.COQUILLAGE_9.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-1300,750), 38)); GameElementNames.COQUILLAGE_10.set(GameElementType.COQUILLAGE_ENNEMI, new ObstacleCircular(new Vec2<ReadOnly>(-1300,450), 38)); GameElementNames.COQUILLAGE_ROCHER_DROITE_SOMMET.set(GameElementType.COQUILLAGE_EN_HAUTEUR_NEUTRE, null); GameElementNames.COQUILLAGE_ROCHER_DROITE_INTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_NEUTRE, null); GameElementNames.COQUILLAGE_ROCHER_DROITE_EXTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_AMI, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_SOMMET.set(GameElementType.COQUILLAGE_EN_HAUTEUR_NEUTRE, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_INTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_NEUTRE, null); GameElementNames.COQUILLAGE_ROCHER_GAUCHE_EXTERIEUR.set(GameElementType.COQUILLAGE_EN_HAUTEUR_ENNEMI, null); break; default: log.critical("Code coquillage inconnu ! "+codeCoquillage); break; } if(codeCoquillage >= 0 && codeCoquillage <= 4) { output.deleteAllHooks(); output.envoieHooks(hookfactory.getHooksPermanentsAEnvoyer()); } } } } else if(lecture[COMMANDE] == SerialProtocol.IN_DEBUT_MATCH.codeInt) { // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; capteursOn = true; synchronized(config) { config.set(ConfigInfo.DATE_DEBUT_MATCH, System.currentTimeMillis()); config.set(ConfigInfo.MATCH_DEMARRE, true); matchDemarre = true; } } else if(lecture[COMMANDE] == SerialProtocol.IN_MATCH_FINI.codeInt) { // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; log.debug("Fin du Match !"); config.set(ConfigInfo.FIN_MATCH, true); serie.close(); return; } else if(lecture[COMMANDE] == SerialProtocol.IN_ELT_SHOOT.codeInt) { lecture[index++] = serie.read(); // nb element // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; int nbElement = lecture[PARAM]; table.setDone(GameElementNames.values()[nbElement], Tribool.TRUE); } /** * Demande de hook */ /* case "dhk": int nbScript = Integer.parseInt(messages[1]); ScriptHookNames s = ScriptHookNames.values()[nbScript]; int param = Integer.parseInt(messages[2]); hookbuffer.add(new IncomingHook(s, param)); break; */ else if(lecture[COMMANDE] == SerialProtocol.IN_ROBOT_ARRIVE.codeInt) { // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; requete.set(RequeteType.TRAJET_FINI); } else if(lecture[COMMANDE] == SerialProtocol.IN_PB_DEPLACEMENT.codeInt) { // Mauvais checksum. Annulation. if(!verifieChecksum(lecture, index)) continue; requete.set(RequeteType.BLOCAGE_MECANIQUE); } else { log.critical("Commande série inconnue: "+lecture[COMMANDE]); output.askResend(idDernierPaquet); } } } catch (InterruptedException | IOException e) { e.printStackTrace(); } catch (MissingCharacterException e) { log.critical("Série trop longue. Redemande"); output.askResend(idDernierPaquet); } } // log.debug("Fermeture de ThreadSerialInput"); } private int convertIR(int capteur) { double V = capteur * 3.3 / 4096; // la tension. 4096 : <=> 3.3V if(V < 2.75) // au-dessus de 8cm return (int) (207.7 / (V - 0.15)); else if(V < 3) return (int) (140 / (V - 1)); else return (int) (63 / (V - 2.1)); } private boolean verifieChecksum(int[] lecture, int longueur) throws IOException, MissingCharacterException { // lecture[longueur++] = serie.read(); // checksum lecture[longueur] = serie.read(); // checksum int c = 0; for(int i = 0; i < longueur; i++) c += lecture[i]; if(lecture[longueur] != ((~c) & 0xFF)) { log.warning("Erreur de checksum (attendu : "+((~c) & 0xFF)+", obtenu : "+lecture[longueur]+"). Paquet redemandé"); output.askResend(idDernierPaquet); return false; } return true; } @Override public void updateConfig(Config config) {} @Override public void useConfig(Config config) { nbCapteurs = config.getInt(ConfigInfo.NB_CAPTEURS_PROXIMITE); } }
package org.jsoup.parser; import org.jsoup.helper.Validate; import org.jsoup.internal.StringUtil; import org.jsoup.nodes.CDataNode; import org.jsoup.nodes.Comment; import org.jsoup.nodes.DataNode; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.nodes.FormElement; import org.jsoup.nodes.Node; import org.jsoup.nodes.TextNode; import org.jsoup.select.Elements; import javax.annotation.Nullable; import javax.annotation.ParametersAreNonnullByDefault; import java.io.Reader; import java.io.StringReader; import java.util.ArrayList; import java.util.List; import static org.jsoup.internal.StringUtil.inSorted; /** * HTML Tree Builder; creates a DOM from Tokens. */ public class HtmlTreeBuilder extends TreeBuilder { // tag searches. must be sorted, used in inSorted. HtmlTreeBuilderTest validates they're sorted. static final String[] TagsSearchInScope = new String[]{"applet", "caption", "html", "marquee", "object", "table", "td", "th"}; static final String[] TagSearchList = new String[]{"ol", "ul"}; static final String[] TagSearchButton = new String[]{"button"}; static final String[] TagSearchTableScope = new String[]{"html", "table"}; static final String[] TagSearchSelectScope = new String[]{"optgroup", "option"}; static final String[] TagSearchEndTags = new String[]{"dd", "dt", "li", "optgroup", "option", "p", "rp", "rt"}; static final String[] TagSearchSpecial = new String[]{"address", "applet", "area", "article", "aside", "base", "basefont", "bgsound", "blockquote", "body", "br", "button", "caption", "center", "col", "colgroup", "command", "dd", "details", "dir", "div", "dl", "dt", "embed", "fieldset", "figcaption", "figure", "footer", "form", "frame", "frameset", "h1", "h2", "h3", "h4", "h5", "h6", "head", "header", "hgroup", "hr", "html", "iframe", "img", "input", "isindex", "li", "link", "listing", "marquee", "menu", "meta", "nav", "noembed", "noframes", "noscript", "object", "ol", "p", "param", "plaintext", "pre", "script", "section", "select", "style", "summary", "table", "tbody", "td", "textarea", "tfoot", "th", "thead", "title", "tr", "ul", "wbr", "xmp"}; public static final int MaxScopeSearchDepth = 100; // prevents the parser bogging down in exceptionally broken pages private HtmlTreeBuilderState state; // the current state private HtmlTreeBuilderState originalState; // original / marked state private boolean baseUriSetFromDoc; private Element headElement; // the current head element private FormElement formElement; // the current form element private Element contextElement; // fragment parse context -- could be null even if fragment parsing private ArrayList<Element> formattingElements; // active (open) formatting elements private List<String> pendingTableCharacters; // chars in table to be shifted out private Token.EndTag emptyEnd; // reused empty end tag private boolean framesetOk; // if ok to go into frameset private boolean fosterInserts; // if next inserts should be fostered private boolean fragmentParsing; // if parsing a fragment of html ParseSettings defaultSettings() { return ParseSettings.htmlDefault; } @Override @ParametersAreNonnullByDefault protected void initialiseParse(Reader input, String baseUri, Parser parser) { super.initialiseParse(input, baseUri, parser); // this is a bit mucky. todo - probably just create new parser objects to ensure all reset. state = HtmlTreeBuilderState.Initial; originalState = null; baseUriSetFromDoc = false; headElement = null; formElement = null; contextElement = null; formattingElements = new ArrayList<>(); pendingTableCharacters = new ArrayList<>(); emptyEnd = new Token.EndTag(); framesetOk = true; fosterInserts = false; fragmentParsing = false; } List<Node> parseFragment(String inputFragment, @Nullable Element context, String baseUri, Parser parser) { // context may be null state = HtmlTreeBuilderState.Initial; initialiseParse(new StringReader(inputFragment), baseUri, parser); contextElement = context; fragmentParsing = true; Element root = null; if (context != null) { if (context.ownerDocument() != null) // quirks setup: doc.quirksMode(context.ownerDocument().quirksMode()); // initialise the tokeniser state: String contextTag = context.normalName(); if (StringUtil.in(contextTag, "title", "textarea")) tokeniser.transition(TokeniserState.Rcdata); else if (StringUtil.in(contextTag, "iframe", "noembed", "noframes", "style", "xmp")) tokeniser.transition(TokeniserState.Rawtext); else if (contextTag.equals("script")) tokeniser.transition(TokeniserState.ScriptData); else if (contextTag.equals(("noscript"))) tokeniser.transition(TokeniserState.Data); // if scripting enabled, rawtext else if (contextTag.equals("plaintext")) tokeniser.transition(TokeniserState.Data); else tokeniser.transition(TokeniserState.Data); // default root = new Element(Tag.valueOf(contextTag, settings), baseUri); doc.appendChild(root); stack.add(root); resetInsertionMode(); // setup form element to nearest form on context (up ancestor chain). ensures form controls are associated // with form correctly Elements contextChain = context.parents(); contextChain.add(0, context); for (Element parent: contextChain) { if (parent instanceof FormElement) { formElement = (FormElement) parent; break; } } } runParser(); if (context != null) { // depending on context and the input html, content may have been added outside of the root el // e.g. context=p, input=div, the div will have been pushed out. List<Node> nodes = root.siblingNodes(); if (!nodes.isEmpty()) root.insertChildren(-1, nodes); return root.childNodes(); } else return doc.childNodes(); } @Override protected boolean process(Token token) { currentToken = token; return this.state.process(token, this); } boolean process(Token token, HtmlTreeBuilderState state) { currentToken = token; return state.process(token, this); } void transition(HtmlTreeBuilderState state) { this.state = state; } HtmlTreeBuilderState state() { return state; } void markInsertionMode() { originalState = state; } HtmlTreeBuilderState originalState() { return originalState; } void framesetOk(boolean framesetOk) { this.framesetOk = framesetOk; } boolean framesetOk() { return framesetOk; } Document getDocument() { return doc; } String getBaseUri() { return baseUri; } void maybeSetBaseUri(Element base) { if (baseUriSetFromDoc) // only listen to the first <base href> in parse return; String href = base.absUrl("href"); if (href.length() != 0) { // ignore <base target> etc baseUri = href; baseUriSetFromDoc = true; doc.setBaseUri(href); // set on the doc so doc.createElement(Tag) will get updated base, and to update all descendants } } boolean isFragmentParsing() { return fragmentParsing; } void error(HtmlTreeBuilderState state) { if (parser.getErrors().canAddError()) parser.getErrors().add(new ParseError(reader.pos(), "Unexpected token [%s] when in state [%s]", currentToken.tokenType(), state)); } Element insert(final Token.StartTag startTag) { // cleanup duplicate attributes: if (startTag.hasAttributes() && !startTag.attributes.isEmpty()) { int dupes = startTag.attributes.deduplicate(settings); if (dupes > 0) { error("Duplicate attribute"); } } // handle empty unknown tags // when the spec expects an empty tag, will directly hit insertEmpty, so won't generate this fake end tag. if (startTag.isSelfClosing()) { Element el = insertEmpty(startTag); stack.add(el); tokeniser.transition(TokeniserState.Data); // handles <script />, otherwise needs breakout steps from script data tokeniser.emit(emptyEnd.reset().name(el.tagName())); // ensure we get out of whatever state we are in. emitted for yielded processing return el; } Element el = new Element(Tag.valueOf(startTag.name(), settings), null, settings.normalizeAttributes(startTag.attributes)); insert(el); return el; } Element insertStartTag(String startTagName) { Element el = new Element(Tag.valueOf(startTagName, settings), null); insert(el); return el; } void insert(Element el) { insertNode(el); stack.add(el); } Element insertEmpty(Token.StartTag startTag) { Tag tag = Tag.valueOf(startTag.name(), settings); Element el = new Element(tag, null, settings.normalizeAttributes(startTag.attributes)); insertNode(el); if (startTag.isSelfClosing()) { if (tag.isKnownTag()) { if (!tag.isEmpty()) tokeniser.error("Tag cannot be self closing; not a void tag"); } else // unknown tag, remember this is self closing for output tag.setSelfClosing(); } return el; } FormElement insertForm(Token.StartTag startTag, boolean onStack) { Tag tag = Tag.valueOf(startTag.name(), settings); FormElement el = new FormElement(tag, null, settings.normalizeAttributes(startTag.attributes)); setFormElement(el); insertNode(el); if (onStack) stack.add(el); return el; } void insert(Token.Comment commentToken) { Comment comment = new Comment(commentToken.getData()); insertNode(comment); } void insert(Token.Character characterToken) { final Node node; Element el = currentElement(); if (el == null) el = doc; // allows for whitespace to be inserted into the doc root object (not on the stack) final String tagName = el.normalName(); final String data = characterToken.getData(); if (characterToken.isCData()) node = new CDataNode(data); else if (isContentForTagData(tagName)) node = new DataNode(data); else node = new TextNode(data); el.appendChild(node); // doesn't use insertNode, because we don't foster these; and will always have a stack. } private void insertNode(Node node) { // if the stack hasn't been set up yet, elements (doctype, comments) go into the doc if (stack.isEmpty()) doc.appendChild(node); else if (isFosterInserts()) insertInFosterParent(node); else currentElement().appendChild(node); // connect form controls to their form element if (node instanceof Element && ((Element) node).tag().isFormListed()) { if (formElement != null) formElement.addElement((Element) node); } } Element pop() { int size = stack.size(); return stack.remove(size-1); } void push(Element element) { stack.add(element); } ArrayList<Element> getStack() { return stack; } boolean onStack(Element el) { return isElementInQueue(stack, el); } private boolean isElementInQueue(ArrayList<Element> queue, Element element) { for (int pos = queue.size() -1; pos >= 0; pos Element next = queue.get(pos); if (next == element) { return true; } } return false; } Element getFromStack(String elName) { for (int pos = stack.size() -1; pos >= 0; pos Element next = stack.get(pos); if (next.normalName().equals(elName)) { return next; } } return null; } boolean removeFromStack(Element el) { for (int pos = stack.size() -1; pos >= 0; pos Element next = stack.get(pos); if (next == el) { stack.remove(pos); return true; } } return false; } Element popStackToClose(String elName) { for (int pos = stack.size() -1; pos >= 0; pos Element el = stack.get(pos); stack.remove(pos); if (el.normalName().equals(elName)) return el; } return null; } // elnames is sorted, comes from Constants void popStackToClose(String... elNames) { for (int pos = stack.size() -1; pos >= 0; pos Element next = stack.get(pos); stack.remove(pos); if (inSorted(next.normalName(), elNames)) break; } } void popStackToBefore(String elName) { for (int pos = stack.size() -1; pos >= 0; pos Element next = stack.get(pos); if (next.normalName().equals(elName)) { break; } else { stack.remove(pos); } } } void clearStackToTableContext() { clearStackToContext("table"); } void clearStackToTableBodyContext() { clearStackToContext("tbody", "tfoot", "thead", "template"); } void clearStackToTableRowContext() { clearStackToContext("tr", "template"); } private void clearStackToContext(String... nodeNames) { for (int pos = stack.size() -1; pos >= 0; pos Element next = stack.get(pos); if (StringUtil.in(next.normalName(), nodeNames) || next.normalName().equals("html")) break; else stack.remove(pos); } } Element aboveOnStack(Element el) { assert onStack(el); for (int pos = stack.size() -1; pos >= 0; pos Element next = stack.get(pos); if (next == el) { return stack.get(pos-1); } } return null; } void insertOnStackAfter(Element after, Element in) { int i = stack.lastIndexOf(after); Validate.isTrue(i != -1); stack.add(i+1, in); } void replaceOnStack(Element out, Element in) { replaceInQueue(stack, out, in); } private void replaceInQueue(ArrayList<Element> queue, Element out, Element in) { int i = queue.lastIndexOf(out); Validate.isTrue(i != -1); queue.set(i, in); } void resetInsertionMode() { boolean last = false; for (int pos = stack.size() -1; pos >= 0; pos Element node = stack.get(pos); if (pos == 0) { last = true; node = contextElement; } String name = node.normalName(); if ("select".equals(name)) { transition(HtmlTreeBuilderState.InSelect); break; // frag } else if (("td".equals(name) || "th".equals(name) && !last)) { transition(HtmlTreeBuilderState.InCell); break; } else if ("tr".equals(name)) { transition(HtmlTreeBuilderState.InRow); break; } else if ("tbody".equals(name) || "thead".equals(name) || "tfoot".equals(name)) { transition(HtmlTreeBuilderState.InTableBody); break; } else if ("caption".equals(name)) { transition(HtmlTreeBuilderState.InCaption); break; } else if ("colgroup".equals(name)) { transition(HtmlTreeBuilderState.InColumnGroup); break; // frag } else if ("table".equals(name)) { transition(HtmlTreeBuilderState.InTable); break; } else if ("head".equals(name)) { transition(HtmlTreeBuilderState.InBody); break; // frag } else if ("body".equals(name)) { transition(HtmlTreeBuilderState.InBody); break; } else if ("frameset".equals(name)) { transition(HtmlTreeBuilderState.InFrameset); break; // frag } else if ("html".equals(name)) { transition(HtmlTreeBuilderState.BeforeHead); break; // frag } else if (last) { transition(HtmlTreeBuilderState.InBody); break; // frag } } } // todo: tidy up in specific scope methods private String[] specificScopeTarget = {null}; private boolean inSpecificScope(String targetName, String[] baseTypes, String[] extraTypes) { specificScopeTarget[0] = targetName; return inSpecificScope(specificScopeTarget, baseTypes, extraTypes); } private boolean inSpecificScope(String[] targetNames, String[] baseTypes, String[] extraTypes) { // https://html.spec.whatwg.org/multipage/parsing.html#has-an-element-in-the-specific-scope final int bottom = stack.size() -1; final int top = bottom > MaxScopeSearchDepth ? bottom - MaxScopeSearchDepth : 0; // don't walk too far up the tree for (int pos = bottom; pos >= top; pos final String elName = stack.get(pos).normalName(); if (inSorted(elName, targetNames)) return true; if (inSorted(elName, baseTypes)) return false; if (extraTypes != null && inSorted(elName, extraTypes)) return false; } //Validate.fail("Should not be reachable"); // would end up false because hitting 'html' at root (basetypes) return false; } boolean inScope(String[] targetNames) { return inSpecificScope(targetNames, TagsSearchInScope, null); } boolean inScope(String targetName) { return inScope(targetName, null); } boolean inScope(String targetName, String[] extras) { return inSpecificScope(targetName, TagsSearchInScope, extras); // todo: in mathml namespace: mi, mo, mn, ms, mtext annotation-xml // todo: in svg namespace: forignOjbect, desc, title } boolean inListItemScope(String targetName) { return inScope(targetName, TagSearchList); } boolean inButtonScope(String targetName) { return inScope(targetName, TagSearchButton); } boolean inTableScope(String targetName) { return inSpecificScope(targetName, TagSearchTableScope, null); } boolean inSelectScope(String targetName) { for (int pos = stack.size() -1; pos >= 0; pos Element el = stack.get(pos); String elName = el.normalName(); if (elName.equals(targetName)) return true; if (!inSorted(elName, TagSearchSelectScope)) // all elements except return false; } Validate.fail("Should not be reachable"); return false; } void setHeadElement(Element headElement) { this.headElement = headElement; } Element getHeadElement() { return headElement; } boolean isFosterInserts() { return fosterInserts; } void setFosterInserts(boolean fosterInserts) { this.fosterInserts = fosterInserts; } FormElement getFormElement() { return formElement; } void setFormElement(FormElement formElement) { this.formElement = formElement; } void newPendingTableCharacters() { pendingTableCharacters = new ArrayList<>(); } List<String> getPendingTableCharacters() { return pendingTableCharacters; } /** 11.2.5.2 Closing elements that have implied end tags<p/> When the steps below require the UA to generate implied end tags, then, while the current node is a dd element, a dt element, an li element, an option element, an optgroup element, a p element, an rp element, or an rt element, the UA must pop the current node off the stack of open elements. @param excludeTag If a step requires the UA to generate implied end tags but lists an element to exclude from the process, then the UA must perform the above steps as if that element was not in the above list. */ void generateImpliedEndTags(String excludeTag) { while ((excludeTag != null && !currentElement().normalName().equals(excludeTag)) && inSorted(currentElement().normalName(), TagSearchEndTags)) pop(); } void generateImpliedEndTags() { generateImpliedEndTags(null); } boolean isSpecial(Element el) { // todo: mathml's mi, mo, mn // todo: svg's foreigObject, desc, title String name = el.normalName(); return inSorted(name, TagSearchSpecial); } Element lastFormattingElement() { return formattingElements.size() > 0 ? formattingElements.get(formattingElements.size()-1) : null; } Element removeLastFormattingElement() { int size = formattingElements.size(); if (size > 0) return formattingElements.remove(size-1); else return null; } // active formatting elements void pushActiveFormattingElements(Element in) { int numSeen = 0; for (int pos = formattingElements.size() -1; pos >= 0; pos Element el = formattingElements.get(pos); if (el == null) // marker break; if (isSameFormattingElement(in, el)) numSeen++; if (numSeen == 3) { formattingElements.remove(pos); break; } } formattingElements.add(in); } private boolean isSameFormattingElement(Element a, Element b) { // same if: same namespace, tag, and attributes. Element.equals only checks tag, might in future check children return a.normalName().equals(b.normalName()) && // a.namespace().equals(b.namespace()) && a.attributes().equals(b.attributes()); // todo: namespaces } void reconstructFormattingElements() { Element last = lastFormattingElement(); if (last == null || onStack(last)) return; Element entry = last; int size = formattingElements.size(); int pos = size - 1; boolean skip = false; while (true) { if (pos == 0) { // step 4. if none before, skip to 8 skip = true; break; } entry = formattingElements.get(--pos); // step 5. one earlier than entry if (entry == null || onStack(entry)) // step 6 - neither marker nor on stack break; // jump to 8, else continue back to 4 } while(true) { if (!skip) // step 7: on later than entry entry = formattingElements.get(++pos); Validate.notNull(entry); // should not occur, as we break at last element // 8. create new element from element, 9 insert into current node, onto stack skip = false; // can only skip increment from 4. Element newEl = insertStartTag(entry.normalName()); // todo: avoid fostering here? // newEl.namespace(entry.namespace()); // todo: namespaces newEl.attributes().addAll(entry.attributes()); // 10. replace entry with new entry formattingElements.set(pos, newEl); if (pos == size-1) // if not last entry in list, jump to 7 break; } } void clearFormattingElementsToLastMarker() { while (!formattingElements.isEmpty()) { Element el = removeLastFormattingElement(); if (el == null) break; } } void removeFromActiveFormattingElements(Element el) { for (int pos = formattingElements.size() -1; pos >= 0; pos Element next = formattingElements.get(pos); if (next == el) { formattingElements.remove(pos); break; } } } boolean isInActiveFormattingElements(Element el) { return isElementInQueue(formattingElements, el); } Element getActiveFormattingElement(String nodeName) { for (int pos = formattingElements.size() -1; pos >= 0; pos Element next = formattingElements.get(pos); if (next == null) // scope marker break; else if (next.normalName().equals(nodeName)) return next; } return null; } void replaceActiveFormattingElement(Element out, Element in) { replaceInQueue(formattingElements, out, in); } void insertMarkerToFormattingElements() { formattingElements.add(null); } void insertInFosterParent(Node in) { Element fosterParent; Element lastTable = getFromStack("table"); boolean isLastTableParent = false; if (lastTable != null) { if (lastTable.parent() != null) { fosterParent = lastTable.parent(); isLastTableParent = true; } else fosterParent = aboveOnStack(lastTable); } else { // no table == frag fosterParent = stack.get(0); } if (isLastTableParent) { Validate.notNull(lastTable); // last table cannot be null by this point. lastTable.before(in); } else fosterParent.appendChild(in); } @Override public String toString() { return "TreeBuilder{" + "currentToken=" + currentToken + ", state=" + state + ", currentElement=" + currentElement() + '}'; } protected boolean isContentForTagData(final String normalName) { return (normalName.equals("script") || normalName.equals("style")); } }
package org.jtrfp.trcl.flow; import java.lang.ref.WeakReference; import java.util.List; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; import org.jtrfp.trcl.OverworldSystem; import org.jtrfp.trcl.Tunnel; import org.jtrfp.trcl.beh.CustomDeathBehavior; import org.jtrfp.trcl.beh.CustomNAVTargetableBehavior; import org.jtrfp.trcl.beh.DamageableBehavior; import org.jtrfp.trcl.beh.HorizAimAtPlayerBehavior; import org.jtrfp.trcl.beh.RemovesNAVObjectiveOnDeath; import org.jtrfp.trcl.beh.tun.TunnelEntryListener; import org.jtrfp.trcl.core.TR; import org.jtrfp.trcl.file.Location3D; import org.jtrfp.trcl.file.NAVFile.BOS; import org.jtrfp.trcl.file.NAVFile.CHK; import org.jtrfp.trcl.file.NAVFile.DUN; import org.jtrfp.trcl.file.NAVFile.NAVSubObject; import org.jtrfp.trcl.file.NAVFile.TGT; import org.jtrfp.trcl.file.NAVFile.TUN; import org.jtrfp.trcl.file.NAVFile.XIT; import org.jtrfp.trcl.file.TDFFile.ExitMode; import org.jtrfp.trcl.obj.Checkpoint; import org.jtrfp.trcl.obj.DEFObject; import org.jtrfp.trcl.obj.Jumpzone; import org.jtrfp.trcl.obj.TunnelEntranceObject; import org.jtrfp.trcl.obj.TunnelExitObject; import org.jtrfp.trcl.obj.WorldObject; public abstract class NAVObjective { private static final double CHECKPOINT_HEIGHT_PADDING=70000; public abstract String getDescription(); public abstract WorldObject getTarget(); protected NAVObjective(Factory f){ f.tr.getReporter().report("org.jtrfp.trcl.flow.NAVObjective."+f.counter+".desc", getDescription()); final double [] loc = getTarget().getPosition(); if(getTarget()!=null)f.tr.getReporter().report("org.jtrfp.trcl.flow.NAVObjective."+f.counter+".loc", "X="+loc[0]+" Y="+loc[1]+" Z="+loc[2]); f.counter++; } public static class Factory{ private final TR tr;//for debug private Tunnel currentTunnel; int counter; private WorldObject worldBossObject,bossChamberExitShutoffTrigger; public Factory(TR tr){ this.tr=tr; }//end constructor public void create(final TR tr, NAVSubObject navSubObject, List<NAVObjective>indexedNAVObjectiveList){ final OverworldSystem overworld=tr.getGame().getCurrentMission().getOverworldSystem(); final List<DEFObject> defs = overworld.getDefList(); if(navSubObject instanceof TGT){ TGT tgt = (TGT)navSubObject; int [] targs = tgt.getTargets(); for(int i=0; i<targs.length;i++){ final WorldObject targ = defs.get(targs[i]); final NAVObjective objective = new NAVObjective(this){ @Override public String getDescription() { return "Destroy Target"; } @Override public WorldObject getTarget() { return targ; } };//end new NAVObjective indexedNAVObjectiveList.add(objective); targ.addBehavior(new RemovesNAVObjectiveOnDeath(objective,tr.getGame().getCurrentMission())); targ.addBehavior(new CustomDeathBehavior(new Runnable(){ @Override public void run(){ tr.getGame().getUpfrontDisplay() .submitMomentaryUpfrontMessage("Target Destroyed"); }//end run() })); }//end for(targs) } else if(navSubObject instanceof TUN){ TUN tun = (TUN)navSubObject; //Entrance and exit locations are already set up. final Location3D loc3d = tun.getLocationOnMap(); final Vector3D modernLoc = new Vector3D( TR.legacy2Modern(loc3d.getX()), TR.legacy2Modern(loc3d.getY()), TR.legacy2Modern(loc3d.getZ())); /*final TunnelEntranceObject teo = tr.getGame().getCurrentMission().getTunnelEntranceObject( new Point((int)(modernLoc.getX()/TR.mapSquareSize),(int)(modernLoc.getZ()/TR.mapSquareSize))); */ final Mission mission = tr.getGame().getCurrentMission(); final TunnelEntranceObject teo = mission.getNearestTunnelEntrance(loc3d.getX(),loc3d.getY(),loc3d.getZ()); currentTunnel=teo.getSourceTunnel(); /*final TunnelEntranceObject tunnelEntrance = currentTunnel.getEntranceObject(); final double [] entPos=tunnelEntrance.getPosition(); entPos[0]=TR.legacy2Modern(loc3d.getZ()); entPos[1]=TR.legacy2Modern(loc3d.getY()); entPos[2]=TR.legacy2Modern(loc3d.getX()); entPos[1]=tr.getGame(). getCurrentMission(). getOverworldSystem(). getAltitudeMap(). heightAt( TR.legacy2MapSquare(loc3d.getZ()), TR.legacy2MapSquare(loc3d.getX()))*(tr.getWorld().sizeY/2)+TunnelEntranceObject.GROUND_HEIGHT_PAD; tunnelEntrance.notifyPositionChange(); */ final NAVObjective enterObjective = new NAVObjective(this){ @Override public String getDescription() { return "Enter Tunnel"; } @Override public WorldObject getTarget() { return teo; } };//end new NAVObjective tunnelEntrance //tunnelEntrance.setNavObjectiveToRemove(enterObjective,true); final WorldObject tunnelEntranceObject = teo; currentTunnel.addTunnelEntryListener(new TunnelEntryListener(){ @Override public void notifyTunnelEntered(Tunnel tunnel) { if(tr.getGame().getCurrentMission().getRemainingNAVObjectives().get(0).getTarget()==tunnelEntranceObject){ tr.getGame().getCurrentMission().removeNAVObjective(enterObjective); tunnel.removeTunnelEntryListener(this); } }}); indexedNAVObjectiveList.add(enterObjective); final TunnelExitObject tunnelExit = currentTunnel.getExitObject(); final NAVObjective exitObjective = new NAVObjective(this){ @Override public String getDescription() { return "Exit Tunnel"; } @Override public WorldObject getTarget() { return tunnelExit; } };//end new NAVObjective tunnelExit indexedNAVObjectiveList.add(exitObjective); tunnelExit.setNavObjectiveToRemove(exitObjective,true); tunnelExit.setMirrorTerrain(currentTunnel.getSourceTunnel().getExitMode()==ExitMode.exitToChamber); //if(currentTunnel.getSourceTunnel().getEntranceLogic()==TunnelLogic.visibleUnlessBoss){ /* bossChamberExitShutoffTrigger.addBehavior(new CustomNAVTargetableBehavior(new Runnable(){ @Override public void run() { tunnelEntrance.getBehavior().probeForBehavior(TunnelEntranceBehavior.class).setEnable(false); tunnelEntrance.setVisible(false);} })); worldBossObject.addBehavior(new CustomDeathBehavior(new Runnable(){ @Override public void run(){ //mission.setBossFight(false); } }));*/ //}//end if(visibleUnlessBoss) } else if(navSubObject instanceof BOS){ final Mission mission = tr.getGame().getCurrentMission(); final WeakReference<Mission> wMission = new WeakReference<Mission>(mission); final BOS bos = (BOS)navSubObject; boolean first=true; final int [] bossTargs = bos.getTargets(); if(bossTargs!=null){ for(final int target:bos.getTargets()){ final WorldObject shieldGen = defs.get(target); final NAVObjective objective = new NAVObjective(this){ @Override public String getDescription() { return "Destroy Shield"; } @Override public WorldObject getTarget() { return shieldGen; } };//end new NAVObjective ((DEFObject)shieldGen).setShieldGen(true); if(first){ bossChamberExitShutoffTrigger=shieldGen; shieldGen.addBehavior(new CustomNAVTargetableBehavior(new Runnable(){ @Override public void run(){ wMission.get().enterBossMode(bos.getMusicFile()); tr.getGame().getUpfrontDisplay() .submitMomentaryUpfrontMessage("Mission Objective"); }//end run() })); first=false; }//end if(first) shieldGen.addBehavior(new RemovesNAVObjectiveOnDeath(objective,mission)); bossChamberExitShutoffTrigger.addBehavior(new CustomNAVTargetableBehavior(new Runnable(){ @Override public void run() { shieldGen.getBehavior().probeForBehavior(DamageableBehavior.class).setEnable(true); shieldGen.setActive(true); } })); indexedNAVObjectiveList.add(objective); }//end for(targets) }//end if(bos.targets() !=null)) final DEFObject bossObject = defs.get(bos.getBossIndex()); bossObject.addBehavior(new HorizAimAtPlayerBehavior(tr.getGame().getPlayer())); bossObject.setIgnoringProjectiles(true); final NAVObjective objective = new NAVObjective(this){ @Override public String getDescription() { return "Destroy Boss"; } @Override public WorldObject getTarget() { return bossObject; } };//end new NAVObjective indexedNAVObjectiveList.add(objective); bossObject.addBehavior(new RemovesNAVObjectiveOnDeath(objective,mission)); //bossObject.addBehavior(new ChangesBehaviorWhenTargeted(true,DamageableBehavior.class)); bossObject.addBehavior(new CustomDeathBehavior(new Runnable(){ @Override public void run(){ wMission.get().exitBossMode(); }//end run() })); bossObject.addBehavior(new CustomNAVTargetableBehavior(new Runnable(){ @Override public void run() { bossObject.probeForBehavior(DamageableBehavior.class).setEnable(true); bossObject.setIgnoringProjectiles(false);} })); if(bossTargs!=null){ if(bossTargs.length==0){ bossChamberExitShutoffTrigger=bossObject;}} else bossChamberExitShutoffTrigger=bossObject; worldBossObject = bossObject; bossChamberExitShutoffTrigger.addBehavior(new CustomNAVTargetableBehavior(new Runnable(){ @Override public void run() { bossObject.setActive(true);} })); } else if(navSubObject instanceof CHK){ final CHK cp = (CHK)navSubObject; final Location3D loc3d = cp.getLocationOnMap(); final Checkpoint chk = new Checkpoint(tr); final double [] chkPos = chk.getPosition(); chkPos[0]=TR.legacy2Modern(loc3d.getZ()); chkPos[1]=TR.legacy2Modern(loc3d.getY()+CHECKPOINT_HEIGHT_PADDING); chkPos[2]=TR.legacy2Modern(loc3d.getX()); chk.notifyPositionChange(); chk.setIncludeYAxisInCollision(false); final NAVObjective objective = new NAVObjective(this){ @Override public String getDescription() { return "Checkpoint"; } @Override public WorldObject getTarget() { return chk; } };//end new NAVObjective chk.setObjectiveToRemove(objective,tr.getGame().getCurrentMission()); overworld.add(chk); indexedNAVObjectiveList.add(objective); } else if(navSubObject instanceof XIT){ XIT xit = (XIT)navSubObject; Location3D loc3d = xit.getLocationOnMap(); currentTunnel.getExitObject().setExitLocation( new Vector3D(TR.legacy2Modern(loc3d.getZ()),TR.legacy2Modern(loc3d.getY()),TR.legacy2Modern(loc3d.getX()))); } else if(navSubObject instanceof DUN){ final DUN xit = (DUN)navSubObject; final Location3D loc3d = xit.getLocationOnMap(); final Jumpzone chk = new Jumpzone(tr); final double [] chkPos = chk.getPosition(); chkPos[0]=TR.legacy2Modern(loc3d.getZ()); chkPos[1]=TR.legacy2Modern(loc3d.getY()); chkPos[2]=TR.legacy2Modern(loc3d.getX()); chk.notifyPositionChange(); chk.setVisible(false); try{//Start placing the jump zone. //WorldObject jumpZone = new WorldObject(tr,tr.getResourceManager().getBINModel("JUMP-PNT.BIN", tr.getGlobalPaletteVL(), tr.gpu.get().getGl())); //jumpZone.setPosition(chk.getPosition()); //jumpZone.setVisible(true); //overworld.add(jumpZone); final NAVObjective objective = new NAVObjective(this){ @Override public String getDescription() { return "Fly To Jump Zone"; } @Override public WorldObject getTarget() { return chk; } };//end new NAVObjective chk.setObjectiveToRemove(objective,tr.getGame().getCurrentMission()); chk.setIncludeYAxisInCollision(false); overworld.add(chk); indexedNAVObjectiveList.add(objective); }catch(Exception e){e.printStackTrace();} }else{System.err.println("Unrecognized NAV objective: "+navSubObject);} }//end create() }//end Factory }//end NAVObjective
package org.lightmare.deploy; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import javax.ejb.Local; import javax.ejb.Remote; import javax.ejb.Stateless; import javax.persistence.Entity; import org.apache.log4j.Logger; import org.lightmare.annotations.UnitName; import org.lightmare.cache.ArchiveData; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.DeployData; import org.lightmare.cache.DeploymentDirectory; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.RestContainer; import org.lightmare.cache.TmpResources; import org.lightmare.config.Configuration; import org.lightmare.deploy.fs.Watcher; import org.lightmare.jpa.JPAManager; import org.lightmare.jpa.datasource.DataSourceInitializer; import org.lightmare.jpa.datasource.PoolConfig.PoolProviderType; import org.lightmare.libraries.LibraryLoader; import org.lightmare.remote.rpc.RPCall; import org.lightmare.remote.rpc.RpcListener; import org.lightmare.rest.providers.RestProvider; import org.lightmare.scannotation.AnnotationDB; import org.lightmare.utils.AbstractIOUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.fs.FileUtils; import org.lightmare.utils.fs.WatchUtils; import org.lightmare.utils.reflect.MetaUtils; import org.lightmare.utils.shutdown.ShutDown; /** * Determines and saves in cache ejb beans {@link org.lightmare.cache.MetaData} * on startup * * @author Levan * */ public class MetaCreator { private static AnnotationDB annotationDB; private TmpResources tmpResources; private boolean await; // Blocker for deployments connections or beans private CountDownLatch blocker; // Data for cache at deploy time private Map<String, AbstractIOUtils> aggregateds = new HashMap<String, AbstractIOUtils>(); private Map<URL, ArchiveData> archivesURLs; private Map<String, URL> classOwnersURL; private Map<URL, DeployData> realURL; private ClassLoader current; // Configuration for appropriate archives URLs private Configuration configuration; // Lock for deployment and directory scanning private final Lock scannerLock = new ReentrantLock(); // Lock for MetaCreator initialization private static final Lock LOCK = new ReentrantLock(); private static final Logger LOG = Logger.getLogger(MetaCreator.class); private MetaCreator() { tmpResources = new TmpResources(); ShutDown.setHook(tmpResources); } private static MetaCreator get() { MetaCreator creator = MetaContainer.getCreator(); if (creator == null) { LOCK.lock(); try { if (creator == null) { creator = new MetaCreator(); MetaContainer.setCreator(creator); } } finally { LOCK.unlock(); } } return creator; } private void configure(URL[] archives) { if (configuration == null && ObjectUtils.available(archives)) { configuration = MetaContainer.getConfig(archives); } } public AnnotationDB getAnnotationDB() { return annotationDB; } /** * Checks weather {@link javax.persistence.Entity} annotated classes is need * to be filtered by {@link org.lightmare.annotations.UnitName} value * * @param className * @return boolean * @throws IOException */ private boolean checkForUnitName(String className, Configuration cloneConfig) throws IOException { boolean isValid = Boolean.FALSE; Class<?> entityClass; entityClass = MetaUtils.initClassForName(className); UnitName annotation = entityClass.getAnnotation(UnitName.class); isValid = annotation.value().equals(cloneConfig.getAnnotatedUnitName()); return isValid; } private List<String> translateToList(Set<String> classSet) { List<String> classList = new ArrayList<String>(classSet); return classList; } /** * Defines belonginess of {@link javax.persistence.Entity} annotated classes * to jar file * * @param classSet * @return {@link List}<String> */ private void filterEntitiesForJar(Set<String> classSet, String fileNameForBean) { Map<String, String> classOwnersFiles = annotationDB .getClassOwnersFiles(); String fileNameForEntity; for (String entityName : classSet) { fileNameForEntity = classOwnersFiles.get(entityName); if (ObjectUtils.notNullAll(fileNameForEntity, fileNameForBean) && ObjectUtils.notTrue(fileNameForEntity .equals(fileNameForBean))) { classSet.remove(entityName); } } } /** * Filters {@link javax.persistence.Entity} annotated classes by name or by * {@link org.lightmare.annotations.UnitName} by configuration * * @param classSet * @return {@link List}<String> * @throws IOException */ private List<String> filterEntities(Set<String> classSet, Configuration configClone) throws IOException { List<String> classes; if (configClone.getAnnotatedUnitName() == null) { classes = translateToList(classSet); } else { Set<String> filtereds = new HashSet<String>(); for (String className : classSet) { if (checkForUnitName(className, configClone)) { filtereds.add(className); } } classes = translateToList(filtereds); } return classes; } /** * Creates connection associated with unit name if such does not exists * * @param unitName * @param beanName * @throws IOException */ protected void configureConnection(String unitName, String beanName, ClassLoader loader, Configuration configClone) throws IOException { JPAManager.Builder builder = new JPAManager.Builder(); Map<String, String> classOwnersFiles = annotationDB .getClassOwnersFiles(); AbstractIOUtils ioUtils = aggregateds.get(beanName); if (ObjectUtils.notNull(ioUtils)) { URL jarURL = ioUtils.getAppropriatedURL(classOwnersFiles, beanName); builder.setURL(jarURL); } if (configClone.isScanForEntities()) { Set<String> classSet; Map<String, Set<String>> annotationIndex = annotationDB .getAnnotationIndex(); classSet = annotationIndex.get(Entity.class.getName()); String annotatedUnitName = configClone.getAnnotatedUnitName(); if (annotatedUnitName == null) { classSet = annotationIndex.get(Entity.class.getName()); } else if (annotatedUnitName.equals(unitName)) { Set<String> unitNamedSet = annotationIndex.get(UnitName.class .getName()); // Intersects entities with unit name annotated classes classSet.retainAll(unitNamedSet); } if (ObjectUtils.notNull(ioUtils)) { String fileNameForBean = classOwnersFiles.get(beanName); filterEntitiesForJar(classSet, fileNameForBean); } List<String> classes = filterEntities(classSet, configClone); builder.setClasses(classes); } // Builds connection for appropriated persistence unit name builder.setPath(configClone.getPersXmlPath()) .setProperties(configClone.getPersistenceProperties()) .setSwapDataSource(configClone.isSwapDataSource()) .setScanArchives(configClone.isScanArchives()) .setClassLoader(loader).build().setConnection(unitName); } /** * Caches each archive by it's {@link URL} for deployment * * @param ejbURLs * @param archiveData */ private void fillArchiveURLs(Collection<URL> ejbURLs, ArchiveData archiveData, DeployData deployData) { for (URL ejbURL : ejbURLs) { archivesURLs.put(ejbURL, archiveData); realURL.put(ejbURL, deployData); } } /** * Caches each archive by it's {@link URL} for deployment and creates fill * {@link URL} array for scanning and finding {@link javax.ejb.Stateless} * annotated classes * * @param archive * @param modifiedArchives * @throws IOException */ private void fillArchiveURLs(URL archive, List<URL> modifiedArchives) throws IOException { AbstractIOUtils ioUtils = AbstractIOUtils.getAppropriatedType(archive); if (ObjectUtils.notNull(ioUtils)) { ioUtils.scan(configuration.isPersXmlFromJar()); List<URL> ejbURLs = ioUtils.getEjbURLs(); modifiedArchives.addAll(ejbURLs); ArchiveData archiveData = new ArchiveData(); archiveData.setIoUtils(ioUtils); DeployData deployData = new DeployData(); deployData.setType(ioUtils.getType()); deployData.setUrl(archive); if (ejbURLs.isEmpty()) { archivesURLs.put(archive, archiveData); realURL.put(archive, deployData); } else { fillArchiveURLs(ejbURLs, archiveData, deployData); } } } /** * Gets {@link URL} array for all classes and jar libraries within archive * file for class loading policy * * @param archives * @return {@link URL}[] * @throws IOException */ private URL[] getFullArchives(URL[] archives) throws IOException { List<URL> modifiedArchives = new ArrayList<URL>(); for (URL archive : archives) { fillArchiveURLs(archive, modifiedArchives); } return ObjectUtils.toArray(modifiedArchives, URL.class); } /** * Awaits for {@link Future} tasks if it set so by configuration * * @param future */ private void awaitDeployment(Future<String> future) { if (await) { try { String nameFromFuture = future.get(); LOG.info(String.format("Deploy processing of %s finished", nameFromFuture)); } catch (InterruptedException ex) { LOG.error(ex.getMessage(), ex); } catch (ExecutionException ex) { LOG.error(ex.getMessage(), ex); } } } /** * Awaits for {@link CountDownLatch} of deployments */ private void awaitDeployments() { try { blocker.await(); } catch (InterruptedException ex) { LOG.error(ex); } } /** * Starts bean deployment process for bean name * * @param beanName * @throws IOException */ private void deployBean(String beanName) throws IOException { URL currentURL = classOwnersURL.get(beanName); ArchiveData archiveData = archivesURLs.get(currentURL); if (archiveData == null) { archiveData = new ArchiveData(); } AbstractIOUtils ioUtils = archiveData.getIoUtils(); if (ioUtils == null) { ioUtils = AbstractIOUtils.getAppropriatedType(currentURL); archiveData.setIoUtils(ioUtils); } ClassLoader loader = archiveData.getLoader(); // Finds appropriated ClassLoader if needed and or creates new one List<File> tmpFiles = null; if (ObjectUtils.notNull(ioUtils)) { if (loader == null) { if (ioUtils.notExecuted()) { ioUtils.scan(configuration.isPersXmlFromJar()); } URL[] libURLs = ioUtils.getURLs(); loader = LibraryLoader.initializeLoader(libURLs); archiveData.setLoader(loader); } tmpFiles = ioUtils.getTmpFiles(); aggregateds.put(beanName, ioUtils); } // Archive file url which contains this bean DeployData deployData; if (ObjectUtils.available(realURL)) { deployData = realURL.get(currentURL); } else { deployData = null; } // Initializes and fills BeanLoader.BeanParameters class to deploy // stateless ejb bean BeanLoader.BeanParameters parameters = new BeanLoader.BeanParameters(); parameters.creator = this; parameters.className = beanName; parameters.loader = loader; parameters.tmpFiles = tmpFiles; parameters.blocker = blocker; parameters.deployData = deployData; parameters.configuration = configuration; Future<String> future = BeanLoader.loadBean(parameters); awaitDeployment(future); if (ObjectUtils.available(tmpFiles)) { tmpResources.addFile(tmpFiles); } } /** * Deploys single bean by class name * * @param beanNames */ private void deployBeans(Set<String> beanNames) { blocker = new CountDownLatch(beanNames.size()); for (String beanName : beanNames) { LOG.info(String.format("deploing bean %s", beanName)); try { deployBean(beanName); } catch (IOException ex) { LOG.error(String.format("Could not deploy bean %s", beanName), ex); } } awaitDeployments(); if (RestContainer.hasRest()) { RestProvider.reload(); } boolean hotDeployment = configuration.isHotDeployment(); boolean watchStatus = configuration.isWatchStatus(); if (hotDeployment && ObjectUtils.notTrue(watchStatus)) { Watcher.startWatch(); watchStatus = Boolean.TRUE; } } /** * Scan application for find all {@link javax.ejb.Stateless} beans and * {@link Remote} or {@link Local} proxy interfaces * * @param archives * @throws IOException * @throws ClassNotFoundException */ public void scanForBeans(URL[] archives) throws IOException { scannerLock.lock(); try { configure(archives); // starts RPC server if configured as remote and server if (configuration.isRemote() && Configuration.isServer()) { RpcListener.startServer(configuration); } else if (configuration.isRemote()) { RPCall.configure(configuration); } String[] libraryPaths = configuration.getLibraryPaths(); // Loads libraries from specified path if (ObjectUtils.notNull(libraryPaths)) { LibraryLoader.loadLibraries(libraryPaths); } // Gets and caches class loader current = LibraryLoader.getContextClassLoader(); archivesURLs = new HashMap<URL, ArchiveData>(); if (ObjectUtils.available(archives)) { realURL = new HashMap<URL, DeployData>(); } URL[] fullArchives = getFullArchives(archives); annotationDB = new AnnotationDB(); annotationDB.setScanFieldAnnotations(Boolean.FALSE); annotationDB.setScanParameterAnnotations(Boolean.FALSE); annotationDB.setScanMethodAnnotations(Boolean.FALSE); annotationDB.scanArchives(fullArchives); Set<String> beanNames = annotationDB.getAnnotationIndex().get( Stateless.class.getName()); classOwnersURL = annotationDB.getClassOwnersURLs(); DataSourceInitializer.initializeDataSources(configuration); if (ObjectUtils.available(beanNames)) { deployBeans(beanNames); } } finally { // Caches configuration MetaContainer.putConfig(archives, configuration); // clears cached resources clear(); // gets rid from all created temporary files tmpResources.removeTempFiles(); scannerLock.unlock(); } } /** * Scan application for find all {@link javax.ejb.Stateless} beans and * {@link Remote} or {@link Local} proxy interfaces * * @throws ClassNotFoundException * @throws IOException */ public void scanForBeans(File[] jars) throws IOException { List<URL> urlList = new ArrayList<URL>(); URL url; for (File file : jars) { url = file.toURI().toURL(); urlList.add(url); } URL[] archives = ObjectUtils.toArray(urlList, URL.class); scanForBeans(archives); } /** * Scan application for find all {@link javax.ejb.Stateless} beans and * {@link Remote} or {@link Local} proxy interfaces * * @throws ClassNotFoundException * @throws IOException */ public void scanForBeans(String... paths) throws IOException { if (ObjectUtils.notAvailable(paths) && ObjectUtils.available(configuration.getDeploymentPath())) { Set<DeploymentDirectory> deployments = configuration .getDeploymentPath(); List<String> pathList = new ArrayList<String>(); File deployFile; for (DeploymentDirectory deployment : deployments) { deployFile = new File(deployment.getPath()); if (deployment.isScan()) { String[] subDeployments = deployFile.list(); if (ObjectUtils.available(subDeployments)) { pathList.addAll(Arrays.asList(subDeployments)); } } } paths = ObjectUtils.toArray(pathList, String.class); } List<URL> urlList = new ArrayList<URL>(); List<URL> archive; for (String path : paths) { archive = FileUtils.toURLWithClasspath(path); urlList.addAll(archive); } URL[] archives = ObjectUtils.toArray(urlList, URL.class); scanForBeans(archives); } public ClassLoader getCurrent() { return current; } /** * Closes all existing connections * * @throws IOException */ public static void closeAllConnections() throws IOException { ConnectionContainer.closeConnections(); } /** * Clears all locally cached data */ public void clear() { boolean locked = scannerLock.tryLock(); while (ObjectUtils.notTrue(locked)) { locked = scannerLock.tryLock(); } if (locked) { try { if (ObjectUtils.available(realURL)) { realURL.clear(); realURL = null; } if (ObjectUtils.available(aggregateds)) { aggregateds.clear(); } if (ObjectUtils.available(archivesURLs)) { archivesURLs.clear(); archivesURLs = null; } if (ObjectUtils.available(classOwnersURL)) { classOwnersURL.clear(); classOwnersURL = null; } configuration = null; } finally { scannerLock.unlock(); } } } /** * Closes all connections clears all caches * * @throws IOException */ public static void close() throws IOException { ShutDown.clearAll(); } /** * Builder class to provide properties for lightmare application and * initialize {@link MetaCreator} instance * * @author levan * */ public static class Builder { private MetaCreator creator; public Builder(boolean cloneConfiguration) throws IOException { creator = MetaCreator.get(); Configuration config = creator.configuration; if (cloneConfiguration && ObjectUtils.notNull(config)) { try { creator.configuration = (Configuration) config.clone(); } catch (CloneNotSupportedException ex) { throw new IOException(ex); } } else { creator.configuration = new Configuration(); } } public Builder() throws IOException { this(Boolean.FALSE); } public Builder(Map<Object, Object> configuration) throws IOException { this(); creator.configuration.configure(configuration); } public Builder(String path) throws IOException { this(); creator.configuration.configure(path); } private Map<Object, Object> initPersistenceProperties() { Map<Object, Object> persistenceProperties = creator.configuration .getPersistenceProperties(); if (persistenceProperties == null) { persistenceProperties = new HashMap<Object, Object>(); creator.configuration .setPersistenceProperties(persistenceProperties); } return persistenceProperties; } /** * Sets additional persistence properties * * @param properties * @return {@link Builder} */ public Builder setPersistenceProperties(Map<String, String> properties) { if (ObjectUtils.available(properties)) { Map<Object, Object> persistenceProperties = initPersistenceProperties(); persistenceProperties.putAll(properties); } return this; } /** * Adds instant persistence property * * @param key * @param property * @return {@link Builder} */ public Builder addPersistenceProperty(String key, String property) { Map<Object, Object> persistenceProperties = initPersistenceProperties(); persistenceProperties.put(key, property); return this; } /** * Adds property to scan for {@link javax.persistence.Entity} annotated * classes from deployed archives * * @param scanForEnt * @return {@link Builder} */ public Builder setScanForEntities(boolean scanForEnt) { creator.configuration.setScanForEntities(scanForEnt); return this; } /** * Adds property to use only {@link org.lightmare.annotations.UnitName} * annotated entities for which * {@link org.lightmare.annotations.UnitName#value()} matches passed * unit name * * @param unitName * @return {@link Builder} */ public Builder setUnitName(String unitName) { creator.configuration.setAnnotatedUnitName(unitName); return this; } /** * Sets path for persistence.xml file * * @param path * @return {@link Builder} */ public Builder setPersXmlPath(String path) { creator.configuration.setPersXmlPath(path); creator.configuration.setScanArchives(Boolean.FALSE); return this; } /** * Adds path for additional libraries to load at start time * * @param libPaths * @return {@link Builder} */ public Builder setLibraryPath(String... libPaths) { creator.configuration.setLibraryPaths(libPaths); return this; } /** * Sets boolean checker to scan persistence.xml files from appropriated * jar files * * @param xmlFromJar * @return {@link Builder} */ public Builder setXmlFromJar(boolean xmlFromJar) { creator.configuration.setPersXmlFromJar(xmlFromJar); return this; } /** * Sets boolean checker to swap jta data source value with non jta data * source value * * @param swapDataSource * @return {@link Builder} */ public Builder setSwapDataSource(boolean swapDataSource) { creator.configuration.setSwapDataSource(swapDataSource); return this; } /** * Adds path for data source file * * @param dataSourcePath * @return {@link Builder} */ public Builder addDataSourcePath(String dataSourcePath) { creator.configuration.addDataSourcePath(dataSourcePath); return this; } /** * This method is deprecated should use * {@link MetaCreator.Builder#addDataSourcePath(String)} instead * * @param dataSourcePath * @return {@link MetaCreator.Builder} */ @Deprecated public Builder setDataSourcePath(String dataSourcePath) { creator.configuration.addDataSourcePath(dataSourcePath); return this; } /** * Sets boolean checker to scan {@link javax.persistence.Entity} * annotated classes from appropriated deployed archive files * * @param scanArchives * @return {@link Builder} */ public Builder setScanArchives(boolean scanArchives) { creator.configuration.setScanArchives(scanArchives); return this; } /** * Sets boolean checker to block deployment processes * * @param await * @return {@link Builder} */ public Builder setAwaitDeploiment(boolean await) { creator.await = await; return this; } /** * Sets property is server or not in embedded mode * * @param remote * @return {@link Builder} */ public Builder setRemote(boolean remote) { creator.configuration.setRemote(remote); return this; } /** * Sets property is application server or just client for other remote * server * * @param server * @return {@link Builder} */ public Builder setServer(boolean server) { Configuration.setServer(server); creator.configuration.setClient(ObjectUtils.notTrue(server)); return this; } /** * Sets boolean check is application in just client mode or not * * @param client * @return {@link Builder} */ public Builder setClient(boolean client) { creator.configuration.setClient(client); Configuration.setServer(ObjectUtils.notTrue(client)); return this; } /** * To add any additional property * * @param key * @param property * @return {@link Builder} */ public Builder setProperty(String key, String property) { creator.configuration.putValue(key, property); return this; } /** * File path for administrator user name and password * * @param property * @return {@link Builder} */ public Builder setAdminUsersPth(String property) { Configuration.setAdminUsersPath(property); return this; } /** * Sets specific IP address in case when application is in remote server * mode * * @param property * @return {@link Builder} */ public Builder setIpAddress(String property) { creator.configuration.putValue(Configuration.IP_ADDRESS_KEY, property); return this; } /** * Sets specific port in case when applicatin is in remore server mode * * @param property * @return {@link Builder} */ public Builder setPort(String property) { creator.configuration.putValue(Configuration.PORT_KEY, property); return this; } /** * Sets amount for network master threads in case when application is in * remote server mode * * @param property * @return {@link Builder} */ public Builder setMasterThreads(String property) { creator.configuration.putValue(Configuration.BOSS_POOL_KEY, property); return this; } /** * Sets amount of worker threads in case when application is in remote * server mode * * @param property * @return {@link Builder} */ public Builder setWorkerThreads(String property) { creator.configuration.putValue(Configuration.WORKER_POOL_KEY, property); return this; } /** * Adds deploy file path to application with boolean checker if file is * directory to scan this directory for deployment files list * * @param deploymentPath * @param scan * @return {@link Builder} */ public Builder addDeploymentPath(String deploymentPath, boolean scan) { String clearPath = WatchUtils.clearPath(deploymentPath); creator.configuration.addDeploymentPath(clearPath, scan); return this; } /** * Adds deploy file path to application * * @param deploymentPath * @return {@link Builder} */ public Builder addDeploymentPath(String deploymentPath) { addDeploymentPath(deploymentPath, Boolean.FALSE); return this; } /** * Adds timeout for connection in case when application is in remote * server or client mode * * @param property * @return {@link Builder} */ public Builder setTimeout(String property) { creator.configuration.putValue( Configuration.CONNECTION_TIMEOUT_KEY, property); return this; } /** * Adds boolean check if application is using pooled data source * * @param dsPooledType * @return {@link Builder} */ public Builder setDataSourcePooledType(boolean dsPooledType) { creator.configuration.setDataSourcePooledType(dsPooledType); return this; } /** * Sets which data source pool provider should use application by * {@link PoolProviderType} parameter * * @param poolProviderType * @return {@link Builder} */ public Builder setPoolProviderType(PoolProviderType poolProviderType) { creator.configuration.setPoolProviderType(poolProviderType); return this; } /** * Sets path for data source pool additional properties * * @param path * @return {@link Builder} */ public Builder setPoolPropertiesPath(String path) { creator.configuration.setPoolPropertiesPath(path); return this; } /** * Sets data source pool additional properties * * @param properties * @return {@link Builder} */ public Builder setPoolProperties( Map<? extends Object, ? extends Object> properties) { creator.configuration.setPoolProperties(properties); return this; } /** * Adds instance property for pooled data source * * @param key * @param value * @return {@link Builder} */ public Builder addPoolProperty(Object key, Object value) { creator.configuration.addPoolProperty(key, value); return this; } /** * Sets boolean check is application in hot deployment (with watch * service on deployment directories) or not * * @param hotDeployment * @return {@link Builder} */ public Builder setHotDeployment(boolean hotDeployment) { creator.configuration.setHotDeployment(hotDeployment); return this; } /** * Adds additional parameters from passed {@link Map} to existing * configuration * * @param configuration * @return */ public Builder addConfiguration(Map<Object, Object> configuration) { creator.configuration.configure(configuration); return this; } public MetaCreator build() throws IOException { creator.configuration.configure(); LOG.info("Lightmare application starts working"); return creator; } } }
package org.lightmare.deploy; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import javax.ejb.Local; import javax.ejb.Remote; import javax.ejb.Stateless; import javax.persistence.Entity; import org.apache.log4j.Logger; import org.lightmare.annotations.UnitName; import org.lightmare.cache.ArchiveData; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.DeployData; import org.lightmare.cache.DeploymentDirectory; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.RestContainer; import org.lightmare.cache.TmpResources; import org.lightmare.config.Configuration; import org.lightmare.deploy.fs.Watcher; import org.lightmare.jpa.JPAManager; import org.lightmare.jpa.datasource.DataSourceInitializer; import org.lightmare.jpa.datasource.PoolConfig; import org.lightmare.jpa.datasource.PoolConfig.PoolProviderType; import org.lightmare.libraries.LibraryLoader; import org.lightmare.remote.rpc.RPCall; import org.lightmare.remote.rpc.RpcListener; import org.lightmare.rest.utils.RestUtils; import org.lightmare.scannotation.AnnotationDB; import org.lightmare.utils.AbstractIOUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.fs.FileUtils; import org.lightmare.utils.fs.WatchUtils; import org.lightmare.utils.reflect.MetaUtils; import org.lightmare.utils.shutdown.ShutDown; /** * Determines and saves in cache ejb beans {@link org.lightmare.cache.MetaData} * on startup * * @author Levan * */ public class MetaCreator { private static AnnotationDB annotationDB; private TmpResources tmpResources; private boolean await; // Blocker for deployments private CountDownLatch blocker; // Data for cache at deploy time private Map<String, AbstractIOUtils> aggregateds = new HashMap<String, AbstractIOUtils>(); private Map<URL, ArchiveData> archivesURLs; private Map<String, URL> classOwnersURL; private Map<URL, DeployData> realURL; private ClassLoader current; // Configuration for appropriate archives URLs private Configuration configuration; private final Lock scannerLock = new ReentrantLock(); private static final Lock LOCK = new ReentrantLock(); private static final Logger LOG = Logger.getLogger(MetaCreator.class); private MetaCreator() { tmpResources = new TmpResources(); ShutDown.setHook(tmpResources); } private static MetaCreator get() { MetaCreator creator = MetaContainer.getCreator(); if (creator == null) { LOCK.lock(); try { if (creator == null) { creator = new MetaCreator(); MetaContainer.setCreator(creator); } } finally { LOCK.unlock(); } } return creator; } private void configure(URL[] archives) { if (configuration == null && ObjectUtils.available(archives)) { configuration = MetaContainer.getConfig(archives); } } public AnnotationDB getAnnotationDB() { return annotationDB; } /** * Checks weather {@link javax.persistence.Entity} annotated classes is need * to be filtered by {@link org.lightmare.annotations.UnitName} value * * @param className * @return boolean * @throws IOException */ private boolean checkForUnitName(String className, Configuration cloneConfig) throws IOException { boolean isValid = Boolean.FALSE; Class<?> entityClass; entityClass = MetaUtils.initClassForName(className); UnitName annotation = entityClass.getAnnotation(UnitName.class); isValid = annotation.value().equals(cloneConfig.getAnnotatedUnitName()); return isValid; } private List<String> translateToList(Set<String> classSet) { String[] classArray = new String[classSet.size()]; classArray = ObjectUtils.toArray(classSet, String.class); List<String> classList = Arrays.asList(classArray); return classList; } /** * Defines belonginess of {@link javax.persistence.Entity} annotated classes * to jar file * * @param classSet * @return {@link List}<String> */ private void filterEntitiesForJar(Set<String> classSet, String fileNameForBean) { Map<String, String> classOwnersFiles = annotationDB .getClassOwnersFiles(); String fileNameForEntity; for (String entityName : classSet) { fileNameForEntity = classOwnersFiles.get(entityName); if (ObjectUtils.notNull(fileNameForEntity) && ObjectUtils.notNull(fileNameForBean) && !fileNameForEntity.equals(fileNameForBean)) { classSet.remove(entityName); } } } /** * Filters {@link javax.persistence.Entity} annotated classes by name or by * {@link org.lightmare.annotations.UnitName} by configuration * * @param classSet * @return {@link List}<String> * @throws IOException */ private List<String> filterEntities(Set<String> classSet, Configuration configClone) throws IOException { List<String> classes; if (configClone.getAnnotatedUnitName() == null) { classes = translateToList(classSet); } else { Set<String> filtereds = new HashSet<String>(); for (String className : classSet) { if (checkForUnitName(className, configClone)) { filtereds.add(className); } } classes = translateToList(filtereds); } return classes; } /** * Creates connection associated with unit name if such does not exists * * @param unitName * @param beanName * @throws IOException */ protected void configureConnection(String unitName, String beanName, ClassLoader loader, Configuration configClone) throws IOException { JPAManager.Builder builder = new JPAManager.Builder(); Map<String, String> classOwnersFiles = annotationDB .getClassOwnersFiles(); AbstractIOUtils ioUtils = aggregateds.get(beanName); if (ObjectUtils.notNull(ioUtils)) { URL jarURL = ioUtils.getAppropriatedURL(classOwnersFiles, beanName); builder.setURL(jarURL); } if (configClone.isScanForEntities()) { Set<String> classSet; Map<String, Set<String>> annotationIndex = annotationDB .getAnnotationIndex(); classSet = annotationIndex.get(Entity.class.getName()); String annotatedUnitName = configClone.getAnnotatedUnitName(); if (annotatedUnitName == null) { classSet = annotationIndex.get(Entity.class.getName()); } else if (annotatedUnitName.equals(unitName)) { Set<String> unitNamedSet = annotationIndex.get(UnitName.class .getName()); classSet.retainAll(unitNamedSet); } if (ObjectUtils.notNull(ioUtils)) { String fileNameForBean = classOwnersFiles.get(beanName); filterEntitiesForJar(classSet, fileNameForBean); } List<String> classes = filterEntities(classSet, configClone); builder.setClasses(classes); } builder.setPath(configClone.getPersXmlPath()) .setProperties(configClone.getPersistenceProperties()) .setSwapDataSource(configClone.isSwapDataSource()) .setScanArchives(configClone.isScanArchives()) .setClassLoader(loader).build().setConnection(unitName); } /** * Caches each archive by it's {@link URL} for deployment * * @param ejbURLs * @param archiveData */ private void fillArchiveURLs(Collection<URL> ejbURLs, ArchiveData archiveData, DeployData deployData) { for (URL ejbURL : ejbURLs) { archivesURLs.put(ejbURL, archiveData); realURL.put(ejbURL, deployData); } } /** * Caches each archive by it's {@link URL} for deployment and creates fill * {@link URL} array for scanning and finding {@link javax.ejb.Stateless} * annotated classes * * @param archive * @param modifiedArchives * @throws IOException */ private void fillArchiveURLs(URL archive, List<URL> modifiedArchives) throws IOException { AbstractIOUtils ioUtils = AbstractIOUtils.getAppropriatedType(archive); if (ObjectUtils.notNull(ioUtils)) { ioUtils.scan(configuration.isPersXmlFromJar()); List<URL> ejbURLs = ioUtils.getEjbURLs(); modifiedArchives.addAll(ejbURLs); ArchiveData archiveData = new ArchiveData(); archiveData.setIoUtils(ioUtils); DeployData deployData = new DeployData(); deployData.setType(ioUtils.getType()); deployData.setUrl(archive); if (ejbURLs.isEmpty()) { archivesURLs.put(archive, archiveData); realURL.put(archive, deployData); } else { fillArchiveURLs(ejbURLs, archiveData, deployData); } } } /** * Gets {@link URL} array for all classes and jar libraries within archive * file for class loading policy * * @param archives * @return {@link URL}[] * @throws IOException */ private URL[] getFullArchives(URL[] archives) throws IOException { List<URL> modifiedArchives = new ArrayList<URL>(); for (URL archive : archives) { fillArchiveURLs(archive, modifiedArchives); } return ObjectUtils.toArray(modifiedArchives, URL.class); } /** * Awaits for {@link Future} tasks if it set so by configuration * * @param future */ private void awaitDeployment(Future<String> future) { if (await) { try { String nameFromFuture = future.get(); LOG.info(String.format("Deploy processing of %s finished", nameFromFuture)); } catch (InterruptedException ex) { LOG.error(ex.getMessage(), ex); } catch (ExecutionException ex) { LOG.error(ex.getMessage(), ex); } } } /** * Awaits for {@link CountDownLatch} of deployments */ private void awaitDeployments() { try { blocker.await(); } catch (InterruptedException ex) { LOG.error(ex); } } /** * Starts bean deployment process for bean name * * @param beanName * @throws IOException */ private void deployBean(String beanName) throws IOException { URL currentURL = classOwnersURL.get(beanName); ArchiveData archiveData = archivesURLs.get(currentURL); if (archiveData == null) { archiveData = new ArchiveData(); } AbstractIOUtils ioUtils = archiveData.getIoUtils(); if (ioUtils == null) { ioUtils = AbstractIOUtils.getAppropriatedType(currentURL); archiveData.setIoUtils(ioUtils); } ClassLoader loader = archiveData.getLoader(); // Finds appropriated ClassLoader if needed and or creates new one List<File> tmpFiles = null; if (ObjectUtils.notNull(ioUtils)) { if (loader == null) { if (!ioUtils.isExecuted()) { ioUtils.scan(configuration.isPersXmlFromJar()); } URL[] libURLs = ioUtils.getURLs(); loader = LibraryLoader.initializeLoader(libURLs); archiveData.setLoader(loader); } tmpFiles = ioUtils.getTmpFiles(); aggregateds.put(beanName, ioUtils); } // Archive file url which contains this bean DeployData deployData; if (ObjectUtils.available(realURL)) { deployData = realURL.get(currentURL); } else { deployData = null; } // Initializes and fills BeanLoader.BeanParameters class to deploy // stateless ejb bean BeanLoader.BeanParameters parameters = new BeanLoader.BeanParameters(); parameters.creator = this; parameters.className = beanName; parameters.loader = loader; parameters.tmpFiles = tmpFiles; parameters.blocker = blocker; parameters.deployData = deployData; parameters.configuration = configuration; Future<String> future = BeanLoader.loadBean(parameters); awaitDeployment(future); if (ObjectUtils.available(tmpFiles)) { tmpResources.addFile(tmpFiles); } } /** * Deploys single bean by class name * * @param beanNames */ private void deployBeans(Set<String> beanNames) { blocker = new CountDownLatch(beanNames.size()); for (String beanName : beanNames) { LOG.info(String.format("deploing bean %s", beanName)); try { deployBean(beanName); } catch (IOException ex) { LOG.error(String.format("Could not deploy bean %s", beanName), ex); } } awaitDeployments(); if (RestContainer.hasRest()) { RestUtils.reload(); } boolean hotDeployment = configuration.isHotDeployment(); boolean watchStatus = configuration.isWatchStatus(); if (hotDeployment && ObjectUtils.notTrue(watchStatus)) { Watcher.startWatch(); watchStatus = Boolean.TRUE; } } /** * Scan application for find all {@link javax.ejb.Stateless} beans and * {@link Remote} or {@link Local} proxy interfaces * * @param archives * @throws IOException * @throws ClassNotFoundException */ public void scanForBeans(URL[] archives) throws IOException { scannerLock.lock(); try { configure(archives); // starts RPC server if configured as remote and server if (configuration.isRemote() && Configuration.isServer()) { RpcListener.startServer(configuration); } else if (configuration.isRemote()) { RPCall.configure(configuration); } String[] libraryPaths = configuration.getLibraryPaths(); // Loads libraries from specified path if (ObjectUtils.notNull(libraryPaths)) { LibraryLoader.loadLibraries(libraryPaths); } // Gets and caches class loader current = LibraryLoader.getContextClassLoader(); archivesURLs = new HashMap<URL, ArchiveData>(); if (ObjectUtils.available(archives)) { realURL = new HashMap<URL, DeployData>(); } URL[] fullArchives = getFullArchives(archives); annotationDB = new AnnotationDB(); annotationDB.setScanFieldAnnotations(Boolean.FALSE); annotationDB.setScanParameterAnnotations(Boolean.FALSE); annotationDB.setScanMethodAnnotations(Boolean.FALSE); annotationDB.scanArchives(fullArchives); Set<String> beanNames = annotationDB.getAnnotationIndex().get( Stateless.class.getName()); classOwnersURL = annotationDB.getClassOwnersURLs(); DataSourceInitializer.initializeDataSources(configuration); if (ObjectUtils.available(beanNames)) { deployBeans(beanNames); } } finally { // Caches configuration MetaContainer.putConfig(archives, configuration); // clears cached resources clear(); // gets rid from all created temporary files tmpResources.removeTempFiles(); scannerLock.unlock(); } } /** * Scan application for find all {@link javax.ejb.Stateless} beans and * {@link Remote} or {@link Local} proxy interfaces * * @throws ClassNotFoundException * @throws IOException */ public void scanForBeans(File[] jars) throws IOException { List<URL> urlList = new ArrayList<URL>(); URL url; for (File file : jars) { url = file.toURI().toURL(); urlList.add(url); } URL[] archives = ObjectUtils.toArray(urlList, URL.class); scanForBeans(archives); } /** * Scan application for find all {@link javax.ejb.Stateless} beans and * {@link Remote} or {@link Local} proxy interfaces * * @throws ClassNotFoundException * @throws IOException */ public void scanForBeans(String... paths) throws IOException { if (ObjectUtils.notAvailable(paths) && ObjectUtils.available(configuration.getDeploymentPath())) { Set<DeploymentDirectory> deployments = configuration .getDeploymentPath(); List<String> pathList = new ArrayList<String>(); File deployFile; for (DeploymentDirectory deployment : deployments) { deployFile = new File(deployment.getPath()); if (deployment.isScan()) { String[] subDeployments = deployFile.list(); if (ObjectUtils.available(subDeployments)) { pathList.addAll(Arrays.asList(subDeployments)); } } } paths = ObjectUtils.toArray(pathList, String.class); } List<URL> urlList = new ArrayList<URL>(); List<URL> archive; for (String path : paths) { archive = FileUtils.toURLWithClasspath(path); urlList.addAll(archive); } URL[] archives = ObjectUtils.toArray(urlList, URL.class); scanForBeans(archives); } public ClassLoader getCurrent() { return current; } /** * Closes all existing connections */ public static void closeAllConnections() { ConnectionContainer.closeConnections(); } public void clear() { boolean locked = scannerLock.tryLock(); while (ObjectUtils.notTrue(locked)) { locked = scannerLock.tryLock(); } if (locked) { try { if (ObjectUtils.available(realURL)) { realURL.clear(); realURL = null; } if (ObjectUtils.available(aggregateds)) { aggregateds.clear(); } if (ObjectUtils.available(archivesURLs)) { archivesURLs.clear(); archivesURLs = null; } if (ObjectUtils.available(classOwnersURL)) { classOwnersURL.clear(); classOwnersURL = null; } configuration = null; } finally { scannerLock.unlock(); } } } /** * Closes all connections clears all caches */ public static void close() { ConnectionContainer.closeConnections(); MetaContainer.clear(); } /** * Builder class to provide properties for lightmare application and * initialize {@link MetaCreator} instance * * @author levan * */ public static class Builder { private MetaCreator creator; public Builder(boolean cloneConfiguration) throws IOException { creator = MetaCreator.get(); Configuration config = creator.configuration; if (cloneConfiguration && ObjectUtils.notNull(config)) { try { creator.configuration = (Configuration) config.clone(); } catch (CloneNotSupportedException ex) { throw new IOException(ex); } } else { creator.configuration = new Configuration(); } } public Builder() throws IOException { this(Boolean.FALSE); } private void initPoolProperties() { if (PoolConfig.poolProperties == null) { PoolConfig.poolProperties = new HashMap<Object, Object>(); } } /** * Sets additional persistence properties * * @param properties * @return {@link Builder} */ public Builder setPersistenceProperties(Map<String, String> properties) { if (ObjectUtils.available(properties)) { Map<Object, Object> persistenceProperties = creator.configuration .getPersistenceProperties(); if (persistenceProperties == null) { persistenceProperties = new HashMap<Object, Object>(); creator.configuration .setPersistenceProperties(persistenceProperties); } persistenceProperties.putAll(properties); } return this; } public Builder addPersistenceProperty(String key, String property) { return this; } /** * Adds property to scan for {@link javax.persistence.Entity} annotated * classes from deployed archives * * @param scanForEnt * @return {@link Builder} */ public Builder setScanForEntities(boolean scanForEnt) { creator.configuration.setScanForEntities(scanForEnt); return this; } /** * Adds property to use only {@link org.lightmare.annotations.UnitName} * annotated entities for which * {@link org.lightmare.annotations.UnitName#value()} matches passed * unit name * * @param unitName * @return {@link Builder} */ public Builder setUnitName(String unitName) { creator.configuration.setAnnotatedUnitName(unitName); return this; } /** * Sets path for persistence.xml file * * @param path * @return {@link Builder} */ public Builder setPersXmlPath(String path) { creator.configuration.setPersXmlPath(path); creator.configuration.setScanArchives(Boolean.FALSE); return this; } /** * Adds path for additional libraries to load at start time * * @param libPaths * @return {@link Builder} */ public Builder setLibraryPath(String... libPaths) { creator.configuration.setLibraryPaths(libPaths); return this; } /** * Sets boolean checker to scan persistence.xml files from appropriated * jar files * * @param xmlFromJar * @return {@link Builder} */ public Builder setXmlFromJar(boolean xmlFromJar) { creator.configuration.setPersXmlFromJar(xmlFromJar); return this; } /** * Sets boolean checker to swap jta data source value with non jta data * source value * * @param swapDataSource * @return {@link Builder} */ public Builder setSwapDataSource(boolean swapDataSource) { creator.configuration.setSwapDataSource(swapDataSource); return this; } /** * Adds path for data source file * * @param dataSourcePath * @return {@link Builder} */ public Builder addDataSourcePath(String dataSourcePath) { creator.configuration.addDataSourcePath(dataSourcePath); return this; } /** * This method is deprecated should use * {@link MetaCreator.Builder#addDataSourcePath(String)} instead * * @param dataSourcePath * @return {@link MetaCreator.Builder} */ @Deprecated public Builder setDataSourcePath(String dataSourcePath) { creator.configuration.addDataSourcePath(dataSourcePath); return this; } /** * Sets boolean checker to scan {@link javax.persistence.Entity} * annotated classes from appropriated deployed archive files * * @param scanArchives * @return {@link Builder} */ public Builder setScanArchives(boolean scanArchives) { creator.configuration.setScanArchives(scanArchives); return this; } /** * Sets boolean checker to block deployment processes * * @param await * @return {@link Builder} */ public Builder setAwaitDeploiment(boolean await) { creator.await = await; return this; } /** * Sets property is server or not in embedded mode * * @param remote * @return {@link Builder} */ public Builder setRemote(boolean remote) { creator.configuration.setRemote(remote); return this; } /** * Sets property is application server or just client for other remote * server * * @param server * @return {@link Builder} */ public Builder setServer(boolean server) { Configuration.setServer(server); creator.configuration.setClient(!server); return this; } /** * Sets boolean check is application in just client mode or not * * @param client * @return {@link Builder} */ public Builder setClient(boolean client) { creator.configuration.setClient(client); Configuration.setServer(!client); return this; } /** * To add any additional property * * @param key * @param property * @return {@link Builder} */ public Builder setProperty(String key, String property) { creator.configuration.putValue(key, property); return this; } /** * File path for administrator user name and password * * @param property * @return {@link Builder} */ public Builder setAdminUsersPth(String property) { Configuration.setAdminUsersPath(property); return this; } /** * Sets specific IP address in case when application is in remote server * mode * * @param property * @return {@link Builder} */ public Builder setIpAddress(String property) { creator.configuration.putValue(Configuration.IP_ADDRESS, property); return this; } /** * Sets specific port in case when applicatin is in remore server mode * * @param property * @return {@link Builder} */ public Builder setPort(String property) { creator.configuration.putValue(Configuration.PORT, property); return this; } /** * Sets amount for network master threads in case when application is in * remote server mode * * @param property * @return {@link Builder} */ public Builder setMasterThreads(String property) { creator.configuration.putValue(Configuration.BOSS_POOL, property); return this; } /** * Sets amount of worker threads in case when application is in remote * server mode * * @param property * @return {@link Builder} */ public Builder setWorkerThreads(String property) { creator.configuration.putValue(Configuration.WORKER_POOL, property); return this; } /** * Adds deploy file path to application with boolean checker if file is * directory to scan this directory for deployment files list * * @param deploymentPath * @param scan * @return {@link Builder} */ public Builder addDeploymentPath(String deploymentPath, boolean scan) { String clearPath = WatchUtils.clearPath(deploymentPath); creator.configuration.addDeploymentPath(clearPath, scan); return this; } /** * Adds deploy file path to application * * @param deploymentPath * @return {@link Builder} */ public Builder addDeploymentPath(String deploymentPath) { addDeploymentPath(deploymentPath, Boolean.FALSE); return this; } /** * Adds timeout for connection in case when application is in remote * server or client mode * * @param property * @return {@link Builder} */ public Builder setTimeout(String property) { creator.configuration.putValue(Configuration.CONNECTION_TIMEOUT, property); return this; } /** * Adds boolean check if application is using pooled data source * * @param dsPooledType * @return {@link Builder} */ public Builder setDataSourcePooledType(boolean dsPooledType) { JPAManager.pooledDataSource = dsPooledType; return this; } /** * Sets which data source pool provider should use application by * {@link PoolProviderType} parameter * * @param poolProviderType * @return {@link Builder} */ public Builder setPoolProviderType(PoolProviderType poolProviderType) { PoolConfig.poolProviderType = poolProviderType; return this; } /** * Sets path for data source pool additional properties * * @param path * @return {@link Builder} */ public Builder setPoolPropertiesPath(String path) { PoolConfig.poolPath = path; return this; } /** * Sets data source pool additional properties * * @param properties * @return {@link Builder} */ public Builder setPoolProperties( Map<? extends Object, ? extends Object> properties) { initPoolProperties(); PoolConfig.poolProperties.putAll(properties); return this; } /** * Adds instance property for pooled data source * * @param key * @param value * @return {@link Builder} */ public Builder addPoolProperty(Object key, Object value) { initPoolProperties(); PoolConfig.poolProperties.put(key, value); return this; } /** * Sets boolean check is application in hot deployment (with watch * service on deployment directories) or not * * @param hotDeployment * @return {@link Builder} */ public Builder setHotDeployment(boolean hotDeployment) { creator.configuration.setHotDeployment(hotDeployment); return this; } public MetaCreator build() throws IOException { creator.configuration.configure(); LOG.info("Lightmare application starts working"); return creator; } } }
package org.lightmare.utils.fs; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.RandomAccessFile; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Enumeration; import java.util.List; import java.util.Set; import org.apache.log4j.Logger; import org.lightmare.libraries.LibraryLoader; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.StringUtils; import org.lightmare.utils.fs.codecs.ArchiveUtils; /** * Utility for removing {@link File}s recursively from file system * * @author Levan Tsinadze * @since 0.0.20-SNAPSHOT */ public class FileUtils { // First bytes for ZIP file private static byte[] MAGIC = { 'P', 'K', 0x3, 0x4 }; // Read privilege for random access file private static final String READ = "r"; private static final Logger LOG = Logger.getLogger(FileUtils.class); /** * Lists java archive class files in passed file * * @param file * @return {@link File}[] */ private static File[] listJavaFiles(File file) { File[] subFiles = file.listFiles(new FilenameFilter() { @Override public boolean accept(File file, String name) { return name.endsWith(ArchiveUtils.JAR_FILE_EXT) || name.endsWith(ArchiveUtils.CLASS_FILE_EXT) || file.isDirectory(); } }); return subFiles; } /** * Adds passed {@link File}'s {@link URL} to passed {@link Collection} of * {@link URL} objects * * @param urls * @param file * @throws IOException */ private static void addURL(Collection<URL> urls, File file) throws IOException { try { urls.add(file.toURI().toURL()); } catch (MalformedURLException ex) { throw new IOException(ex); } } /** * Adds sub files of passed {@link File} array to passed {@link List} of * {@link URL} objects * * @param files * @param urls * @throws IOException */ private static void addSubDirectory(File[] files, Set<URL> urls) throws IOException { for (File subFile : files) { if (subFile.isDirectory()) { getSubfiles(subFile, urls); } else { addURL(urls, subFile); } } } /** * Gets all jar or class subfiles from specified {@link File} recursively * * @param file * @param urls * @throws IOException */ public static void getSubfiles(File file, Set<URL> urls) throws IOException { if (file.isDirectory()) { File[] subFiles = listJavaFiles(file); if (CollectionUtils.valid(subFiles)) { addSubDirectory(subFiles, urls); } } else { addURL(urls, file); } } /** * Check whether passed {@link URL} is from extracted ear directory * * @param url * @return boolean * @throws IOException */ public static boolean checkOnEarDir(URL url) throws IOException { boolean isEarDir; File file; try { file = new File(url.toURI()); isEarDir = checkOnEarDir(file); } catch (URISyntaxException ex) { throw new IOException(ex); } return isEarDir; } /** * Check whether passed path is extracted ear directory path * * @param file * @return boolean */ public static boolean checkOnEarDir(String path) { boolean isEarDir; File file = new File(path); isEarDir = checkOnEarDir(file); return isEarDir; } /** * Check whether passed file is extracted ear directory * * @param file * @return boolean */ public static boolean checkOnEarDir(File file) { boolean isEarDir = file.isDirectory(); if (isEarDir) { File[] files = file.listFiles(); isEarDir = CollectionUtils.valid(files); if (isEarDir) { String path = file.getPath(); String delim; if (path.endsWith(ArchiveUtils.FILE_SEPARATOR)) { delim = StringUtils.EMPTY_STRING; } else { delim = ArchiveUtils.FILE_SEPARATOR; } String appxmlPath = StringUtils.concat(path, delim, ArchiveUtils.APPLICATION_XML_PATH); File appXmlFile = new File(appxmlPath); isEarDir = appXmlFile.exists(); } } return isEarDir; } /** * Removes passed {@link File}s from file system and if * {@link File#isDirectory()} removes all it's content recursively * * @param file * @return boolean */ public static boolean deleteFile(File file) { if (file.isDirectory()) { File[] subFiles = file.listFiles(); if (CollectionUtils.valid(subFiles)) { for (File subFile : subFiles) { deleteFile(subFile); } } } return file.delete(); } /** * Iterates over passed {@link File}s and removes each of them from file * system and if {@link File#isDirectory()} removes all it's content * recursively * * @param files */ public static void deleteFiles(Iterable<File> files) { for (File fileToDelete : files) { deleteFile(fileToDelete); } } /** * Converts passed file to {@link URL} instance * * @param file * @return {@link URL} * @throws IOException */ public static URL toURL(File file) throws IOException { return file.toURI().toURL(); } /** * Creates {@link URL} from passed path * * @param path * @return {@link URL} * @throws IOException */ public static URL toURL(String path) throws IOException { File file = new File(path); return toURL(file); } /** * Checks passed path and if its empty path for current class directory * * @param path * @return {@link String} */ public static List<URL> toURLWithClasspath(String path) throws IOException { List<URL> urls = new ArrayList<URL>(); URL url; if (StringUtils.valid(path)) { url = toURL(path); urls.add(url); } else if (ObjectUtils.notNull(path) && path.isEmpty()) { Enumeration<URL> urlEnum = LibraryLoader.getContextClassLoader() .getResources(path); while (urlEnum.hasMoreElements()) { url = urlEnum.nextElement(); urls.add(url); } } return urls; } /** * Checks if passed file is ZIP archive file * * @param file * @return <code>boolean</code> */ public static boolean checkOnZip(File file) { boolean isZip = Boolean.TRUE; int length = MAGIC.length; byte[] buffer = new byte[length]; try { RandomAccessFile raf = new RandomAccessFile(file, READ); try { raf.readFully(buffer); for (int i = CollectionUtils.FIRST_INDEX; i < length && isZip; i++) { isZip = buffer[i] == MAGIC[i]; } } finally { raf.close(); } } catch (IOException ex) { LOG.error(ex.getMessage(), ex); isZip = Boolean.FALSE; } return isZip; } }
package org.littleshoot.proxy; import java.io.UnsupportedEncodingException; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.charset.Charset; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Properties; import java.util.Set; import java.util.TimeZone; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.math.NumberUtils; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.handler.codec.http.DefaultHttpRequest; import org.jboss.netty.handler.codec.http.HttpChunk; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpMessage; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.util.CharsetUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Utilities for the proxy. */ public class ProxyUtils { private static final Logger LOG = LoggerFactory.getLogger(ProxyUtils.class); private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); /** * Date format pattern used to parse HTTP date headers in RFC 1123 format. */ public static final String PATTERN_RFC1123 = "EEE, dd MMM yyyy HH:mm:ss zzz"; /** * Date format pattern used to parse HTTP date headers in RFC 1036 format. */ public static final String PATTERN_RFC1036 = "EEEE, dd-MMM-yy HH:mm:ss zzz"; private static final Set<String> hopByHopHeaders = new HashSet<String>(); private static final String via; private static final String hostName; static { try { final InetAddress localAddress = InetAddress.getLocalHost(); hostName = localAddress.getHostName(); } catch (final UnknownHostException e) { LOG.error("Could not lookup host", e); throw new IllegalStateException("Could not determine host!", e); } final StringBuilder sb = new StringBuilder(); sb.append("Via: 1.1 "); sb.append(hostName); sb.append("\r\n"); via = sb.toString(); //hopByHopHeaders.add("proxy-connection"); hopByHopHeaders.add("connection"); hopByHopHeaders.add("keep-alive"); hopByHopHeaders.add("proxy-authenticate"); hopByHopHeaders.add("proxy-authorization"); hopByHopHeaders.add("te"); hopByHopHeaders.add("trailers"); // We pass Transfer-Encoding along in both directions, as we don't // choose to modify it. //hopByHopHeaders.add("transfer-encoding"); hopByHopHeaders.add("upgrade"); } /** * Utility class for a no-op {@link ChannelFutureListener}. */ public static final ChannelFutureListener NO_OP_LISTENER = new ChannelFutureListener() { public void operationComplete(final ChannelFuture future) throws Exception { LOG.info("No op listener - write finished"); } }; /** * Constant for the headers for an OK response to an HTTP connect request. */ public static final String CONNECT_OK_HEADERS = "Connection: Keep-Alive\r\n"+ "Proxy-Connection: Keep-Alive\r\n"+ via + "\r\n"; /** * Constant for the headers for a proxy error response. */ public static final String PROXY_ERROR_HEADERS = "Connection: close\r\n"+ "Proxy-Connection: close\r\n"+ "Pragma: no-cache\r\n"+ "Cache-Control: no-cache\r\n" + via + "\r\n"; public static final HttpRequestFilter PASS_THROUGH_REQUEST_FILTER = new HttpRequestFilter() { public void filter(final HttpRequest httpRequest) { } }; // Should never be constructed. private ProxyUtils() { } // Schemes are case-insensitive: http://tools.ietf.org/html/rfc3986#section-3.1 private static Pattern HTTP_PREFIX = Pattern.compile("http.*", Pattern.CASE_INSENSITIVE); private static Pattern HTTPS_PREFIX = Pattern.compile("https.*", Pattern.CASE_INSENSITIVE); public static String stripHost(final String uri) { if (!HTTP_PREFIX.matcher(uri).matches()) { // It's likely a URI path, not the full URI (i.e. the host is // already stripped). return uri; } final String noHttpUri = StringUtils.substringAfter(uri, ": final int slashIndex = noHttpUri.indexOf("/"); if (slashIndex == -1) { return "/"; } final String noHostUri = noHttpUri.substring(slashIndex); return noHostUri; } /** * Builds the cache URI from the request, including the host and the path. * * @param httpRequest The request. * @return The cache URI. */ public static String cacheUri(final HttpRequest httpRequest) { final String host = httpRequest.getHeader(HttpHeaders.Names.HOST); final String uri = httpRequest.getUri(); final String path; if (HTTP_PREFIX.matcher(uri).matches()) { path = stripHost(uri); } else { path = uri; } // ignore the protocol? return host + path; } /** * Formats the given date according to the RFC 1123 pattern. * * @param date The date to format. * @return An RFC 1123 formatted date string. * * @see #PATTERN_RFC1123 */ public static String formatDate(final Date date) { return formatDate(date, PATTERN_RFC1123); } public static String formatDate(final Date date, final String pattern) { if (date == null) throw new IllegalArgumentException("date is null"); if (pattern == null) throw new IllegalArgumentException("pattern is null"); final SimpleDateFormat formatter = new SimpleDateFormat(pattern, Locale.US); formatter.setTimeZone(GMT); return formatter.format(date); } /** * Creates a Date formatted for HTTP headers for the current time. * * @return The formatted HTTP date. */ public static String httpDate() { return formatDate(new Date()); } /** * Copies the mutable fields from the response original to the copy. * * @param original The original response to copy from. * @param copy The copy. * @return The copy with all mutable fields from the original. */ public static HttpResponse copyMutableResponseFields( final HttpResponse original, final HttpResponse copy) { final Collection<String> headerNames = original.getHeaderNames(); for (final String name : headerNames) { final List<String> values = original.getHeaders(name); copy.setHeader(name, values); } copy.setContent(original.getContent()); if (original.isChunked()) { copy.setChunked(true); } return copy; } /** * Writes a raw HTTP response to the channel. * * @param channel The channel. * @param statusLine The status line of the response. * @param headers The raw headers string. */ public static void writeResponse(final Channel channel, final String statusLine, final String headers) { writeResponse(channel, statusLine, headers, ""); } /** * Writes a raw HTTP response to the channel. * * @param channel The channel. * @param statusLine The status line of the response. * @param headers The raw headers string. * @param responseBody The response body. */ public static void writeResponse(final Channel channel, final String statusLine, final String headers, final String responseBody) { final String fullResponse = statusLine + headers + responseBody; LOG.info("Writing full response:\n"+fullResponse); try { final ChannelBuffer buf = ChannelBuffers.copiedBuffer(fullResponse.getBytes("UTF-8")); channel.write(buf); channel.setReadable(true); } catch (final UnsupportedEncodingException e) { // Never. return; } } /** * Prints the headers of the message (for debugging). * * @param msg The {@link HttpMessage}. */ public static void printHeaders(final HttpMessage msg) { final String status = msg.getProtocolVersion().toString(); LOG.debug(status); final StringBuilder sb = new StringBuilder(); final Set<String> headerNames = msg.getHeaderNames(); for (final String name : headerNames) { final String value = msg.getHeader(name); sb.append(name); sb.append(": "); sb.append(value); sb.append("\n"); } LOG.debug("\n"+sb.toString()); } /** * Prints the specified header from the specified method. * * @param msg The HTTP message. * @param name The name of the header to print. */ public static void printHeader(final HttpMessage msg, final String name) { final String value = msg.getHeader(name); LOG.debug(name + ": "+value); } static boolean isLastChunk(final Object msg) { if (msg instanceof HttpChunk) { final HttpChunk chunk = (HttpChunk) msg; return chunk.isLast(); } else { return false; } } private static ChannelFutureListener CLOSE = new ChannelFutureListener() { public void operationComplete(final ChannelFuture future) { final Channel ch = future.getChannel(); if (ch.isOpen()) { ch.close(); } } }; /** * Closes the specified channel after all queued write requests are flushed. * * @param ch The {@link Channel} to close. */ public static void closeOnFlush(final Channel ch) { LOG.info("Closing on flush: {}", ch); if (ch.isConnected()) { ch.write(ChannelBuffers.EMPTY_BUFFER).addListener(ProxyUtils.CLOSE); } } /** * Parses the host and port an HTTP request is being sent to. * * @param httpRequest The request. * @return The host and port string. */ public static String parseHostAndPort(final HttpRequest httpRequest) { return parseHostAndPort(httpRequest.getUri()); } /** * Parses the host and port an HTTP request is being sent to. * * @param uri The URI. * @return The host and port string. */ public static String parseHostAndPort(final String uri) { final String tempUri; if (!HTTP_PREFIX.matcher(uri).matches()) { // Browsers particularly seem to send requests in this form when // they use CONNECT. tempUri = uri; } else { // We can't just take a substring from a hard-coded index because it // could be either http or https. tempUri = StringUtils.substringAfter(uri, ": } final String hostAndPort; if (tempUri.contains("/")) { hostAndPort = tempUri.substring(0, tempUri.indexOf("/")); } else { hostAndPort = tempUri; } return hostAndPort; } public static String parseHost(final HttpRequest request) { final String host = request.getHeader(HttpHeaders.Names.HOST); if (StringUtils.isNotBlank(host)) { return host; } return parseHost(request.getUri()); } public static String parseHost(final String request) { final String hostAndPort = ProxyUtils.parseHostAndPort(request); if (hostAndPort.contains(":")) { return StringUtils.substringBefore(hostAndPort, ":"); } else { return hostAndPort; } } /** * Parses the port from an address. * * @param httpRequest The request containing the URI. * @return The port. If not port is explicitly specified, returns the * the default port 80 if the protocol is HTTP and 443 if the protocol is * HTTPS. */ public static int parsePort(final HttpRequest httpRequest) { final String uri = httpRequest.getUri(); if (uri.contains(":")) { final String portStr = StringUtils.substringAfter(uri, ":"); return Integer.parseInt(portStr); } else if (HTTP_PREFIX.matcher(uri).matches()) { return 80; } else if (HTTPS_PREFIX.matcher(uri).matches()) { return 443; } else { // Unsupported protocol -- return 80 for now. return 80; } } /** * Creates a copy of an original HTTP request to void modifying it. * * @param original The original request. * @param keepProxyFormat keep proxy-formatted URI (used in chaining) * @return The request copy. */ public static HttpRequest copyHttpRequest(final HttpRequest original, boolean keepProxyFormat) { final HttpMethod method = original.getMethod(); final String uri = original.getUri(); LOG.info("Raw URI before switching from proxy format: {}", uri); final HttpRequest copy; if (keepProxyFormat) { copy = new DefaultHttpRequest(original.getProtocolVersion(), method, uri); } else { final String noHostUri = ProxyUtils.stripHost(uri); copy = new DefaultHttpRequest(original.getProtocolVersion(), method, noHostUri); } final ChannelBuffer originalContent = original.getContent(); if (originalContent != null) { copy.setContent(originalContent); } // We also need to follow 2616 section 13.5.1 End-to-end and // Hop-by-hop Headers // The following HTTP/1.1 headers are hop-by-hop headers: // - Connection // - Keep-Alive // - Proxy-Authenticate // - Proxy-Authorization // - Trailers // - Transfer-Encoding // - Upgrade LOG.info("Request copy method: {}", copy.getMethod()); copyHeaders(original, copy); final String ae = copy.getHeader(HttpHeaders.Names.ACCEPT_ENCODING); if (StringUtils.isNotBlank(ae)) { // Remove sdch from encodings we accept since we can't decode it. final String noSdch = ae.replace(",sdch", "").replace("sdch", ""); copy.setHeader(HttpHeaders.Names.ACCEPT_ENCODING, noSdch); LOG.info("Removed sdch and inserted: {}", noSdch); } // Switch the de-facto standard "Proxy-Connection" header to // "Connection" when we pass it along to the remote host. This is // largely undocumented but seems to be what most browsers and servers // expect. final String proxyConnectionKey = "Proxy-Connection"; if (copy.containsHeader(proxyConnectionKey)) { final String header = copy.getHeader(proxyConnectionKey); copy.removeHeader(proxyConnectionKey); copy.setHeader("Connection", header); } ProxyUtils.addVia(copy); return copy; } private static void copyHeaders(final HttpMessage original, final HttpMessage copy) { final Set<String> headerNames = original.getHeaderNames(); for (final String name : headerNames) { if (!hopByHopHeaders.contains(name.toLowerCase())) { final List<String> values = original.getHeaders(name); copy.setHeader(name, values); } } } /** * Removes all headers that should not be forwarded. * See RFC 2616 13.5.1 End-to-end and Hop-by-hop Headers. * * @param msg The message to strip headers from. */ public static void stripHopByHopHeaders(final HttpMessage msg) { final Set<String> headerNames = msg.getHeaderNames(); for (final String name : headerNames) { if (hopByHopHeaders.contains(name.toLowerCase())) { msg.removeHeader(name); } } } /** * Creates a copy of an original HTTP request to void modifying it. * This variant will unconditionally strip the proxy-formatted request. * * @param original The original request. * @return The request copy. */ public static HttpRequest copyHttpRequest(final HttpRequest original) { return copyHttpRequest(original, false); } /** * Adds the Via header to specify that the message has passed through * the proxy. * * @param msg The HTTP message. */ public static void addVia(final HttpMessage msg) { final StringBuilder sb = new StringBuilder(); sb.append(msg.getProtocolVersion().getMajorVersion()); sb.append("."); sb.append(msg.getProtocolVersion().getMinorVersion()); sb.append("."); sb.append(hostName); final List<String> vias; if (msg.containsHeader(HttpHeaders.Names.VIA)) { vias = msg.getHeaders(HttpHeaders.Names.VIA); vias.add(sb.toString()); } else { vias = Arrays.asList(sb.toString()); } msg.setHeader(HttpHeaders.Names.VIA, vias); } /** * Detect Charset Encoding of a HttpResponse * based on Headers and Meta Tags * * @param http The HTTP Response. * @return Returns the detected charset. */ public static Charset detectCharset(HttpResponse http) { Charset charset = null; // Return null charset if charset detected in Response have no support Charset headerCharset = CharsetUtil.ISO_8859_1; // Default charset for detection is latin-1 if (http.getHeader("Content-Type") != null) { // If has Content-Type header, try to detect charset from it String header_pattern = "^\\s*?.*?\\s*?charset\\s*?=\\s*?(.*?)$"; // How to find charset in header Pattern pattern = Pattern.compile(header_pattern, Pattern.CASE_INSENSITIVE); // Set Pattern Matcher to Matcher matcher = pattern.matcher(http.getHeader("Content-Type")); // find charset in header if (matcher.find()) { // If there is a charset definition String charsetName = matcher.group(1); // Get string charset name if (Charset.isSupported(charsetName)) { // If charset is supported by java charset = Charset.forName(charsetName); // Set current charset to that headerCharset = Charset.forName(charsetName); // Set the header charset to that } } } String html = http.getContent().toString(headerCharset); // Try to decode response content with header charset /** * Returns <code>true</code> if the specified string is either "true" or * "on" ignoring case. * * @param val The string in question. * @return <code>true</code> if the specified string is either "true" or * "on" ignoring case, otherwise <code>false</code>. */ public static boolean isTrue(final String val) { return checkTrueOrFalse(val, "true", "on"); } /** * Returns <code>true</code> if the specified string is either "false" or * "off" ignoring case. * * @param val The string in question. * @return <code>true</code> if the specified string is either "false" or * "off" ignoring case, otherwise <code>false</code>. */ public static boolean isFalse(final String val) { return checkTrueOrFalse(val, "false", "off"); } private static boolean checkTrueOrFalse(final String val, final String str1, final String str2) { final String str = val.trim(); return StringUtils.isNotBlank(str) && (str.equalsIgnoreCase(str1) || str.equalsIgnoreCase(str2)); } public static boolean extractBooleanDefaultFalse( final Properties props, final String key) { final String throttle = props.getProperty(key); if (StringUtils.isNotBlank(throttle)) { return throttle.trim().equalsIgnoreCase("true"); } return false; } public static long extractLong(final Properties props, final String key) { final String readThrottleString = props.getProperty(key); if (StringUtils.isNotBlank(readThrottleString) && NumberUtils.isNumber(readThrottleString)) { return Long.parseLong(readThrottleString); } return -1; } }
package org.mariadb.jdbc; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import javax.transaction.xa.XAException; import javax.transaction.xa.XAResource; import javax.transaction.xa.Xid; import org.mariadb.jdbc.internal.util.Utils; public class MariaXaResource implements XAResource { private final MariaDbConnection connection; public MariaXaResource(MariaDbConnection connection) { this.connection = connection; } private static String xidToString(Xid xid) { return "0x" + Utils.byteArrayToHexString(xid.getGlobalTransactionId()) + ",0x" + Utils.byteArrayToHexString(xid.getBranchQualifier()) + "," + xid.getFormatId(); } private static String flagsToString(int flags) { switch (flags) { case TMJOIN: return "JOIN"; case TMONEPHASE: return "ONE PHASE"; case TMRESUME: return "RESUME"; case TMSUSPEND: return "SUSPEND"; default: return ""; } } private XAException mapXaException(SQLException sqle) { int xaErrorCode; switch (sqle.getErrorCode()) { case 1397: xaErrorCode = XAException.XAER_NOTA; break; case 1398: xaErrorCode = XAException.XAER_INVAL; break; case 1399: xaErrorCode = XAException.XAER_RMFAIL; break; case 1400: xaErrorCode = XAException.XAER_OUTSIDE; break; case 1401: xaErrorCode = XAException.XAER_RMERR; break; case 1402: xaErrorCode = XAException.XA_RBROLLBACK; break; default: xaErrorCode = 0; break; } XAException xaException; if (xaErrorCode != 0) { xaException = new XAException(xaErrorCode); } else { xaException = new XAException(sqle.getMessage()); } xaException.initCause(sqle); return xaException; } /** * Execute a query. * * @param command query to run. * @throws XAException exception */ private void execute(String command) throws XAException { try { connection.createStatement().execute(command); } catch (SQLException sqle) { throw mapXaException(sqle); } } /** * Commits the global transaction specified by xid. * * @param xid A global transaction identifier * @param onePhase If true, the resource manager should use a one-phase commit protocol to commit * the work done on behalf of xid. * @throws XAException exception */ public void commit(Xid xid, boolean onePhase) throws XAException { String command = "XA COMMIT " + xidToString(xid); if (onePhase) { command += " ONE PHASE"; } execute(command); } /** * Ends the work performed on behalf of a transaction branch. The resource manager disassociates * the XA resource from the transaction branch specified and lets the transaction complete. * <p>If TMSUSPEND is specified in the flags, the transaction branch is temporarily suspended in * an incomplete state. The transaction context is in a suspended state and must be resumed via * the start method with TMRESUME specified.</p> * <p>If TMFAIL is specified, the portion of work has failed. The resource manager may mark the * transaction as rollback-only</p> * <p>If TMSUCCESS is specified, the portion of work has completed successfully.</p> * * @param xid A global transaction identifier that is the same as the identifier used previously * in the start method. * @param flags One of TMSUCCESS, TMFAIL, or TMSUSPEND. * @throws XAException An error has occurred. (XAException values are XAER_RMERR, XAER_RMFAILED, * XAER_NOTA, XAER_INVAL, XAER_PROTO, or XA_RB*) */ public void end(Xid xid, int flags) throws XAException { if (flags != TMSUCCESS && flags != TMSUSPEND && flags != TMFAIL) { throw new XAException(XAException.XAER_INVAL); } execute("XA END " + xidToString(xid) + " " + flagsToString(flags)); } /** * Tells the resource manager to forget about a heuristically completed transaction branch. * * @param xid A global transaction identifier. */ public void forget(Xid xid) { // Not implemented by the server } /** * Obtains the current transaction timeout value set for this XAResource instance. If * XAResource.setTransactionTimeout was not used prior to invoking this method, the return value * is the default timeout set for the resource manager; otherwise, the value used in the previous * setTransactionTimeout call is returned. * * @return the transaction timeout value in seconds. */ public int getTransactionTimeout() { // not implemented return 0; } /** * This method is called to determine if the resource manager instance represented by the target * object is the same as the resource manager instance represented by the parameter xares. * * @param xaResource An XAResource object whose resource manager instance is to be compared with * the target object. * @return true if it's the same RM instance; otherwise false. */ @Override public boolean isSameRM(XAResource xaResource) { // Typically used by transaction manager to "join" transactions. We do not support joins, // so always return false; return false; } /** * Ask the resource manager to prepare for a transaction commit of the transaction specified in * xid. * * @param xid A global transaction identifier. * @return A value indicating the resource manager's vote on the outcome of the transaction. * @throws XAException An error has occurred. Possible exception values are: XA_RB*, XAER_RMERR, * XAER_RMFAIL, XAER_NOTA, XAER_INVAL, XAER_PROTO. */ public int prepare(Xid xid) throws XAException { execute("XA PREPARE " + xidToString(xid)); return XA_OK; } /** * Obtains a list of prepared transaction branches from a resource manager. The transaction * manager calls this method during recovery to obtain the list of transaction branches that are * currently in prepared or heuristically completed states. * * @param flags One of TMSTARTRSCAN, TMENDRSCAN, TMNOFLAGS. TMNOFLAGS must be used when no other * flags are set in the parameter. * @return The resource manager returns zero or more XIDs of the transaction branches. * @throws XAException An error has occurred. Possible values are XAER_RMERR, XAER_RMFAIL, * XAER_INVAL, and XAER_PROTO. */ public Xid[] recover(int flags) throws XAException { // Return all Xid at once, when STARTRSCAN is specified // Return zero-length array otherwise. if (((flags & TMSTARTRSCAN) == 0) && ((flags & TMENDRSCAN) == 0) && (flags != TMNOFLAGS)) { throw new XAException(XAException.XAER_INVAL); } if ((flags & TMSTARTRSCAN) == 0) { return new MariaDbXid[0]; } try { ResultSet rs = connection.createStatement().executeQuery("XA RECOVER"); ArrayList<MariaDbXid> xidList = new ArrayList<>(); while (rs.next()) { int formatId = rs.getInt(1); int len1 = rs.getInt(2); int len2 = rs.getInt(3); byte[] arr = rs.getBytes(4); byte[] globalTransactionId = new byte[len1]; byte[] branchQualifier = new byte[len2]; System.arraycopy(arr, 0, globalTransactionId, 0, len1); System.arraycopy(arr, len1, branchQualifier, 0, len2); xidList.add(new MariaDbXid(formatId, globalTransactionId, branchQualifier)); } Xid[] xids = new Xid[xidList.size()]; xidList.toArray(xids); return xids; } catch (SQLException sqle) { throw mapXaException(sqle); } } /** * Informs the resource manager to roll back work done on behalf of a transaction branch. * * @param xid A global transaction identifier. * @throws XAException An error has occurred. */ public void rollback(Xid xid) throws XAException { execute("XA ROLLBACK " + xidToString(xid)); } /** * Sets the current transaction timeout value for this XAResource instance. Once set, this timeout * value is effective until setTransactionTimeout is invoked again with a different value. To * reset the timeout value to the default value used by the resource manager, set the value to * zero. If the timeout operation is performed successfully, the method returns true; otherwise * false. If a resource manager does not support explicitly setting the transaction timeout value, * this method returns false. * * @param timeout The transaction timeout value in seconds. * @return true if the transaction timeout value is set successfully; otherwise false. */ public boolean setTransactionTimeout(int timeout) { return false; // not implemented } /** * Starts work on behalf of a transaction branch specified in xid. If TMJOIN is specified, the * start applies to joining a transaction previously seen by the resource manager. If TMRESUME is * specified, the start applies to resuming a suspended transaction specified in the parameter * xid. If neither TMJOIN nor TMRESUME is specified and the transaction specified by xid has * previously been seen by the resource manager, the resource manager throws the XAException * exception with XAER_DUPID error code. * * @param xid A global transaction identifier to be associated with the resource. * @param flags One of TMNOFLAGS, TMJOIN, or TMRESUME. * @throws XAException An error has occurred. */ public void start(Xid xid, int flags) throws XAException { if (flags != TMJOIN && flags != TMRESUME && flags != TMNOFLAGS) { throw new XAException(XAException.XAER_INVAL); } execute("XA START " + xidToString(xid) + " " + flagsToString( flags == TMJOIN && connection.getPinGlobalTxToPhysicalConnection() ? TMRESUME : flags)); } }
package org.openboxprotocol.types; import java.io.IOException; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.SerializerProvider; public class Masked<F extends ValueType<F>> extends AbstractValueType<Masked<F>> { private F value; private F mask; private Masked(F value, F mask) { this.value = value.applyMask(mask); this.mask = mask; } public F getValue() { return this.value; } public F getMask() { return this.mask; } @Override public int hashCode() { return (this.value.hashCode() * 3 + this.mask.hashCode() * 71); } @Override public boolean equals(Object other) { return (other instanceof Masked) && ((Masked<?>)other).value.equals(this.value) && ((Masked<?>)other).mask.equals(this.mask); } public static <F extends ValueType<F>> Masked<F> of(F value, F mask) { return new Masked<F>(value, mask); } @Override public Masked<F> applyMask(Masked<F> mask) { return this; } @Override public String toString() { return String.format("%s%%%s", this.value.toString(), this.mask.toString()); } @Override public void serialize(JsonGenerator arg0, SerializerProvider arg1) throws IOException { arg0.writeString(String.format("%s%%%s", this.value.toString(), this.mask.toString())); } }
package org.openshift.haproxy; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.util.ArrayList; public class GearParser { ArrayList<Gear> gears = new ArrayList<Gear>(50); public GearParser(String urlToParse) { try { String line = null; InputStream in = new URL(urlToParse).openConnection() .getInputStream(); BufferedReader br = new BufferedReader(new InputStreamReader(in)); int i = 0; while ((line = br.readLine()) != null) { System.out.println("Line: " + line); if (i != 0) { String[] splitLines = line.split(","); Gear currentGear = new Gear(splitLines[1], splitLines[0], new Integer(splitLines[9]), new Integer( splitLines[7])); gears.add(currentGear); System.out.println("Added new gear to arraylist"); } i++; } } catch (Exception e) { e.printStackTrace(); } } public ArrayList<Gear> getGears() { return gears; } }
package org.spongepowered.mod; import com.google.common.base.Optional; import net.minecraft.server.MinecraftServer; import net.minecraft.util.ChatComponentText; import net.minecraft.world.WorldServer; import net.minecraftforge.common.DimensionManager; import net.minecraftforge.fml.common.FMLCommonHandler; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import org.spongepowered.api.Game; import org.spongepowered.api.GameRegistry; import org.spongepowered.api.Platform; import org.spongepowered.api.entity.player.Player; import org.spongepowered.api.plugin.PluginManager; import org.spongepowered.api.service.ServiceManager; import org.spongepowered.api.service.command.CommandService; import org.spongepowered.api.service.event.EventManager; import org.spongepowered.api.service.scheduler.Scheduler; import org.spongepowered.api.text.message.Message; import org.spongepowered.api.util.annotation.NonnullByDefault; import org.spongepowered.api.world.World; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.UUID; import javax.annotation.Nullable; import javax.inject.Inject; @NonnullByDefault public final class SpongeGame implements Game { @Nullable private static final String apiVersion = Game.class.getPackage().getImplementationVersion(); @Nullable private static final String implementationVersion = SpongeGame.class.getPackage().getImplementationVersion(); private final PluginManager pluginManager; private final EventManager eventManager; private final GameRegistry gameRegistry; @Inject public SpongeGame(PluginManager plugin, EventManager event, GameRegistry registry) { pluginManager = plugin; eventManager = event; gameRegistry = registry; } @Override public Platform getPlatform() { switch (FMLCommonHandler.instance().getEffectiveSide()) { case CLIENT: return Platform.CLIENT; default: return Platform.SERVER; } } @Override public PluginManager getPluginManager() { return pluginManager; } @Override public EventManager getEventManager() { return eventManager; } @Override public Collection<World> getWorlds() { List<World> worlds = new ArrayList<World>(); for (WorldServer worldServer : DimensionManager.getWorlds()) { worlds.add((World) worldServer); } return worlds; } @Override public World getWorld(UUID uniqueId) { // TODO: This needs to map to world id's somehow throw new UnsupportedOperationException(); } @Override public World getWorld(String worldName) { for (World world : getWorlds()) { if (world.getName().equals(worldName)) { return world; } } return null; } @Override @SideOnly(Side.SERVER) public void broadcastMessage(Message<?> message) { MinecraftServer.getServer().getConfigurationManager() .sendChatMsg(new ChatComponentText((String) message.getContent()));//TODO implement this properly } @Override public String getAPIVersion() { return apiVersion != null ? apiVersion : "UNKNOWN"; } @Override public String getImplementationVersion() { return implementationVersion != null ? implementationVersion : "UNKNOWN"; } @Override public GameRegistry getRegistry() { return gameRegistry; } @Override public ServiceManager getServiceManager() { throw new UnsupportedOperationException(); } @Override public Scheduler getScheduler() { throw new UnsupportedOperationException(); } @Override public CommandService getCommandDispatcher() { throw new UnsupportedOperationException(); } @Override public Collection<Player> getOnlinePlayers() { throw new UnsupportedOperationException(); } @Override public int getMaxPlayers() { throw new UnsupportedOperationException(); } @Override public Optional<Player> getPlayer(UUID uniqueId) { throw new UnsupportedOperationException(); } @Override public Optional<Player> getPlayer(String name) { throw new UnsupportedOperationException(); } }
package net.maizegenetics.gbs.util; /** * Utility class for encoding tags into longs. * <p> * Sequencing reads are chunked into 32bp and recorded in a 64-bit long. Only * A (00), C (01), G (10), T (11) are encoded. Any other character sets the entire long to -1. * Missing data at the end is padded with poly-A or (0). This missing end, is tracked * by the tag length attribute. * <p> * Some of these methods should be transitioned to {@link net.maizegenetics.dna.snp.NucleotideAlignmentConstants}, * however, BaseEncoder only supports four states, while NucleotideAlignment includes gaps, insertions, and missing. * * @author Ed Buckler */ public class BaseEncoder { /** defines the number of bases fitting with a long */ public static final int chunkSize = 32; public static final int chunkSizeForInt = 16; /** defines the base order */ public static final char[] bases = {'A', 'C', 'G', 'T'}; private BaseEncoder() { } /** * Returns a long for a sequence in a String * @param seq * @return 2-bit encode sequence (-1 if an invalid sequence state is provided e.g. N) */ public static long getLongFromSeq(String seq) { int seqLength = seq.length(); long v = 0; for (int i = 0; i < seqLength; i++) { switch (seq.charAt(i)) { case 'A': case 'a': v = v << 2; break; case 'C': case 'c': v = (v << 2) + (byte) 1; break; case 'G': case 'g': v = (v << 2) + (byte) 2; break; case 'T': case 't': v = (v << 2) + (byte) 3; break; default: return -1; } } if (seqLength == chunkSize) { return v; } if (seqLength > chunkSize) { return -1; } v = (v << (2 * (chunkSize - seqLength))); //if shorter fill with AAAA return v; } /** * @param seq A String containing a DNA sequence. * @return result A array of Long containing the binary representation of the sequence. * null if sequence length is not a multiple of BaseEncoder.chunksize. */ public static long[] getLongArrayFromSeq(String seq) { if (seq.length() % chunkSize != 0) { return null; } long[] result = new long[seq.length() / chunkSize]; for (int i = 0; i < result.length; i++) { result[i] = getLongFromSeq(seq.substring(i * chunkSize, (i + 1) * chunkSize)); } return result; } /** * @param seq A String containing a DNA sequence. * @return result A array of Long containing the binary representation of the sequence. * if sequence length is shorter than padded Length adds A to the end. */ public static long[] getLongArrayFromSeq(String seq, int paddedLength) { if(seq.length()<paddedLength) { seq=seq+"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA".substring(0, paddedLength-seq.length()); } // System.out.println("PadLength:"+seq.length()); return getLongArrayFromSeq(seq); } /** * Returns the reverse complement of a sequence already encoded in a 2-bit long. * <p> * Note: polyA is used represent unknown, but reverse complement will change it to polyT which does not mean the same * sometimes it is best to reverseComplement by text below * @param seq 2-bit encoded sequence * @param len length of the sequence * @return 2-bit reverse complement */ public static long getReverseComplement(long seq, byte len) { // if(seq==-1) return -1; long rev = 0; // byte b=0; long mask = 3; seq = ~seq; for (int i = 0; i < len; i++) { rev = (rev << 2) + (seq & mask); seq = seq >> 2; // System.out.println("v = " + v); } return rev; } /** * Returns the reverse complement of a sequence already encoded in a 2-bit long. * The entire long (32-bp) is reverse complemented. * <p> * Note: polyA is used represent unknown, but reverse complement will change it to polyT which does not mean the same * sometimes it is best to reverseComplement by text below * @param seq 2-bit encoded sequence * @return 2-bit reverse complement */ public static long getReverseComplement(long seq) { return getReverseComplement(seq, (byte) chunkSize); } /** * Returns the reverse complement of a arrays of sequences already encoded in a 2-bit long. * <p> * Note: polyA is used represent unknown, but reverse complement will change it to polyT which does not mean the same * sometimes it is best to reverseComplement by text below * @param seq array of 2-bit encoded sequences * @return array of 2-bit reverse complements */ public static long[] getReverseComplement(long[] seq) { long[] rev = new long[seq.length]; for (int i = 0; i < rev.length; i++) { rev[i] = getReverseComplement(seq[seq.length - i - 1], (byte) chunkSize); } return rev; } /** * Returns a string based reverse complement. Get around issues with the poly-A tailing in the 2-bit encoding approach. * * @param seq DNA sequence * @return reverse complement DNA sequence */ public static String getReverseComplement(String seq) { StringBuilder sb = new StringBuilder(seq.length()); for (int i = seq.length() - 1; i >= 0; i sb.append(getComplementBase(seq.charAt(i))); } return sb.toString(); } /** * Returns reverse complement for a sequence. * @param base * @return reverse complement of base */ public static char getComplementBase(char base) { switch (base) { case 'A': return 'T'; case 'C': return 'G'; case 'G': return 'C'; case 'T': return 'A'; } return 'N'; } /** * Returns the byte {@link net.maizegenetics.dna.snp.NucleotideAlignmentConstants} representation * used by TASSEL for the 2-bit encoded long. * <p> * e.g. A > 2-bit encode 00 > byte (0) * @param val 2-bit encoded DNA sequence * @return array of bytes for the DNA sequence */ public static byte[] getByteSeqFromLong(long val) { byte[] b = new byte[chunkSize]; long mask = 3; for (int i = 0; i < chunkSize; i++) { b[chunkSize - i - 1] = (byte) (val & mask); val = val >> 2; } return b; } /** * Returns the byte {@link net.maizegenetics.dna.snp.NucleotideAlignmentConstants} representation * used by TASSEL for the 2-bit encoded long. * <p> * e.g. A > 2-bit encode 00 > byte (0) * @param valA array of 2-bit encoded DNA sequence * @return array of bytes for the DNA sequence */ public static byte[] getByteSeqFromLong(long[] valA) { byte[] b = new byte[chunkSize * valA.length]; long mask = 3; long val; for (int j = 0; j < valA.length; j++) { val = valA[j]; for (int i = 0; i < chunkSize; i++) { b[(j * chunkSize) + chunkSize - i - 1] = (byte) (val & mask); val = val >> 2; } } return b; } /** * Returns the 2-bit encoded long represented by 32 bytes representing {@link net.maizegenetics.dna.snp.NucleotideAlignmentConstants} * representation * <p> * @param b array of bytes encoding NucleotideAlignmentConstants * @return 2-bit encoded long */ public static long getLongSeqFromByteArray(byte[] b) { //the byte array must be in 0-3 coding for A, C, G, T long v = 0; if (b.length != chunkSize) { return -1; } for (int i = 0; i < b.length; i++) { v = (v << 2) + b[i]; } return v; } /** * Return a string representation of the 2-bit encoded long. * @param val 2-bit encoded sequence * @param len length of the sequence * @return DNA sequence as a string */ public static String getSequenceFromLong(long val, byte len) { StringBuilder seq = new StringBuilder(chunkSize + 4); long mask = 3; for (int i = 0; i < len; i++) { byte base = (byte) (val & mask); seq.insert(0, bases[base]); val = val >> 2; } return seq.toString(); } /** * Return a string representation of an array of 2-bit encoded longs. * @param val array of 2-bit encoded sequences * @return DNA sequence as a string */ public static String getSequenceFromLong(long[] val) { StringBuilder seq = new StringBuilder(); for (long v : val) { seq.append(getSequenceFromLong(v)); } return seq.toString(); } /** * Split a 2-bit encoded long into 2 integers. * @param val 2-bit encoded long sequence * @return array of 2-bit encoded integers */ public static int[] getIntFromLong(long val) { int[] ival = new int[2]; ival[0] = (int) (val >> chunkSize); ival[1] = (int) (val); return ival; } /** * Return a string representation of the 2-bit encoded Integer (16bp). * @param val 2-bit encoded sequence * @return DNA sequence as a string */ public static String getSequenceFromInt(int val) { StringBuilder seq = new StringBuilder(chunkSizeForInt + 1); long mask = 3; for (int i = 0; i < chunkSizeForInt; i++) { byte base = (byte) (val & mask); seq.insert(0, bases[base]); val = val >> 2; } return seq.toString(); } /** * Returns the position of the first low quality positions based on a quality * fastq (?) string. * @param quality fastq quality string * @param minQual minimum quality threshold * @return position of first low quality position (quality length is returned is not low * quality base is found. */ public static int getFirstLowQualityPos(String quality, int minQual) { int qualInt = 0; for (int i = 0; i < quality.length(); i++) { qualInt = (int) quality.charAt(i) - 64; if (qualInt < minQual) { return i; } } return quality.length(); } /** * Return a string representation of the 2-bit encoded long. * @param val 2-bit encoded sequence * @return DNA sequence as a string */ public static String getSequenceFromLong(long val) { return getSequenceFromLong(val, (byte) chunkSize); } /** * Returns the number of bp differences between two 2-bit encoded longs. * Maximum divergence is used to save time when only interested in very similar * sequences. * @param seq1 2-bit encoded sequence * @param seq2 2-bit encoded sequence * @param maxDivergence threshold for counting divergence upto * @return count of the divergence (above the maxDivergence, chunkSize is returned) */ public static byte seqDifferences(long seq1, long seq2, int maxDivergence) { long mask = 3; byte cnt = 0; long diff = seq1 ^ seq2; for (int x = 0; x < chunkSize && cnt <= maxDivergence; x++) { if ((diff & mask) > 0) { cnt++; } diff = diff >> 2; // System.out.println("v = " + v); } if (cnt > maxDivergence) { cnt = (byte) chunkSize; } // if(x<(chunkSize-1)) cnt=(byte)chunkSize; //if didn't get to the end of the sequence set to maximum return cnt; } /** * Returns the number of bp differences between two 2-bit encoded longs. * @param seq1 2-bit encoded sequence * @param seq2 2-bit encoded sequence * @return count of the divergence */ public static byte seqDifferences(long seq1, long seq2) { long mask = 3; byte cnt = 0; long diff = seq1 ^ seq2; for (int x = 0; x < chunkSize; x++) { if ((diff & mask) > 0) { cnt++; } diff = diff >> 2; // System.out.println("v = " + v); } return cnt; } /** * Returns the number of sequencing differences between two 2-bit encoded longs. * Maximum divergence is used to save time when only interested in very similar * sequences. * @param seq1 2-bit encoded sequence * @param seq2 2-bit encoded sequence * @param lengthOfComp number of sites to compare * @param maxDivergence threshold for counting divergence upto * @return count of the divergence (above the maxDivergence, chunkSize is returned) */ public static byte seqDifferencesForSubset(long seq1, long seq2, int lengthOfComp, int maxDivergence) { long mask = 3; byte cnt = 0; long diff = seq1 ^ seq2; diff = diff >> (2 * (chunkSize - lengthOfComp)); //shift to 5' end of sequence for (int x = 0; x < lengthOfComp && cnt < maxDivergence; x++) { if ((diff & mask) > 0) { cnt++; } diff = diff >> 2; } return cnt; } /** * Trim the poly-A off the sequence string * @param s input sequence * @return sequence with polyA removed */ public static String removePolyAFromEnd(String s) { int index = s.length() - 1; while (s.charAt(index) == 'A') { index if (index < 1) { return null; } } return s.substring(0, index + 1); } }
package net.mcft.copy.tweaks; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.ListIterator; import java.util.Set; import net.mcft.copy.betterstorage.api.crafting.BetterStorageCrafting; import net.mcft.copy.betterstorage.api.crafting.ShapedStationRecipe; import net.mcft.copy.core.util.RandomUtils; import net.mcft.copy.core.util.WorldUtils; import net.minecraft.block.Block; import net.minecraft.entity.Entity; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.monster.EntityBlaze; import net.minecraft.entity.monster.EntityCreeper; import net.minecraft.entity.monster.EntityEnderman; import net.minecraft.entity.monster.EntitySkeleton; import net.minecraft.entity.monster.EntitySpider; import net.minecraft.entity.monster.EntityWitch; import net.minecraft.entity.passive.EntityChicken; import net.minecraft.entity.passive.EntityCow; import net.minecraft.entity.passive.EntityHorse; import net.minecraft.entity.passive.EntityPig; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.Item; import net.minecraft.item.Item.ToolMaterial; import net.minecraft.item.ItemArmor; import net.minecraft.item.ItemStack; import net.minecraft.item.ItemTool; import net.minecraft.item.crafting.CraftingManager; import net.minecraft.item.crafting.IRecipe; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.living.LivingDropsEvent; import net.minecraftforge.event.entity.living.LivingEvent.LivingUpdateEvent; import net.minecraftforge.event.world.BlockEvent.HarvestDropsEvent; import net.minecraftforge.oredict.OreDictionary; import net.minecraftforge.oredict.ShapedOreRecipe; import cpw.mods.fml.common.Mod; import cpw.mods.fml.common.Mod.EventHandler; import cpw.mods.fml.common.event.FMLPostInitializationEvent; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import cpw.mods.fml.common.registry.GameRegistry; @Mod(modid = copyVanillaTweaks.MOD_ID, version = "@VERSION@", useMetadata = false, dependencies = "required-after:copycore;required-after:betterstorage") public class copyVanillaTweaks { public static final String MOD_ID = "copyVanillaTweaks"; public static final String MOD_NAME = "copy's Vanilla Tweaks"; @EventHandler public void postInit(FMLPostInitializationEvent event) { MinecraftForge.EVENT_BUS.register(this); // Adjust durability of tools and armor setToolDurability( 80, Items.wooden_sword, Items.wooden_pickaxe, Items.wooden_shovel, Items.wooden_axe, Items.wooden_hoe); setToolDurability( 160, Items.stone_sword, Items.stone_pickaxe, Items.stone_shovel, Items.stone_axe, Items.stone_hoe); setToolDurability( 160, Items.golden_sword, Items.golden_pickaxe, Items.golden_shovel, Items.golden_axe, Items.golden_hoe); setToolDurability( 320, Items.iron_sword, Items.iron_pickaxe, Items.iron_shovel, Items.iron_axe, Items.iron_hoe); setToolDurability(1720, Items.diamond_sword, Items.diamond_pickaxe, Items.diamond_shovel, Items.diamond_axe, Items.diamond_hoe); setArmorDurability(10, Items.leather_helmet, Items.leather_chestplate, Items.leather_leggings, Items.leather_boots); setArmorDurability(12, Items.golden_helmet, Items.golden_chestplate, Items.golden_leggings, Items.golden_boots); setArmorDurability(16, Items.iron_helmet, Items.iron_chestplate, Items.iron_leggings, Items.iron_boots); setArmorDurability(20, Items.chainmail_helmet, Items.chainmail_chestplate, Items.chainmail_leggings, Items.chainmail_boots); setArmorDurability(32, Items.diamond_helmet, Items.diamond_chestplate, Items.diamond_leggings, Items.diamond_boots); Set removeRecipesOf = new HashSet(Arrays.asList( Items.stone_sword, Items.stone_pickaxe, Items.stone_shovel, Items.stone_axe, Items.stone_hoe, Items.golden_sword, Items.golden_pickaxe, Items.golden_shovel, Items.golden_axe, Items.golden_hoe, Items.iron_sword, Items.iron_pickaxe, Items.iron_shovel, Items.iron_axe, Items.iron_hoe, Items.diamond_sword, Items.diamond_pickaxe, Items.diamond_shovel, Items.diamond_axe, Items.diamond_hoe, Items.golden_helmet, Items.golden_chestplate, Items.golden_leggings, Items.golden_boots, Items.iron_helmet, Items.iron_chestplate, Items.iron_leggings, Items.iron_boots, Items.diamond_helmet, Items.diamond_chestplate, Items.diamond_leggings, Items.diamond_boots, Items.bucket, Items.bed, Item.getItemFromBlock(Blocks.enchanting_table), Item.getItemFromBlock(Blocks.anvil) )); Set<Item> replaceCobbleWithStoneIn = new HashSet<Item>(Arrays.asList( Item.getItemFromBlock(Blocks.lever), Item.getItemFromBlock(Blocks.dispenser), Item.getItemFromBlock(Blocks.dropper), Item.getItemFromBlock(Blocks.piston), Items.brewing_stand )); ListIterator<IRecipe> recipeIterator = CraftingManager.getInstance().getRecipeList().listIterator(); while (recipeIterator.hasNext()) { IRecipe recipe = recipeIterator.next(); ItemStack output = recipe.getRecipeOutput(); if (output == null) continue; if (removeRecipesOf.contains(output.getItem())) recipeIterator.remove(); else if (replaceCobbleWithStoneIn.contains(output.getItem())) { Object[] input = ((ShapedOreRecipe)recipe).getInput(); for (int i = 0; i < input.length; i++) if (input[i] instanceof List) { List<ItemStack> stacks = (List<ItemStack>)input[i]; for (ItemStack stack : stacks) if (stack.getItem() == Item.getItemFromBlock(Blocks.cobblestone)) { input[i] = OreDictionary.getOres("stone"); break; } } } } // Stone tools ToolMaterial.STONE.customCraftingMaterial = Items.flint; GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(Items.stone_sword), " x ", " o ", "s/s", 'o', "stone", 'x', Items.flint, '/', "stickWood", 's', Items.string)); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(Items.stone_pickaxe), "xox", "s/s", " / ", 'o', "stone", 'x', Items.flint, '/', "stickWood", 's', Items.string)); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(Items.stone_shovel), " x ", "s/s", " / ", 'x', Items.flint, '/', "stickWood", 's', Items.string)); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(Items.stone_axe), "xo ", "x/ ", "s/s", 'o', "stone", 'x', Items.flint, '/', "stickWood", 's', Items.string)); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(Items.stone_hoe), "xo ", "s/s", " / ", 'o', "stone", 'x', Items.flint, '/', "stickWood", 's', Items.string)); // Gold tools and armor addStationRecipe(Items.golden_sword, 2, "o", "o", "/", 'o', "ingotGold", '/', "stickWood"); addStationRecipe(Items.golden_pickaxe, 2, "ooo", " / ", " / ", 'o', "ingotGold", '/', "stickWood"); addStationRecipe(Items.golden_shovel, 1, "o", "/", "/", 'o', "ingotGold", '/', "stickWood"); addStationRecipe(Items.golden_axe, 1, "oo", "o/", " /", 'o', "ingotGold", '/', "stickWood"); addStationRecipe(Items.golden_hoe, 1, "oo", " /", " /", 'o', "ingotGold", '/', "stickWood"); addStationRecipe(Items.golden_helmet, 2, "ooo", "oHo", 'o', "ingotGold", 'H', Items.leather_helmet); addStationRecipe(Items.golden_chestplate, 3, "oCo", "ooo", "ooo", 'o', "ingotGold", 'C', Items.leather_chestplate); addStationRecipe(Items.golden_leggings, 2, "ooo", "oLo", "o o", 'o', "ingotGold", 'L', Items.leather_leggings); addStationRecipe(Items.golden_boots, 2, "oBo", "o o", 'o', "ingotGold", 'B', Items.leather_boots); // Iron tools and armor addStationRecipe(Items.iron_sword, 6, "o", "o", "/", 'o', "ingotIron", '/', "stickWood"); addStationRecipe(Items.iron_pickaxe, 8, "ooo", " / ", " / ", 'o', "ingotIron", '/', "stickWood"); addStationRecipe(Items.iron_shovel, 4, "o", "/", "/", 'o', "ingotIron", '/', "stickWood"); addStationRecipe(Items.iron_axe, 4, "oo", "o/", " /", 'o', "ingotIron", '/', "stickWood"); addStationRecipe(Items.iron_hoe, 4, "oo", " /", " /", 'o', "ingotIron", '/', "stickWood"); addStationRecipe(Items.iron_helmet, 8, "ooo", "oHo", 'o', "ingotIron", 'H', Items.leather_helmet); addStationRecipe(Items.iron_chestplate, 12, "oCo", "ooo", "ooo", 'o', "ingotIron", 'C', Items.leather_chestplate); addStationRecipe(Items.iron_leggings, 10, "ooo", "oLo", "o o", 'o', "ingotIron", 'L', Items.leather_leggings); addStationRecipe(Items.iron_boots, 8, "oBo", "o o", 'o', "ingotIron", 'B', Items.leather_boots); // Diamond tools and armor addStationRecipe(Items.diamond_sword, 12, "o", "x", "/", 'o', "gemDiamond", 'x', "gemEmerald", '/', "stickWood"); addStationRecipe(Items.diamond_pickaxe, 16, "oxo", " / ", " / ", 'o', "gemDiamond", 'x', "gemEmerald", '/', "stickWood"); addStationRecipe(Items.diamond_shovel, 8, "o", "/", "/", 'o', "gemDiamond", '/', "stickWood"); addStationRecipe(Items.diamond_axe, 8, "ox", "o/", " /", 'o', "gemDiamond", 'x', "gemEmerald", '/', "stickWood"); addStationRecipe(Items.diamond_hoe, 8, "ox", " /", " /", 'o', "gemDiamond", 'x', "gemEmerald", '/', "stickWood"); addStationRecipe(Items.diamond_helmet, 16, "oxo", "oHo", 'o', "gemDiamond", 'x', "gemEmerald", 'H', Items.leather_helmet); addStationRecipe(Items.diamond_chestplate, 24, "oCo", "oxo", "ooo", 'o', "gemDiamond", 'x', "gemEmerald", 'C', Items.leather_chestplate); addStationRecipe(Items.diamond_leggings, 20, "oxo", "oLo", "o o", 'o', "gemDiamond", 'x', "gemEmerald", 'L', Items.leather_leggings); addStationRecipe(Items.diamond_boots, 16, "oBo", "o o", 'o', "gemDiamond", 'B', Items.leather_boots); // Other recipes addStationRecipe(Items.bucket, 2, "o o", " o ", 'o', "ingotIron"); addStationRecipe(Items.bed, 20, "WWW", "PPP", 'W', Blocks.wool, 'P', "plankWood"); addStationRecipe(Blocks.enchanting_table, 30, " B ", "o "###", 'B', Items.book, 'o', "gemDiamond", '#', Blocks.obsidian); addStationRecipe(Blocks.anvil, 20, "OOO", " o ", "ooo", 'O', "blockIron", 'o', "ingotIron"); addStationRecipe(new ItemStack(Items.leather, 4), 1, "ooo", "ooo", "ooo", 'o', Items.rotten_flesh); } private static void setToolDurability(int durability, Item... items) { for (Item item : items) item.setMaxDamage(durability); } private static final int[] maxDamageArray = new int[]{ 11, 16, 15, 13 }; private static void setArmorDurability(int durability, ItemArmor... items) { for (ItemArmor item : items) item.setMaxDamage(durability * maxDamageArray[item.armorType]); } private static void addStationRecipe(ItemStack output, int experience, Object... input) { BetterStorageCrafting.addStationRecipe(new ShapedStationRecipe( new ItemStack[]{ null, null, null, null, output }, input ).setRequiredExperience(experience)); } private static void addStationRecipe(Item output, int experience, Object... input) { addStationRecipe(new ItemStack(output), experience, input); } private static void addStationRecipe(Block output, int experience, Object... input) { addStationRecipe(new ItemStack(output), experience, input); } @SubscribeEvent public void onHarvestDrops(HarvestDropsEvent event) { // Change gravel drops so it always drops gravel not using a shovel. // Chance of getting flint is different depending on the shovel tier: // Wood & Gold: 35%, Stone: 20%, Iron: 5%, Diamond: 0%. if ((event.block == Blocks.gravel) && !event.isSilkTouching) { ItemStack holding = ((event.harvester != null) ? event.harvester.getCurrentEquippedItem() : null); ItemTool tool = (((holding != null) && (holding.getItem() instanceof ItemTool)) ? (ItemTool)holding.getItem() : null); boolean dropsFlint = ((tool != null) && tool.getToolClasses(holding).contains("shovel") && RandomUtils.getBoolean(0.35 - tool.getHarvestLevel(holding, "shovel") * 0.15)); event.drops.clear(); event.drops.add(dropsFlint ? new ItemStack(Items.flint) : new ItemStack(Blocks.gravel)); } } @SubscribeEvent public void onLivingDrops(LivingDropsEvent event) { // Don't drop any extras if the entity is a child. if (event.entityLiving.isChild()) return; // Chickens drop 3 extra feathers. if (event.entity instanceof EntityChicken) event.drops.add(makeItemToDrop(event.entity, Items.feather, 3)); // Pigs drop 2 extra porkchops. else if (event.entity instanceof EntityPig) event.drops.add(makeItemToDrop(event.entity, Items.porkchop, 2)); // Cows drop 1 extra leather and 1 extra beef. else if (event.entity instanceof EntityCow) { event.drops.add(makeItemToDrop(event.entity, Items.leather, 1)); event.drops.add(makeItemToDrop(event.entity, Items.beef, 1)); // Horses drop 1-2 extra leather. } else if (event.entity instanceof EntityHorse) event.drops.add(makeItemToDrop(event.entity, Items.leather, RandomUtils.getInt(1, 3))); // Spiders drop 1 extra string. else if (event.entity instanceof EntitySpider) event.drops.add(makeItemToDrop(event.entity, Items.string, 1)); // Skeletons drop 1 extra bones. else if (event.entity instanceof EntitySkeleton) event.drops.add(makeItemToDrop(event.entity, Items.bone, 1)); // Creepers drop 1 extra gunpowder. else if (event.entity instanceof EntityCreeper) event.drops.add(makeItemToDrop(event.entity, Items.gunpowder, 1)); // Endermen drop 0-1 extra enderpearls. else if ((event.entity instanceof EntityEnderman) && RandomUtils.getBoolean(0.5)) event.drops.add(makeItemToDrop(event.entity, Items.ender_pearl, 1)); // Blaze drop 1 extra blazerod. else if (event.entity instanceof EntityBlaze) event.drops.add(makeItemToDrop(event.entity, Items.blaze_rod, 1)); // Witches have a 25% chance to drop a random potion. else if ((event.entity instanceof EntityWitch) && RandomUtils.getBoolean(0.25)) { int[] damageValues = { 8197, 8194, 8205, 8195, // Healing, Swiftness, Water Breathing, Fire Resistance, 16396, 16388, 16394, 16392 // Harming, Poison, Slowness, Weakness }; int damage = damageValues[RandomUtils.getInt(damageValues.length)]; event.drops.add(makeItemToDrop(event.entity, new ItemStack(Items.potionitem, 1, damage))); } } public static EntityItem makeItemToDrop(Entity entity, ItemStack stack) { EntityItem item = new EntityItem(entity.worldObj, entity.posX, entity.posY, entity.posZ, stack); item.motionX = RandomUtils.getGaussian() * 0.05F; item.motionY = RandomUtils.getGaussian() * 0.05F + 0.2F; item.motionZ = RandomUtils.getGaussian() * 0.05F; return item; } public static EntityItem makeItemToDrop(Entity entity, Item item, int amount) { return makeItemToDrop(entity, new ItemStack(item, amount)); } @SubscribeEvent public void onLivingUpdate(LivingUpdateEvent event) { // Adult chickens have a 25% chance drop a feather every 8 minutes. if (!event.entity.worldObj.isRemote && (event.entity instanceof EntityChicken) && !event.entityLiving.isChild() && ((event.entity.ticksExisted % (8 * 60 * 20)) == 0) && RandomUtils.getBoolean(0.5)) WorldUtils.dropStackFromEntity(event.entity, new ItemStack(Items.feather), 1.5F); } }
package ragnardb.plugin; public class ColumnDefinition { private String _columnName; private int _sqlType; private boolean notNull; private boolean _null; private boolean auto_Increment; private boolean identity; private int startInt; private int incrementInt; private boolean primaryKey; private boolean hash; private boolean unique; public ColumnDefinition(String columnName, int sqlType) { _columnName = columnName; _sqlType = sqlType; } public String getColumnName() { return _columnName; } public void setColumnName(String columnName) { _columnName = columnName; } public int getSQLType() { return _sqlType; } public void setSQLType(int sqlType) { _sqlType = sqlType; } public void setNotNull(boolean in){ notNull = in; } public void setNull(boolean in){ _null = in; } public void setAutoIncrement(boolean in){ auto_Increment = in; } public void setIdentity(boolean in){ identity = in; } public void setStartInt(int in){ startInt = in; } public void setIncrementInt(int in){ incrementInt = in; } public void setPrimaryKey(boolean in){ primaryKey = in; } public void setHash(boolean in){ hash = in; } public void setUnique(boolean in){ unique = in; } public boolean getNotNull(){ return notNull; } public boolean getNull(){ return _null; } public boolean getAutoIncrement(){ return auto_Increment; } public boolean getIdentity(){ return identity; } public int getStartInt(){ return startInt; } public int getIncrementInt(){ return incrementInt; } public boolean getPrimaryKey(){ return primaryKey; } public boolean getHash(){ return hash; } public boolean getUnique(){ return unique; } }
package redis.clients.jedis; import java.io.Closeable; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.commons.pool2.impl.GenericObjectPoolConfig; import redis.clients.jedis.BinaryClient.LIST_POSITION; public class JedisCluster implements JedisCommands, BasicCommands, Closeable { public static final short HASHSLOTS = 16384; private static final int DEFAULT_TIMEOUT = 2000; private static final int DEFAULT_MAX_REDIRECTIONS = 5; public static enum Reset { SOFT, HARD } private int maxRedirections; private JedisClusterConnectionHandler connectionHandler; public JedisCluster(Set<HostAndPort> nodes, int timeout) { this(nodes, timeout, DEFAULT_MAX_REDIRECTIONS); } public JedisCluster(Set<HostAndPort> nodes) { this(nodes, DEFAULT_TIMEOUT); } public JedisCluster(Set<HostAndPort> nodes, int timeout, int maxRedirections) { this(nodes, timeout, maxRedirections, new GenericObjectPoolConfig()); } public JedisCluster(Set<HostAndPort> nodes, final GenericObjectPoolConfig poolConfig) { this(nodes, DEFAULT_TIMEOUT, DEFAULT_MAX_REDIRECTIONS, poolConfig); } public JedisCluster(Set<HostAndPort> nodes, int timeout, final GenericObjectPoolConfig poolConfig) { this(nodes, timeout, DEFAULT_MAX_REDIRECTIONS, poolConfig); } public JedisCluster(Set<HostAndPort> jedisClusterNode, int timeout, int maxRedirections, final GenericObjectPoolConfig poolConfig) { this.connectionHandler = new JedisSlotBasedConnectionHandler(jedisClusterNode, poolConfig, timeout); this.maxRedirections = maxRedirections; } @Override public void close() { if (connectionHandler != null) { for (JedisPool pool : connectionHandler.getNodes().values()) { try { if (pool != null) { pool.destroy(); } } catch (Exception e) { // pass } } } } @Override public String set(final String key, final String value) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.set(key, value); } }.run(key); } @Override public String set(final String key, final String value, final String nxxx, final String expx, final long time) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.set(key, value, nxxx, expx, time); } }.run(key); } @Override public String get(final String key) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.get(key); } }.run(key); } @Override public Boolean exists(final String key) { return new JedisClusterCommand<Boolean>(connectionHandler, maxRedirections) { @Override public Boolean execute(Jedis connection) { return connection.exists(key); } }.run(key); } @Override public Long persist(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.persist(key); } }.run(key); } @Override public String type(final String key) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.type(key); } }.run(key); } @Override public Long expire(final String key, final int seconds) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.expire(key, seconds); } }.run(key); } @Override public Long pexpire(final String key, final long milliseconds) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.pexpire(key, milliseconds); } }.run(key); } @Override public Long expireAt(final String key, final long unixTime) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.expireAt(key, unixTime); } }.run(key); } @Override public Long pexpireAt(final String key, final long millisecondsTimestamp) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.pexpireAt(key, millisecondsTimestamp); } }.run(key); } @Override public Long ttl(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.ttl(key); } }.run(key); } @Override public Boolean setbit(final String key, final long offset, final boolean value) { return new JedisClusterCommand<Boolean>(connectionHandler, maxRedirections) { @Override public Boolean execute(Jedis connection) { return connection.setbit(key, offset, value); } }.run(key); } @Override public Boolean setbit(final String key, final long offset, final String value) { return new JedisClusterCommand<Boolean>(connectionHandler, maxRedirections) { @Override public Boolean execute(Jedis connection) { return connection.setbit(key, offset, value); } }.run(key); } @Override public Boolean getbit(final String key, final long offset) { return new JedisClusterCommand<Boolean>(connectionHandler, maxRedirections) { @Override public Boolean execute(Jedis connection) { return connection.getbit(key, offset); } }.run(key); } @Override public Long setrange(final String key, final long offset, final String value) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.setrange(key, offset, value); } }.run(key); } @Override public String getrange(final String key, final long startOffset, final long endOffset) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.getrange(key, startOffset, endOffset); } }.run(key); } @Override public String getSet(final String key, final String value) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.getSet(key, value); } }.run(key); } @Override public Long setnx(final String key, final String value) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.setnx(key, value); } }.run(key); } @Override public String setex(final String key, final int seconds, final String value) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.setex(key, seconds, value); } }.run(key); } @Override public Long decrBy(final String key, final long integer) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.decrBy(key, integer); } }.run(key); } @Override public Long decr(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.decr(key); } }.run(key); } @Override public Long incrBy(final String key, final long integer) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.incrBy(key, integer); } }.run(key); } @Override public Double incrByFloat(final String key, final double value) { return new JedisClusterCommand<Double>(connectionHandler, maxRedirections) { @Override public Double execute(Jedis connection) { return connection.incrByFloat(key, value); } }.run(key); } @Override public Long incr(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.incr(key); } }.run(key); } @Override public Long append(final String key, final String value) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.append(key, value); } }.run(key); } @Override public String substr(final String key, final int start, final int end) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.substr(key, start, end); } }.run(key); } @Override public Long hset(final String key, final String field, final String value) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.hset(key, field, value); } }.run(key); } @Override public String hget(final String key, final String field) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.hget(key, field); } }.run(key); } @Override public Long hsetnx(final String key, final String field, final String value) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.hsetnx(key, field, value); } }.run(key); } @Override public String hmset(final String key, final Map<String, String> hash) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.hmset(key, hash); } }.run(key); } @Override public List<String> hmget(final String key, final String... fields) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.hmget(key, fields); } }.run(key); } @Override public Long hincrBy(final String key, final String field, final long value) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.hincrBy(key, field, value); } }.run(key); } @Override public Boolean hexists(final String key, final String field) { return new JedisClusterCommand<Boolean>(connectionHandler, maxRedirections) { @Override public Boolean execute(Jedis connection) { return connection.hexists(key, field); } }.run(key); } @Override public Long hdel(final String key, final String... field) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.hdel(key, field); } }.run(key); } @Override public Long hlen(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.hlen(key); } }.run(key); } @Override public Set<String> hkeys(final String key) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.hkeys(key); } }.run(key); } @Override public List<String> hvals(final String key) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.hvals(key); } }.run(key); } @Override public Map<String, String> hgetAll(final String key) { return new JedisClusterCommand<Map<String, String>>(connectionHandler, maxRedirections) { @Override public Map<String, String> execute(Jedis connection) { return connection.hgetAll(key); } }.run(key); } @Override public Long rpush(final String key, final String... string) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.rpush(key, string); } }.run(key); } @Override public Long lpush(final String key, final String... string) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.lpush(key, string); } }.run(key); } @Override public Long llen(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.llen(key); } }.run(key); } @Override public List<String> lrange(final String key, final long start, final long end) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.lrange(key, start, end); } }.run(key); } @Override public String ltrim(final String key, final long start, final long end) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.ltrim(key, start, end); } }.run(key); } @Override public String lindex(final String key, final long index) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.lindex(key, index); } }.run(key); } @Override public String lset(final String key, final long index, final String value) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.lset(key, index, value); } }.run(key); } @Override public Long lrem(final String key, final long count, final String value) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.lrem(key, count, value); } }.run(key); } @Override public String lpop(final String key) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.lpop(key); } }.run(key); } @Override public String rpop(final String key) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.rpop(key); } }.run(key); } @Override public Long sadd(final String key, final String... member) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.sadd(key, member); } }.run(key); } @Override public Set<String> smembers(final String key) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.smembers(key); } }.run(key); } @Override public Long srem(final String key, final String... member) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.srem(key, member); } }.run(key); } @Override public String spop(final String key) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.spop(key); } }.run(key); } @Override public Set<String> spop(final String key, final long count) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.spop(key, count); } }.run(key); } @Override public Long scard(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.scard(key); } }.run(key); } @Override public Boolean sismember(final String key, final String member) { return new JedisClusterCommand<Boolean>(connectionHandler, maxRedirections) { @Override public Boolean execute(Jedis connection) { return connection.sismember(key, member); } }.run(key); } @Override public String srandmember(final String key) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.srandmember(key); } }.run(key); } @Override public List<String> srandmember(final String key, final int count) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.srandmember(key, count); } }.run(key); } @Override public Long strlen(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.strlen(key); } }.run(key); } @Override public Long zadd(final String key, final double score, final String member) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zadd(key, score, member); } }.run(key); } @Override public Long zadd(final String key, final Map<String, Double> scoreMembers) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zadd(key, scoreMembers); } }.run(key); } @Override public Set<String> zrange(final String key, final long start, final long end) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrange(key, start, end); } }.run(key); } @Override public Long zrem(final String key, final String... member) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zrem(key, member); } }.run(key); } @Override public Double zincrby(final String key, final double score, final String member) { return new JedisClusterCommand<Double>(connectionHandler, maxRedirections) { @Override public Double execute(Jedis connection) { return connection.zincrby(key, score, member); } }.run(key); } @Override public Long zrank(final String key, final String member) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zrank(key, member); } }.run(key); } @Override public Long zrevrank(final String key, final String member) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zrevrank(key, member); } }.run(key); } @Override public Set<String> zrevrange(final String key, final long start, final long end) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrevrange(key, start, end); } }.run(key); } @Override public Set<Tuple> zrangeWithScores(final String key, final long start, final long end) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, maxRedirections) { @Override public Set<Tuple> execute(Jedis connection) { return connection.zrangeWithScores(key, start, end); } }.run(key); } @Override public Set<Tuple> zrevrangeWithScores(final String key, final long start, final long end) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, maxRedirections) { @Override public Set<Tuple> execute(Jedis connection) { return connection.zrevrangeWithScores(key, start, end); } }.run(key); } @Override public Long zcard(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zcard(key); } }.run(key); } @Override public Double zscore(final String key, final String member) { return new JedisClusterCommand<Double>(connectionHandler, maxRedirections) { @Override public Double execute(Jedis connection) { return connection.zscore(key, member); } }.run(key); } @Override public List<String> sort(final String key) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.sort(key); } }.run(key); } @Override public List<String> sort(final String key, final SortingParams sortingParameters) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.sort(key, sortingParameters); } }.run(key); } @Override public Long zcount(final String key, final double min, final double max) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zcount(key, min, max); } }.run(key); } @Override public Long zcount(final String key, final String min, final String max) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zcount(key, min, max); } }.run(key); } @Override public Set<String> zrangeByScore(final String key, final double min, final double max) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrangeByScore(key, min, max); } }.run(key); } @Override public Set<String> zrangeByScore(final String key, final String min, final String max) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrangeByScore(key, min, max); } }.run(key); } @Override public Set<String> zrevrangeByScore(final String key, final double max, final double min) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrevrangeByScore(key, max, min); } }.run(key); } @Override public Set<String> zrangeByScore(final String key, final double min, final double max, final int offset, final int count) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrangeByScore(key, min, max, offset, count); } }.run(key); } @Override public Set<String> zrevrangeByScore(final String key, final String max, final String min) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrevrangeByScore(key, max, min); } }.run(key); } @Override public Set<String> zrangeByScore(final String key, final String min, final String max, final int offset, final int count) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrangeByScore(key, min, max, offset, count); } }.run(key); } @Override public Set<String> zrevrangeByScore(final String key, final double max, final double min, final int offset, final int count) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrevrangeByScore(key, max, min, offset, count); } }.run(key); } @Override public Set<Tuple> zrangeByScoreWithScores(final String key, final double min, final double max) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, maxRedirections) { @Override public Set<Tuple> execute(Jedis connection) { return connection.zrangeByScoreWithScores(key, min, max); } }.run(key); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final String key, final double max, final double min) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, maxRedirections) { @Override public Set<Tuple> execute(Jedis connection) { return connection.zrevrangeByScoreWithScores(key, max, min); } }.run(key); } @Override public Set<Tuple> zrangeByScoreWithScores(final String key, final double min, final double max, final int offset, final int count) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, maxRedirections) { @Override public Set<Tuple> execute(Jedis connection) { return connection.zrangeByScoreWithScores(key, min, max, offset, count); } }.run(key); } @Override public Set<String> zrevrangeByScore(final String key, final String max, final String min, final int offset, final int count) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrevrangeByScore(key, max, min, offset, count); } }.run(key); } @Override public Set<Tuple> zrangeByScoreWithScores(final String key, final String min, final String max) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, maxRedirections) { @Override public Set<Tuple> execute(Jedis connection) { return connection.zrangeByScoreWithScores(key, min, max); } }.run(key); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final String key, final String max, final String min) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, maxRedirections) { @Override public Set<Tuple> execute(Jedis connection) { return connection.zrevrangeByScoreWithScores(key, max, min); } }.run(key); } @Override public Set<Tuple> zrangeByScoreWithScores(final String key, final String min, final String max, final int offset, final int count) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, maxRedirections) { @Override public Set<Tuple> execute(Jedis connection) { return connection.zrangeByScoreWithScores(key, min, max, offset, count); } }.run(key); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final String key, final double max, final double min, final int offset, final int count) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, maxRedirections) { @Override public Set<Tuple> execute(Jedis connection) { return connection.zrevrangeByScoreWithScores(key, max, min, offset, count); } }.run(key); } @Override public Set<Tuple> zrevrangeByScoreWithScores(final String key, final String max, final String min, final int offset, final int count) { return new JedisClusterCommand<Set<Tuple>>(connectionHandler, maxRedirections) { @Override public Set<Tuple> execute(Jedis connection) { return connection.zrevrangeByScoreWithScores(key, max, min, offset, count); } }.run(key); } @Override public Long zremrangeByRank(final String key, final long start, final long end) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zremrangeByRank(key, start, end); } }.run(key); } @Override public Long zremrangeByScore(final String key, final double start, final double end) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zremrangeByScore(key, start, end); } }.run(key); } @Override public Long zremrangeByScore(final String key, final String start, final String end) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zremrangeByScore(key, start, end); } }.run(key); } @Override public Long zlexcount(final String key, final String min, final String max) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zlexcount(key, min, max); } }.run(key); } @Override public Set<String> zrangeByLex(final String key, final String min, final String max) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrangeByLex(key, min, max); } }.run(key); } @Override public Set<String> zrangeByLex(final String key, final String min, final String max, final int offset, final int count) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrangeByLex(key, min, max, offset, count); } }.run(key); } @Override public Set<String> zrevrangeByLex(final String key, final String max, final String min) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrevrangeByLex(key, max, min); } }.run(key); } @Override public Set<String> zrevrangeByLex(final String key, final String max, final String min, final int offset, final int count) { return new JedisClusterCommand<Set<String>>(connectionHandler, maxRedirections) { @Override public Set<String> execute(Jedis connection) { return connection.zrevrangeByLex(key, max, min, offset, count); } }.run(key); } @Override public Long zremrangeByLex(final String key, final String min, final String max) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.zremrangeByLex(key, min, max); } }.run(key); } @Override public Long linsert(final String key, final LIST_POSITION where, final String pivot, final String value) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.linsert(key, where, pivot, value); } }.run(key); } @Override public Long lpushx(final String key, final String... string) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.lpushx(key, string); } }.run(key); } @Override public Long rpushx(final String key, final String... string) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.rpushx(key, string); } }.run(key); } /** * @deprecated unusable command, this command will be removed in 3.0.0. */ @Override @Deprecated public List<String> blpop(final String arg) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.blpop(arg); } }.run(arg); } /** * @deprecated unusable command, this command will be removed in 3.0.0. */ @Override @Deprecated public List<String> brpop(final String arg) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.brpop(arg); } }.run(arg); } @Override public Long del(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.del(key); } }.run(key); } @Override public String echo(final String string) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.echo(string); } }.run(null); } @Override public Long move(final String key, final int dbIndex) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.move(key, dbIndex); } }.run(key); } @Override public Long bitcount(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.bitcount(key); } }.run(key); } @Override public Long bitcount(final String key, final long start, final long end) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.bitcount(key, start, end); } }.run(key); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String ping() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.ping(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String quit() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.quit(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String flushDB() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.flushDB(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public Long dbSize() { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.dbSize(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String select(final int index) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.select(index); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String flushAll() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.flushAll(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String auth(final String password) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.auth(password); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String save() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.save(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String bgsave() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.bgsave(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String bgrewriteaof() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.bgrewriteaof(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public Long lastsave() { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.lastsave(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String shutdown() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.shutdown(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String info() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.info(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String info(final String section) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.info(section); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String slaveof(final String host, final int port) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.slaveof(host, port); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String slaveofNoOne() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.slaveofNoOne(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public int getDB() { return new JedisClusterCommand<Integer>(connectionHandler, maxRedirections) { @Override public Integer execute(Jedis connection) { return connection.getDB(); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String debug(final DebugParams params) { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.debug(params); } }.run(null); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public String configResetStat() { return new JedisClusterCommand<String>(connectionHandler, maxRedirections) { @Override public String execute(Jedis connection) { return connection.configResetStat(); } }.run(null); } public Map<String, JedisPool> getClusterNodes() { return connectionHandler.getNodes(); } /** * Deprecated, BasicCommands is not fit to JedisCluster, so it'll be removed */ @Deprecated @Override public Long waitReplicas(int replicas, long timeout) { // TODO Auto-generated method stub return null; } @Override public ScanResult<Entry<String, String>> hscan(final String key, final String cursor) { return new JedisClusterCommand<ScanResult<Entry<String, String>>>(connectionHandler, maxRedirections) { @Override public ScanResult<Entry<String, String>> execute(Jedis connection) { return connection.hscan(key, cursor); } }.run(key); } @Override public ScanResult<String> sscan(final String key, final String cursor) { return new JedisClusterCommand<ScanResult<String>>(connectionHandler, maxRedirections) { @Override public ScanResult<String> execute(Jedis connection) { return connection.sscan(key, cursor); } }.run(key); } @Override public ScanResult<Tuple> zscan(final String key, final String cursor) { return new JedisClusterCommand<ScanResult<Tuple>>(connectionHandler, maxRedirections) { @Override public ScanResult<Tuple> execute(Jedis connection) { return connection.zscan(key, cursor); } }.run(key); } @Override public Long pfadd(final String key, final String... elements) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.pfadd(key, elements); } }.run(key); } @Override public long pfcount(final String key) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.pfcount(key); } }.run(key); } @Override public List<String> blpop(final int timeout, final String key) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.blpop(timeout, key); } }.run(key); } @Override public List<String> brpop(final int timeout, final String key) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.brpop(timeout, key); } }.run(key); } @Override public Long geoadd(final String key, final double lat,final double lon, final String member) { return new JedisClusterCommand<Long>(connectionHandler, maxRedirections) { @Override public Long execute(Jedis connection) { return connection.geoadd(key,lat,lon, member); } }.run(key); } @Override public List<String> georadius(final String key,final double lat,final double lon, final double radius, final String radius_type, final String... fields) { return new JedisClusterCommand<List<String>>(connectionHandler, maxRedirections) { @Override public List<String> execute(Jedis connection) { return connection.georadius(key,lat,lon,radius,radius_type,fields); } }.run(key); } }
package sdimkov.cucumber; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class FluentFormatter { private int blankLinesCount; private List<String> output; private final List<String> input; private final Path inputPath; private final Map<String, Integer> blankLineRules = new HashMap<>(); private final Map<String, Integer> indentRules = new HashMap<>(); /** * @param file the target file for formatting * * @throws IOException */ public FluentFormatter(File file) throws IOException { inputPath = file.toPath(); input = Files.readAllLines(inputPath, Charset.defaultCharset()); } /** * Set formatting rule that ensures count of blank lines before matched lines * * @param firstWord only lines starting with this word will be matched * @param lines the count of blank lines * * @return this for chaining */ public FluentFormatter setBlankLinesBefore(String firstWord, int lines) { if (firstWord.length() == 0) throw new IllegalArgumentException("firstWord can't be blank"); blankLineRules.put(firstWord, lines); return this; } /** * Set formatting rule that ensures indent size for matched lines * * @param firstWord only lines starting with this word will be matched * @param indent the indentation size (in spaces) * * @return this for chaining */ public FluentFormatter setIndent(String firstWord, int indent) { if (firstWord.length() == 0) throw new IllegalArgumentException("firstWord can't be blank"); indentRules.put(firstWord, indent); return this; } /** * Apply all formatting rules * * @return this for chaining */ public FluentFormatter format() { output = new ArrayList<>(input.size()); for (String line : input) { String word = getFirstWord(line); if (blankLineRules.containsKey(word)) { applyBlankLines(blankLineRules.get(word)); } if (indentRules.containsKey(word)) { applyIndent(line, indentRules.get(word)); } else if ("".equals(word)) { blankLinesCount++; output.add(""); } else { output.add(line); } } return this; } /** * Generic print method * * @param stream the output stream * * @return this for chaining */ public FluentFormatter print(PrintStream stream) { for (String line : output) stream.println(line); return this; } /** * Print to System.out * * @return this for chaining */ public FluentFormatter print() { return print(System.out); } /** * Save formatted file to given destination * * @param filePath the destination path * * @return this for chaining * * @throws IOException */ public FluentFormatter saveTo(Path filePath) throws IOException { Files.write(filePath, output, Charset.defaultCharset()); return this; } /** * Override original file with formatted * * @return this for chaining * * @throws IOException */ public FluentFormatter save() throws IOException { return saveTo(inputPath); } private void applyIndent(String line, int indent) { String indentStr = new String(new char[indent]).replace('\0', ' '); output.add(indentStr + line.trim()); blankLinesCount = 0; } private void applyBlankLines(int blankLines) { if (blankLines == -1) return; int diff = blankLines - this.blankLinesCount; if (diff > 0) { for (int i = 0; i < diff; i++) output.add(""); } else if (diff < 0) { for (int i = diff; i < 0; i++) output.remove(output.size() - 1); } } private String getFirstWord(String line) { return line.trim().split(" ")[0]; } }
package seedu.address.model; import java.time.LocalDate; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Logger; import com.google.common.eventbus.Subscribe; import javafx.collections.transformation.FilteredList; import seedu.address.commons.core.ComponentManager; import seedu.address.commons.core.LogsCenter; import seedu.address.commons.core.UnmodifiableObservableList; import seedu.address.commons.events.model.FilePathChangeEvent; import seedu.address.commons.events.model.TaskListChangedEvent; import seedu.address.commons.events.ui.AgendaTimeRangeChangedEvent; import seedu.address.commons.util.StringUtil; import seedu.address.logic.RecurringTaskManager; import seedu.address.logic.util.DateFormatterUtil; import seedu.address.model.tag.Tag; import seedu.address.model.tag.UniqueTagList; import seedu.address.model.task.Name; import seedu.address.model.task.ReadOnlyTask; import seedu.address.model.task.RecurringType; import seedu.address.model.task.Task; import seedu.address.model.task.TaskOccurrence; import seedu.address.model.task.TaskDate; import seedu.address.model.task.TaskType; import seedu.address.model.task.UniqueTaskList; import seedu.address.model.task.UniqueTaskList.TaskNotFoundException; import seedu.address.model.task.UniqueTaskList.TimeslotOverlapException; /** * Represents the in-memory model of the address book data. All changes to any * model should be synchronized. */ public class ModelManager extends ComponentManager implements Model { private static final Logger logger = LogsCenter.getLogger(ModelManager.class); private final TaskMaster taskMaster; private final List<Task> tasks; private final FilteredList<TaskOccurrence> filteredTaskComponents; private Expression previousExpression; private TaskDate previousDate; // @@author A0135782Y /** * Initializes a ModelManager with the given TaskList TaskList and its * variables should not be null */ public ModelManager(TaskMaster src, UserPrefs userPrefs) { super(); assert src != null; assert userPrefs != null; logger.fine("Initializing with address book: " + src + " and user prefs " + userPrefs); taskMaster = new TaskMaster(src); tasks = taskMaster.getTasks(); filteredTaskComponents = new FilteredList<>(taskMaster.getTaskComponentList()); RecurringTaskManager.getInstance().setTaskList(taskMaster.getUniqueTaskList()); if (RecurringTaskManager.getInstance().updateAnyRecurringTasks()) { indicateTaskListChanged(); } previousExpression = new PredicateExpression(new InitialQualifier()); previousDate = new TaskDate(new Date(System.currentTimeMillis())); } // @@author public ModelManager() { this(new TaskMaster(), new UserPrefs()); } // @@author A0135782Y public ModelManager(ReadOnlyTaskMaster initialData, UserPrefs userPrefs) { taskMaster = new TaskMaster(initialData); tasks = taskMaster.getTasks(); filteredTaskComponents = new FilteredList<>(taskMaster.getTaskComponentList()); RecurringTaskManager.getInstance().setTaskList(taskMaster.getUniqueTaskList()); if (RecurringTaskManager.getInstance().updateAnyRecurringTasks()) { indicateTaskListChanged(); } previousExpression = new PredicateExpression(new InitialQualifier()); previousDate = new TaskDate(new Date(System.currentTimeMillis())); showTaskToday(); } // @@author @Override public void resetData(ReadOnlyTaskMaster newData) { taskMaster.resetData(newData); indicateTaskListChanged(); } @Override public ReadOnlyTaskMaster getTaskMaster() { return taskMaster; } /** Raises an event to indicate the model has changed */ private void indicateTaskListChanged() { raise(new TaskListChangedEvent(taskMaster)); } @Override public synchronized void deleteTask(TaskOccurrence target) throws TaskNotFoundException { taskMaster.removeTask(target.getTaskReference()); indicateTaskListChanged(); } // @@author A0147995H @Override public synchronized void editTask(Task target, Name name, UniqueTagList tags, TaskDate startDate, TaskDate endDate, RecurringType recurringType) throws TaskNotFoundException, TimeslotOverlapException { taskMaster.updateTask(target, name, tags, startDate, endDate, recurringType); indicateTaskListChanged(); updateFilteredTaskList(previousExpression); } // @@author // @@author A0135782Y @Override public synchronized void addTask(Task task) throws UniqueTaskList.DuplicateTaskException, TimeslotOverlapException { taskMaster.addTask(task); RecurringTaskManager.getInstance().correctAddingOverdueTasks(task); updateFilteredListToShowAll(); indicateTaskListChanged(); } // @@author A0147967J @Override public synchronized void archiveTask(TaskOccurrence target) throws TaskNotFoundException { taskMaster.archiveTask(target); indicateTaskListChanged(); updateFilteredTaskList(previousExpression); } @Override public void changeDirectory(String filePath) { raise(new FilePathChangeEvent(filePath)); } // @@author @Override public List<ReadOnlyTask> getTaskList() { return new ArrayList<ReadOnlyTask>(tasks); } @Override public UnmodifiableObservableList<TaskOccurrence> getFilteredTaskComponentList() { return new UnmodifiableObservableList<>(filteredTaskComponents); } @Override public void updateFilteredListToShowAll() { previousExpression = new PredicateExpression(new ArchiveQualifier(false)); filteredTaskComponents.setPredicate(new PredicateExpression(new ArchiveQualifier(false))::satisfies); } @Override public void updateFilteredTaskList(Set<String> keywords, Set<String> tags, Date startDate, Date endDate, Date deadline) { updateFilteredTaskList( new PredicateExpression(new FindQualifier(keywords, tags, startDate, endDate, deadline))); } @Override public void updateFilteredTaskList(Expression expression) { filteredTaskComponents.setPredicate(expression::satisfies); } @Override @Subscribe public void setSystemTime(AgendaTimeRangeChangedEvent atrce){ previousDate = atrce.getInputDate(); updateFilteredTaskList(new HashSet<String>(), new HashSet<String>(), DateFormatterUtil.getStartOfDay(atrce.getInputDate().getDate()), DateFormatterUtil.getEndOfDay(atrce.getInputDate().getDate()), null); } public void showTaskToday() { Date date = DateFormatterUtil.localDateToDate(LocalDate.now()); updateFilteredTaskList(new HashSet<String>(), new HashSet<String>(), DateFormatterUtil.getStartOfDay(date), DateFormatterUtil.getEndOfDay(date), null); } public interface Expression { boolean satisfies(TaskOccurrence t); } private class PredicateExpression implements Expression { private final Qualifier qualifier; PredicateExpression(Qualifier qualifier) { this.qualifier = qualifier; } @Override public boolean satisfies(TaskOccurrence task) { return qualifier.run(task); } } interface Qualifier { boolean run(TaskOccurrence task); } // @@author A0147967J private class TypeQualifier implements Qualifier { private TaskType typeKeyWords; TypeQualifier(TaskType typeKeyWords) { this.typeKeyWords = typeKeyWords; } @Override public boolean run(TaskOccurrence task) { return task.getTaskReference().getTaskType().equals(typeKeyWords) && !task.isArchived(); } } private class InitialQualifier implements Qualifier { InitialQualifier() { } @Override public boolean run(TaskOccurrence task) { return true; } } // @@author // @@author A0135782Y private class ArchiveQualifier implements Qualifier { private boolean isArchived; ArchiveQualifier(boolean isItArchive) { this.isArchived = isItArchive; } @Override public boolean run(TaskOccurrence task) { return task.isArchived() == isArchived; } } // @@author // @@author A0147995H private class NameQualifier implements Qualifier { private Set<String> nameKeyWords; NameQualifier(Set<String> nameKeyWords) { this.nameKeyWords = nameKeyWords; } @Override public boolean run(TaskOccurrence task) { if (nameKeyWords.isEmpty()) return true; return nameKeyWords.stream().filter( keyword -> StringUtil.containsIgnoreCase(task.getTaskReference().getName().fullName, keyword)) .findAny().isPresent(); } } private class TagQualifier implements Qualifier { private Set<String> tagSet; TagQualifier(Set<String> tagSet) { this.tagSet = tagSet; } private String tagToString(TaskOccurrence task) { Set<Tag> tagSet = task.getTaskReference().getTags().toSet(); Set<String> tagStringSet = new HashSet<String>(); for (Tag t : tagSet) { tagStringSet.add(t.tagName); } return String.join(" ", tagStringSet); } @Override public boolean run(TaskOccurrence task) { if (tagSet.isEmpty()) { return true; } return tagSet.stream().filter(tag -> StringUtil.containsIgnoreCase(tagToString(task), tag)).findAny() .isPresent(); } } private class PeriodQualifier implements Qualifier { private final int START_DATE_INDEX = 0; private final int END_DATE_INDEX = 1; private Date startTime; private Date endTime; PeriodQualifier(Date startTime, Date endTime) { this.startTime = startTime; this.endTime = endTime; } private Date[] extractTaskPeriod(TaskOccurrence task) { TaskType type = task.getTaskReference().getTaskType(); if (type.equals(TaskType.FLOATING)) { return null; } Date startDate; if (task.getStartDate().getDateInLong() == TaskDate.DATE_NOT_PRESENT) { startDate = null; } else { startDate = new Date(task.getStartDate().getDateInLong()); } Date endDate = new Date(task.getEndDate().getDateInLong()); return new Date[] { startDate, endDate }; } @Override public boolean run(TaskOccurrence task) { if (this.endTime == null) return true; Date[] timeArray = extractTaskPeriod(task); if (timeArray == null) return false; Date startDate = timeArray[START_DATE_INDEX]; Date endDate = timeArray[END_DATE_INDEX]; if (startDate == null) { if (!endDate.before(this.startTime) && !endDate.after(this.endTime)) { return true; } return false; } if (!startDate.before(this.startTime) && !endDate.after(this.endTime)) return true; return false; } } private class DeadlineQualifier implements Qualifier { private Date deadline; DeadlineQualifier(Date deadline) { this.deadline = deadline; } @Override public boolean run(TaskOccurrence task) { if (this.deadline == null) return true; if (task.getTaskReference().getTaskType().equals(TaskType.FLOATING)) return false; Date deadline = new Date(task.getEndDate().getDateInLong()); if ( (deadline.before(this.deadline) || this.deadline.equals(deadline)) && task.getStartDate().getDateInLong() == TaskDate.DATE_NOT_PRESENT) return true; return false; } } private class FindQualifier implements Qualifier { private NameQualifier nameQualifier; private TagQualifier tagQualifier; private PeriodQualifier periodQualifier; private DeadlineQualifier deadlineQualifier; private TypeQualifier typeQualifier = null; private ArchiveQualifier archiveQualifier; FindQualifier(Set<String> keywordSet, Set<String> tagSet, Date startTime, Date endTime, Date deadline) { if (keywordSet.contains("-C")) { this.archiveQualifier = new ArchiveQualifier(true); } if (keywordSet.contains("-F")) this.typeQualifier = new TypeQualifier(TaskType.FLOATING); this.nameQualifier = new NameQualifier(keywordSet); this.tagQualifier = new TagQualifier(tagSet); this.periodQualifier = new PeriodQualifier(startTime, endTime); this.deadlineQualifier = new DeadlineQualifier(deadline); } @Override public boolean run(TaskOccurrence task) { if (this.typeQualifier != null) return typeQualifier.run(task); if (this.archiveQualifier != null) { return archiveQualifier.run(task); } return nameQualifier.run(task) && tagQualifier.run(task) && periodQualifier.run(task) && deadlineQualifier.run(task); } } // @@author @Override public Expression getPreviousExpression() { // TODO Auto-generated method stub return previousExpression; } @Override public TaskDate getPreviousDate(){ return previousDate; } }
package seedu.address.model; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.logging.Logger; import com.google.common.base.Joiner; import javafx.collections.transformation.FilteredList; import seedu.address.commons.core.ComponentManager; import seedu.address.commons.core.LogsCenter; import seedu.address.commons.core.UnmodifiableObservableList; import seedu.address.commons.events.model.ToDoAppChangedEvent; import seedu.address.commons.exceptions.IllegalValueException; import seedu.address.commons.util.CollectionUtil; import seedu.address.commons.util.StringUtil; import seedu.address.model.person.Deadline; import seedu.address.model.person.ReadOnlyTask; import seedu.address.model.person.Task; import seedu.address.model.person.UniqueTaskList; import seedu.address.model.person.UniqueTaskList.TaskNotFoundException; /** * Represents the in-memory model of the address book data. All changes to any * model should be synchronized. */ public class ModelManager extends ComponentManager implements Model { private static final Logger logger = LogsCenter.getLogger(ModelManager.class); private final ToDoApp toDoApp; private final FilteredList<ReadOnlyTask> filteredTasks; /** * Initializes a ModelManager with the given toDoApp and userPrefs. */ public ModelManager(ReadOnlyToDoApp toDoApp, UserPrefs userPrefs) { super(); assert !CollectionUtil.isAnyNull(toDoApp, userPrefs); logger.fine("Initializing with ToDoApp: " + toDoApp + " and user prefs " + userPrefs); this.toDoApp = new ToDoApp(toDoApp); filteredTasks = new FilteredList<>(this.toDoApp.getTaskList()); } public ModelManager() { this(new ToDoApp(), new UserPrefs()); } @Override public void resetData(ReadOnlyToDoApp newData) { toDoApp.resetData(newData); indicateToDoAppChanged(); } @Override public ReadOnlyToDoApp getToDoApp() { return toDoApp; } /** Raises an event to indicate the model has changed */ private void indicateToDoAppChanged() { raise(new ToDoAppChangedEvent(toDoApp)); } @Override public synchronized void deleteTask(ReadOnlyTask target) throws TaskNotFoundException { toDoApp.removeTask(target); indicateToDoAppChanged(); } @Override public synchronized void addTask(Task task) throws UniqueTaskList.DuplicateTaskException { toDoApp.addTask(task); updateFilteredListToShowAll(); indicateToDoAppChanged(); } // @@author A0114395E @Override public synchronized void addTask(Task task, int idx) throws UniqueTaskList.DuplicateTaskException { toDoApp.addTask(task, idx); updateFilteredListToShowAll(); indicateToDoAppChanged(); } // @@author @Override public void updateTask(int filteredTaskListIndex, ReadOnlyTask editedTask) throws UniqueTaskList.DuplicateTaskException { assert editedTask != null; int toDoAppIndex = filteredTasks.getSourceIndex(filteredTaskListIndex); toDoApp.updateTask(toDoAppIndex, editedTask); indicateToDoAppChanged(); } @Override public UnmodifiableObservableList<ReadOnlyTask> getFilteredTaskList() { return new UnmodifiableObservableList<>(filteredTasks); } @Override public void updateFilteredListToShowAll() { filteredTasks.setPredicate(null); } @Override public void updateFilteredTaskList(Set<String> keywords) { if (keywords.contains("name")) { keywords.remove("name"); updateFilteredTaskList(new PredicateExpression(new NameQualifier(keywords))); } else if (keywords.contains("deadline")) { keywords.remove("deadline"); updateFilteredTaskList(new PredicateExpression(new DeadlineQualifier(keywords))); } else if (keywords.contains("priority")) { keywords.remove("priority"); updateFilteredTaskList(new PredicateExpression( new PriorityQualifier(Integer.parseInt(Joiner.on(" ").skipNulls().join(keywords))))); } else if (keywords.contains("completion")) { keywords.remove("completion"); updateFilteredTaskList(new PredicateExpression( new CompletionQualifier(Joiner.on(" ").skipNulls().join(keywords)))); } } private void updateFilteredTaskList(Expression expression) { filteredTasks.setPredicate(expression::satisfies); } interface Expression { boolean satisfies(ReadOnlyTask task); String toString(); } private class PredicateExpression implements Expression { private final Qualifier qualifier; PredicateExpression(Qualifier qualifier) { this.qualifier = qualifier; } @Override public boolean satisfies(ReadOnlyTask task) { return qualifier.run(task); } @Override public String toString() { return qualifier.toString(); } } interface Qualifier { boolean run(ReadOnlyTask task); String toString(); } private class NameQualifier implements Qualifier { private Set<String> nameKeyWords; NameQualifier(Set<String> nameKeyWords) { this.nameKeyWords = nameKeyWords; } @Override public boolean run(ReadOnlyTask task) { return nameKeyWords.stream() .filter(keyword -> StringUtil.containsWordIgnoreCase(task.getName().fullName, keyword)).findAny() .isPresent(); } @Override public String toString() { return "name=" + String.join(", ", nameKeyWords); } } // @@author A0124591H private class DeadlineQualifier implements Qualifier { private Deadline deadlineKeyDeadline; private String deadlineKeyString; DeadlineQualifier(Set<String> deadlineKeyInputs) { try { this.deadlineKeyDeadline = new Deadline(Joiner.on(" ").join(deadlineKeyInputs)); this.deadlineKeyString = deadlineKeyDeadline.toString(); } catch (IllegalValueException e) { } } @Override public boolean run(ReadOnlyTask task) { return task.getDeadline().toString().equals(deadlineKeyString); } @Override public String toString() { return "deadline=" + String.join(", ", deadlineKeyDeadline.value); } } // @@author A0124591H private class PriorityQualifier implements Qualifier { private int priorityNumber; PriorityQualifier(int priorityNumber) { this.priorityNumber = priorityNumber; } @Override public boolean run(ReadOnlyTask task) { return task.getPriority().value == priorityNumber; } @Override public String toString() { return "priority=" + String.join(", ", String.valueOf(priorityNumber)); } } // @@author A0124591H private class CompletionQualifier implements Qualifier { private String completionValue; CompletionQualifier(String completionValue) { this.completionValue = completionValue; } @Override public boolean run(ReadOnlyTask task) { return String.valueOf(task.getCompletion().value).toLowerCase().equals(completionValue.toLowerCase()); } @Override public String toString() { return "completion=" + String.join(", ", completionValue); } } }
//@@author A0144885R package seedu.address.model; import java.util.ArrayList; import java.util.Set; import java.util.logging.Logger; import javafx.collections.ObservableList; import javafx.collections.transformation.FilteredList; import seedu.address.commons.core.ComponentManager; import seedu.address.commons.core.LogsCenter; import seedu.address.commons.core.UnmodifiableObservableList; import seedu.address.commons.events.model.TaskManagerChangedEvent; import seedu.address.commons.util.CollectionUtil; import seedu.address.commons.util.DateUtil; import seedu.address.commons.util.StringUtil; import seedu.address.model.task.Deadline; import seedu.address.model.task.ReadOnlyTask; import seedu.address.model.task.Task; import seedu.address.model.task.TaskList.TaskNotFoundException; /** * Represents the in-memory model of the address book data. * All changes to any model should be synchronized. */ public class ModelManager extends ComponentManager implements Model { private static final Logger logger = LogsCenter.getLogger(ModelManager.class); private final TaskManager taskManager; private FilteredList<ReadOnlyTask> filteredTasks; private TaskManager taskManagerCopy; private String flag; /** * Initializes a ModelManager with the given taskManager and userPrefs. */ public ModelManager(ReadOnlyTaskManager taskManager, UserPrefs userPrefs) { super(); assert !CollectionUtil.isAnyNull(taskManager, userPrefs); logger.fine("Initializing with address book: " + taskManager + " and user prefs " + userPrefs); this.taskManager = new TaskManager(taskManager); filteredTasks = new FilteredList<>(this.taskManager.getTaskList()); this.taskManagerCopy = new TaskManager(taskManager); this.flag = "empty copy"; } public ModelManager() { this(new TaskManager(), new UserPrefs()); } @Override public void resetData(ReadOnlyTaskManager newData) { taskManager.resetData(newData); indicateTaskManagerChanged(); } @Override public ReadOnlyTaskManager getTaskManager() { return taskManager; } /** Raises an event to indicate the model has changed */ public void indicateTaskManagerChanged() { raise(new TaskManagerChangedEvent(taskManager)); } @Override public synchronized void deleteTask(ReadOnlyTask target) throws TaskNotFoundException { taskManager.removeTask(target); indicateTaskManagerChanged(); } @Override public synchronized void addTask(Task task) { taskManager.addTask(task); updateFilteredListToShowAll(); indicateTaskManagerChanged(); } @Override public void updateTask(int filteredTaskListIndex, ReadOnlyTask editedTask) { assert editedTask != null; int taskManagerIndex = filteredTasks.getSourceIndex(filteredTaskListIndex); taskManager.updateTask(taskManagerIndex, editedTask); indicateTaskManagerChanged(); } //@@author A0143504R public TaskManager getCopy() { return taskManagerCopy; } public void updateCopy(ReadOnlyTaskManager newData) { taskManagerCopy = new TaskManager(newData); } public void updateFlag(String newFlag) { flag = newFlag; } public String getFlag() { return this.flag; } @Override public UnmodifiableObservableList<ReadOnlyTask> getFilteredTaskList() { return new UnmodifiableObservableList<>(filteredTasks); } @Override public void updateFilteredListToShowAll() { filteredTasks.setPredicate(null); } @Override public void updateFilteredTaskListByKeywords(Set<String> keywords) { updateFilteredTaskList(new PredicateExpression(new TaskQualifier(keywords))); } private void updateFilteredTaskList(Expression expression) { filteredTasks.setPredicate(expression::satisfies); } public void updateFilteredTaskListByDate(Deadline deadline) { updateFilteredTaskList(new PredicateExpression(new TaskQualifierByDate(deadline))); } //@@author A0138377U public ArrayList<ReadOnlyTask> getList() { ArrayList<ReadOnlyTask> listOfTasks = new ArrayList<>(); for (ReadOnlyTask task : filteredTasks) { listOfTasks.add(task); } return listOfTasks; } public void setList(ObservableList<ReadOnlyTask> listOfTasks) { taskManager.setTasks(listOfTasks); } public int getFilteredTasksSize () { return filteredTasks.size(); } public ArrayList<ReadOnlyTask> getAllDoneTasks() { ArrayList<ReadOnlyTask> listOfTasks = new ArrayList<>(); for (ReadOnlyTask task : new FilteredList<>(this.taskManager.getTaskList())) { if (task.getStatus().status.equals("Done")) { listOfTasks.add(task); } } return listOfTasks; } @Override public synchronized void deleteBulkTask(ReadOnlyTask target) throws TaskNotFoundException { taskManager.removeTask(target); } //@@author A0143504R interface Expression { boolean satisfies(ReadOnlyTask task); String toString(); } private class PredicateExpression implements Expression { private final Qualifier qualifier; PredicateExpression(Qualifier qualifier) { this.qualifier = qualifier; } @Override public boolean satisfies(ReadOnlyTask task) { return qualifier.run(task); } @Override public String toString() { return qualifier.toString(); } } interface Qualifier { boolean run(ReadOnlyTask task); String toString(); } private class TaskQualifier implements Qualifier { private Set<String> keyWords; TaskQualifier(Set<String> keyWords) { this.keyWords = keyWords; } @Override public boolean run(ReadOnlyTask task) { return keyWords.stream() .filter(keyword -> StringUtil.containsWordIgnoreCase(task.getName().name, keyword) || StringUtil.containsWordIgnoreCase(task.getDescription().description, keyword) || StringUtil.containsWordIgnoreCase(task.getTags().getTags(), keyword)) .findAny() .isPresent(); } @Override public String toString() { return "name=" + String.join(", ", keyWords); } } private class TaskQualifierByDate implements Qualifier { private Deadline deadline; TaskQualifierByDate(Deadline deadline) { this.deadline = deadline; } @Override public boolean run(ReadOnlyTask task) { return DateUtil.isDeadlineMatch(task.getDeadline(), deadline); } @Override public String toString() { return "name=" + String.join(", ", deadline.toString()); } } }
package seedu.todo.controllers; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import edu.emory.mathcs.backport.java.util.Arrays; import seedu.todo.commons.exceptions.UnmatchedQuotesException; public class Tokenizer { private final static String QUOTE = "\""; private static class TokenizedString { public String string; public boolean isToken; public boolean isQuote; TokenizedString(String string, boolean isToken, boolean isQuote) { this.string = string; this.isToken = isToken; this.isQuote = isQuote; } @Override public String toString() { return String.format("TokenizedString(%s, %s, %s)", this.string, isToken, isQuote); } } public static HashMap<String, String[]> tokenize(HashMap<String, String[]> tokenDefinitions, String inputCommand) throws UnmatchedQuotesException { // Generate token -> tokenType mapping and list of tokens List<String> tokens = new ArrayList<String>(); HashMap<String, String> getTokenType = new HashMap<String, String>(); for (Map.Entry<String, String[]> tokenDefinition : tokenDefinitions.entrySet()) { String tokenType = tokenDefinition.getKey(); for (String token : tokenDefinition.getValue()) { tokens.add(token); getTokenType.put(token, tokenType); } } if (inputCommand.length() == 0) return null; // Split inputCommand into arraylist of chunks if (StringUtils.countMatches(inputCommand, QUOTE) % 2 == 1) throw new UnmatchedQuotesException("Unmatched double-quotes detected."); String[] splitString = inputCommand.split(QUOTE); // If first char is QUOTE, then first element is a quoted string. List<TokenizedString> tokenizedSplitString = new ArrayList<TokenizedString>(); for (int i = 0; i < splitString.length; i++) { tokenizedSplitString.add(new TokenizedString(splitString[i].trim(), false, (i % 2 == 1))); } Map<String, Integer> tokenIndices = new HashMap<String, Integer>(); for (int i = 0; i < tokenizedSplitString.size(); i++) { // Java doesn't eager-evaluate the terminating condition TokenizedString currString = tokenizedSplitString.get(i); if (currString.isQuote) continue; if (currString.isToken) { tokenIndices.put(currString.string, i); tokens.remove(currString.string); continue; } // Try to match all the tokens for (String token : tokens) { Matcher m = Pattern.compile(String.format("\\b%s\\b", token)).matcher(currString.string); if (!m.find()) continue; // Found. Replace current element with split elements. String preString = currString.string.substring(0, m.start()).trim(); String postString = currString.string.substring(m.end(), currString.string.length()).trim(); tokenizedSplitString.remove(i); List<TokenizedString> replacedSplitStrings = new ArrayList<TokenizedString>(); if (!preString.isEmpty()) replacedSplitStrings.add(new TokenizedString(preString, false, false)); replacedSplitStrings.add(new TokenizedString(token, true, false)); if (!postString.isEmpty()) replacedSplitStrings.add(new TokenizedString(postString, false, false)); tokenizedSplitString.addAll(i, replacedSplitStrings); // Update currString and resume. currString = tokenizedSplitString.get(i); if (currString.isQuote) break; if (currString.isToken) { tokenIndices.put(currString.string, i); tokens.remove(currString.string); break; } } } // Get arraylist of indices // Get dictionary of tokenType -> index // Return dictionary of tokenType -> {token, tokenField} return null; } }
package tw.com.ehanlin.mde.dsl; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; import tw.com.ehanlin.mde.dsl.action.*; import tw.com.ehanlin.mde.util.EmptyObject; import tw.com.ehanlin.mde.util.SpliceStringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; public class DslParser { public static DslParser instance = new DslParser(); public Dsl parse(String dsl) { SpliceStringReader reader = new SpliceStringReader(dsl); SpliceStringReader.Matcher matcher = reader.splice(rootSymbols); if(matcher.finish()){ return EmptyObject.Dsl; } List<Action> actions = new ArrayList(); do{ switch(matcher.match()){ case "@" : Action action = parseAction(reader); if(action != null){ actions.add(action); } break; case "{" : Dsl result = new Dsl(actions); parseContent(result, reader); return result; } }while(!((matcher = reader.splice(rootSymbols)).finish())); return EmptyObject.Dsl; } private List<String> rootSymbols = Arrays.asList("@", "{"); private List<String> readActionOrPropertySymbols = Arrays.asList("@", "{", "}"); private List<String> actionScopeSymbols = Arrays.asList("(", "<", "["); private List<String> actionInfoSymbols = Arrays.asList("=", ",", "[", "{", ")", ">", "]"); private List<String> mongoSymbols = Arrays.asList(":", ",", "{", "[", "]", "}"); private Pattern propertyPattern = Pattern.compile("\\S+"); private Pattern stringPattern = Pattern.compile("^(?:'(.*)'|\"(.*)\")$"); private Pattern atPattern = Pattern.compile("^@.*"); private Pattern longPattern = Pattern.compile("^[+-]?\\d+$"); private Pattern doublePattern = Pattern.compile("^[+-]?\\d*\\.\\d+$"); private Pattern booleanPattern = Pattern.compile("^(true|false)$", Pattern.CASE_INSENSITIVE); private void parseContent(Dsl current, SpliceStringReader reader){ List<Action> actions = new ArrayList(); SpliceStringReader.Matcher matcher; loop: while(!((matcher = reader.splice(readActionOrPropertySymbols)).finish())){ Dsl lastDsl = null; for(String parseProperty : parseProperties(matcher.prefix().trim())){ lastDsl = new Dsl(actions); current.appendDsl(parseProperty, lastDsl); actions = new ArrayList(); } switch(matcher.match()){ case "@" : { actions.add(parseAction(reader)); break; } case "{" : { parseContent(lastDsl, reader); break; } case "}" : { break loop; } } } } private List<String> parseProperties(String properties){ List<String> result = new ArrayList(); Matcher matcher = propertyPattern.matcher(properties); while (matcher.find()){ result.add(matcher.group()); } return result; } private Object parseMongoContent(String content) { Matcher stringMatcher = stringPattern.matcher(content); if(stringMatcher.matches()){ String result = stringMatcher.group(1); return (result != null) ? result : stringMatcher.group(2); } if(atPattern.matcher(content).matches()){ return content; } if(longPattern.matcher(content).matches()){ return Long.parseLong(content); } if(doublePattern.matcher(content).matches()){ return Double.parseDouble(content); } Matcher booleanMatcher = booleanPattern.matcher(content); if(booleanMatcher.matches()){ return booleanMatcher.group(0).toLowerCase().equals("true"); } return content; } private String parsePairSymbolsContent(String start, String end, SpliceStringReader reader){ List<String> symbols = Arrays.asList(start, end); StringBuilder result = new StringBuilder(start); Integer count = 1; SpliceStringReader.Matcher matcher; while(!((matcher = reader.splice(symbols)).finish())){ result.append(matcher.prefix()); result.append(matcher.match()); if(matcher.match().equals(start)){ count += 1; }else{ count -= 1; } if(count <= 0){ return result.toString(); } } return result.toString(); } private BasicDBList parseMongoList(SpliceStringReader reader) { SpliceStringReader.Matcher matcher; BasicDBList result = new BasicDBList(); while (!((matcher = reader.splice(mongoSymbols)).finish())) { switch (matcher.match()) { case "," : { String value = matcher.prefix().trim(); if (value.length() > 0) { result.add(parseMongoContent(value)); } break; } case "{" : result.add(parseMongoMap(reader)); break; case "[" : result.add(parseMongoList(reader)); break; case "}" : { String value = matcher.prefix().trim(); if (value.length() > 0) { result.add(parseMongoContent(value)); } return result; } } } return result; } private BasicDBObject parseMongoMap(SpliceStringReader reader) { SpliceStringReader.Matcher matcher; BasicDBObject result = new BasicDBObject(); String currentKey = null; while (!((matcher = reader.splice(mongoSymbols)).finish())) { switch (matcher.match()) { case ":" : currentKey = matcher.prefix().trim(); break; case "," : { String value = matcher.prefix().trim(); if (currentKey != null && value.length() > 0) { result.append(currentKey, parseMongoContent(value)); currentKey = null; } break; } case "{" : if(currentKey != null){ result.append(currentKey, parseMongoMap(reader)); currentKey = null; } break; case "[" : if(currentKey != null){ result.append(currentKey, parseMongoList(reader)); currentKey = null; } break; case "}" : { String value = matcher.prefix().trim(); if (currentKey != null && value.length() > 0) { result.append(currentKey, parseMongoContent(value)); } return result; } } } return result; } private BasicDBObject parseActionInfo(SpliceStringReader reader) { SpliceStringReader.Matcher matcher; BasicDBObject result = new BasicDBObject(); String currentKey = null; while(!((matcher = reader.splice(actionInfoSymbols)).finish())){ switch(matcher.match()){ case "=" : currentKey = matcher.prefix().trim(); break; case "," : { String value = matcher.prefix().trim(); if (currentKey != null && value.length() > 0) { result.append(currentKey, value); currentKey = null; } break; } case "[" : if(currentKey != null){ result.append(currentKey, parseMongoList(reader)); currentKey = null; } break; case "{" : if(currentKey != null){ result.append(currentKey, parseMongoMap(reader)); currentKey = null; } break; case ")" : case ">" : case "]" : { String value = matcher.prefix().trim(); if (currentKey != null && value.length() > 0) { result.append(currentKey, value); } return result; } } } return result; } private Action parseAction(SpliceStringReader reader) { SpliceStringReader.Matcher matcher = reader.splice(actionScopeSymbols); if(matcher.finish()) { return null; } String actionName = matcher.prefix().trim(); Action.Scope scope = null; switch(matcher.match()){ case "(" : scope = Action.Scope.PARENT; break; case "<" : scope = Action.Scope.SALF; break; case "[" : scope = Action.Scope.CHILD; break; } BasicDBObject infos = parseActionInfo(reader); switch(actionName){ case "find" : return new Find(scope, (String)infos.get("db"), (String)infos.get("coll"), (BasicDBObject)infos.get("query"), (BasicDBObject)infos.get("projection")); case "findOne" : return new FindOne(scope, (String)infos.get("db"), (String)infos.get("coll"), (BasicDBObject)infos.get("query"), (BasicDBObject)infos.get("projection")); case "findOneById" : return new FindOneById(scope, (String)infos.get("db"), (String)infos.get("coll"), (BasicDBObject)infos.get("projection")); case "distinct" : return new Distinct(scope, (String)infos.get("db"), (String)infos.get("coll"), (String)infos.get("key"), (BasicDBObject)infos.get("query")); case "count" : return new Count(scope, (String)infos.get("db"), (String)infos.get("coll"), (BasicDBObject)infos.get("query")); case "aggregate" : return new Aggregate(scope, (String)infos.get("db"), (String)infos.get("coll"), (BasicDBList)infos.get("pipelines")); } return null; } }
package nitezh.ministock.domain; import org.json.JSONException; import org.json.JSONObject; import static nitezh.ministock.domain.PortfolioStockRepository.PortfolioField; public class PortfolioStock { private final String symbol; private final String price; private final String date; private final String quantity; private final String highLimit; private final String lowLimit; private final String customName; private final String symbol2; public PortfolioStock(String symbol, String price, String date, String quantity, String highLimit, String lowLimit, String customName, String symbol2) { this.symbol = symbol; this.price = price; this.date = date; this.quantity = quantity; this.highLimit = highLimit; this.lowLimit = lowLimit; this.customName = customName; this.symbol2 = symbol2; } public String getPrice() { return price; } public String getDate() { return date; } public String getQuantity() { return quantity; } public String getHighLimit() { return highLimit; } public String getLowLimit() { return lowLimit; } public String getCustomName() { return customName; } private String getSymbol2() { return symbol2; } private void setJsonValue(JSONObject json, PortfolioField key, String value) { if (value == null || value.equals("")) { value = "empty"; } try { json.put(key.name(), value); } catch (JSONException ignored) { } } public JSONObject toJson() { JSONObject json = new JSONObject(); this.setJsonValue(json, PortfolioField.PRICE, this.getPrice()); this.setJsonValue(json, PortfolioField.DATE, this.getDate()); this.setJsonValue(json, PortfolioField.QUANTITY, this.getQuantity()); this.setJsonValue(json, PortfolioField.LIMIT_HIGH, this.getHighLimit()); this.setJsonValue(json, PortfolioField.LIMIT_LOW, this.getLowLimit()); this.setJsonValue(json, PortfolioField.CUSTOM_DISPLAY, this.getCustomName()); this.setJsonValue(json, PortfolioField.SYMBOL_2, this.getSymbol2()); return json; } public boolean hasData() { return (this.getPrice() != null && !this.getPrice().equals("") || (this.getCustomName() != null && !this.getCustomName().equals(""))); } }
package uk.co.solong.rest2java; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; import org.jboss.jdeparser.FormatPreferences; import org.jboss.jdeparser.JBlock; import org.jboss.jdeparser.JClassDef; import org.jboss.jdeparser.JDeparser; import org.jboss.jdeparser.JExprs; import org.jboss.jdeparser.JFiler; import org.jboss.jdeparser.JMethodDef; import org.jboss.jdeparser.JMod; import org.jboss.jdeparser.JParamDeclaration; import org.jboss.jdeparser.JSourceFile; import org.jboss.jdeparser.JSources; import org.jboss.jdeparser.JStatement; import org.jboss.jdeparser.JType; import org.jboss.jdeparser.JTypes; import org.jboss.jdeparser.JVarDeclaration; import org.springframework.web.client.RestTemplate; import uk.co.solong.rest2java.spec.APISpec; import uk.co.solong.rest2java.spec.MandatoryParameter; import uk.co.solong.rest2java.spec.MandatoryPermaParam; import uk.co.solong.rest2java.spec.Method; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; @Mojo(name = "rest2java", defaultPhase = LifecyclePhase.GENERATE_SOURCES) public class Rest2Java extends AbstractMojo { @Parameter(defaultValue = "${basedir}/src/main/resources/schema.json") private File schemaFile; @Parameter(defaultValue = "false") private boolean writeToStdOut; @Parameter(defaultValue = "mypackage") private String targetPackage; @Parameter(defaultValue = "${project.build.directory}/generated-sources") private File outputDirectory; @Parameter(defaultValue = "${project}", readonly = true, required = true) private MavenProject project; public void execute() throws MojoExecutionException { getLog().info("Loading schema from file: " + schemaFile); getLog().info("Package is: "+targetPackage); if (!writeToStdOut) { getLog().info("Will write output to disk: " + outputDirectory); } else { getLog().info("Will write to STDOUT"); } if (!schemaFile.exists()) { throw new MojoExecutionException("No schema file provided"); } try { APISpec apiSpec = getApiSpec(); validate(apiSpec); JFiler filer = getFiler(); JSources rootSources = JDeparser.createSources(filer, new FormatPreferences(new Properties())); // String _package = apiSpec.getOrg() + "." + apiSpec.getApiName(); JSourceFile apiFile = rootSources.createSourceFile(targetPackage, apiSpec.getServiceName()); // apiFile._import() JClassDef apiClass = apiFile._class(JMod.PUBLIC | JMod.FINAL, apiSpec.getServiceName()); if (apiSpec.getMandatoryPermaParams().size() > 0) { JMethodDef constructorDef = apiClass.constructor(JMod.PUBLIC); for (MandatoryPermaParam mpp : apiSpec.getMandatoryPermaParams()) { JParamDeclaration param = constructorDef.param(JMod.FINAL, mpp.getType(), mpp.getJavaName()); JVarDeclaration field = apiClass.field(JMod.PRIVATE | JMod.FINAL, mpp.getType(), "_" + mpp.getJavaName()); constructorDef.body().assign(JExprs.$(field), JExprs.$(param)); } } // create the method bodies for (Method methodSpec : apiSpec.getMethods()) { // for each method, generate a builderclass String methodName = methodSpec.getMethodName(); String builderClassName = StringUtils.capitalize(methodName) + "Builder"; // = JTypes._(builderClassName); JSources currentBuilderSources = JDeparser.createSources(filer, new FormatPreferences(new Properties())); JSourceFile currentBuilderFile = currentBuilderSources.createSourceFile(targetPackage, builderClassName); JClassDef currentBuilderClass = currentBuilderFile._class(JMod.PUBLIC | JMod.FINAL, builderClassName); JType returnType = currentBuilderClass.erasedType(); // import org.springFramework.web.client.RestTemplate; currentBuilderFile._import(RestTemplate.class); // private RestTemplate restTemplate JVarDeclaration templateField = currentBuilderClass.field(JMod.PRIVATE, RestTemplate.class, "restTemplate"); // public BootLinodeBuilder() { JMethodDef builderConstructorDef = currentBuilderClass.constructor(JMod.PUBLIC); // restTemplate = new RestTemplate(); builderConstructorDef.body().assign(JExprs.$(templateField), JTypes._(RestTemplate.class)._new()); // method name JMethodDef currentMethod = apiClass.method(JMod.PUBLIC | JMod.FINAL, returnType, methodName); // method parameters for (MandatoryParameter mp : methodSpec.getMandatoryParameters()) { JParamDeclaration vap = currentMethod.param(JMod.FINAL, mp.getType(), mp.getJavaName()); } JBlock block = currentMethod.body(); // JExprs. // final SubmitNameBuilder result = new SubmitNameBuilder(); JVarDeclaration resultDeclaration = block.var(JMod.FINAL, returnType, "result", returnType._new()); // currentBuilderFile._import(returnType); // block.call(JExprs.$(resultDeclaration), // "setTemplate").arg(JExprs.$(templateDeclaration)); // block.assign(JExprs.$(d), JExprs._new(returnType)) ; JStatement t = block._return(JExprs.$(resultDeclaration)); currentBuilderSources.writeSources(); } rootSources.writeSources(); if (!writeToStdOut) { writeToFile(); getLog().info("Adding compiled source:" + outputDirectory.getPath()); project.addCompileSourceRoot(outputDirectory.getPath()); } else { getLog().info("STDOUT is enabled. Not adding compiled source to maven classpath"); printToStdOut(); } } catch (IOException e) { throw new MojoExecutionException("Schema format is invalid:", e); } } private void printToStdOut() { // TODO Auto-generated method stub for (Key key: sourceFiles.keySet()) { ByteArrayOutputStream s = sourceFiles.get(key); System.out.println(new String(s.toByteArray())); } } private void writeToFile() throws IOException { for (Key key: sourceFiles.keySet()) { ByteArrayOutputStream s = sourceFiles.get(key); File f = new File(outputDirectory + key.toDirectory()); if (!f.exists()) { getLog().info("Output directory does not exist. Creating: " + f.getCanonicalPath()); f.mkdirs(); } else { getLog().debug ("Output directory exists: " + f.getCanonicalPath()); } String targetFile = outputDirectory + key.toFileName(); getLog().info("Writing " + targetFile); OutputStream stream = new FileOutputStream(targetFile); s.writeTo(stream); } } private APISpec getApiSpec() throws IOException, JsonParseException, JsonMappingException { ObjectMapper objectMapper = new ObjectMapper(); APISpec apiSpec = objectMapper.readValue(schemaFile, APISpec.class); return apiSpec; } private void validate(APISpec apiSpec) { // TODO Auto-generated method stub // Validate.notBlank(apiSpec.getApiName()); Validate.notBlank(targetPackage, "Package must not be null"); Validate.notBlank(apiSpec.getServiceName(), "ServiceName must not be null"); for (Method m : apiSpec.getMethods()) { Validate.notBlank(m.getMethodName(), "Method name must not be blank"); for (MandatoryParameter mp : m.getMandatoryParameters()) { Validate.notBlank(mp.getJavaName(), "Parameter name must be specified in method {}", m.getMethodName()); Validate.notBlank(mp.getType(), "Parameter type must be specified for {} in method {}", mp.getJavaName(), m.getMethodName()); } } } private final ConcurrentMap<Key, ByteArrayOutputStream> sourceFiles = new ConcurrentHashMap<>(); /* private final JFiler filerold = new JFiler() { public OutputStream openStream(final String packageName, final String fileName) throws IOException { getLog().info("Writing for " + fileName); final Key key = new Key(packageName, fileName + ".java"); if (!sourceFiles.containsKey(key)) { OutputStream stream = null; if (writeToStdOut) { stream = System.out; } else { File f = new File(outputDirectory + key.toDirectory()); if (!f.exists()) { getLog().info("Output directory does not exist. Creating: " + f.getCanonicalPath()); f.mkdirs(); } else { getLog().info("Output directory exists " + f.getCanonicalPath()); } String targetFile = outputDirectory + key.toFileName(); getLog().info("Writing" + targetFile); stream = new FileOutputStream(targetFile); } if (sourceFiles.putIfAbsent(key, stream) == null) { return stream; } } throw new IOException("Already exists"); } };*/ public JFiler getFiler() { return filer; } /* * public ByteArrayInputStream openFile(String packageName, String fileName) * throws FileNotFoundException { final FileOutputStream out = * sourceFiles.get(new Key(packageName, fileName)); if (out == null) throw * new FileNotFoundException("No file found for package " + packageName + * " file " + fileName); return new ByteArrayInputStream(out.toByteArray()); * } */ static final class Key { private final String packageName; private final String fileName; private final String packagePath; Key(final String packageName, final String fileName) { this.packageName = packageName; this.fileName = fileName; this.packagePath = packageName.replace(".", slash); } public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final Key key = (Key) o; return fileName.equals(key.fileName) && packageName.equals(key.packageName); } public int hashCode() { int result = packageName.hashCode(); result = 31 * result + fileName.hashCode(); return result; } private static final String slash = File.separator; public String toFileName() { System.out.println(slash); return new StringBuilder().append(slash).append(packagePath).append(slash).append(fileName).toString(); } public String toDirectory() { return new StringBuilder().append(slash).append(packagePath).append(slash).toString(); } } public File getSchemaFile() { return schemaFile; } public void setSchemaFile(File schemaFile) { this.schemaFile = schemaFile; } public boolean isWriteToStdOut() { return writeToStdOut; } public void setWriteToStdOut(boolean writeToStdOut) { this.writeToStdOut = writeToStdOut; } public File getOutputDirectory() { return outputDirectory; } public void setOutputDirectory(File outputDirectory) { this.outputDirectory = outputDirectory; } public String getTargetPackage() { return targetPackage; } public void setTargetPackage(String targetPackage) { this.targetPackage = targetPackage; } private final JFiler filer = new JFiler() { public OutputStream openStream(final String packageName, final String fileName) throws IOException { final Key key = new Key(packageName, fileName + ".java"); if (! sourceFiles.containsKey(key)) { final ByteArrayOutputStream stream = new ByteArrayOutputStream(); if (sourceFiles.putIfAbsent(key, stream) == null) { return stream; } } throw new IOException("Already exists"); } }; }
package org.adligo.models.params.client; /** * Title: * Description: <p>This is a generic and reuseable implementation of the * I_TemplateParams interface. It relies on the TemplateParam * class for storing the name, values and nested I_TemplateParams. * Company: Adligo * @author scott@adligo.com * @version 1.3 */ import java.util.Date; import org.adligo.i.log.client.Log; import org.adligo.i.log.client.LogFactory; import org.adligo.i.util.client.I_Iterator; import org.adligo.i.util.client.I_Map; import org.adligo.i.util.client.MapFactory; import org.adligo.i.util.client.StringUtils; public class Params implements I_MultipleParamsObject { private static final long serialVersionUID = 1L; static final Log log = LogFactory.getLog(Params.class); /** * this version number represents the xml format and should be incremented * only if the format changes */ public static final String CLASS_VERSION = new String("1.5"); private I_Map // String, I_OneOrN paramsMap = MapFactory.create();// holds TemplateParam objects private I_OneOrN m_currentGroup = null; private int counntForThisName = 0; private I_TemplateParams param; // the current param that was selected by // getNextParam(String s) /** Constructors */ public Params() { } /** * This creates a Param object using the parameters and adds it to the * Collection of Param objects. */ public Param addParam(String name, String value, I_TemplateParams params) { Param parm = new Param(name,params); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, String value) { Param parm = new Param(name); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Integer value, I_TemplateParams params) { Param parm = new Param(name,params); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Integer value) { Param parm = new Param(name); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Short value, I_TemplateParams params) { Param parm = new Param(name,params); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Short value) { Param parm = new Param(name); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Long value, I_TemplateParams params) { Param parm = new Param(name,params); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Long value) { Param parm = new Param(name); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Double value, I_TemplateParams params) { Param parm = new Param(name,params); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Double value) { Param parm = new Param(name); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Float value, I_TemplateParams params) { Param parm = new Param(name,params); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Float value) { Param parm = new Param(name); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Date value, I_TemplateParams params) { Param parm = new Param(name,params); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Date value) { Param parm = new Param(name); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Boolean value, I_TemplateParams params) { Param parm = new Param(name,params); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, Boolean value) { Param parm = new Param(name); parm.setValue(value); addParam(parm); return parm; } /** * returns the parameter created * @param name * @return */ public Param addParam(String name) { Param parm = new Param(name); addParam(parm); return parm; } /** * returns the children of the parameter created * @param name * @return */ public Params addParams(String name) { Params toRet = new Params(); Param parm = new Param(name,toRet); addParam(parm); return toRet; } public Params addWhereParams() { return this.addParams(Param.WHERE); } public Param addParam(String name, I_TemplateParams params) { Param parm = new Param(name,params); addParam(parm); return parm; } public Param addParam(String name, String operator, String value ) { Param parm = new Param(name,operator, value); addParam(parm); return parm; } public Param addParam(String name, String operator, Integer value ) { Param parm = new Param(name,operator, value); addParam(parm); return parm; } public Param addParam(String name, String operator, Short value ) { Param parm = new Param(name,operator, value); addParam(parm); return parm; } public Param addParam(String name, String operator, Long value ) { Param parm = new Param(name,operator, value); addParam(parm); return parm; } public Param addParam(String name, String operator, Double value ) { Param parm = new Param(name,operator, value); addParam(parm); return parm; } public Param addParam(String name, String operator, Float value ) { Param parm = new Param(name,operator, value); addParam(parm); return parm; } public Param addParam(String name, String operator, Date value ) { Param parm = new Param(name,operator, value); addParam(parm); return parm; } public Param addParam(String name, String operator, Boolean value ) { Param parm = new Param(name,operator, value); addParam(parm); return parm; } public Param addParam(String name, String [] operators, String value ) { Param parm = new Param(name,new Operators(operators)); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, String [] operators) { Param parm = new Param(name,operators); addParam(parm); return parm; } public Param addParam(String name, String [] operators, Integer value ) { Param parm = new Param(name,new Operators(operators)); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, String [] operators, Short value ) { Param parm = new Param(name,new Operators(operators)); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, String [] operators, Long value ) { Param parm = new Param(name,new Operators(operators)); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, String [] operators, Double value ) { Param parm = new Param(name,new Operators(operators)); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, String [] operators, Float value ) { Param parm = new Param(name,new Operators(operators)); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, String [] operators, Date value ) { Param parm = new Param(name,new Operators(operators)); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, String [] operators, Boolean value ) { Param parm = new Param(name,new Operators(operators)); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, I_Operators operators, String value ) { Param parm = new Param(name,operators); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, I_Operators operators) { Param parm = new Param(name,operators); addParam(parm); return parm; } public Param addParam(String name, I_Operators operators, Integer value ) { Param parm = new Param(name,operators); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, I_Operators operators, Short value ) { Param parm = new Param(name,operators); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, I_Operators operators, Long value ) { Param parm = new Param(name,operators); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, I_Operators operators, Double value ) { Param parm = new Param(name,operators); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, I_Operators operators, Float value ) { Param parm = new Param(name,operators); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, I_Operators operators, Date value ) { Param parm = new Param(name,operators); parm.setValue(value); addParam(parm); return parm; } public Param addParam(String name, I_Operators operators, Boolean value ) { Param parm = new Param(name,operators); parm.setValue(value); addParam(parm); return parm; } /** * Adds a I_TemplateParams to the vector of params */ public void addParam(I_TemplateParams p) { if (p == null) { throw new NullPointerException("Can't contain a null item"); } if (p.getName() == null) { throw new NullPointerException("Can't contain a param " + "with a null name"); } I_OneOrN container = (I_OneOrN) paramsMap.get(p.getName()); if (container == null) { SingleParamContainer toAdd = new SingleParamContainer(); toAdd.setItem(p); paramsMap.put(p.getName(), toAdd); } else if (container.size() == 1) { NParamContainer newGroup = new NParamContainer(); newGroup.addItem(container.get(0)); newGroup.addItem(p); paramsMap.put(p.getName(), newGroup); } else { NParamContainer currentGroup = (NParamContainer) container; currentGroup.addItem(p); } try { ((Param) p).setParent(this); } catch (ClassCastException x) { } } public void removeParam(I_TemplateParams p) { if (p == null) { throw new NullPointerException("Can't contain a null item"); } if (p.getName() == null) { throw new NullPointerException("Can't contain a param " + "with a null name"); } I_OneOrN container = (I_OneOrN) paramsMap.get(p.getName()); if (container.size() == 1) { paramsMap.remove(p.getName()); } else { NParamContainer currentGroup = (NParamContainer) container; currentGroup.removeItem(p); } } /** * Implementation of I_TemplateParams see the interfaces documentation. */ public void First() { m_currentGroup = null; counntForThisName = 0; } /** * Implementation of I_TemplateParams see the interfaces documentation. */ public Object[] getValues() { if (param != null) { return param.getValues(); } return null; } /** * Implementation of I_TemplateParams see the interfaces documentation. */ public I_Operators getOperators() { if (param != null) { return param.getOperators(); } return null; } public String getName() { String r = new String(""); if (param != null) { r = param.getName(); } return r; } /** * Implementation of I_TemplateParams see the interfaces documentation. */ public I_TemplateParams getNestedParams() { if (param != null) { return param.getNestedParams(); } return null; } public I_TemplateParams getCurrentParam() { return param; } public void removeAllParams(String name) { paramsMap.remove(name); } /** * Implementation of I_TemplateParams see the interfaces documentation. */ public boolean getNextParam(String s) { if (s == null) { return false; } if (log.isDebugEnabled()) { log.debug("getNextParamFool =" + s); } I_OneOrN currentGroup = (I_OneOrN) this.paramsMap.get(s); if (currentGroup == null) { if (log.isDebugEnabled()) { log.debug("got null I_OneOrN returning"); } param = null; return false; } if (m_currentGroup != null) { // yes make sure their the same instace if (m_currentGroup == currentGroup) { counntForThisName++; if (log.isDebugEnabled()) { log.debug("got same I_OneOrN count is now " + counntForThisName); } param = m_currentGroup.get(counntForThisName); if (param == null) { return false; } else { return true; } } } m_currentGroup = currentGroup; param = m_currentGroup.get(0); if (param == null) { return false; } else { return true; } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Params to String \n"); I_Iterator it = paramsMap.getIterator(); boolean first = true; while (it.hasNext()) { if (!first) { sb.append(","); } sb.append(it.next()); } return sb.toString(); } public String writeXML() { XMLBuilder builder = new XMLBuilder(); writeXML(builder); return builder.getBuffer().toString(); } public String writeXML(XMLBuilder sb) { writeXML(sb, ""); return sb.toString(); } public void writeXML(XMLBuilder sb, String name) { sb.indent(); sb.append(XMLObject.OBJECT_HEADER); sb.append(" "); sb.append(XMLObject.CLASS); sb.append("=\""); sb.append(ClassForNameMap.PARAMS_SHORT_NAME); sb.append("\" "); sb.append(XMLObject.VERSION); sb.append("=\""); sb.append(CLASS_VERSION); sb.append("\" "); sb.append(XMLObject.NAME); sb.append("=\""); if (!StringUtils.isEmpty(name)) { sb.append(name); } else { sb.append(Param.PARAMS); } sb.append("\" >"); sb.lineFeed(); sb.addIndentLevel(); I_Iterator it = paramsMap.getIterator(); while (it.hasNext()) { I_OneOrN items = (I_OneOrN) paramsMap.get(it.next()); for (int i = 0; i < items.size(); i++) { ((I_XML_Serilizable) items.get(i)).writeXML(sb); if (log.isDebugEnabled()) { log.debug(sb.toString()); } } } sb.removeIndentLevel(); sb.indent(); sb.append(XMLObject.OBJECT_ENDER); sb.lineFeed(); if (log.isDebugEnabled()) { log.debug(sb.toString()); } } public void readXML(String s) { readXML(s, null); } public void readXML(String s, String name) { if (log.isDebugEnabled()) { log.debug("Reading XML in Params\n" + s); } int[] iaVectorTags = Parser.getTagIndexs(s, XMLObject.OBJECT_HEADER, XMLObject.OBJECT_ENDER); // get vector element s = s.substring(iaVectorTags[0], iaVectorTags[1]); int[] iaVectorHeader = Parser.getTagIndexs(s, XMLObject.OBJECT_HEADER, ">"); // get vector header String sVectorHeader = s .substring(iaVectorHeader[0], iaVectorHeader[1]); if (name == null) { name = Parser.getAttributeValue(sVectorHeader, XMLObject.NAME); } int[] iaObject = Parser.getTagIndexs(s, XMLObject.OBJECT_HEADER, ">"); s = s.substring(iaObject[1] + 1, s.length()); // remove object header // name=vParmas iaObject = Parser.getTagIndexs(s, XMLObject.OBJECT_HEADER, XMLObject.OBJECT_ENDER); while (iaObject[1] > 10 && iaObject[0] >= 0) { String sVectorObject = s.substring(iaObject[0], iaObject[1]); if (log.isDebugEnabled()) { log.debug("readXML:\n" + sVectorObject); } this.addParam((I_TemplateParams) XMLObject.readXML(sVectorObject)); s = s.substring(iaObject[1] + 1, s.length()); iaObject = Parser.getTagIndexs(s, XMLObject.OBJECT_HEADER, XMLObject.OBJECT_ENDER); } } public String getClassVersion() { return CLASS_VERSION; } /** * This is a utility method for manipulateing I_TemplateParams object this * method encapsulates the ability to add a param to another param object * with out adding duplicate (named getName()) params * * @return the pAddTo object if not null the p object if pAddTo is null and * p is not null * * @param pAddTo * the param to add the other param to * @param p * the param to add so pAddTo.add(p); * @param bAddDuplicate * if it should add a pram if there is already one with the same * name */ public static I_TemplateParams addParam(I_TemplateParams pAddTo, I_TemplateParams p, boolean bAddDuplicate) { if (pAddTo == null) { return p; } else { pAddTo.First(); // if there isn't a param already if (!pAddTo.getNextParam(p.getName())) { return addParamToParam(pAddTo, p); } else if (bAddDuplicate) { return addParamToParam(pAddTo, p); } else { // didn't add anything so we return the same thing return pAddTo; } } } /** * Adds param p to pAddTo with out looking for dups * * @param pAddTo * @param p * @return pAddTo */ private static I_TemplateParams addParamToParam(I_TemplateParams pAddTo, I_TemplateParams p) { if (pAddTo instanceof I_MultipleParamsObject) { ((I_MultipleParamsObject) pAddTo).addParam(p); return pAddTo; } else { Params params = new Params(); params.addParam(pAddTo); params.addParam(p); return params; } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((paramsMap == null) ? 0 : paramsMap.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Params other = (Params) obj; if (paramsMap == null) { if (other.paramsMap != null) return false; } else if (!paramsMap.equals(other.paramsMap)) return false; return true; } @Override public short[] getValueTypes() { return this.param.getValueTypes(); } }
package org.appwork.remoteapi; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Type; import java.util.HashMap; import java.util.Iterator; import java.util.Map.Entry; import java.util.TreeMap; import org.appwork.net.protocol.http.HTTPConstants; import org.appwork.net.protocol.http.HTTPConstants.ResponseCode; import org.appwork.storage.InvalidTypeException; import org.appwork.storage.JSonStorage; import org.appwork.storage.config.annotations.AllowStorage; import org.appwork.utils.logging.Log; import org.appwork.utils.net.HTTPHeader; import sun.reflect.generics.reflectiveObjects.ParameterizedTypeImpl; /** * @author thomas * */ public class InterfaceHandler<T> { private static Method HELP; static { try { InterfaceHandler.HELP = InterfaceHandler.class.getMethod("help", new Class[] { RemoteAPIRequest.class, RemoteAPIResponse.class }); } catch (final SecurityException e) { Log.exception(e); } catch (final NoSuchMethodException e) { Log.exception(e); } } /** * @param c * @param x * @return * @throws ParseException * @throws NoSuchMethodException * @throws SecurityException */ public static <T extends RemoteAPIInterface> InterfaceHandler<T> create(final Class<T> c, final RemoteAPIInterface x, final int defaultAuthLevel) throws ParseException, SecurityException, NoSuchMethodException { final InterfaceHandler<T> ret = new InterfaceHandler<T>(c, x, defaultAuthLevel); ret.parse(); return ret; } private final RemoteAPIInterface impl; private final Class<T> interfaceClass; private final TreeMap<String, TreeMap<Integer, Method>> methods; private final HashMap<Method, Integer> parameterCountMap; private final HashMap<Method, Integer> methodsAuthLevel; private final int defaultAuthLevel; private boolean sessionRequired = false; /** * @param <T> * @param c * @param x * @throws NoSuchMethodException * @throws SecurityException */ private InterfaceHandler(final Class<T> c, final RemoteAPIInterface x, final int defaultAuthLevel) throws SecurityException, NoSuchMethodException { this.interfaceClass = c; this.impl = x; this.methods = new TreeMap<String, TreeMap<Integer, Method>>(); TreeMap<Integer, Method> map; this.methods.put("help", map = new TreeMap<Integer, Method>()); this.defaultAuthLevel = defaultAuthLevel; map.put(0, InterfaceHandler.HELP); this.parameterCountMap = new HashMap<Method, Integer>(); this.parameterCountMap.put(InterfaceHandler.HELP, 0); this.methodsAuthLevel = new HashMap<Method, Integer>(); this.methodsAuthLevel.put(InterfaceHandler.HELP, 0); } public int getAuthLevel(final Method m) { final Integer auth = this.methodsAuthLevel.get(m); if (auth != null) { return auth; } return this.defaultAuthLevel; } /** * @param length * @param methodName * @return */ public Method getMethod(final String methodName, final int length) { if (methodName.equals(InterfaceHandler.HELP.getName())) { return InterfaceHandler.HELP; } final TreeMap<Integer, Method> methodsByName = this.methods.get(methodName); if (methodsByName == null) { return null; } return methodsByName.get(length); } /** * @param method * @return */ public int getParameterCount(final Method method) { return this.parameterCountMap.get(method); } public void help(final RemoteAPIRequest request, final RemoteAPIResponse response) throws InstantiationException, IllegalAccessException, UnsupportedEncodingException, IOException { final StringBuilder sb = new StringBuilder(); sb.append(this.interfaceClass.getName()); sb.append("\r\n\r\n"); Entry<String, TreeMap<Integer, Method>> next; for (final Iterator<Entry<String, TreeMap<Integer, Method>>> it = this.methods.entrySet().iterator(); it.hasNext();) { next = it.next(); for (final Method m : next.getValue().values()) { if (m == InterfaceHandler.HELP) { sb.append("\r\n====- " + m.getName() + " -===="); sb.append("\r\n Description: This Call"); sb.append("\r\n Call: "); sb.append("/" + m.getName() + "\r\n"); continue; } String name = m.getName(); final ApiMethodName methodname = m.getAnnotation(ApiMethodName.class); if (methodname != null) { name = methodname.value(); } sb.append("\r\n====- " + name + " -===="); final ApiDoc an = m.getAnnotation(ApiDoc.class); if (an != null) { sb.append("\r\n Description: "); sb.append(an.value() + ""); } // sb.append("\r\n Description: "); final HashMap<Type, Integer> map = new HashMap<Type, Integer>(); String call = "/" + name; int count = 0; for (int i = 0; i < m.getGenericParameterTypes().length; i++) { if (m.getParameterTypes()[i] == RemoteAPIRequest.class || m.getParameterTypes()[i] == RemoteAPIResponse.class) { continue; } count++; if (i > 0) { call += "&"; } else { call += "?"; } Integer num = map.get(m.getParameterTypes()[i]); if (num == null) { map.put(m.getParameterTypes()[i], 0); num = 0; } num++; call += m.getParameterTypes()[i].getSimpleName() + "" + num; sb.append("\r\n Parameter: " + count + " - " + m.getParameterTypes()[i].getSimpleName() + "" + num); map.put(m.getParameterTypes()[i], num); } sb.append("\r\n Call: " + call); sb.append("\r\n"); } } response.setResponseCode(ResponseCode.SUCCESS_OK); final String text = sb.toString(); final int length = text.getBytes("UTF-8").length; response.getResponseHeaders().add(new HTTPHeader(HTTPConstants.HEADER_REQUEST_CONTENT_LENGTH, length + "")); response.getResponseHeaders().add(new HTTPHeader(HTTPConstants.HEADER_REQUEST_CONTENT_TYPE, "text")); response.getOutputStream().write(text.getBytes("UTF-8")); } public Object invoke(final Method method, final Object[] parameters) throws IllegalArgumentException, IllegalAccessException, InvocationTargetException { if (method.getDeclaringClass() == InterfaceHandler.class) { return method.invoke(this, parameters); } else { return method.invoke(this.impl, parameters); } } /** * @return the sessionRequired */ public boolean isSessionRequired() { return this.sessionRequired; } /** * @throws ParseException * */ private void parse() throws ParseException { for (final Method m : this.interfaceClass.getMethods()) { final ApiHiddenMethod hidden = m.getAnnotation(ApiHiddenMethod.class); if (hidden != null) { continue; } this.validateMethod(m); String name = m.getName(); final ApiMethodName methodname = m.getAnnotation(ApiMethodName.class); if (methodname != null) { name = methodname.value(); } TreeMap<Integer, Method> methodsByName = this.methods.get(name); if (methodsByName == null) { methodsByName = new TreeMap<Integer, Method>(); this.methods.put(name, methodsByName); } int l = 0; for (final Class<?> c : m.getParameterTypes()) { if (c != RemoteAPIRequest.class && c != RemoteAPIResponse.class) { l++; } } this.parameterCountMap.put(m, l); if (methodsByName.containsKey(l)) { throw new ParseException(this.interfaceClass + " Contains ambiguous methods: \r\n" + m + "\r\n" + methodsByName.get(l)); } methodsByName.put(l, m); final ApiAuthLevel auth = m.getAnnotation(ApiAuthLevel.class); if (auth != null) { this.methodsAuthLevel.put(m, auth.value()); } } } /** * @param sessionRequired * the sessionRequired to set */ protected void setSessionRequired(final boolean sessionRequired) { this.sessionRequired = sessionRequired; } /** * @param m * @throws ParseException */ private void validateMethod(final Method m) throws ParseException { if (m == InterfaceHandler.HELP) { throw new ParseException(m + " is reserved for internal usage"); } boolean responseIsParamater = false; for (final Type t : m.getGenericParameterTypes()) { if (RemoteAPIRequest.class == t) { continue; } else if (RemoteAPIResponse.class == t) { responseIsParamater = true; continue; } else { try { JSonStorage.canStore(t); } catch (final InvalidTypeException e) { throw new ParseException("Parameter " + t + " of " + m + " is invalid", e); } } } if (responseIsParamater) { if (m.getGenericReturnType() != void.class && m.getGenericReturnType() != Void.class) { throw new ParseException("Response in Parameters. " + m + " must return void, and has to handle the response itself"); } } else { try { if (RemoteAPIProcess.class.isAssignableFrom(m.getReturnType())) { final Type t = m.getReturnType().getGenericSuperclass(); if (t instanceof ParameterizedTypeImpl) { final ParameterizedTypeImpl p = (ParameterizedTypeImpl) t; final Type[] oo = p.getActualTypeArguments(); for (final Type o : oo) { JSonStorage.canStore(o); } } else { throw new ParseException("return Type of " + m + " is invalid"); } } else { try { JSonStorage.canStore(m.getGenericReturnType()); } catch (final InvalidTypeException e) { final AllowStorage allow = m.getAnnotation(AllowStorage.class); boolean found = false; if (allow != null) { for (final Class<?> c : allow.value()) { if (e.getType() == c) { found = true; break; } } } if (!found) { throw new InvalidTypeException(e); } } } } catch (final InvalidTypeException e) { throw new ParseException("return Type of " + m + " is invalid", e); } } } }
package org.testng.reporters; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.List; import org.testng.ITestContext; import org.testng.ITestNGMethod; import org.testng.ITestResult; import org.testng.Reporter; import org.testng.TestListenerAdapter; import org.testng.internal.Utils; /** * This class implements an HTML reporter for individual tests. * * @author Cedric Beust, May 2, 2004 * */ public class TestHTMLReporter extends TestListenerAdapter { private ITestContext m_testContext = null; // implements ITestListener @Override public void onStart(ITestContext context) { m_testContext = context; } @Override public void onFinish(ITestContext context) { generateLog(m_testContext, null /* host */, m_testContext.getOutputDirectory(), getPassedTests(), getFailedTests(), getSkippedTests(), getFailedButWithinSuccessPercentageTests()); } // implements ITestListener private static File getOutputFile(String outputDirectory, ITestContext context) { String result = // context.getSuite().getName() + File.separatorChar + outputDirectory + File.separator + context.getName() + ".html"; // File result = new File(base); // String path = result.getAbsolutePath(); // int index = path.lastIndexOf("."); // base = "file://" + path.substring(0, index) + ".out"; return new File(result); } public static void generateTable(StringBuffer sb, String title, Collection<ITestResult> tests, String cssClass) { sb.append("<table width='100%' border='1' class='invocation-").append(cssClass).append("'>\n") .append("<tr><td colspan='3' align='center'><b>").append(title).append("</b></td></tr>\n") .append("<tr>") .append("<td><b>Test method</b></td>\n") .append("<td width=\"10%\"><b>Time (seconds)</b></td>\n") .append("<td width=\"30%\"><b>Exception</b></td>\n") .append("</tr>\n"); Comparator testResultComparator = new Comparator<ITestResult>() { public int compare(ITestResult o1, ITestResult o2) { String c1 = o1.getName(); String c2 = o2.getName(); return c1.compareTo(c2); } }; if (tests instanceof List) { Collections.sort((List) tests, testResultComparator); } int i = 0; // User output? String id = ""; Throwable tw = null; for (ITestResult tr : tests) { sb.append("<tr>\n"); // Test method ITestNGMethod method = tr.getMethod(); String fqName = tr.getName(); sb.append("<td>").append(fqName); sb.append("<br/>(").append(tr.getTestClass().getName()).append(")"); // Method description if (! Utils.isStringEmpty(method.getDescription())) { sb.append("<br/><b>").append(method.getDescription()).append("</b>"); } Object[] parameters = tr.getParameters(); if (parameters != null && parameters.length > 0) { sb.append("<br/><b>Parameters:</b> "); for (int j = 0; j < parameters.length; j++) { if (j > 0) sb.append(", "); sb.append(parameters[j] == null ? "null" : parameters[j].toString()); } } { List<String> output = Reporter.getOutput(tr); if (null != output && output.size() > 0) { sb.append("<br/>"); // Method name String divId = "Output-" + tr.hashCode(); sb.append("\n<a href=\"#").append(divId).append("\"") .append(" onClick=\"toggleBox('").append(divId).append("');\">") .append("Show output</a>\n") .append("\n<a href=\"#").append(divId).append("\"") .append(" onClick=\"toggleAllBoxes();\">Show all outputs</a>\n") ; // Method output sb.append("<div class='log' id=\"").append(divId).append("\">\n"); for (String s : output) { sb.append(s).append("<br/>\n"); } sb.append("</div>\n"); } } sb.append("</td>\n"); // Time long time = (tr.getEndMillis() - tr.getStartMillis()) / 1000; String strTime = new Long(time).toString(); sb.append("<td>").append(strTime).append("</td>\n"); // Exception tw = tr.getThrowable(); String stackTrace = ""; String fullStackTrace = ""; id = "stack-trace" + tr.hashCode(); sb.append("<td>"); if (null != tw) { String[] stackTraces = Utils.stackTrace(tw, true); String shortStackTrace = stackTraces[0].replaceAll("<", "&lt;").replaceAll(">", "&gt;"); fullStackTrace = stackTraces[1].replaceAll("<", "&lt;").replaceAll(">", "&gt;"); stackTrace = "<div><pre>" + shortStackTrace + "</pre></div>"; sb.append(stackTrace); // JavaScript link sb.append("<a href='#' onClick='toggleBox(\"") .append(id).append("\")'>") .append("Click to show all stack frames").append("</a>\n") .append("<div class='stack-trace' id='" + id + "'>") .append("<pre>" + fullStackTrace + "</pre>") .append("</div>") ; } sb.append("</td>\n").append("</tr>\n"); } sb.append("</table><p>\n"); } private static String arrayToString(String[] array) { StringBuffer result = new StringBuffer(""); for (int i = 0; i < array.length; i++) { result.append(array[i]).append(" "); } return result.toString(); } private static String HEAD = "\n<style type=\"text/css\">\n" + ".log { display: none;} \n" + ".stack-trace { display: none;} \n" + "</style>\n" + "<script type=\"text/javascript\">\n" + "<! "function flip(e) {\n" + " current = e.style.display;\n" + " if (current == 'block') {\n" + " e.style.display = 'none';\n" + " }\n" + " else {\n" + " e.style.display = 'block';\n" + " }\n" + "}\n" + "\n" + "function toggleBox(szDivId)\n" + "{\n" + " if (document.getElementById) {\n" + " flip(document.getElementById(szDivId));\n" + " }\n" + " else if (document.all) {\n" + " // this is the way old msie versions work\n" + " var style2 = document.all[szDivId].style;\n" + " style2.display = style2.display? \"\":\"block\";\n" + " }\n" + "\n" + "}\n" + "\n" + "function toggleAllBoxes() {\n" + " if (document.getElementsByTagName) {\n" + " d = document.getElementsByTagName('div');\n" + " for (i = 0; i < d.length; i++) {\n" + " if (d[i].className == 'log') {\n" + " flip(d[i]);\n" + " }\n" + " }\n" + " }\n" + "}\n" + "\n" + " "</script>\n" + "\n"; public static void generateLog(ITestContext testContext, String host, String outputDirectory, Collection<ITestResult> passedTests, Collection<ITestResult> failedTests, Collection<ITestResult> skippedTests, Collection<ITestResult> percentageTests) { File htmlOutputFile = getOutputFile(outputDirectory, testContext); StringBuffer sb = new StringBuffer(); sb.append("<html>\n<head>\n") .append("<title>TestNG: ").append(testContext.getName()).append("</title>\n") .append(HtmlHelper.getCssString()) .append(HEAD) .append("</head>\n") .append("<body>\n"); Date startDate = testContext.getStartDate(); Date endDate = testContext.getEndDate(); long duration = (endDate.getTime() - startDate.getTime()) / 1000; int passed = testContext.getPassedTests().size() + testContext.getFailedButWithinSuccessPercentageTests().size(); int failed = testContext.getFailedTests().size(); int skipped = testContext.getSkippedTests().size(); String hostLine = Utils.isStringEmpty(host) ? "" : "<tr><td>Remote host:</td><td>" + host + "</td>\n</tr>"; sb .append("<h2 align='center'>").append(testContext.getName()).append("</h2>") .append("<table border='1' align=\"center\">\n") .append("<tr>\n") // .append("<td>Property file:</td><td>").append(m_testRunner.getPropertyFileName()).append("</td>\n") // .append("</tr><tr>\n") .append("<td>Tests passed/Failed/Skipped:</td><td>").append(passed).append("/").append(failed).append("/").append(skipped).append("</td>\n") .append("</tr><tr>\n") .append("<td>Started on:</td><td>").append(testContext.getStartDate().toString()).append("</td>\n") .append("</tr>\n") .append(hostLine) .append("<tr><td>Total time:</td><td>").append(duration).append(" seconds</td>\n") .append("</tr><tr>\n") .append("<td>Included groups:</td><td>").append(arrayToString(testContext.getIncludedGroups())).append("</td>\n") .append("</tr><tr>\n") .append("<td>Excluded groups:</td><td>").append(arrayToString(testContext.getExcludedGroups())).append("</td>\n") .append("</tr>\n") .append("</table><p/>\n") ; if (failedTests.size() > 0) { generateTable(sb, "FAILED TESTS", failedTests, "failed"); } if (percentageTests.size() > 0) { generateTable(sb, "FAILED TESTS BUT WITHIN SUCCESS PERCENTAGE", percentageTests, "percent"); } if (passedTests.size() > 0) { generateTable(sb, "PASSED TESTS", passedTests, "passed"); } if (skippedTests.size() > 0) { generateTable(sb, "SKIPPED TESTS", skippedTests, "skipped"); } sb.append("</body>\n</html>"); Utils.writeFile(htmlOutputFile, sb.toString()); } private static void ppp(String s) { System.out.println("[TestHTMLReporter] " + s); } }
package org.biojava.bio.seq.impl; import java.util.Collections; import java.util.Iterator; import org.biojava.bio.Annotatable; import org.biojava.bio.Annotation; import org.biojava.bio.BioError; import org.biojava.bio.BioException; import org.biojava.bio.SimpleAnnotation; import org.biojava.ontology.OntoTools; import org.biojava.ontology.Term; import org.biojava.bio.seq.Feature; import org.biojava.bio.seq.FeatureFilter; import org.biojava.bio.seq.FeatureHolder; import org.biojava.bio.seq.FilterUtils; import org.biojava.bio.seq.RealizingFeatureHolder; import org.biojava.bio.seq.Sequence; import org.biojava.bio.seq.SimpleFeatureHolder; import org.biojava.bio.symbol.Location; import org.biojava.bio.symbol.SymbolList; import org.biojava.utils.AbstractChangeable; import org.biojava.utils.ChangeEvent; import org.biojava.utils.ChangeForwarder; import org.biojava.utils.ChangeListener; import org.biojava.utils.ChangeSupport; import org.biojava.utils.ChangeType; import org.biojava.utils.ChangeVetoException; /** * A no-frills implementation of a feature. * * @author Matthew Pocock * @author Thomas Down * @author Kalle Nslund * @author Paul Seed * @author Len Trigg */ public class SimpleFeature extends AbstractChangeable implements Feature, RealizingFeatureHolder, java.io.Serializable { private transient ChangeListener annotationForwarder; private transient ChangeListener featureForwarder; /** * The FeatureHolder that we will delegate the FeatureHolder interface too. * This is lazily instantiated. */ private SimpleFeatureHolder featureHolder; /** * The location of this feature. */ private Location loc; /** * The type of this feature - something like Exon. * This is included for cheap interoperability with GFF. */ private String type; /** * The source of this feature - the program that generated it. * This is included for cheap interoperability with GFF. */ private String source; /** * Our parent FeatureHolder. */ private FeatureHolder parent; /** * The annotation object. * This is lazily instantiated. */ private Annotation annotation; private Term typeTerm; private Term sourceTerm; /** * A utility function to retrieve the feature holder delegate, creating it if * necessary. * * @return the FeatureHolder delegate */ protected SimpleFeatureHolder getFeatureHolder() { if(featureHolder == null) { featureHolder = new SimpleFeatureHolder(); } return featureHolder; } /** * A utility function to find out if the feature holder delegate has been * instantiated yet. If it has not, we may avoid instantiating it by returning * some pre-canned result. * * @return true if the feature holder delegate has been created and false * otherwise */ protected boolean featureHolderAllocated() { return featureHolder != null; } protected ChangeSupport getChangeSupport(ChangeType ct) { ChangeSupport cs = super.getChangeSupport(ct); if( (annotationForwarder == null) && (ct.isMatchingType(Annotatable.ANNOTATION) || Annotatable.ANNOTATION.isMatchingType(ct)) ) { annotationForwarder = new ChangeForwarder.Retyper(this, cs, Annotation.PROPERTY); getAnnotation().addChangeListener( annotationForwarder, Annotatable.ANNOTATION ); } if( (featureForwarder == null) && (ct == null || ct == FeatureHolder.FEATURES) ) { featureForwarder = new ChangeForwarder( this, cs ); getFeatureHolder().addChangeListener( featureForwarder, FeatureHolder.FEATURES ); } return cs; } public Location getLocation() { return loc; } public void setLocation(Location loc) throws ChangeVetoException { if(hasListeners()) { ChangeSupport cs = getChangeSupport(LOCATION); synchronized(cs) { ChangeEvent ce = new ChangeEvent(this, LOCATION, loc, this.loc); cs.firePreChangeEvent(ce); this.loc = loc; cs.firePostChangeEvent(ce); } } else { this.loc = loc; } } public Term getTypeTerm() { return typeTerm; } public String getType() { if (typeTerm != OntoTools.ANY) { return typeTerm.getName(); } else { return type; } } public void setType(String type) throws ChangeVetoException { if(hasListeners()) { ChangeSupport cs = getChangeSupport(TYPE); synchronized(cs) { ChangeEvent ce = new ChangeEvent(this, TYPE, type, this.type); cs.firePreChangeEvent(ce); this.type = type; cs.firePostChangeEvent(ce); } } else { this.type = type; } } public void setTypeTerm(Term t) throws ChangeVetoException { if(hasListeners()) { ChangeSupport cs = getChangeSupport(TYPE); synchronized (cs) { ChangeEvent ce_term = new ChangeEvent(this, TYPETERM, t, this.getTypeTerm()); ChangeEvent ce_name = new ChangeEvent(this, TYPE, t.getName(), this.getType()); cs.firePreChangeEvent(ce_term); cs.firePreChangeEvent(ce_name); this.typeTerm = typeTerm; cs.firePostChangeEvent(ce_term); cs.firePostChangeEvent(ce_name); } } else { this.typeTerm = typeTerm; } } public String getSource() { if (sourceTerm != OntoTools.ANY) { return sourceTerm.getName(); } else { return source; } } public Term getSourceTerm() { return sourceTerm; } public FeatureHolder getParent() { return parent; } public void setSource(String source) throws ChangeVetoException { if(hasListeners()) { ChangeSupport cs = getChangeSupport(SOURCE); synchronized(cs) { ChangeEvent ce = new ChangeEvent(this, SOURCE, this.source, source); cs.firePreChangeEvent(ce); this.source = source; cs.firePostChangeEvent(ce); } } else { this.source = source; } } public void setSourceTerm(Term t) throws ChangeVetoException { if(hasListeners()) { ChangeSupport cs = getChangeSupport(TYPE); synchronized (cs) { ChangeEvent ce_term = new ChangeEvent(this, SOURCETERM, t, this.getSourceTerm()); ChangeEvent ce_name = new ChangeEvent(this, SOURCE, t.getName(), this.getSource()); cs.firePreChangeEvent(ce_term); cs.firePreChangeEvent(ce_name); this.sourceTerm = t; cs.firePostChangeEvent(ce_term); cs.firePostChangeEvent(ce_name); } } else { this.sourceTerm = sourceTerm; } } public Sequence getSequence() { FeatureHolder fh = this; while (fh instanceof Feature) { fh = ((Feature) fh).getParent(); } try { return (Sequence) fh; } catch (ClassCastException ex) { throw new BioError("Feature doesn't seem to have a Sequence ancestor: " + fh); } } public Annotation getAnnotation() { if(annotation == null) annotation = new SimpleAnnotation(); return annotation; } public SymbolList getSymbols() { return getLocation().symbols(getSequence()); } public int countFeatures() { if(featureHolderAllocated()) return getFeatureHolder().countFeatures(); return 0; } public Iterator features() { if(featureHolderAllocated()) return getFeatureHolder().features(); return Collections.EMPTY_LIST.iterator(); } public void removeFeature(Feature f) throws ChangeVetoException { getFeatureHolder().removeFeature(f); } public boolean containsFeature(Feature f) { if(featureHolderAllocated()) { return getFeatureHolder().containsFeature(f); } else { return false; } } public FeatureHolder filter(FeatureFilter ff) { FeatureFilter childFilter = new FeatureFilter.Not(FeatureFilter.top_level); if (FilterUtils.areDisjoint(ff, childFilter)) { return FeatureHolder.EMPTY_FEATURE_HOLDER; } else if (featureHolderAllocated()) { return getFeatureHolder().filter(ff); } else { return FeatureHolder.EMPTY_FEATURE_HOLDER; } } public FeatureHolder filter(FeatureFilter ff, boolean recurse) { if(featureHolderAllocated()) return getFeatureHolder().filter(ff, recurse); return FeatureHolder.EMPTY_FEATURE_HOLDER; } public Feature.Template makeTemplate() { Feature.Template ft = new Feature.Template(); fillTemplate(ft); return ft; } protected void fillTemplate(Feature.Template ft) { ft.location = getLocation(); ft.type = getType(); ft.source = getSource(); ft.annotation = getAnnotation(); } /** * Create a <code>SimpleFeature</code> on the given sequence. * The feature is created underneath the parent <code>FeatureHolder</code> * and populated directly from the template fields. However, * if the template annotation is the <code>Annotation.EMPTY_ANNOTATION</code>, * an empty <code>SimpleAnnotation</code> is attached to the feature instead. * @param sourceSeq the source sequence * @param parent the parent sequence or feature * @param template the template for the feature */ public SimpleFeature(Sequence sourceSeq, FeatureHolder parent, Feature.Template template) { if (template.location == null) { throw new IllegalArgumentException( "Location can not be null. Did you mean Location.EMPTY_LOCATION?" ); } if(!(parent instanceof Feature) && !(parent instanceof Sequence)) { throw new IllegalArgumentException("Parent must be sequence or feature, not: " + parent.getClass() + " " + parent); } if (template.location.getMin() < 1 || template.location.getMax() > sourceSeq.length()) { } this.parent = parent; this.loc = template.location; this.typeTerm = template.typeTerm != null ? template.typeTerm : OntoTools.ANY; this.sourceTerm = template.sourceTerm != null ? template.sourceTerm : OntoTools.ANY; this.type = template.type != null ? template.type : typeTerm.getName(); this.source = template.source != null ? template.source : sourceTerm.getName(); if (this.type == null) { throw new NullPointerException("Either type or typeTerm must have a non-null value"); } if (this.source == null) { throw new NullPointerException("Either source or sourceTerm must have a non-null value"); } this.annotation = template.annotation != null ? new SimpleAnnotation(template.annotation) : null; } public String toString() { return "Feature " + getType() + " " + getSource() + " " + getLocation(); } public Feature realizeFeature(FeatureHolder fh, Feature.Template templ) throws BioException { try { RealizingFeatureHolder rfh = (RealizingFeatureHolder) getParent(); return rfh.realizeFeature(fh, templ); } catch (ClassCastException ex) { throw new BioException("Couldn't propagate feature creation request."); } } public Feature createFeature(Feature.Template temp) throws BioException, ChangeVetoException { Feature f = realizeFeature(this, temp); getFeatureHolder().addFeature(f); return f; } public int hashCode() { return makeTemplate().hashCode(); } public boolean equals(Object o) { if (! (o instanceof Feature)) { return false; } Feature fo = (Feature) o; if (! fo.getSequence().equals(getSequence())) return false; return makeTemplate().equals(fo.makeTemplate()); } public FeatureFilter getSchema() { return new FeatureFilter.ByParent(new FeatureFilter.ByFeature(this)); } }
package org.biojavax.bio.db.biosql; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Collections; import java.util.Set; import javax.sql.DataSource; import org.biojavax.Namespace; import org.biojavax.bio.db.*; /** * * @author hollandr */ public interface BioSQLBioDB extends PersistentBioDB { /** * This method is used to write long strings, ie. CLOBS in Oracle. * @param text the string to write * @param ps the statement we are writing to * @param column the column number to write the string to. * @throws SQLException if the write process fails. */ public void writeLongString(String text, PreparedStatement ps, int column) throws SQLException; /** * This method reads long strings, eg. CLOBS in Oracle. * @param rs the resultset to read from * @param column the column the string lives in * @return String the string that has been read. * @throws SQLException if the string could not be read. */ public String readLongString(ResultSet rs, int column) throws SQLException; public class Impl implements BioSQLBioDB { private Connection conn; private DataSource ds; private boolean respectsTransactions; private Namespace readns; private Set namespaces; protected Impl(Connection conn) { this.conn = conn; this.respectsTransactions = true; this.readns = null; } protected Impl(DataSource ds) { this.ds = ds; this.respectsTransactions = false; this.readns = null; } public Connection getConnection() throws SQLException { if (this.conn==null) this.conn = ds.getConnection(); return this.conn; } public boolean respectsTransactions() { return this.respectsTransactions; } public Set loadNamespaceNames() throws SQLException { // use SQL to locate all the names // return them as a set return Collections.EMPTY_SET; } public PersistentNamespace loadNamespace(String name) throws SQLException { // name is all we need to identify it uniquely, so no SQL required! PersistentNamespace ns = BioSQLNamespace.getInstance(this, name); return (PersistentNamespace)ns.load(null); } public Set loadOntologyNames() { // use SQL to locate all the names // return them as a set return Collections.EMPTY_SET; } public PersistentComparableOntology loadOntology(String name) throws SQLException { // name is all we need to identify it uniquely, so no SQL required! // PersistentComparableOntology co = BioSQLComparableOntology.getInstance(this, name); // return (PersistentComparableOntology)co.load(null); return null; } public void setNamespace(Namespace ns) { this.readns = ns; } public Set loadSequenceUIDs() throws SQLException { // use SQL to locate all the Integer UIDs // return them as a set return Collections.EMPTY_SET; } public PersistentBioEntry loadSequenceByUID(int UID) throws SQLException, NullPointerException { // use SQL to locate the FIRST set of details, including the UID. // construct a SimpleBioEntry object using SimpleBioEntryBuilder // wrap it in a BioSQLBioEntry object with the UID set return null; } public PersistentBioEntry loadSequence(String name, String accession, int version) throws SQLException, NullPointerException { // use SQL to locate the FIRST set of details, including the UID. // construct a SimpleBioEntry object using SimpleBioEntryBuilder // wrap it in a BioSQLBioEntry object with the UID set return null; } public Persistent convert(Object o) throws IllegalArgumentException { // if-else wrapper //if (o instanceof BioEntry) return BioSQLBioEntry.getInstance(this,(BioEntry)o); //else if (o instanceof BioEntryFeature) return BioSQLBioEntryFeature.getInstance(this,(BioEntryFeature)o); //else if (o instanceof BioEntryRelationship) return BioSQLBioEntryRelationship.getInstance(this,(BioEntryRelationship)o); //else if (o instanceof ComparableOntology) return BioSQLComparableOntology.getInstance(this,(ComparableOntology)o); //else if (o instanceof ComparableTerm) return BioSQLComparableTerm.getInstance(this,(ComparableTerm)o); //else if (o instanceof ComparableTriple) return BioSQLComparableTriple.getInstance(this,(ComparableTriple)o); //else if (o instanceof CrossRef) return BioSQLCrossRef.getInstance(this,(CrossRef)o); //else if (o instanceof DocumentReference) return BioSQLDocumentReference.getInstance(this,(DocumentReference)o); //else if (o instanceof LocatedDocumentReference) return BioSQLLocatedDocumentReference.getInstance(this,(LocatedDocumentReference)o); //else if (o instanceof NCBITaxon) return BioSQLNCBITaxon.getInstance(this,(NCBITaxon)o); //else if (o instanceof Namespace) return BioSQLNamespace.getInstance(this,(Namespace)o); throw new IllegalArgumentException("Unable to convert object of type "+o.getClass()); } public void writeLongString(String text, PreparedStatement ps, int column) throws SQLException { ps.setString(column, text); } public String readLongString(ResultSet rs, int column) throws SQLException { return rs.getString(column); } } public class MySQL extends Impl { public MySQL(Connection conn) {super(conn);} public MySQL(DataSource ds) {super(ds);} } public class Oracle extends Impl { public Oracle(Connection conn) {super(conn);} public Oracle(DataSource ds) {super(ds);} public void writeLongString(String text, PreparedStatement ps, int column) throws SQLException { ps.setString(column, text); // Doesn't work on Oracle 9i/10g - FIXME } public String readLongString(ResultSet rs, int column) throws SQLException { return rs.getString(column); // Doesn't work on Oracle 9i/10g - FIXME } } }
package org.bouncycastle.cms; import java.io.IOException; import java.io.OutputStream; import java.security.GeneralSecurityException; import java.security.InvalidKeyException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.Provider; import java.security.PublicKey; import java.security.Signature; import java.security.SignatureException; import java.security.cert.CertificateExpiredException; import java.security.cert.CertificateNotYetValidException; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Enumeration; import java.util.Iterator; import java.util.List; import javax.crypto.Cipher; import org.bouncycastle.asn1.ASN1Encodable; import org.bouncycastle.asn1.ASN1EncodableVector; import org.bouncycastle.asn1.ASN1InputStream; import org.bouncycastle.asn1.ASN1Null; import org.bouncycastle.asn1.ASN1ObjectIdentifier; import org.bouncycastle.asn1.ASN1OctetString; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.ASN1Set; import org.bouncycastle.asn1.DEREncodable; import org.bouncycastle.asn1.DERObject; import org.bouncycastle.asn1.DERObjectIdentifier; import org.bouncycastle.asn1.DERSet; import org.bouncycastle.asn1.DERTags; import org.bouncycastle.asn1.cms.Attribute; import org.bouncycastle.asn1.cms.AttributeTable; import org.bouncycastle.asn1.cms.CMSAttributes; import org.bouncycastle.asn1.cms.IssuerAndSerialNumber; import org.bouncycastle.asn1.cms.SignerIdentifier; import org.bouncycastle.asn1.cms.SignerInfo; import org.bouncycastle.asn1.cms.Time; import org.bouncycastle.asn1.x509.AlgorithmIdentifier; import org.bouncycastle.asn1.x509.DigestInfo; import org.bouncycastle.cert.X509CertificateHolder; import org.bouncycastle.operator.ContentVerifier; import org.bouncycastle.operator.DefaultSignatureAlgorithmIdentifierFinder; import org.bouncycastle.operator.DigestCalculator; import org.bouncycastle.operator.OperatorCreationException; import org.bouncycastle.operator.RawContentVerifier; import org.bouncycastle.operator.SignatureAlgorithmIdentifierFinder; import org.bouncycastle.util.Arrays; /** * an expanded SignerInfo block from a CMS Signed message */ public class SignerInformation { private SignerId sid; private SignerInfo info; private AlgorithmIdentifier digestAlgorithm; private AlgorithmIdentifier encryptionAlgorithm; private final ASN1Set signedAttributeSet; private final ASN1Set unsignedAttributeSet; private CMSProcessable content; private byte[] signature; private ASN1ObjectIdentifier contentType; private IntDigestCalculator digestCalculator; private byte[] resultDigest; private SignatureAlgorithmIdentifierFinder sigAlgFinder; // Derived private AttributeTable signedAttributeValues; private AttributeTable unsignedAttributeValues; SignerInformation( SignerInfo info, ASN1ObjectIdentifier contentType, CMSProcessable content, IntDigestCalculator digestCalculator, SignatureAlgorithmIdentifierFinder sigAlgFinder) { this.info = info; this.contentType = contentType; this.sigAlgFinder = sigAlgFinder; SignerIdentifier s = info.getSID(); if (s.isTagged()) { ASN1OctetString octs = ASN1OctetString.getInstance(s.getId()); sid = new SignerId(octs.getOctets()); } else { IssuerAndSerialNumber iAnds = IssuerAndSerialNumber.getInstance(s.getId()); sid = new SignerId(iAnds.getName(), iAnds.getSerialNumber().getValue()); } this.digestAlgorithm = info.getDigestAlgorithm(); this.signedAttributeSet = info.getAuthenticatedAttributes(); this.unsignedAttributeSet = info.getUnauthenticatedAttributes(); this.encryptionAlgorithm = info.getDigestEncryptionAlgorithm(); this.signature = info.getEncryptedDigest().getOctets(); this.content = content; this.digestCalculator = digestCalculator; } public ASN1ObjectIdentifier getContentType() { return this.contentType; } private byte[] encodeObj( DEREncodable obj) throws IOException { if (obj != null) { return obj.getDERObject().getEncoded(); } return null; } public SignerId getSID() { return sid; } /** * return the version number for this objects underlying SignerInfo structure. */ public int getVersion() { return info.getVersion().getValue().intValue(); } public AlgorithmIdentifier getDigestAlgorithmID() { return digestAlgorithm; } /** * return the object identifier for the signature. */ public String getDigestAlgOID() { return digestAlgorithm.getObjectId().getId(); } /** * return the signature parameters, or null if there aren't any. */ public byte[] getDigestAlgParams() { try { return encodeObj(digestAlgorithm.getParameters()); } catch (Exception e) { throw new RuntimeException("exception getting digest parameters " + e); } } /** * return the content digest that was calculated during verification. */ public byte[] getContentDigest() { if (resultDigest == null) { throw new IllegalStateException("method can only be called after verify."); } return (byte[])resultDigest.clone(); } /** * return the object identifier for the signature. */ public String getEncryptionAlgOID() { return encryptionAlgorithm.getObjectId().getId(); } /** * return the signature/encryption algorithm parameters, or null if * there aren't any. */ public byte[] getEncryptionAlgParams() { try { return encodeObj(encryptionAlgorithm.getParameters()); } catch (Exception e) { throw new RuntimeException("exception getting encryption parameters " + e); } } /** * return a table of the signed attributes - indexed by * the OID of the attribute. */ public AttributeTable getSignedAttributes() { if (signedAttributeSet != null && signedAttributeValues == null) { signedAttributeValues = new AttributeTable(signedAttributeSet); } return signedAttributeValues; } /** * return a table of the unsigned attributes indexed by * the OID of the attribute. */ public AttributeTable getUnsignedAttributes() { if (unsignedAttributeSet != null && unsignedAttributeValues == null) { unsignedAttributeValues = new AttributeTable(unsignedAttributeSet); } return unsignedAttributeValues; } /** * return the encoded signature */ public byte[] getSignature() { return (byte[])signature.clone(); } /** * Return a SignerInformationStore containing the counter signatures attached to this * signer. If no counter signatures are present an empty store is returned. */ public SignerInformationStore getCounterSignatures() { // TODO There are several checks implied by the RFC3852 comments that are missing /* The countersignature attribute MUST be an unsigned attribute; it MUST NOT be a signed attribute, an authenticated attribute, an unauthenticated attribute, or an unprotected attribute. */ AttributeTable unsignedAttributeTable = getUnsignedAttributes(); if (unsignedAttributeTable == null) { return new SignerInformationStore(new ArrayList(0)); } List counterSignatures = new ArrayList(); /* The UnsignedAttributes syntax is defined as a SET OF Attributes. The UnsignedAttributes in a signerInfo may include multiple instances of the countersignature attribute. */ ASN1EncodableVector allCSAttrs = unsignedAttributeTable.getAll(CMSAttributes.counterSignature); for (int i = 0; i < allCSAttrs.size(); ++i) { Attribute counterSignatureAttribute = (Attribute)allCSAttrs.get(i); /* A countersignature attribute can have multiple attribute values. The syntax is defined as a SET OF AttributeValue, and there MUST be one or more instances of AttributeValue present. */ ASN1Set values = counterSignatureAttribute.getAttrValues(); if (values.size() < 1) { // TODO Throw an appropriate exception? } for (Enumeration en = values.getObjects(); en.hasMoreElements();) { /* Countersignature values have the same meaning as SignerInfo values for ordinary signatures, except that: 1. The signedAttributes field MUST NOT contain a content-type attribute; there is no content type for countersignatures. 2. The signedAttributes field MUST contain a message-digest attribute if it contains any other attributes. 3. The input to the message-digesting process is the contents octets of the DER encoding of the signatureValue field of the SignerInfo value with which the attribute is associated. */ SignerInfo si = SignerInfo.getInstance(en.nextElement()); String digestName = CMSSignedHelper.INSTANCE.getDigestAlgName(si.getDigestAlgorithm().getObjectId().getId()); counterSignatures.add(new SignerInformation(si, CMSAttributes.counterSignature, null, new CounterSignatureDigestCalculator(digestName, null, getSignature()), new DefaultSignatureAlgorithmIdentifierFinder())); } } return new SignerInformationStore(counterSignatures); } /** * return the DER encoding of the signed attributes. * @throws IOException if an encoding error occurs. */ public byte[] getEncodedSignedAttributes() throws IOException { if (signedAttributeSet != null) { return signedAttributeSet.getEncoded(ASN1Encodable.DER); } return null; } /** * @deprecated */ private boolean doVerify( PublicKey key, Provider sigProvider) throws CMSException, NoSuchAlgorithmException { String digestName = CMSSignedHelper.INSTANCE.getDigestAlgName(this.getDigestAlgOID()); String encName = CMSSignedHelper.INSTANCE.getEncryptionAlgName(this.getEncryptionAlgOID()); String signatureName = digestName + "with" + encName; Signature sig = CMSSignedHelper.INSTANCE.getSignatureInstance(signatureName, sigProvider); MessageDigest digest = CMSSignedHelper.INSTANCE.getDigestInstance(digestName, sigProvider); // TODO [BJA-109] Note: PSSParameterSpec requires JDK1.4+ /* try { DERObjectIdentifier sigAlgOID = encryptionAlgorithm.getObjectId(); DEREncodable sigParams = this.encryptionAlgorithm.getParameters(); if (sigAlgOID.equals(PKCSObjectIdentifiers.id_RSASSA_PSS)) { // RFC 4056 // When the id-RSASSA-PSS algorithm identifier is used for a signature, // the AlgorithmIdentifier parameters field MUST contain RSASSA-PSS-params. if (sigParams == null) { throw new CMSException( "RSASSA-PSS signature must specify algorithm parameters"); } AlgorithmParameters params = AlgorithmParameters.getInstance( sigAlgOID.getId(), sig.getProvider().getName()); params.init(sigParams.getDERObject().getEncoded(), "ASN.1"); PSSParameterSpec spec = (PSSParameterSpec)params.getParameterSpec(PSSParameterSpec.class); sig.setParameter(spec); } else { // TODO Are there other signature algorithms that provide parameters? if (sigParams != null) { throw new CMSException("unrecognised signature parameters provided"); } } } catch (IOException e) { throw new CMSException("error encoding signature parameters.", e); } catch (InvalidAlgorithmParameterException e) { throw new CMSException("error setting signature parameters.", e); } catch (InvalidParameterSpecException e) { throw new CMSException("error processing signature parameters.", e); } */ try { if (digestCalculator != null) { resultDigest = digestCalculator.getDigest(); } else { if (content != null) { content.write(new DigOutputStream(digest)); } else if (signedAttributeSet == null) { // TODO Get rid of this exception and just treat content==null as empty not missing? throw new CMSException("data not encapsulated in signature - use detached constructor."); } resultDigest = digest.digest(); } } catch (IOException e) { throw new CMSException("can't process mime object to create signature.", e); } // TODO Shouldn't be using attribute OID as contentType (should be null) boolean isCounterSignature = contentType.equals( CMSAttributes.counterSignature); // RFC 3852 11.1 Check the content-type attribute is correct { DERObject validContentType = getSingleValuedSignedAttribute( CMSAttributes.contentType, "content-type"); if (validContentType == null) { if (!isCounterSignature && signedAttributeSet != null) { throw new CMSException("The content-type attribute type MUST be present whenever signed attributes are present in signed-data"); } } else { if (isCounterSignature) { throw new CMSException("[For counter signatures,] the signedAttributes field MUST NOT contain a content-type attribute"); } if (!(validContentType instanceof DERObjectIdentifier)) { throw new CMSException("content-type attribute value not of ASN.1 type 'OBJECT IDENTIFIER'"); } DERObjectIdentifier signedContentType = (DERObjectIdentifier)validContentType; if (!signedContentType.equals(contentType)) { throw new CMSException("content-type attribute value does not match eContentType"); } } } // RFC 3852 11.2 Check the message-digest attribute is correct { DERObject validMessageDigest = getSingleValuedSignedAttribute( CMSAttributes.messageDigest, "message-digest"); if (validMessageDigest == null) { if (signedAttributeSet != null) { throw new CMSException("the message-digest signed attribute type MUST be present when there are any signed attributes present"); } } else { if (!(validMessageDigest instanceof ASN1OctetString)) { throw new CMSException("message-digest attribute value not of ASN.1 type 'OCTET STRING'"); } ASN1OctetString signedMessageDigest = (ASN1OctetString)validMessageDigest; if (!Arrays.constantTimeAreEqual(resultDigest, signedMessageDigest.getOctets())) { throw new CMSSignerDigestMismatchException("message-digest attribute value does not match calculated value"); } } } // RFC 3852 11.4 Validate countersignature attribute(s) { AttributeTable signedAttrTable = this.getSignedAttributes(); if (signedAttrTable != null && signedAttrTable.getAll(CMSAttributes.counterSignature).size() > 0) { throw new CMSException("A countersignature attribute MUST NOT be a signed attribute"); } AttributeTable unsignedAttrTable = this.getUnsignedAttributes(); if (unsignedAttrTable != null) { ASN1EncodableVector csAttrs = unsignedAttrTable.getAll(CMSAttributes.counterSignature); for (int i = 0; i < csAttrs.size(); ++i) { Attribute csAttr = (Attribute)csAttrs.get(i); if (csAttr.getAttrValues().size() < 1) { throw new CMSException("A countersignature attribute MUST contain at least one AttributeValue"); } // Note: We don't recursively validate the countersignature value } } } try { sig.initVerify(key); if (signedAttributeSet == null) { if (digestCalculator != null) { // need to decrypt signature and check message bytes return verifyDigest(resultDigest, key, this.getSignature(), sigProvider); } else if (content != null) { // TODO Use raw signature of the hash value instead content.write(new SigOutputStream(sig)); } } else { sig.update(this.getEncodedSignedAttributes()); } return sig.verify(this.getSignature()); } catch (InvalidKeyException e) { throw new CMSException("key not appropriate to signature in message.", e); } catch (IOException e) { throw new CMSException("can't process mime object to create signature.", e); } catch (SignatureException e) { throw new CMSException("invalid signature format in message: " + e.getMessage(), e); } } private boolean doVerify( SignerInformationVerifier verifier) throws CMSException { String digestName = CMSSignedHelper.INSTANCE.getDigestAlgName(this.getDigestAlgOID()); String encName = CMSSignedHelper.INSTANCE.getEncryptionAlgName(this.getEncryptionAlgOID()); String signatureName = digestName + "with" + encName; try { if (digestCalculator != null) { resultDigest = digestCalculator.getDigest(); } else { DigestCalculator calc = verifier.getDigestCalculator(this.getDigestAlgorithmID()); if (content != null) { OutputStream digOut = calc.getOutputStream(); content.write(digOut); digOut.close(); } else if (signedAttributeSet == null) { // TODO Get rid of this exception and just treat content==null as empty not missing? throw new CMSException("data not encapsulated in signature - use detached constructor."); } resultDigest = calc.getDigest(); } } catch (IOException e) { throw new CMSException("can't process mime object to create signature.", e); } catch (NoSuchAlgorithmException e) { throw new CMSException("can't find algorithm: " + e.getMessage(), e); } catch (OperatorCreationException e) { throw new CMSException("can't create digest calculator: " + e.getMessage(), e); } // TODO Shouldn't be using attribute OID as contentType (should be null) boolean isCounterSignature = contentType.equals(CMSAttributes.counterSignature); // RFC 3852 11.1 Check the content-type attribute is correct { DERObject validContentType = getSingleValuedSignedAttribute( CMSAttributes.contentType, "content-type"); if (validContentType == null) { if (!isCounterSignature && signedAttributeSet != null) { throw new CMSException("The content-type attribute type MUST be present whenever signed attributes are present in signed-data"); } } else { if (isCounterSignature) { throw new CMSException("[For counter signatures,] the signedAttributes field MUST NOT contain a content-type attribute"); } if (!(validContentType instanceof DERObjectIdentifier)) { throw new CMSException("content-type attribute value not of ASN.1 type 'OBJECT IDENTIFIER'"); } DERObjectIdentifier signedContentType = (DERObjectIdentifier)validContentType; if (!signedContentType.equals(contentType)) { throw new CMSException("content-type attribute value does not match eContentType"); } } } // RFC 3852 11.2 Check the message-digest attribute is correct { DERObject validMessageDigest = getSingleValuedSignedAttribute( CMSAttributes.messageDigest, "message-digest"); if (validMessageDigest == null) { if (signedAttributeSet != null) { throw new CMSException("the message-digest signed attribute type MUST be present when there are any signed attributes present"); } } else { if (!(validMessageDigest instanceof ASN1OctetString)) { throw new CMSException("message-digest attribute value not of ASN.1 type 'OCTET STRING'"); } ASN1OctetString signedMessageDigest = (ASN1OctetString)validMessageDigest; if (!Arrays.constantTimeAreEqual(resultDigest, signedMessageDigest.getOctets())) { throw new CMSSignerDigestMismatchException("message-digest attribute value does not match calculated value"); } } } // RFC 3852 11.4 Validate countersignature attribute(s) { AttributeTable signedAttrTable = this.getSignedAttributes(); if (signedAttrTable != null && signedAttrTable.getAll(CMSAttributes.counterSignature).size() > 0) { throw new CMSException("A countersignature attribute MUST NOT be a signed attribute"); } AttributeTable unsignedAttrTable = this.getUnsignedAttributes(); if (unsignedAttrTable != null) { ASN1EncodableVector csAttrs = unsignedAttrTable.getAll(CMSAttributes.counterSignature); for (int i = 0; i < csAttrs.size(); ++i) { Attribute csAttr = (Attribute)csAttrs.get(i); if (csAttr.getAttrValues().size() < 1) { throw new CMSException("A countersignature attribute MUST contain at least one AttributeValue"); } // Note: We don't recursively validate the countersignature value } } } try { ContentVerifier contentVerifier = verifier.getContentVerifier(sigAlgFinder.find(signatureName)); OutputStream sigOut = contentVerifier.getOutputStream(); if (signedAttributeSet == null) { if (digestCalculator != null) { if (contentVerifier instanceof RawContentVerifier) { RawContentVerifier rawVerifier = (RawContentVerifier)contentVerifier; if (encName.equals("RSA")) { DigestInfo digInfo = new DigestInfo(digestAlgorithm, resultDigest); return rawVerifier.verify(digInfo.getDEREncoded(), this.getSignature()); } return rawVerifier.verify(resultDigest, this.getSignature()); } throw new CMSException("verifier unable to process raw signature"); } else if (content != null) { // TODO Use raw signature of the hash value instead content.write(sigOut); } } else { sigOut.write(this.getEncodedSignedAttributes()); } sigOut.close(); return contentVerifier.verify(this.getSignature()); } catch (IOException e) { throw new CMSException("can't process mime object to create signature.", e); } catch (OperatorCreationException e) { throw new CMSException("can't create content verifier: " + e.getMessage(), e); } } private boolean isNull( DEREncodable o) { return (o instanceof ASN1Null) || (o == null); } private DigestInfo derDecode( byte[] encoding) throws IOException, CMSException { if (encoding[0] != (DERTags.CONSTRUCTED | DERTags.SEQUENCE)) { throw new IOException("not a digest info object"); } ASN1InputStream aIn = new ASN1InputStream(encoding); DigestInfo digInfo = new DigestInfo((ASN1Sequence)aIn.readObject()); // length check to avoid Bleichenbacher vulnerability if (digInfo.getEncoded().length != encoding.length) { throw new CMSException("malformed RSA signature"); } return digInfo; } /** * @deprecated */ private boolean verifyDigest( byte[] digest, PublicKey key, byte[] signature, Provider sigProvider) throws NoSuchAlgorithmException, CMSException { String encName = CMSSignedHelper.INSTANCE.getEncryptionAlgName(this.getEncryptionAlgOID()); try { if (encName.equals("RSA")) { Cipher c = CMSEnvelopedHelper.INSTANCE.createAsymmetricCipher("RSA/ECB/PKCS1Padding", sigProvider); c.init(Cipher.DECRYPT_MODE, key); DigestInfo digInfo = derDecode(c.doFinal(signature)); if (!digInfo.getAlgorithmId().getObjectId().equals(digestAlgorithm.getObjectId())) { return false; } if (!isNull(digInfo.getAlgorithmId().getParameters())) { return false; } byte[] sigHash = digInfo.getDigest(); return Arrays.constantTimeAreEqual(digest, sigHash); } else if (encName.equals("DSA")) { Signature sig = CMSSignedHelper.INSTANCE.getSignatureInstance("NONEwithDSA", sigProvider); sig.initVerify(key); sig.update(digest); return sig.verify(signature); } else { throw new CMSException("algorithm: " + encName + " not supported in base signatures."); } } catch (GeneralSecurityException e) { throw new CMSException("Exception processing signature: " + e, e); } catch (IOException e) { throw new CMSException("Exception decoding signature: " + e, e); } } // private boolean verifyDigest( // byte[] digest, // PublicKey key, // byte[] signature, // Provider sigProvider) // throws NoSuchAlgorithmException, CMSException // String encName = CMSSignedHelper.INSTANCE.getEncryptionAlgName(this.getEncryptionAlgOID()); // String digestName = CMSSignedHelper.INSTANCE.getDigestAlgName(this.getDigestAlgOID()); // String signatureName = digestName + "with" + encName; // try // byte[] bytesToSign = digest; // Signature sig; // if (encName.equals("RSA")) // bytesToSign = RSADigestSigner.encodeDERSig(digestAlgorithm.getObjectId(), digest); // sig = CMSSignedHelper.INSTANCE.getSignatureInstance("NONEwithRSA", sigProvider); // else if (encName.equals("DSA")) // sig = CMSSignedHelper.INSTANCE.getSignatureInstance("NONEwithDSA", sigProvider); // else if (encName.equals("RSAandMGF1")) // sig = CMSSignedHelper.INSTANCE.getSignatureInstance("NONEWITHRSAPSS", sigProvider); // try // // Init the params this way to avoid having a 'raw' version of each PSS algorithm // Signature sig2 = CMSSignedHelper.INSTANCE.getSignatureInstance(signatureName, sigProvider); // PSSParameterSpec spec = (PSSParameterSpec)sig2.getParameters().getParameterSpec(PSSParameterSpec.class); // sig.setParameter(spec); // catch (Exception e) // throw new CMSException("algorithm: " + encName + " could not be configured."); // else // throw new CMSException("algorithm: " + encName + " not supported in base signatures."); // sig.initVerify(key); // sig.update(bytesToSign); // return sig.verify(signature); // catch (GeneralSecurityException e) // throw new CMSException("Exception processing signature: " + e, e); /** * verify that the given public key successfully handles and confirms the * signature associated with this signer. * @deprecated use verify(ContentVerifierProvider) */ public boolean verify( PublicKey key, String sigProvider) throws NoSuchAlgorithmException, NoSuchProviderException, CMSException { return verify(key, CMSUtils.getProvider(sigProvider)); } /** * verify that the given public key successfully handles and confirms the * signature associated with this signer * @deprecated use verify(ContentVerifierProvider) */ public boolean verify( PublicKey key, Provider sigProvider) throws NoSuchAlgorithmException, NoSuchProviderException, CMSException { // Optional, but still need to validate if present getSigningTime(); return doVerify(key, sigProvider); } /** * verify that the given certificate successfully handles and confirms * the signature associated with this signer and, if a signingTime * attribute is available, that the certificate was valid at the time the * signature was generated. * @deprecated use verify(ContentVerifierProvider) */ public boolean verify( X509Certificate cert, String sigProvider) throws NoSuchAlgorithmException, NoSuchProviderException, CertificateExpiredException, CertificateNotYetValidException, CMSException { return verify(cert, CMSUtils.getProvider(sigProvider)); } /** * verify that the given certificate successfully handles and confirms * the signature associated with this signer and, if a signingTime * attribute is available, that the certificate was valid at the time the * signature was generated. * @deprecated use verify(ContentVerifierProvider) */ public boolean verify( X509Certificate cert, Provider sigProvider) throws NoSuchAlgorithmException, CertificateExpiredException, CertificateNotYetValidException, CMSException { Time signingTime = getSigningTime(); if (signingTime != null) { cert.checkValidity(signingTime.getDate()); } return doVerify(cert.getPublicKey(), sigProvider); } /** * Verify that the given verifier can successfully verify the signature on * this SignerInformation object. * * @param verifier a suitably configured SignerInformationVerifier. * @return true if the signer information is verified, false otherwise. * @throws org.bouncycastle.cms.CMSVerifierCertificateNotValidException if the provider has an associated certificate and the certificate is not valid at the time given as the SignerInfo's signing time. * @throws org.bouncycastle.cms.CMSException if the verifier is unable to create a ContentVerifiers or DigestCalculators. */ public boolean verify(SignerInformationVerifier verifier) throws CMSException { Time signingTime = getSigningTime(); // has to be validated if present. if (verifier.hasAssociatedCertificate()) { if (signingTime != null) { X509CertificateHolder dcv = verifier.getAssociatedCertificate(); if (!dcv.isValidOn(signingTime.getDate())) { throw new CMSVerifierCertificateNotValidException("verifier not valid at signingTime"); } } } return doVerify(verifier); } /** * Return the base ASN.1 CMS structure that this object contains. * * @return an object containing a CMS SignerInfo structure. * @deprecated use toASN1Structure() */ public SignerInfo toSignerInfo() { return info; } /** * Return the underlying ASN.1 object defining this SignerInformation object. * * @return a SignerInfo. */ public SignerInfo toASN1Structure() { return info; } private DERObject getSingleValuedSignedAttribute( DERObjectIdentifier attrOID, String printableName) throws CMSException { AttributeTable unsignedAttrTable = this.getUnsignedAttributes(); if (unsignedAttrTable != null && unsignedAttrTable.getAll(attrOID).size() > 0) { throw new CMSException("The " + printableName + " attribute MUST NOT be an unsigned attribute"); } AttributeTable signedAttrTable = this.getSignedAttributes(); if (signedAttrTable == null) { return null; } ASN1EncodableVector v = signedAttrTable.getAll(attrOID); switch (v.size()) { case 0: return null; case 1: { Attribute t = (Attribute)v.get(0); ASN1Set attrValues = t.getAttrValues(); if (attrValues.size() != 1) { throw new CMSException("A " + printableName + " attribute MUST have a single attribute value"); } return attrValues.getObjectAt(0).getDERObject(); } default: throw new CMSException("The SignedAttributes in a signerInfo MUST NOT include multiple instances of the " + printableName + " attribute"); } } private Time getSigningTime() throws CMSException { DERObject validSigningTime = getSingleValuedSignedAttribute( CMSAttributes.signingTime, "signing-time"); if (validSigningTime == null) { return null; } try { return Time.getInstance(validSigningTime); } catch (IllegalArgumentException e) { throw new CMSException("signing-time attribute value not a valid 'Time' structure"); } } /** * Return a signer information object with the passed in unsigned * attributes replacing the ones that are current associated with * the object passed in. * * @param signerInformation the signerInfo to be used as the basis. * @param unsignedAttributes the unsigned attributes to add. * @return a copy of the original SignerInformationObject with the changed attributes. */ public static SignerInformation replaceUnsignedAttributes( SignerInformation signerInformation, AttributeTable unsignedAttributes) { SignerInfo sInfo = signerInformation.info; ASN1Set unsignedAttr = null; if (unsignedAttributes != null) { unsignedAttr = new DERSet(unsignedAttributes.toASN1EncodableVector()); } return new SignerInformation( new SignerInfo(sInfo.getSID(), sInfo.getDigestAlgorithm(), sInfo.getAuthenticatedAttributes(), sInfo.getDigestEncryptionAlgorithm(), sInfo.getEncryptedDigest(), unsignedAttr), signerInformation.contentType, signerInformation.content, null, new DefaultSignatureAlgorithmIdentifierFinder()); } /** * Return a signer information object with passed in SignerInformationStore representing counter * signatures attached as an unsigned attribute. * * @param signerInformation the signerInfo to be used as the basis. * @param counterSigners signer info objects carrying counter signature. * @return a copy of the original SignerInformationObject with the changed attributes. */ public static SignerInformation addCounterSigners( SignerInformation signerInformation, SignerInformationStore counterSigners) { // TODO Perform checks from RFC 3852 11.4 SignerInfo sInfo = signerInformation.info; AttributeTable unsignedAttr = signerInformation.getUnsignedAttributes(); ASN1EncodableVector v; if (unsignedAttr != null) { v = unsignedAttr.toASN1EncodableVector(); } else { v = new ASN1EncodableVector(); } ASN1EncodableVector sigs = new ASN1EncodableVector(); for (Iterator it = counterSigners.getSigners().iterator(); it.hasNext();) { sigs.add(((SignerInformation)it.next()).toSignerInfo()); } v.add(new Attribute(CMSAttributes.counterSignature, new DERSet(sigs))); return new SignerInformation( new SignerInfo(sInfo.getSID(), sInfo.getDigestAlgorithm(), sInfo.getAuthenticatedAttributes(), sInfo.getDigestEncryptionAlgorithm(), sInfo.getEncryptedDigest(), new DERSet(v)), signerInformation.contentType, signerInformation.content, null, new DefaultSignatureAlgorithmIdentifierFinder()); } }
package org.bouncycastle.tsp; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.IOException; import java.security.MessageDigest; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.tsp.TimeStampResp; import org.bouncycastle.asn1.cmp.PKIFailureInfo; import org.bouncycastle.asn1.cmp.PKIFreeText; import org.bouncycastle.asn1.cmp.PKIStatus; import org.bouncycastle.asn1.ASN1InputStream; import org.bouncycastle.asn1.cms.Attribute; /** * Base class for an RFC 3161 Time Stamp Response object. */ public class TimeStampResponse { TimeStampResp resp; TimeStampToken timeStampToken; public TimeStampResponse(TimeStampResp resp) throws TSPException, IOException { this.resp = resp; if (resp.getTimeStampToken() != null) { timeStampToken = new TimeStampToken(resp.getTimeStampToken()); } } /** * Create a TimeStampResponse from a byte array containing an ASN.1 encoding. * * @param resp the byte array containing the encoded response. * @throws TSPException if the response is malformed. * @throws IOException if the byte array doesn't represent an ASN.1 encoding. */ public TimeStampResponse(byte[] resp) throws TSPException, IOException { this(new ByteArrayInputStream(resp)); } /** * Create a TimeStampResponse from an input stream containing an ASN.1 encoding. * * @param in the input stream containing the encoded response. * @throws TSPException if the response is malformed. * @throws IOException if the stream doesn't represent an ASN.1 encoding. */ public TimeStampResponse(InputStream in) throws TSPException, IOException { this(readTimeStampResp(in)); } private static TimeStampResp readTimeStampResp( InputStream in) throws IOException, TSPException { try { return TimeStampResp.getInstance(new ASN1InputStream(in).readObject()); } catch (IllegalArgumentException e) { throw new TSPException("malformed timestamp response: " + e, e); } catch (ClassCastException e) { throw new TSPException("malformed timestamp response: " + e, e); } } public int getStatus() { return resp.getStatus().getStatus().intValue(); } public String getStatusString() { if (resp.getStatus().getStatusString() != null) { StringBuffer statusStringBuf = new StringBuffer(); PKIFreeText text = resp.getStatus().getStatusString(); for (int i = 0; i != text.size(); i++) { statusStringBuf.append(text.getStringAt(i).getString()); } return statusStringBuf.toString(); } else { return null; } } public PKIFailureInfo getFailInfo() { if (resp.getStatus().getFailInfo() != null) { return new PKIFailureInfo(resp.getStatus().getFailInfo()); } return null; } public TimeStampToken getTimeStampToken() { return timeStampToken; } /** * Check this response against to see if it a well formed response for * the passed in request. Validation will include checking the time stamp * token if the response status is GRANTED or GRANTED_WITH_MODS. * * @param request the request to be checked against * @throws TSPException if the request can not match this response. */ public void validate( TimeStampRequest request) throws TSPException { TimeStampToken tok = this.getTimeStampToken(); if (tok != null) { TimeStampTokenInfo tstInfo = tok.getTimeStampInfo(); if (request.getNonce() != null && !request.getNonce().equals(tstInfo.getNonce())) { throw new TSPValidationException("response contains wrong nonce value."); } if (this.getStatus() != PKIStatus.GRANTED && this.getStatus() != PKIStatus.GRANTED_WITH_MODS) { throw new TSPValidationException("time stamp token found in failed request."); } if (!MessageDigest.isEqual(request.getMessageImprintDigest(), tstInfo.getMessageImprintDigest())) { throw new TSPValidationException("response for different message imprint digest."); } if (!tstInfo.getMessageImprintAlgOID().equals(request.getMessageImprintAlgOID())) { throw new TSPValidationException("response for different message imprint algorithm."); } Attribute scV1 = tok.getSignedAttributes().get(PKCSObjectIdentifiers.id_aa_signingCertificate); Attribute scV2 = tok.getSignedAttributes().get(PKCSObjectIdentifiers.id_aa_signingCertificateV2); if (scV1 == null && scV2 == null && !(scV1 != null && scV2 != null)) { throw new TSPValidationException("no signing certificate attribute present."); } if (request.getReqPolicy() != null && !request.getReqPolicy().equals(tstInfo.getPolicy())) { throw new TSPValidationException("TSA policy wrong for request."); } } else if (this.getStatus() == PKIStatus.GRANTED || this.getStatus() == PKIStatus.GRANTED_WITH_MODS) { throw new TSPValidationException("no time stamp token found and one expected."); } } /** * return the ASN.1 encoded representation of this object. */ public byte[] getEncoded() throws IOException { return resp.getEncoded(); } }
package org.commcare.android.models; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Hashtable; import org.commcare.android.application.CommCareApplication; import org.commcare.android.database.EncryptedModel; import org.commcare.android.util.AndroidCommCarePlatform; import org.commcare.util.CommCareSession; import org.javarosa.core.services.storage.IMetaData; import org.javarosa.core.services.storage.Persistable; import org.javarosa.core.util.externalizable.DeserializationException; import org.javarosa.core.util.externalizable.ExtUtil; import org.javarosa.core.util.externalizable.PrototypeFactory; /** * @author ctsims * */ public class FormRecord implements Persistable, IMetaData, EncryptedModel { public static final String STORAGE_KEY = "FORMRECORDS"; public static final String META_XMLNS = "XMLNS"; public static final String META_PATH = "PATH"; public static final String META_ENTITY_ID = "ENTITYID"; public static final String META_STATUS = "STATUS"; public static final String STATUS_UNSENT = "unsent"; public static final String STATUS_INCOMPLETE = "incomplete"; public static final String STATUS_COMPLETE = "complete"; public static final String STATUS_UNSTARTED = "unstarted"; private int id = -1; private String status; private String path; private String xmlns; private String entity; private byte[] aesKey; public FormRecord() { } /** * Creates a record of a form entry with the provided data. Note that none * of the parameters can be null... * * @param xmlns * @param path * @param entityId * @param status */ public FormRecord(String xmlns, String path, String entityId, String status, byte[] aesKey) { this.xmlns = xmlns; this.path = path; this.entity = entityId; this.status = status; this.aesKey = aesKey; } /* (non-Javadoc) * @see org.javarosa.core.services.storage.Persistable#getID() */ public int getID() { return id; } /* (non-Javadoc) * @see org.javarosa.core.services.storage.Persistable#setID(int) */ public void setID(int ID) { id = ID; } public String getPath() { return path; } public byte[] getAesKey() { return aesKey; } public String getEntityId() { return entity; } public String getStatus() { return status; } public String getFormNamespace() { return xmlns; } /* (non-Javadoc) * @see org.javarosa.core.util.externalizable.Externalizable#readExternal(java.io.DataInputStream, org.javarosa.core.util.externalizable.PrototypeFactory) */ public void readExternal(DataInputStream in, PrototypeFactory pf) throws IOException, DeserializationException { id = (int)ExtUtil.readNumeric(in); xmlns = ExtUtil.readString(in); path = ExtUtil.readString(in); entity = ExtUtil.readString(in); status = ExtUtil.readString(in); aesKey = ExtUtil.readBytes(in); } /* (non-Javadoc) * @see org.javarosa.core.util.externalizable.Externalizable#writeExternal(java.io.DataOutputStream) */ public void writeExternal(DataOutputStream out) throws IOException { ExtUtil.writeNumeric(out, id); ExtUtil.writeString(out, xmlns); ExtUtil.writeString(out, path); ExtUtil.writeString(out, entity); ExtUtil.writeString(out, status); ExtUtil.writeBytes(out, aesKey); } /* (non-Javadoc) * @see org.javarosa.core.services.storage.IMetaData#getMetaData() */ public Hashtable getMetaData() { Hashtable h = new Hashtable(); String[] fields = getMetaDataFields(); for (int i = 0; i < fields.length; i++) { String field = fields[i]; Object value = getMetaData(field); if (value != null) { h.put(field, value); } } return h; } /* (non-Javadoc) * @see org.javarosa.core.services.storage.IMetaData#getMetaData(java.lang.String) */ public Object getMetaData(String fieldName) { if(fieldName.equals(META_XMLNS)) { return xmlns; } else if(fieldName.equals(META_PATH)) { return path; } else if(fieldName.equals(META_ENTITY_ID)) { return entity; } else if(fieldName.equals(META_STATUS)) { return status; } else { throw new IllegalArgumentException("No metadata field " + fieldName + " in the form record storage system"); } } /* (non-Javadoc) * @see org.javarosa.core.services.storage.IMetaData#getMetaDataFields() */ public String[] getMetaDataFields() { return new String [] {META_XMLNS, META_PATH, META_ENTITY_ID, META_STATUS}; } public boolean isEncrypted(String data) { return false; } public boolean isBlobEncrypted() { return true; } String[] cached; private void decache() { if(AndroidCommCarePlatform.ENTITY_NONE.equals(entity)) { cached = new String[] { null, null, null}; } else if(entity.startsWith("case:")) { cached = new String[] { entity.substring("case:".length()), null, null}; } else if(entity.startsWith("referral")) { int c = entity.indexOf(":"); int length = Integer.parseInt(entity.substring("referral".length(), c)); String refid = entity.substring(c+1, c + length + 1); String type = entity.substring(c + length+1 ); Referral r = CommCareApplication._().getStorage(Referral.STORAGE_KEY, Referral.class). getRecordForValues(new String[] {Referral.REFERRAL_ID, Referral.REFERRAL_TYPE}, new String[] {refid, type}); cached = new String[] {r.getLinkedId(), r.getReferralId(), r.getType()}; } else { //Pre DB26 Record cached = new String[] {entity, null, null }; } } public String getCaseId() { if(cached == null) { decache(); } return cached[0]; } public String getReferralId() { if(cached == null) { decache(); } return cached[1]; } public String getReferralType() { if(cached == null) { decache(); } return cached[2]; } public static String generateEntityId(CommCareSession session) { if(session.getReferralId() != null) { //referral is primary return "referral" + session.getReferralId().length() + ":" + session.getReferralId() + session.getReferralType(); } else if(session.getCaseId() != null) { //case is primary return "case:"+session.getCaseId(); } else { return AndroidCommCarePlatform.ENTITY_NONE; } } }
package org.concord.data.state; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JPanel; import org.concord.data.ui.DataFlowControlToolBar; import org.concord.data.ui.DataStoreLabel; import org.concord.data.ui.DataValueLabel; import org.concord.framework.data.stream.DataProducer; import org.concord.framework.otrunk.OTObject; import org.concord.framework.otrunk.view.OTObjectView; import org.concord.framework.otrunk.view.OTViewContainer; /** * @author scott * * TODO To change the template for this generated type comment go to * Window - Preferences - Java - Code Style - Code Templates */ public class OTDataFieldView implements OTObjectView, ActionListener { OTDataField otObject; protected OTViewContainer viewContainer; protected DataValueLabel dataField; JButton saveButton = new JButton("Save"); public void initialize(OTObject otDataField, OTViewContainer vContainer) { this.otObject = (OTDataField)otDataField; viewContainer = vContainer; } /* (non-Javadoc) * @see org.concord.framework.otrunk.view.OTObjectView#getComponent(boolean) */ public JComponent getComponent(boolean editable) { OTDataStore otDataStore = otObject.getDataStore(); DataStoreLabel dataStoreField = new DataStoreLabel(otDataStore,0); if(!editable) { return dataStoreField; } DataProducer dataProducer = (DataProducer)otObject.getDataProducer(); dataField = new DataValueLabel(dataProducer); JPanel fieldLabelPanel = new JPanel(); fieldLabelPanel.setLayout(new BoxLayout(fieldLabelPanel, BoxLayout.X_AXIS)); fieldLabelPanel.add(dataStoreField); fieldLabelPanel.add(dataField); // fieldPanel.add(fieldLabelPanel, BorderLayout.CENTER); DataFlowControlToolBar toolBar = new DataFlowControlToolBar(dataProducer); toolBar.setFloatable(false); fieldLabelPanel.add(toolBar); saveButton.addActionListener(this); fieldLabelPanel.add(saveButton); dataField.setColumns(7); dataStoreField.setColumns(7); return fieldLabelPanel; } /* (non-Javadoc) * @see org.concord.framework.otrunk.view.OTObjectView#viewClosed() */ public void viewClosed() { // TODO Auto-generated method stub } /* (non-Javadoc) * @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent) */ public void actionPerformed(ActionEvent e) { float currentValue = dataField.getValue(); otObject.getDataStore().setValueAt(0, 0, new Float(currentValue)); } public String getXHTMLText(File folder, int containerDisplayWidth, int containerDisplayHeight) { // TODO Auto-generated method stub return null; } }
package org.exist.security.xacml; /** * This class represents the context from which an access is made. */ public final class AccessContext { /** * The postfix for all internal accesses. */ public static final String INTERNAL = "(internal)"; /** * This represents when access is attempted as a result of a trigger. */ public static final AccessContext TRIGGER = new AccessContext("Trigger"); /** * This represents when access is made through SOAP. */ public static final AccessContext SOAP = new AccessContext("SOAP"); /** * This represents when access is made through XML:DB. */ public static final AccessContext XMLDB = new AccessContext("XML:DB"); /** * This represents when access is made through XSLT */ public static final AccessContext XSLT = new AccessContext("XSLT"); /** * This represents when access is made through XQJ */ public static final AccessContext XQJ = new AccessContext("XQJ"); /** * The context for access through the REST-style interface. */ public static final AccessContext REST = new AccessContext("REST"); /** * The context for remote access over XML-RPC. */ public static final AccessContext XMLRPC = new AccessContext("XML-RPC"); /** * The context for access through WEBDAV */ public static final AccessContext WEBDAV = new AccessContext("WebDAV"); /** * The context for access internally when the access is not made by any of the * other contexts. This should only be used if all actions * are completely trusted, that is, no user input should be directly included * in a query or any similar case. */ public static final AccessContext INTERNAL_PREFIX_LOOKUP = new AccessContext("Prefix lookup " + INTERNAL); /** * The context for trusted validation queries. */ public static final AccessContext VALIDATION_INTERNAL = new AccessContext("Validation " + INTERNAL); /** * The context for JUnit tests that directly make access not through the other * contexts. */ public static final AccessContext TEST = new AccessContext("JUnit test"); /** * The context for evaluating XInclude paths. */ public static final AccessContext XINCLUDE = new AccessContext("XInclude"); public static final AccessContext INITIALIZE = new AccessContext("Initialize " + INTERNAL); private final String value; private AccessContext() { throw new RuntimeException("The empty constructor is not supported."); } private AccessContext(String value) { if(value == null || value.length() == 0) throw new NullPointerException("Access context value cannot be null"); this.value = value; } public String toString() { return value; } }
package org.growingstems.scouting; import org.frc836.database.DB.SyncCallback; import org.frc836.database.DBActivity; import org.frc836.database.DBSyncService.LocalBinder; import org.growingstems.scouting.MenuSelections.Refreshable; import android.app.ActionBar; import android.app.Fragment; import android.app.FragmentManager; import android.app.FragmentTransaction; import android.app.ProgressDialog; import android.content.ComponentName; import android.content.Intent; import android.content.ServiceConnection; import android.support.v13.app.FragmentPagerAdapter; import android.os.Bundle; import android.os.IBinder; import android.support.v4.view.ViewPager; import android.util.SparseArray; import android.view.Menu; import android.webkit.URLUtil; public class DataActivity extends DBActivity implements ActionBar.TabListener, Refreshable { private ProgressDialog pd; public static final String ACTIVITY_TYPE_STRING = "ACTIVITY_TYPE"; public static final String EVENT_ARG = "EVENT_NAME"; public static final String TEAM_ARG = "TEAM_NUM"; public static final int ACTIVITY_TYPE_DEFAULT = 0; public static final int ACTIVITY_TYPE_EVENT = 1; public static final int ACTIVITY_TYPE_TEAM = 2; protected enum DataType { dt_Default, dt_Event, dt_Team }; protected DataType dataType = DataType.dt_Default; protected String eventName = null; protected int teamNum = -1; protected static final int[] DEFAULT_TABS = { DataFragment.PT_EVENTS, DataFragment.PT_TEAMS }; protected static final int[] EVENT_TABS = { DataFragment.PT_EVENT_TEAMS, DataFragment.PT_EVENT_MATCHES }; /** * The {@link android.support.v4.view.PagerAdapter} that will provide * fragments for each of the sections. We use a {@link FragmentPagerAdapter} * derivative, which will keep every loaded fragment in memory. If this * becomes too memory intensive, it may be best to switch to a * {@link android.support.v13.app.FragmentStatePagerAdapter}. */ SectionsPagerAdapter mSectionsPagerAdapter; /** * The {@link ViewPager} that will host the section contents. */ ViewPager mViewPager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); m_callback = new ServiceWatcher(); setContentView(R.layout.activity_data); Intent intent = getIntent(); int temp = intent.getIntExtra(ACTIVITY_TYPE_STRING, ACTIVITY_TYPE_DEFAULT); switch (temp) { case ACTIVITY_TYPE_EVENT: dataType = DataType.dt_Event; eventName = intent.getStringExtra(EVENT_ARG); if (eventName != null) setTitle(eventName); break; case ACTIVITY_TYPE_TEAM: dataType = DataType.dt_Team; teamNum = intent.getIntExtra(TEAM_ARG, -1); break; case ACTIVITY_TYPE_DEFAULT: default: dataType = DataType.dt_Default; break; } // Set up the action bar. final ActionBar actionBar = getActionBar(); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS); // Create the adapter that will return a fragment for each of the three // primary sections of the activity. mSectionsPagerAdapter = new SectionsPagerAdapter(getFragmentManager()); // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(R.id.pager); mViewPager.setAdapter(mSectionsPagerAdapter); // When swiping between different sections, select the corresponding // tab. We can also use ActionBar.Tab#select() to do this if we have // a reference to the Tab. mViewPager .setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { actionBar.setSelectedNavigationItem(position); } }); // For each of the sections in the app, add a tab to the action bar. for (int i = 0; i < mSectionsPagerAdapter.getCount(); i++) { // Create a tab with text corresponding to the page title defined by // the adapter. Also specify this Activity object, which implements // the TabListener interface, as the callback (listener) for when // this tab is selected. actionBar.addTab(actionBar.newTab() .setText(mSectionsPagerAdapter.getPageTitle(i)) .setTabListener(this)); } } @Override protected void onResume() { super.onResume(); reloadData(); } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); MainMenuSelection.setRefreshItem(menu, R.string.refresh_data); return true; } @Override public void onTabSelected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { // When the given tab is selected, switch to the corresponding page in // the ViewPager. mViewPager.setCurrentItem(tab.getPosition()); } @Override public void onTabUnselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } @Override public void onTabReselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to * one of the sections/tabs/pages. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { SparseArray<Fragment> tabs; public SectionsPagerAdapter(FragmentManager fm) { super(fm); tabs = new SparseArray<Fragment>(getCount()); } @Override public Fragment getItem(int position) { // getItem is called to instantiate the fragment for the given page. if (tabs.get(position) == null) { int tab; switch (dataType) { case dt_Event: tab = EVENT_TABS[position]; tabs.put(position, DataFragment.newInstance(tab, DataActivity.this, eventName)); break; case dt_Default: default: tab = DEFAULT_TABS[position]; tabs.put(position, DataFragment.newInstance(tab, DataActivity.this)); break; } } return tabs.get(position); } @Override public int getCount() { switch (dataType) { case dt_Event: return EVENT_TABS.length; case dt_Default: default: return DEFAULT_TABS.length; } } @Override public CharSequence getPageTitle(int position) { int tab; switch (dataType) { case dt_Event: tab = EVENT_TABS[position]; break; case dt_Default: default: tab = DEFAULT_TABS[position]; break; } return DataFragment.getPageTitle(tab, DataActivity.this); } } protected class ServiceWatcher implements ServiceConnection { public void onServiceConnected(ComponentName name, IBinder service) { if (service instanceof LocalBinder) { db.startSync(new RefreshCallback(DataActivity.this)); } } public void onServiceDisconnected(ComponentName name) { } } @Override public void refresh() { pd = ProgressDialog.show(this, "Busy", "Refreshing Data", false); pd.setCancelable(true); String url = Prefs.getScoutingURLNoDefault(getApplicationContext()); if (url.length() > 1 && URLUtil.isValidUrl(url) && Prefs.getAutoSync(getApplicationContext(), false)) db.startSync(new RefreshCallback(this)); else reloadData(); } private class RefreshCallback implements SyncCallback { private DataActivity parent; public RefreshCallback(DataActivity parent) { this.parent = parent; } @Override public void onFinish() { parent.reloadData(); } } private void reloadData() { for (int i = 0; i < mSectionsPagerAdapter.getCount(); i++) { Fragment f = mSectionsPagerAdapter.getItem(i); if (f instanceof DataFragment) { ((DataFragment) f).refreshData(); } } if (pd != null) pd.dismiss(); } }
package org.helioviewer.jhv.export; import java.awt.EventQueue; import java.awt.image.BufferedImage; import java.io.File; import java.lang.ref.SoftReference; import java.util.concurrent.Executors; import java.util.concurrent.ExecutorService; import org.helioviewer.jhv.JHVDirectory; import org.helioviewer.jhv.JHVGlobals; import org.helioviewer.jhv.base.ImageUtils; import org.helioviewer.jhv.base.image.MappedImageFactory; import org.helioviewer.jhv.camera.Camera; import org.helioviewer.jhv.display.Displayer; import org.helioviewer.jhv.gui.ImageViewerGui; import org.helioviewer.jhv.gui.components.MoviePanel; import org.helioviewer.jhv.gui.components.MoviePanel.RecordMode; import org.helioviewer.jhv.layers.FrameListener; import org.helioviewer.jhv.layers.Movie; import org.helioviewer.jhv.opengl.GLGrab; import org.helioviewer.jhv.threads.JHVThread; import org.helioviewer.jhv.time.TimeUtils; import com.jogamp.opengl.GL2; public class ExportMovie implements FrameListener { private static MovieExporter exporter; private static GLGrab grabber; private static RecordMode mode; private static boolean stopped; private static boolean shallStop; private final ExecutorService encodeExecutor = Executors.newFixedThreadPool(1, new JHVThread.NamedThreadFactory("Movie Encode")); public static BufferedImage EVEImage = null; public static int EVEMovieLinePosition = -1; public void disposeMovieWriter(boolean keep) { if (exporter != null) { if (keep) { encodeExecutor.execute(new CloseWriter(exporter, true)); } else { encodeExecutor.shutdownNow(); new CloseWriter(exporter, false).run(); } exporter = null; } } private void exportMovieFinish(GL2 gl) { ImageViewerGui.getGLListener().detachExport(); MoviePanel.recordPanelSetEnabled(true); try { grabber.dispose(gl); disposeMovieWriter(true); } catch (Exception e) { e.printStackTrace(); } } public void handleMovieExport(Camera camera, GL2 gl) { if (stopped) { exportMovieFinish(gl); return; } try { BufferedImage screen = MappedImageFactory.createCompatibleMappedImage(grabber.w, exporter.getHeight(), BufferedImage.TYPE_3BYTE_BGR); grabber.renderFrame(camera, gl, MappedImageFactory.getByteBuffer(screen)); encodeExecutor.execute(new FrameConsumer(exporter, screen, EVEImage, EVEMovieLinePosition)); } catch (Exception e) { e.printStackTrace(); } if (mode == RecordMode.SHOT) { stop(); } } private static final int MACROBLOCK = 8; public static void start(int _w, int _h, boolean isInternal, int fps, RecordMode _mode) { Movie.startRecording(); int scrw = 1; int scrh = 0; if (EVEImage != null) { scrw = Math.max(1, EVEImage.getWidth()); scrh = EVEImage.getHeight(); } mode = _mode; int canvasWidth = mode == RecordMode.SHOT ? _w : (_w / MACROBLOCK) * MACROBLOCK; int sh = (int) (scrh / (double) scrw * canvasWidth + .5); int canvasHeight = isInternal ? _h - sh : _h; int exportHeight = mode == RecordMode.SHOT ? canvasHeight + sh : ((canvasHeight + sh) / MACROBLOCK) * MACROBLOCK; canvasHeight = exportHeight - sh; stopped = false; MoviePanel.recordPanelSetEnabled(false); grabber = new GLGrab(canvasWidth, canvasHeight); ImageViewerGui.getGLListener().attachExport(instance); String prefix = JHVDirectory.EXPORTS.getPath() + "JHV_" + TimeUtils.formatFilename(System.currentTimeMillis()); if (mode == RecordMode.SHOT) { try { exporter = new PNGExporter(); exporter.open(prefix + ".png", canvasWidth, exportHeight, fps); } catch (Exception e) { e.printStackTrace(); } Displayer.render(1); } else { try { exporter = new JCodecExporter(); exporter.open(prefix + ".mp4", canvasWidth, exportHeight, fps); } catch (Exception e) { e.printStackTrace(); } if (mode == RecordMode.LOOP) { Movie.addFrameListener(instance); Movie.setFrame(0); Movie.play(); } } } public static void stop() { shallStop = false; if (!stopped) { stopped = true; if (mode == RecordMode.LOOP) Movie.removeFrameListener(instance); if (mode != RecordMode.FREE) MoviePanel.clickRecordButton(); Displayer.display(); // force detach } Movie.stopRecording(); } // loop mode only @Override public void frameChanged(int frame, boolean last) { if (shallStop) stop(); if (last) shallStop = true; } private static class FrameConsumer implements Runnable { private final MovieExporter movieExporter; private final BufferedImage mainImage; private final SoftReference<BufferedImage> eveRef; private final int frameH; private final int movieLinePosition; FrameConsumer(MovieExporter _movieExporter, BufferedImage _mainImage, BufferedImage _eveImage, int _movieLinePosition) { movieExporter = _movieExporter; mainImage = _mainImage; eveRef = new SoftReference<>(_eveImage == null ? null : ImageUtils.deepCopy(_eveImage)); frameH = grabber.h; movieLinePosition = _movieLinePosition; } @Override public void run() { try { ExportUtils.pasteCanvases(mainImage, frameH, eveRef.get(), movieLinePosition, movieExporter.getHeight()); movieExporter.encode(mainImage); } catch (Exception e) { e.printStackTrace(); } } } private static class CloseWriter implements Runnable { private final MovieExporter movieExporter; private final boolean keep; CloseWriter(MovieExporter _movieExporter, boolean _keep) { movieExporter = _movieExporter; keep = _keep; } @Override public void run() { boolean failed = false; try { if (keep) { movieExporter.close(); EventQueue.invokeLater(() -> JHVGlobals.displayNotification(movieExporter.getPath())); } } catch (Exception e) { e.printStackTrace(); failed = true; } if (!keep || failed) { File f = new File(movieExporter.getPath()); f.delete(); } System.gc(); } } private static final ExportMovie instance = new ExportMovie(); private ExportMovie() { } public static ExportMovie getInstance() { return instance; } }
package org.immopoly.appengine; import java.net.URL; import java.util.List; import java.util.Map; import javax.jdo.PersistenceManager; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.immopoly.common.ImmopolyException; import org.json.JSONObject; public class ActionExposeAdd extends AbstractAction implements Action { protected ActionExposeAdd(Map<String, Action> actions) { super(actions); } @Override public String getURI() { return "portfolio/add"; } @Override public void execute(HttpServletRequest req, HttpServletResponse resp) throws ImmopolyException { PersistenceManager pm = PMF.get().getPersistenceManager(); try { String token = req.getParameter(TOKEN); String exposeId = req.getParameter(EXPOSE); if (null == token || token.length() == 0) throw new ImmopolyException("missing token", 61); User user = DBManager.getUserByToken(pm, token); if (null == user) throw new ImmopolyException("token not found " + token, 62); History history = null; // first check if already owned Expose expose = DBManager.getExpose(pm, exposeId); if (null != expose) { if (expose.getUserId() == user.getId()) { throw new ImmopolyException("gehört dir schon du penner", 201); } else { // history eintrag // other user User otherUser = DBManager.getUser(pm, expose.getUserId()); // minus 30tel double fine = 2 * expose.getRent() / 30.0; user.setBalance(user.getBalance() - fine); if (null != otherUser) otherUser.setBalance(otherUser.getBalance() + fine); history = new History(History.TYPE_EXPOSE_MONOPOLY_NEGATIVE, user.getId(), System.currentTimeMillis(), "Die Wohnung '" + expose.getName() + "' gehört schon '" + otherUser.getUserName() + "' Strafe " + History.MONEYFORMAT.format(fine), fine); if (null != otherUser) { History otherHistory = new History(History.TYPE_EXPOSE_MONOPOLY_POSITIVE, otherUser.getId(), System .currentTimeMillis(), "Jemand wollte deine Wohnung '" + expose.getName() + "' übernehmen: Belohung " + History.MONEYFORMAT.format(fine), fine); pm.makePersistent(otherHistory); } pm.makePersistent(history); pm.makePersistent(user); if (null != otherUser) pm.makePersistent(otherUser); } } else { URL url = new URL(OAuthData.SERVER + OAuthData.SEARCH_PREFIX + "expose/" + exposeId + ".json"); JSONObject obj = WebHelper.getHttpData(url); if (obj.has("expose.expose")) { expose = new Expose(user.getId(), obj); // nur wohnungen mit rent if (expose.getRent() == 0.0) throw new ImmopolyException("Expose hat keinen Wert für Kaltmiete, sie kann nicht übernommen werden", 302); // if(!checkDistance(pm,expose)) // throw new ImmopolyException("SPOOFING ALERT", 441); pm.makePersistent(expose); double fine = 2 * expose.getRent() / 30.0; history = new History(History.TYPE_EXPOSE_ADDED, user.getId(), System.currentTimeMillis(), "Du hast die Wohnung '" + expose.getName() + "' gemietet für " + History.MONEYFORMAT.format(expose.getRent()) + " im Monat. Übernahmekosten: " + History.MONEYFORMAT.format(fine), fine); user.setBalance(user.getBalance() - fine); pm.makePersistent(user); pm.makePersistent(history); } else if (obj.toString().contains("ERROR_RESOURCE_NOT_FOUND")) throw new ImmopolyException("expose jibs nich", 301); } // history eintrag resp.getOutputStream().write(history.toJSON().toString().getBytes("UTF-8")); } catch (ImmopolyException e) { throw e; } catch (Exception e) { throw new ImmopolyException("could not add expose ", 101, e); } finally { pm.close(); } } private boolean checkDistance(PersistenceManager pm, Expose expose) { //get last x entries List<Expose> lastExposes = DBManager.getLastExposes(pm, expose.getUserId(),System.currentTimeMillis()-(60*60*1000)); LOG.info("lastExposes "+lastExposes.size() + " userId: " +expose.getUserId()+" "+(System.currentTimeMillis()-(60*60*1000))); for (Expose e : lastExposes) { //wenn e weiter weg ist als MAX_SPOOFING_METER_PER_SECOND per return false double distance = calcDistance(expose.getLatitude(),expose.getLongitude(),e.getLatitude(),e.getLongitude()); double distancePerSecond=distance/((System.currentTimeMillis()-e.getTime())/1000); LOG.info("distance "+distance+" distancePerSecond "+distancePerSecond+" max "+Const.MAX_SPOOFING_DISTANCE_PER_SECOND); if(distancePerSecond>Const.MAX_SPOOFING_DISTANCE_PER_SECOND){ LOG.severe("distance "+distance+" distancePerSecond "+distancePerSecond+" max "+Const.MAX_SPOOFING_DISTANCE_PER_SECOND); return false; } } return true; } public static double calcDistance(double lat1, double lng1, double lat2, double lng2) { double earthRadius = 3958.75; double dLat = Math.toRadians(lat2-lat1); double dLng = Math.toRadians(lng2-lng1); double a = Math.sin(dLat/2) * Math.sin(dLat/2) + Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) * Math.sin(dLng/2) * Math.sin(dLng/2); double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a)); double dist = earthRadius * c; int meterConversion = 1609; return new Double(dist * meterConversion).doubleValue(); } }
package org.jgroups.raft.blocks; import org.jgroups.Global; import org.jgroups.JChannel; import org.jgroups.blocks.atomic.AsyncCounter; import org.jgroups.blocks.atomic.Counter; import org.jgroups.protocols.raft.*; import org.jgroups.raft.RaftHandle; import org.jgroups.util.*; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; /** * Provides a consensus based distributed counter (similar to AtomicLong) which can be atomically updated across a cluster. * @author Bela Ban * @since 0.2 */ public class CounterService implements StateMachine, RAFT.RoleChange { protected JChannel ch; protected RaftHandle raft; protected long repl_timeout=20000; // timeout (ms) to wait for a majority to ack a write /** If true, reads can return the local counter value directly. Else, reads have to go through the leader */ protected boolean allow_dirty_reads=true; // keys: counter names, values: counter values protected final Map<String,Long> counters=new HashMap<>(); protected enum Command {create, delete, get, set, compareAndSet, incrementAndGet, decrementAndGet, addAndGet, compareAndSwap} public CounterService(JChannel ch) { setChannel(ch); } public void setChannel(JChannel ch) { this.ch=ch; this.raft=new RaftHandle(this.ch, this); raft.addRoleListener(this); } public void addRoleChangeListener(RAFT.RoleChange listener) {raft.addRoleListener(listener);} public long replTimeout() {return repl_timeout;} public CounterService replTimeout(long timeout) {this.repl_timeout=timeout; return this;} public boolean allowDirtyReads() {return allow_dirty_reads;} public CounterService allowDirtyReads(boolean flag) {allow_dirty_reads=flag; return this;} public int lastApplied() {return raft.lastApplied();} public int commitIndex() {return raft.commitIndex();} public void snapshot() throws Exception {raft.snapshot();} public int logSize() {return raft.logSize();} public String raftId() {return raft.raftId();} public CounterService raftId(String id) {raft.raftId(id); return this;} /** * Returns an instance of the counter. This is local operation which never fails and don't create a record in the * log. For cluster-wide operation call getOrCreateCounter(name, initial_value). * @param name Name of the counter, different counters have to have different names * @return The counter instance */ public Counter counter(String name) { return new CounterImpl(name, this); } /** * Returns an existing counter, or creates a new one if none exists. This is a cluster-wide operation which would * fail if no leader is elected. * @param name Name of the counter, different counters have to have different names * @param initial_value The initial value of a new counter if there is no existing counter. Ignored * if the counter already exists * @return The counter implementation */ public Counter getOrCreateCounter(String name, long initial_value) throws Exception { if(!counters.containsKey(name)) invoke(Command.create, name, false, initial_value); return new CounterImpl(name, this); } /** * Deletes a counter instance (on the coordinator) * @param name The name of the counter. No-op if the counter doesn't exist */ public void deleteCounter(String name) throws Exception { invoke(Command.delete, name, true); } public String printCounters() { return counters.entrySet().stream().map(e -> String.format("%s = %d", e.getKey(), e.getValue())) .collect(Collectors.joining("\n")); } public long get(String name) throws Exception { Object retval=allow_dirty_reads? _get(name) : invoke(Command.get, name, false); return (long)retval; } public void set(String name, long new_value) throws Exception { invoke(Command.set, name, true, new_value); } public boolean compareAndSet(String name, long expect, long update) throws Exception { Object retval=invoke(Command.compareAndSet, name, false, expect, update); return (boolean)retval; } public long incrementAndGet(String name) throws Exception { Object retval=invoke(Command.incrementAndGet, name, false); return (long)retval; } public long decrementAndGet(String name) throws Exception { Object retval=invoke(Command.decrementAndGet, name, false); return (long)retval; } public long addAndGet(String name, long delta) throws Exception { Object retval=invoke(Command.addAndGet, name, false, delta); return (long)retval; } @Override public byte[] apply(byte[] data, int offset, int length) throws Exception { ByteArrayDataInputStream in=new ByteArrayDataInputStream(data, offset, length); Command command=Command.values()[in.readByte()]; String name=Bits.readAsciiString(in).toString(); long v1, v2, retval; switch(command) { case create: v1=Bits.readLongCompressed(in); retval=_create(name, v1); return Util.objectToByteBuffer(retval); case delete: _delete(name); break; case get: retval=_get(name); return Util.objectToByteBuffer(retval); case set: v1=Bits.readLongCompressed(in); _set(name, v1); break; case compareAndSet: v1=Bits.readLongCompressed(in); v2=Bits.readLongCompressed(in); boolean success=_cas(name, v1, v2); return Util.objectToByteBuffer(success); case incrementAndGet: retval=_add(name, +1L); return Util.objectToByteBuffer(retval); case decrementAndGet: retval=_add(name, -1L); return Util.objectToByteBuffer(retval); case addAndGet: v1=Bits.readLongCompressed(in); retval=_add(name, v1); return Util.objectToByteBuffer(retval); case compareAndSwap: return Util.objectToByteBuffer(_compareAndSwap(name, Bits.readLongCompressed(in), Bits.readLongCompressed(in))); default: throw new IllegalArgumentException("command " + command + " is unknown"); } return Util.objectToByteBuffer(null); } @Override public void writeContentTo(DataOutput out) throws Exception { synchronized(counters) { int size=counters.size(); out.writeInt(size); for(Map.Entry<String,Long> entry: counters.entrySet()) { AsciiString name=new AsciiString(entry.getKey()); Long value=entry.getValue(); Bits.writeAsciiString(name, out); Bits.writeLongCompressed(value, out); } } } @Override public void readContentFrom(DataInput in) throws Exception { int size=in.readInt(); for(int i=0; i < size; i++) { AsciiString name=Bits.readAsciiString(in); Long value=Bits.readLongCompressed(in); counters.put(name.toString(), value); } } public static String readAndDumpSnapshot(DataInput in) { try { int size=in.readInt(); StringBuilder sb=new StringBuilder(); for(int i=0; i < size; i++) { AsciiString name=Bits.readAsciiString(in); Long value=Bits.readLongCompressed(in); sb.append(name).append(": ").append(value); } return sb.toString(); } catch(Exception ex) { return null; } } public void dumpLog() { raft.logEntries((entry, index) -> { StringBuilder sb=new StringBuilder().append(index).append(" (").append(entry.term()).append("): "); sb.append(dumpLogEntry(entry)); System.out.println(sb); }); } public static String dumpLogEntry(LogEntry e) { if(e.command() == null) return "<marker record>"; StringBuilder sb=new StringBuilder(); if(e.internal()) { try { InternalCommand cmd=Util.streamableFromByteBuffer(InternalCommand.class, e.command(), e.offset(), e.length()); sb.append("[internal] ").append(cmd); } catch(Exception ex) { sb.append("[failure reading internal cmd] ").append(ex); } return sb.toString(); } ByteArrayDataInputStream in=new ByteArrayDataInputStream(e.command(), e.offset(), e.length()); try { Command cmd=Command.values()[in.readByte()]; String name=Bits.readAsciiString(in).toString(); switch(cmd) { case create: case set: case addAndGet: sb.append(print(cmd, name, 1, in)); break; case delete: case get: case incrementAndGet: case decrementAndGet: sb.append(print(cmd, name, 0, in)); break; case compareAndSet: case compareAndSwap: sb.append(print(cmd, name, 2, in)); break; default: throw new IllegalArgumentException("command " + cmd + " is unknown"); } } catch(Throwable t) { sb.append(t); } return sb.toString(); } @Override public void roleChanged(Role role) { System.out.println("-- changed role to " + role); } public String toString() { return printCounters(); } protected Object invoke(Command command, String name, boolean ignore_return_value, long ... values) throws Exception { ByteArrayDataOutputStream out=new ByteArrayDataOutputStream(256); try { out.writeByte(command.ordinal()); Bits.writeAsciiString(new AsciiString(name), out); for(long val: values) Bits.writeLongCompressed(val, out); } catch(Exception ex) { throw new Exception("serialization failure (cmd=" + command + ", name=" + name + "): " + ex); } byte[] buf=out.buffer(); byte[] rsp=raft.set(buf, 0, out.position(), repl_timeout, TimeUnit.MILLISECONDS); return ignore_return_value? null: Util.objectFromByteBuffer(rsp); } /* Async operations */ /** * Returns an {@link AsyncCounter} instance of the counter. * <p> * This is local operation, and it does not create the counter in the raft log. * * @param name Name of the counter, different counters have to have different names. * @return The {@link AsyncCounter} instance */ public AsyncCounter asyncCounter(String name) { return new AsyncCounterImpl(this, name); } /** * Returns an existing counter, or creates a new one if none exists. * <p> * This is a cluster-wide operation which would fail if no leader is elected. * * @param name Name of the counter, different counters have to have different names * @param initialValue The initial value of a new counter if there is no existing counter. Ignored if the counter * already exists * @return The {@link AsyncCounter} implementation. */ public CompletionStage<AsyncCounter> getOrCreateAsyncCounter(String name, long initialValue) { synchronized (counters) { if (counters.containsKey(name)) { return CompletableFuture.completedFuture(asyncCounter(name)); } } return invokeAsync(Command.create, new AsciiString(name), initialValue) .thenApply(__ -> asyncCounter(name)); } protected CompletionStage<Long> asyncGet(AsciiString name) { return invokeAsyncAndGet(Command.get, name); } protected CompletionStage<Void> asyncSet(AsciiString name, long value) { return invokeAsync(Command.set, name, value); } public CompletionStage<Long> asyncIncrementAndGet(AsciiString name) { return invokeAsyncAndGet(Command.incrementAndGet, name); } public CompletionStage<Long> asyncDecrementAndGet(AsciiString name) { return invokeAsyncAndGet(Command.decrementAndGet, name); } protected CompletionStage<Long> asyncAddAndGet(AsciiString name, long delta) { return delta == 0 ? asyncGet(name) : invokeAsyncAddAndGet(name, delta); } protected CompletionStage<Long> asyncCompareAndSwap(AsciiString name, long expected, long value) { ByteArrayDataOutputStream out = new ByteArrayDataOutputStream(Bits.size(name) + Global.BYTE_SIZE + Bits.size(expected) + Bits.size(value)); try { writeCommandAndName(out, Command.compareAndSwap.ordinal(), name); Bits.writeLongCompressed(expected, out); Bits.writeLongCompressed(value, out); return setAsyncWithTimeout(out).thenApply(CounterService::readLong); } catch (Exception ex) { return CompletableFutures.completeExceptionally(ex); } } protected CompletionStage<Long> invokeAsyncAndGet(Command command, AsciiString name) { ByteArrayDataOutputStream out = new ByteArrayDataOutputStream(Bits.size(name) + Global.BYTE_SIZE); try { writeCommandAndName(out, command.ordinal(), name); return setAsyncWithTimeout(out).thenApply(CounterService::readLong); } catch (Exception ex) { return CompletableFutures.completeExceptionally(ex); } } protected CompletionStage<Long> invokeAsyncAddAndGet(AsciiString name, long arg) { ByteArrayDataOutputStream out = new ByteArrayDataOutputStream(Bits.size(name) + Global.BYTE_SIZE + Bits.size(arg)); try { writeCommandAndName(out, Command.addAndGet.ordinal(), name); Bits.writeLongCompressed(arg, out); return setAsyncWithTimeout(out).thenApply(CounterService::readLong); } catch (Exception ex) { return CompletableFutures.completeExceptionally(ex); } } protected CompletionStage<Void> invokeAsync(Command command, AsciiString name, long arg) { ByteArrayDataOutputStream out = new ByteArrayDataOutputStream(Bits.size(name) + Global.BYTE_SIZE + Bits.size(arg)); try { writeCommandAndName(out, command.ordinal(), name); Bits.writeLongCompressed(arg, out); return setAsyncWithTimeout(out).thenApply(CompletableFutures.toVoidFunction()); } catch (Exception ex) { return CompletableFutures.completeExceptionally(ex); } } private static void writeCommandAndName(ByteArrayDataOutputStream out, int command, AsciiString name) throws IOException { out.writeByte(command); Bits.writeAsciiString(name, out); } private CompletionStage<byte[]> setAsyncWithTimeout(ByteArrayDataOutputStream out) throws Exception { return raft.setAsync(out.buffer(), 0, out.position()) .orTimeout(repl_timeout, TimeUnit.MILLISECONDS); } private static Long readLong(byte[] rsp) { try { return Util.objectFromByteBuffer(rsp); } catch (IOException | ClassNotFoundException e) { throw CompletableFutures.wrapAsCompletionException(e); } } protected static String print(Command command, String name, int num_args, DataInput in) { StringBuilder sb=new StringBuilder(command.toString()).append("(").append(name); for(int i=0; i < num_args; i++) { try { long val=Bits.readLongCompressed(in); sb.append(", ").append(val); } catch(IOException ignored) { break; } } sb.append(")"); return sb.toString(); } protected long _create(String name, long initial_value) { synchronized(counters) { Long val=counters.get(name); if(val != null) return val; counters.put(name, initial_value); return initial_value; } } protected void _delete(String name) { synchronized(counters) { counters.remove(name); } } protected long _get(String name) { synchronized(counters) { Long retval=counters.get(name); return retval != null? (long)retval : 0; } } protected void _set(String name, long new_val) { synchronized(counters) { counters.put(name, new_val); } } protected boolean _cas(String name, long expected, long value) { synchronized(counters) { Long existing_value=counters.get(name); if(existing_value == null) return false; if(existing_value == expected) { counters.put(name, value); return true; } return false; } } protected long _add(String name, long delta) { synchronized(counters) { Long val=counters.get(name); if(val == null) val=(long)0; counters.put(name, val+delta); return val+delta; } } protected long _compareAndSwap(String name, long expected, long value) { synchronized (counters) { Long existing = counters.get(name); if (existing == null) { // TODO is it ok to return 0? return expected == 0 ? 1: 0; } if (existing == expected) { counters.put(name, value); } return existing; } } }
package org.lockss.metadata; import static java.sql.Types.BIGINT; import static org.lockss.db.SqlConstants.*; import static org.lockss.metadata.MetadataManager.*; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.lockss.db.DbException; import org.lockss.db.DbManager; import org.lockss.db.PkNamePair; import org.lockss.metadata.MetadataManager.PrioritizedAuId; import org.lockss.plugin.ArchivalUnit; import org.lockss.plugin.AuUtil; import org.lockss.plugin.PluginManager; import org.lockss.util.KeyPair; import org.lockss.util.Logger; import org.lockss.util.StringUtil; import org.lockss.util.TimeBase; public class MetadataManagerSql { private static final Logger log = Logger.getLogger(MetadataManagerSql.class); private static final int UNKNOWN_VERSION = -1; // Query to count enabled pending AUs. private static final String COUNT_ENABLED_PENDING_AUS_QUERY = "select " + "count(*) from " + PENDING_AU_TABLE + " where " + PRIORITY_COLUMN + " >= 0"; // Query to count bibliographic items. private static final String COUNT_BIB_ITEM_QUERY = "select count(*) from " + BIB_ITEM_TABLE; // Query to count publication items that have associated AU_ITEMs // of type 'journal' or 'book' or 'proceedings'. private static final String COUNT_PUBLICATION_QUERY = "select count(distinct " + PUBLICATION_TABLE + "." + MD_ITEM_SEQ_COLUMN + ") from " + PUBLISHER_TABLE + "," + PUBLICATION_TABLE + "," + MD_ITEM_TABLE + "," + MD_ITEM_TYPE_TABLE + " where " + PUBLISHER_TABLE + "." + PUBLISHER_SEQ_COLUMN + "=" + PUBLICATION_TABLE + "." + PUBLISHER_SEQ_COLUMN + " and " + PUBLICATION_TABLE + "." + MD_ITEM_SEQ_COLUMN + "=" + MD_ITEM_TABLE + "." + MD_ITEM_SEQ_COLUMN + " and " + MD_ITEM_TABLE + "." + MD_ITEM_TYPE_SEQ_COLUMN + "=" + MD_ITEM_TYPE_TABLE + "." + MD_ITEM_TYPE_SEQ_COLUMN + " and " + MD_ITEM_TYPE_TABLE + "." + TYPE_NAME_COLUMN + " in ('" + MD_ITEM_TYPE_JOURNAL + "','" + MD_ITEM_TYPE_BOOK + "','" + MD_ITEM_TYPE_PROCEEDINGS + "')"; // Query to count PUBLISHER items that have associated AU_ITEMs private static final String COUNT_PUBLISHER_QUERY = "select count(distinct " + PUBLISHER_TABLE + "." + PUBLISHER_SEQ_COLUMN + ") from " + PUBLISHER_TABLE + "," + PUBLICATION_TABLE + "," + MD_ITEM_TABLE + " where " + PUBLISHER_TABLE + "." + PUBLISHER_SEQ_COLUMN + "=" + PUBLICATION_TABLE + "." + PUBLISHER_SEQ_COLUMN + " and " + PUBLICATION_TABLE + "." + MD_ITEM_SEQ_COLUMN + "=" + MD_ITEM_TABLE + "." + MD_ITEM_SEQ_COLUMN; // Query to count PROVIDER items that have associated AU_ITEMs private static final String COUNT_PROVIDER_QUERY = "select count(distinct " + PROVIDER_TABLE + "." + PROVIDER_SEQ_COLUMN + ") from " + PROVIDER_TABLE + "," + AU_MD_TABLE + "," + MD_ITEM_TABLE + " where " + PROVIDER_TABLE + "." + PROVIDER_SEQ_COLUMN + "=" + AU_MD_TABLE + "." + PROVIDER_SEQ_COLUMN + " and " + AU_MD_TABLE + "." + AU_MD_SEQ_COLUMN + "=" + MD_ITEM_TABLE + "." + AU_MD_SEQ_COLUMN; // Query to find enabled pending AUs sorted by priority. Subsitute "true" // to prioritize indexing new AUs ahead of reindexing existing ones, "false" // to index in the order they were added to the queue. AUs with a priority of // zero (requested from the Debug Panel) are always sorted first. private static final String FIND_PRIORITIZED_ENABLED_PENDING_AUS_QUERY = "select " + PENDING_AU_TABLE + "." + PLUGIN_ID_COLUMN + "," + PENDING_AU_TABLE + "." + AU_KEY_COLUMN + "," + PENDING_AU_TABLE + "." + PRIORITY_COLUMN + ",(" + AU_MD_TABLE + "." + AU_SEQ_COLUMN + " is null) as " + ISNEW_COLUMN + "," + PENDING_AU_TABLE + "." + FULLY_REINDEX_COLUMN + " from " + PENDING_AU_TABLE + " left join " + PLUGIN_TABLE + " on " + PLUGIN_TABLE + "." + PLUGIN_ID_COLUMN + " = " + PENDING_AU_TABLE + "." + PLUGIN_ID_COLUMN + " left join " + AU_TABLE + " on " + AU_TABLE + "." + AU_KEY_COLUMN + " = " + PENDING_AU_TABLE + "." + AU_KEY_COLUMN + " and " + AU_TABLE + "." + PLUGIN_SEQ_COLUMN + " = " + PLUGIN_TABLE + "." + PLUGIN_SEQ_COLUMN + " left join " + AU_MD_TABLE + " on " + AU_MD_TABLE + "." + AU_SEQ_COLUMN + " = " + AU_TABLE + "." + AU_SEQ_COLUMN + " where " + PRIORITY_COLUMN + " >= 0" + " order by (" + PENDING_AU_TABLE + "." + PRIORITY_COLUMN + " > 0)," + "(true = ? and " + AU_MD_TABLE + "." + AU_SEQ_COLUMN + " is not null)," + PENDING_AU_TABLE + "." + PRIORITY_COLUMN; // Query to delete a pending AU by its key and plugin identifier. private static final String DELETE_PENDING_AU_QUERY = "delete from " + PENDING_AU_TABLE + " where " + PLUGIN_ID_COLUMN + " = ?" + " and " + AU_KEY_COLUMN + " = ?"; // Query to delete the metadata items of an Archival Unit. private static final String DELETE_AU_MD_ITEM_QUERY = "delete from " + MD_ITEM_TABLE + " where " + AU_MD_SEQ_COLUMN + " = ?"; // Query to get the identifier of the metadata of an AU in the database. private static final String FIND_AU_MD_BY_AU_ID_QUERY = "select m." + AU_MD_SEQ_COLUMN + " from " + AU_MD_TABLE + " m," + AU_TABLE + " a," + PLUGIN_TABLE + " p" + " where m." + AU_SEQ_COLUMN + " = " + "a." + AU_SEQ_COLUMN + " and a." + PLUGIN_SEQ_COLUMN + " = " + "p." + PLUGIN_SEQ_COLUMN + " and p." + PLUGIN_ID_COLUMN + " = ?" + " and a." + AU_KEY_COLUMN + " = ?"; // Query to delete an AU by Archival Unit key and plugin identifier. private static final String DELETE_AU_QUERY = "delete from " + AU_TABLE + " where " + AU_SEQ_COLUMN + " = ?"; // Query to get the identifier of an AU in the database. private static final String FIND_AU_BY_AU_ID_QUERY = "select a." + AU_SEQ_COLUMN + " from " + AU_TABLE + " a," + PLUGIN_TABLE + " p" + " where a." + PLUGIN_SEQ_COLUMN + " = " + "p." + PLUGIN_SEQ_COLUMN + " and p." + PLUGIN_ID_COLUMN + " = ?" + " and a." + AU_KEY_COLUMN + " = ?"; // Query to find a plugin by its identifier. private static final String FIND_PLUGIN_QUERY = "select " + PLUGIN_SEQ_COLUMN + " from " + PLUGIN_TABLE + " where " + PLUGIN_ID_COLUMN + " = ?"; // Query to add a plugin. private static final String INSERT_PLUGIN_QUERY = "insert into " + PLUGIN_TABLE + "(" + PLUGIN_SEQ_COLUMN + "," + PLUGIN_ID_COLUMN + "," + PLATFORM_SEQ_COLUMN + "," + IS_BULK_CONTENT_COLUMN + ") values (default,?,?,?)"; // Query to find an Archival Unit by its plugin and key. private static final String FIND_AU_QUERY = "select " + AU_SEQ_COLUMN + " from " + AU_TABLE + " where " + PLUGIN_SEQ_COLUMN + " = ?" + " and " + AU_KEY_COLUMN + " = ?"; // Query to add an Archival Unit. private static final String INSERT_AU_QUERY = "insert into " + AU_TABLE + "(" + AU_SEQ_COLUMN + "," + PLUGIN_SEQ_COLUMN + "," + AU_KEY_COLUMN + ") values (default,?,?)"; // Query to add an Archival Unit metadata entry. private static final String INSERT_AU_MD_QUERY = "insert into " + AU_MD_TABLE + "(" + AU_MD_SEQ_COLUMN + "," + AU_SEQ_COLUMN + "," + MD_VERSION_COLUMN + "," + EXTRACT_TIME_COLUMN + "," + CREATION_TIME_COLUMN + "," + PROVIDER_SEQ_COLUMN + ") values (default,?,?,?,?,?)"; // Query to update the extraction time of the metadata of an Archival Unit. private static final String UPDATE_AU_MD_EXTRACT_TIME_QUERY = "update " + AU_MD_TABLE + " set " + EXTRACT_TIME_COLUMN + " = ?" + " where " + AU_MD_SEQ_COLUMN + " = ?"; // Query to add a publication. private static final String INSERT_PUBLICATION_QUERY = "insert into " + PUBLICATION_TABLE + "(" + PUBLICATION_SEQ_COLUMN + "," + MD_ITEM_SEQ_COLUMN + "," + PUBLISHER_SEQ_COLUMN + ") values (default,?,?)"; // Query to find the metadata item of a publication. private static final String FIND_PUBLICATION_METADATA_ITEM_QUERY = "select " + MD_ITEM_SEQ_COLUMN + " from " + PUBLICATION_TABLE + " where " + PUBLICATION_SEQ_COLUMN + " = ?"; // Query to find the parent metadata item private static final String FIND_PARENT_METADATA_ITEM_QUERY = "select " + PARENT_SEQ_COLUMN + " from " + MD_ITEM_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?"; // Query to add an ISSN. private static final String INSERT_ISSN_QUERY = "insert into " + ISSN_TABLE + "(" + MD_ITEM_SEQ_COLUMN + "," + ISSN_COLUMN + "," + ISSN_TYPE_COLUMN + ") values (?,?,?)"; // Query to add an ISBN. private static final String INSERT_ISBN_QUERY = "insert into " + ISBN_TABLE + "(" + MD_ITEM_SEQ_COLUMN + "," + ISBN_COLUMN + "," + ISBN_TYPE_COLUMN + ") values (?,?,?)"; // Query to find the ISSNs of a metadata item. private static final String FIND_MD_ITEM_ISSN_QUERY = "select " + ISSN_COLUMN + "," + ISSN_TYPE_COLUMN + " from " + ISSN_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?"; private static final String FIND_MD_ITEM_PROPRIETARY_ID_QUERY = "select " + PROPRIETARY_ID_COLUMN + " from " + PROPRIETARY_ID_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?"; // Query to find the ISBNs of a metadata item. private static final String FIND_MD_ITEM_ISBN_QUERY = "select " + ISBN_COLUMN + "," + ISBN_TYPE_COLUMN + " from " + ISBN_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?"; // Query to find a publication by its ISSNs. private static final String FIND_PUBLICATION_BY_ISSNS_QUERY = "select" + " p." + PUBLICATION_SEQ_COLUMN + " from " + PUBLICATION_TABLE + " p," + ISSN_TABLE + " i," + MD_ITEM_TABLE + " m," + MD_ITEM_TYPE_TABLE + " t" + " where p." + PUBLISHER_SEQ_COLUMN + " = ?" + " and m." + AU_MD_SEQ_COLUMN + " is null" + " and p." + MD_ITEM_SEQ_COLUMN + " = i." + MD_ITEM_SEQ_COLUMN + " and (i." + ISSN_COLUMN + " = ?" + " or i." + ISSN_COLUMN + " = ?)" + " and p." + MD_ITEM_SEQ_COLUMN + " = m." + MD_ITEM_SEQ_COLUMN + " and m." + MD_ITEM_TYPE_SEQ_COLUMN + " = t." + MD_ITEM_TYPE_SEQ_COLUMN + " and t." + TYPE_NAME_COLUMN + " = ?"; // Query to find a publication by its ISBNs. private static final String FIND_PUBLICATION_BY_ISBNS_QUERY = "select" + " p." + PUBLICATION_SEQ_COLUMN + " from " + PUBLICATION_TABLE + " p," + ISBN_TABLE + " i," + MD_ITEM_TABLE + " m," + MD_ITEM_TYPE_TABLE + " t" + " where p." + PUBLISHER_SEQ_COLUMN + " = ?" + " and m." + AU_MD_SEQ_COLUMN + " is null" + " and p." + MD_ITEM_SEQ_COLUMN + " = i." + MD_ITEM_SEQ_COLUMN + " and (i." + ISBN_COLUMN + " = ?" + " or i." + ISBN_COLUMN + " = ?)" + " and p." + MD_ITEM_SEQ_COLUMN + " = m." + MD_ITEM_SEQ_COLUMN + " and m." + MD_ITEM_TYPE_SEQ_COLUMN + " = t." + MD_ITEM_TYPE_SEQ_COLUMN + " and t." + TYPE_NAME_COLUMN + " = ?"; // Query to find a publication by its name. private static final String FIND_PUBLICATION_BY_NAME_QUERY = "select p." + PUBLICATION_SEQ_COLUMN + " from " + PUBLICATION_TABLE + " p," + MD_ITEM_TABLE + " m," + MD_ITEM_NAME_TABLE + " n," + MD_ITEM_TYPE_TABLE + " t" + " where p." + PUBLISHER_SEQ_COLUMN + " = ?" + " and m." + AU_MD_SEQ_COLUMN + " is null" + " and p." + MD_ITEM_SEQ_COLUMN + " = m." + MD_ITEM_SEQ_COLUMN + " and p." + MD_ITEM_SEQ_COLUMN + " = n." + MD_ITEM_SEQ_COLUMN + " and n." + NAME_COLUMN + " = ?" + " and p." + MD_ITEM_SEQ_COLUMN + " = m." + MD_ITEM_SEQ_COLUMN + " and m." + MD_ITEM_TYPE_SEQ_COLUMN + " = t." + MD_ITEM_TYPE_SEQ_COLUMN + " and t." + TYPE_NAME_COLUMN + " = ?"; // Query to find a metadata item type by its name. private static final String FIND_MD_ITEM_TYPE_QUERY = "select " + MD_ITEM_TYPE_SEQ_COLUMN + " from " + MD_ITEM_TYPE_TABLE + " where " + TYPE_NAME_COLUMN + " = ?"; // Query to add a metadata item. private static final String INSERT_MD_ITEM_QUERY = "insert into " + MD_ITEM_TABLE + "(" + MD_ITEM_SEQ_COLUMN + "," + PARENT_SEQ_COLUMN + "," + MD_ITEM_TYPE_SEQ_COLUMN + "," + AU_MD_SEQ_COLUMN + "," + DATE_COLUMN + "," + COVERAGE_COLUMN + "," + FETCH_TIME_COLUMN + ") values (default,?,?,?,?,?,?)"; // Query to count the ISBNs of a publication. private static final String COUNT_PUBLICATION_ISBNS_QUERY = "select " + "count(*)" + " from " + ISBN_TABLE + " i" + "," + PUBLICATION_TABLE + " p" + " where p." + PUBLICATION_SEQ_COLUMN + " = ?" + " and p." + MD_ITEM_SEQ_COLUMN + " = i." + MD_ITEM_SEQ_COLUMN; // Query to count the ISSNs of a publication. private static final String COUNT_PUBLICATION_ISSNS_QUERY = "select " + "count(*)" + " from " + ISSN_TABLE + " i" + "," + PUBLICATION_TABLE + " p" + " where p." + PUBLICATION_SEQ_COLUMN + " = ?" + " and p." + MD_ITEM_SEQ_COLUMN + " = i." + MD_ITEM_SEQ_COLUMN; // Query to find the secondary names of a metadata item. private static final String FIND_MD_ITEM_NAME_QUERY = "select " + NAME_COLUMN + "," + NAME_TYPE_COLUMN + " from " + MD_ITEM_NAME_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?"; // Query to add a metadata item name. private static final String INSERT_MD_ITEM_NAME_QUERY = "insert into " + MD_ITEM_NAME_TABLE + "(" + MD_ITEM_SEQ_COLUMN + "," + NAME_COLUMN + "," + NAME_TYPE_COLUMN + ") values (?,?,?)"; // Query to add a metadata item URL. private static final String INSERT_URL_QUERY = "insert into " + URL_TABLE + "(" + MD_ITEM_SEQ_COLUMN + "," + FEATURE_COLUMN + "," + URL_COLUMN + ") values (?,?,?)"; // Query to add a metadata item DOI. private static final String INSERT_DOI_QUERY = "insert into " + DOI_TABLE + "(" + MD_ITEM_SEQ_COLUMN + "," + DOI_COLUMN + ") values (?,?)"; // Query to delete a disabled pending AU by its key and plugin identifier. private static final String DELETE_DISABLED_PENDING_AU_QUERY = "delete from " + PENDING_AU_TABLE + " where " + PLUGIN_ID_COLUMN + " = ?" + " and " + AU_KEY_COLUMN + " = ?" + " and " + PRIORITY_COLUMN + " < 0"; // Query to add an enabled pending AU at the bottom of the current priority // list. private static final String INSERT_ENABLED_PENDING_AU_QUERY = "insert into " + PENDING_AU_TABLE + "(" + PLUGIN_ID_COLUMN + "," + AU_KEY_COLUMN + "," + PRIORITY_COLUMN + "," + FULLY_REINDEX_COLUMN + ") values (?,?," + "(select coalesce(max(" + PRIORITY_COLUMN + "), 0) + 1" + " from " + PENDING_AU_TABLE + " where " + PRIORITY_COLUMN + " >= 0),?)"; // Query to add an enabled pending AU at the bottom of the current priority // list using MySQL. private static final String INSERT_ENABLED_PENDING_AU_MYSQL_QUERY = "insert " + "into " + PENDING_AU_TABLE + "(" + PLUGIN_ID_COLUMN + "," + AU_KEY_COLUMN + "," + PRIORITY_COLUMN + "," + FULLY_REINDEX_COLUMN + ") values (?,?," + "(select next_priority from " + "(select coalesce(max(" + PRIORITY_COLUMN + "), 0) + 1 as next_priority" + " from " + PENDING_AU_TABLE + " where " + PRIORITY_COLUMN + " >= 0) as temp_pau_table),?)"; // Query to add an enabled pending AU at the top of the current priority list. private static final String INSERT_HIGHEST_PRIORITY_PENDING_AU_QUERY = "insert into " + PENDING_AU_TABLE + "(" + PLUGIN_ID_COLUMN + "," + AU_KEY_COLUMN + "," + PRIORITY_COLUMN + "," + FULLY_REINDEX_COLUMN + ") values (?,?,0,?)"; // Query to find a pending AU by its key and plugin identifier. private static final String FIND_PENDING_AU_QUERY = "select " + PLUGIN_ID_COLUMN + "," + AU_KEY_COLUMN + " from " + PENDING_AU_TABLE + " where " + PLUGIN_ID_COLUMN + " = ?" + " and " + AU_KEY_COLUMN + " = ?"; // Query to get the version of the metadata of an AU as is recorded in the // database. private static final String FIND_AU_METADATA_VERSION_QUERY = "select m." + MD_VERSION_COLUMN + " from " + AU_MD_TABLE + " m," + AU_TABLE + " a," + PLUGIN_TABLE + " p" + " where m." + AU_SEQ_COLUMN + " = " + " a." + AU_SEQ_COLUMN + " and a." + PLUGIN_SEQ_COLUMN + " = " + " p." + PLUGIN_SEQ_COLUMN + " and p." + PLUGIN_ID_COLUMN + " = ?" + " and a." + AU_KEY_COLUMN + " = ?"; // Query to find the full reindexing flag of an Archival Unit. private static final String FIND_AU_FULL_REINDEXING_BY_AU_QUERY = "select " + FULLY_REINDEX_COLUMN + " from " + PENDING_AU_TABLE + " where " + PLUGIN_ID_COLUMN + " = ?" + " and " + AU_KEY_COLUMN + " = ?"; // Query to update the full reindexing of an Archival Unit. private static final String UPDATE_AU_FULL_REINDEXING_QUERY = "update " + PENDING_AU_TABLE + " set " + FULLY_REINDEX_COLUMN + " = ?" + " where " + PLUGIN_ID_COLUMN + " = ?" + " and " + AU_KEY_COLUMN + " = ?"; // Query to find the extraction time of an Archival Unit. private static final String FIND_AU_MD_EXTRACT_TIME_BY_AUSEQ_QUERY = "select " + EXTRACT_TIME_COLUMN + " from " + AU_MD_TABLE + " where " + AU_SEQ_COLUMN + " = ?"; // Query to find the extraction time of an Archival Unit. private static final String FIND_AU_MD_EXTRACT_TIME_BY_AU_QUERY = "select m." + EXTRACT_TIME_COLUMN + " from " + AU_MD_TABLE + " m," + AU_TABLE + " a," + PLUGIN_TABLE + " p" + " where m." + AU_SEQ_COLUMN + " = " + " a." + AU_SEQ_COLUMN + " and a." + PLUGIN_SEQ_COLUMN + " = " + " p." + PLUGIN_SEQ_COLUMN + " and p." + PLUGIN_ID_COLUMN + " = ?" + " and a." + AU_KEY_COLUMN + " = ?"; // Query to find a platform by its name. private static final String FIND_PLATFORM_QUERY = "select " + PLATFORM_SEQ_COLUMN + " from " + PLATFORM_TABLE + " where " + PLATFORM_NAME_COLUMN + " = ?"; // Query to add a platform. private static final String INSERT_PLATFORM_QUERY = "insert into " + PLATFORM_TABLE + "(" + PLATFORM_SEQ_COLUMN + "," + PLATFORM_NAME_COLUMN + ") values (default,?)"; // Query to add a disabled pending AU. private static final String INSERT_DISABLED_PENDING_AU_QUERY = "insert into " + PENDING_AU_TABLE + "(" + PLUGIN_ID_COLUMN + "," + AU_KEY_COLUMN + "," + PRIORITY_COLUMN + ") values (?,?," + MIN_INDEX_PRIORITY + ")"; // Query to add a pending AU with failed indexing. private static final String INSERT_FAILED_INDEXING_PENDING_AU_QUERY = "insert" + " into " + PENDING_AU_TABLE + "(" + PLUGIN_ID_COLUMN + "," + AU_KEY_COLUMN + "," + PRIORITY_COLUMN + ") values (?,?," + FAILED_INDEX_PRIORITY + ")"; // Query to find pending AUs with a given priority. private static final String FIND_PENDING_AUS_WITH_PRIORITY_QUERY = "select " + PLUGIN_ID_COLUMN + "," + AU_KEY_COLUMN + " from " + PENDING_AU_TABLE + " where " + PRIORITY_COLUMN + " = ?"; // Query to find the publisher of an Archival Unit. private static final String FIND_AU_PUBLISHER_QUERY = "select distinct " + "pr." + PUBLISHER_SEQ_COLUMN + " from " + PUBLISHER_TABLE + " pr" + "," + PUBLICATION_TABLE + " p" + "," + MD_ITEM_TABLE + " m" + "," + AU_MD_TABLE + " am" + " where pr." + PUBLISHER_SEQ_COLUMN + " = p." + PUBLISHER_SEQ_COLUMN + " and p." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = ?"; // Query to find the authors of a metadata item. private static final String FIND_MD_ITEM_AUTHOR_QUERY = "select " + AUTHOR_NAME_COLUMN + " from " + AUTHOR_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?"; // Query to find the keywords of a metadata item. private static final String FIND_MD_ITEM_KEYWORD_QUERY = "select " + KEYWORD_COLUMN + " from " + KEYWORD_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?"; // Query to add a metadata item author. private static final String INSERT_AUTHOR_QUERY = "insert into " + AUTHOR_TABLE + "(" + MD_ITEM_SEQ_COLUMN + "," + AUTHOR_NAME_COLUMN + "," + AUTHOR_IDX_COLUMN + ") values (?,?," + "(select coalesce(max(" + AUTHOR_IDX_COLUMN + "), 0) + 1" + " from " + AUTHOR_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?))"; // Query to add a metadata item author using MySQL. private static final String INSERT_AUTHOR_MYSQL_QUERY = "insert into " + AUTHOR_TABLE + "(" + MD_ITEM_SEQ_COLUMN + "," + AUTHOR_NAME_COLUMN + "," + AUTHOR_IDX_COLUMN + ") values (?,?," + "(select next_idx from " + "(select coalesce(max(" + AUTHOR_IDX_COLUMN + "), 0) + 1 as next_idx" + " from " + AUTHOR_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?) as temp_author_table))"; // Query to add a metadata item keyword. private static final String INSERT_KEYWORD_QUERY = "insert into " + KEYWORD_TABLE + "(" + MD_ITEM_SEQ_COLUMN + "," + KEYWORD_COLUMN + ") values (?,?)"; // Query to add an archival unit to the UNCONFIGURED_AU table. private static final String INSERT_UNCONFIGURED_AU_QUERY = "insert into " + UNCONFIGURED_AU_TABLE + "(" + PLUGIN_ID_COLUMN + "," + AU_KEY_COLUMN + ") values (?,?)"; // Query to remove an archival unit from the UNCONFIGURED_AU table. private static final String DELETE_UNCONFIGURED_AU_QUERY = "delete from " + UNCONFIGURED_AU_TABLE + " where " + PLUGIN_ID_COLUMN + " = ?" + " and " + AU_KEY_COLUMN + " = ?"; // Query to count recorded unconfigured archival units. private static final String UNCONFIGURED_AU_COUNT_QUERY = "select " + "count(*)" + " from " + UNCONFIGURED_AU_TABLE; // Query to find if an archival unit is in the UNCONFIGURED_AU table. private static final String FIND_UNCONFIGURED_AU_COUNT_QUERY = "select " + "count(*)" + " from " + UNCONFIGURED_AU_TABLE + " where " + PLUGIN_ID_COLUMN + " = ?" + " and " + AU_KEY_COLUMN + " = ?"; // Query to delete an Archival Unit child metadata item. private static final String DELETE_AU_CHILD_MD_ITEM_QUERY = "delete from " + MD_ITEM_TABLE + " where " + AU_MD_SEQ_COLUMN + " = ?" + " and " + MD_ITEM_SEQ_COLUMN + " = ?" + " and " + PARENT_SEQ_COLUMN + " is not null"; // Query to retrieve all the publisher names. private static final String GET_PUBLISHER_NAMES_QUERY = "select " + PUBLISHER_NAME_COLUMN + " from " + PUBLISHER_TABLE + " order by " + PUBLISHER_NAME_COLUMN; // Derby query to retrieve all the different DOI prefixes of all the // publishers with multiple DOI prefixes. private static final String GET_PUBLISHERS_MULTIPLE_DOI_PREFIXES_DERBY_QUERY = "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, locate('/', d." + DOI_COLUMN + ")-1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + " and pr." + PUBLISHER_NAME_COLUMN + " in (" + " select subq." + PUBLISHER_NAME_COLUMN + " from (" + "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, locate('/', d." + DOI_COLUMN + ")-1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + ") as subq" + " group by subq." + PUBLISHER_NAME_COLUMN + " having count(subq." + PUBLISHER_NAME_COLUMN + ") > 1)" + " order by pr." + PUBLISHER_NAME_COLUMN + ", prefix"; // PostgreSQL query to retrieve all the different DOI prefixes of all the // publishers with multiple DOI prefixes. private static final String GET_PUBLISHERS_MULTIPLE_DOI_PREFIXES_PG_QUERY = "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, strpos(d." + DOI_COLUMN + ", '/')-1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + " and pr." + PUBLISHER_NAME_COLUMN + " in (" + " select subq." + PUBLISHER_NAME_COLUMN + " from (" + "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, strpos(d." + DOI_COLUMN + ", '/')-1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + ") as subq" + " group by subq." + PUBLISHER_NAME_COLUMN + " having count(subq." + PUBLISHER_NAME_COLUMN + ") > 1)" + " order by pr." + PUBLISHER_NAME_COLUMN + ", prefix"; // MySQL query to retrieve all the different DOI prefixes of all the // publishers with multiple DOI prefixes. private static final String GET_PUBLISHERS_MULTIPLE_DOI_PREFIXES_MYSQL_QUERY = "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substring_index(d." + DOI_COLUMN + ", '/', 1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + " and pr." + PUBLISHER_NAME_COLUMN + " in (" + " select subq." + PUBLISHER_NAME_COLUMN + " from (" + "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substring_index(d." + DOI_COLUMN + ", '/', 1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + ") as subq" + " group by subq." + PUBLISHER_NAME_COLUMN + " having count(subq." + PUBLISHER_NAME_COLUMN + ") > 1)" + " order by pr." + PUBLISHER_NAME_COLUMN + ", prefix"; // Derby query to retrieve all the different publishers linked to all the DOI // prefixes that are linked to multiple publishers. private static final String GET_DOI_PREFIXES_MULTIPLE_PUBLISHERS_DERBY_QUERY = "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, locate('/', d." + DOI_COLUMN + ")-1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + " and substr(d." + DOI_COLUMN + ", 1, locate('/', d." + DOI_COLUMN + ")-1) in (" + " select subq.prefix" + " from (" + "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, locate('/', d." + DOI_COLUMN + ")-1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + ") as subq" + " group by subq.prefix" + " having count(subq.prefix) > 1)" + " order by prefix, pr." + PUBLISHER_NAME_COLUMN; // PostgreSql query to retrieve all the different publishers linked to all the // DOI prefixes that are linked to multiple publishers. private static final String GET_DOI_PREFIXES_MULTIPLE_PUBLISHERS_PG_QUERY = "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, strpos(d." + DOI_COLUMN + ", '/')-1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + " and substr(d." + DOI_COLUMN + ", 1, strpos(d." + DOI_COLUMN + ", '/')-1) in (" + " select subq.prefix" + " from (" + "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, strpos(d." + DOI_COLUMN + ", '/')-1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + ") as subq" + " group by subq.prefix" + " having count(subq.prefix) > 1)" + " order by prefix, pr." + PUBLISHER_NAME_COLUMN; // MySQL query to retrieve all the different publishers linked to all the DOI // prefixes that are linked to multiple publishers. private static final String GET_DOI_PREFIXES_MULTIPLE_PUBLISHERS_MYSQL_QUERY = "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substring_index(d." + DOI_COLUMN + ", '/', 1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + " and substring_index(d." + DOI_COLUMN + ", '/', 1) in (" + " select subq.prefix" + " from (" + "select distinct pr." + PUBLISHER_NAME_COLUMN + ", substring_index(d." + DOI_COLUMN + ", '/', 1) as prefix" + " from " + PUBLISHER_TABLE + " pr" + ", " + DOI_TABLE + " d" + ", " + PUBLICATION_TABLE + " pn" + ", " + MD_ITEM_TABLE + " m" + " where pr." + PUBLISHER_SEQ_COLUMN + " = pn." + PUBLISHER_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = m." + PARENT_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + ") as subq" + " group by subq.prefix" + " having count(subq.prefix) > 1)" + " order by prefix, pr." + PUBLISHER_NAME_COLUMN; // Derby query to retrieve all the different DOI prefixes of all the Archival // Units with multiple DOI prefixes. private static final String GET_AUS_MULTIPLE_DOI_PREFIXES_DERBY_QUERY = "select distinct pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + ", au." + AU_SEQ_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, locate('/', d." + DOI_COLUMN + ")-1) as prefix" + " from " + PLUGIN_TABLE + " pl" + ", " + AU_TABLE + ", " + DOI_TABLE + " d" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " m" + " where pl." + PLUGIN_SEQ_COLUMN + " = au." + PLUGIN_SEQ_COLUMN + " and au." + AU_SEQ_COLUMN + " = am." + AU_SEQ_COLUMN + " and am." + AU_MD_SEQ_COLUMN + " = m." + AU_MD_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + " and au." + AU_SEQ_COLUMN + " in (" + " select subq." + AU_SEQ_COLUMN + " from (" + "select distinct au." + AU_SEQ_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, locate('/', d." + DOI_COLUMN + ")-1) as prefix" + " from " + AU_TABLE + ", " + DOI_TABLE + " d" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " m" + " where au." + AU_SEQ_COLUMN + " = am." + AU_SEQ_COLUMN + " and am." + AU_MD_SEQ_COLUMN + " = m." + AU_MD_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + ") as subq" + " group by subq." + AU_SEQ_COLUMN + " having count(subq." + AU_SEQ_COLUMN + ") > 1)" + " order by pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + ", prefix"; // PostgreSQL query to retrieve all the different DOI prefixes of all the // Archival Units with multiple DOI prefixes. private static final String GET_AUS_MULTIPLE_DOI_PREFIXES_PG_QUERY = "select distinct " + " pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + ", au." + AU_SEQ_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, strpos(d." + DOI_COLUMN + ", '/')-1) as prefix" + " from " + PLUGIN_TABLE + " pl" + ", " + AU_TABLE + ", " + DOI_TABLE + " d" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " m" + " where pl." + PLUGIN_SEQ_COLUMN + " = au." + PLUGIN_SEQ_COLUMN + " and au." + AU_SEQ_COLUMN + " = am." + AU_SEQ_COLUMN + " and am." + AU_MD_SEQ_COLUMN + " = m." + AU_MD_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + " and au." + AU_SEQ_COLUMN + " in (" + " select subq." + AU_SEQ_COLUMN + " from (" + "select distinct au." + AU_SEQ_COLUMN + ", substr(d." + DOI_COLUMN + ", 1, strpos(d." + DOI_COLUMN + ", '/')-1) as prefix" + " from " + AU_TABLE + ", " + DOI_TABLE + " d" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " m" + " where au." + AU_SEQ_COLUMN + " = am." + AU_SEQ_COLUMN + " and am." + AU_MD_SEQ_COLUMN + " = m." + AU_MD_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + ") as subq" + " group by subq." + AU_SEQ_COLUMN + " having count(subq." + AU_SEQ_COLUMN + ") > 1)" + " order by pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + ", prefix"; // MySQL query to retrieve all the different DOI prefixes of all the Archival // Units with multiple DOI prefixes. private static final String GET_AUS_MULTIPLE_DOI_PREFIXES_MYSQL_QUERY = "select distinct pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + ", au." + AU_SEQ_COLUMN + ", substring_index(d." + DOI_COLUMN + ", '/', 1) as prefix" + " from " + PLUGIN_TABLE + " pl" + ", " + AU_TABLE + ", " + DOI_TABLE + " d" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " m" + " where pl." + PLUGIN_SEQ_COLUMN + " = au." + PLUGIN_SEQ_COLUMN + " and au." + AU_SEQ_COLUMN + " = am." + AU_SEQ_COLUMN + " and am." + AU_MD_SEQ_COLUMN + " = m." + AU_MD_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + " and au." + AU_SEQ_COLUMN + " in (" + " select subq." + AU_SEQ_COLUMN + " from (" + "select distinct au." + AU_SEQ_COLUMN + ", substring_index(d." + DOI_COLUMN + ", '/', 1) as prefix" + " from " + AU_TABLE + ", " + DOI_TABLE + " d" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " m" + " where au." + AU_SEQ_COLUMN + " = am." + AU_SEQ_COLUMN + " and am." + AU_MD_SEQ_COLUMN + " = m." + AU_MD_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = d." + MD_ITEM_SEQ_COLUMN + ") as subq" + " group by subq." + AU_SEQ_COLUMN + " having count(subq." + AU_SEQ_COLUMN + ") > 1)" + " order by pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + ", prefix"; // Query to retrieve all the different ISBNs of all the publications with more // than 2 ISBNs. private static final String GET_PUBLICATIONS_MORE_2_ISBNS_QUERY = "select" + " distinct mn." + NAME_COLUMN + ", isbn." + ISBN_COLUMN + ", isbn." + ISBN_TYPE_COLUMN + " from " + MD_ITEM_NAME_TABLE + " mn" + ", " + ISBN_TABLE + " where mn." + MD_ITEM_SEQ_COLUMN + " = isbn." + MD_ITEM_SEQ_COLUMN + " and mn." + NAME_TYPE_COLUMN + " = 'primary'" + " and mn." + NAME_COLUMN + " in (" + "select subq." + NAME_COLUMN + " from (" + " select distinct mn." + NAME_COLUMN + ", isbn." + ISBN_COLUMN + " from " + MD_ITEM_NAME_TABLE + " mn" + ", " + ISBN_TABLE + " where mn." + MD_ITEM_SEQ_COLUMN + " = isbn." + MD_ITEM_SEQ_COLUMN + " and mn." + NAME_TYPE_COLUMN + " = 'primary'" + ") as subq" + " group by subq." + NAME_COLUMN + " having count(subq." + NAME_COLUMN + ") > 2)" + " order by mn." + NAME_COLUMN + ", isbn." + ISBN_COLUMN + ", isbn." + ISBN_TYPE_COLUMN; // Query to retrieve all the different ISSNs of all the publications with more // than 2 ISSNs. private static final String GET_PUBLICATIONS_MORE_2_ISSNS_QUERY = "select" + " distinct mn." + NAME_COLUMN + ", issn." + MD_ITEM_SEQ_COLUMN + ", issn." + ISSN_COLUMN + ", issn." + ISSN_TYPE_COLUMN + " from " + MD_ITEM_NAME_TABLE + " mn" + ", " + ISSN_TABLE + " where mn." + MD_ITEM_SEQ_COLUMN + " = issn." + MD_ITEM_SEQ_COLUMN + " and mn." + NAME_TYPE_COLUMN + " = 'primary'" + " and mn." + NAME_COLUMN + " in (" + "select subq." + NAME_COLUMN + " from (" + " select distinct mn." + NAME_COLUMN + ", issn." + ISSN_COLUMN + " from " + MD_ITEM_NAME_TABLE + " mn" + ", " + ISSN_TABLE + " where mn." + MD_ITEM_SEQ_COLUMN + " = issn." + MD_ITEM_SEQ_COLUMN + " and mn." + NAME_TYPE_COLUMN + " = 'primary'" + ") as subq" + " group by subq." + NAME_COLUMN + " having count(subq." + NAME_COLUMN + ") > 2)" + " order by mn." + NAME_COLUMN + ", issn." + MD_ITEM_SEQ_COLUMN + ", issn." + ISSN_COLUMN + ", issn." + ISSN_TYPE_COLUMN; // Query to retrieve all the different publications linked to all the ISBNs // that are linked to multiple publications. private static final String GET_ISBNS_MULTIPLE_PUBLICATIONS_QUERY = "select" + " distinct mn." + NAME_COLUMN + ", isbn." + ISBN_COLUMN + " from " + MD_ITEM_NAME_TABLE + " mn" + ", " + ISBN_TABLE + " where mn." + MD_ITEM_SEQ_COLUMN + " = isbn." + MD_ITEM_SEQ_COLUMN + " and mn." + NAME_TYPE_COLUMN + " = 'primary'" + " and isbn." + ISBN_COLUMN + " in (" + "select subq." + ISBN_COLUMN + " from (" + " select distinct mn." + NAME_COLUMN + ", isbn." + ISBN_COLUMN + " from " + MD_ITEM_NAME_TABLE + " mn" + ", " + ISBN_TABLE + " where mn." + MD_ITEM_SEQ_COLUMN + " = isbn." + MD_ITEM_SEQ_COLUMN + " and mn." + NAME_TYPE_COLUMN + " = 'primary'" + ") as subq" + " group by subq." + ISBN_COLUMN + " having count(subq." + ISBN_COLUMN + ") > 1)" + " order by isbn." + ISBN_COLUMN + ", mn." + NAME_COLUMN; // Query to retrieve all the different publications linked to all the ISSNs // that are linked to multiple publications. private static final String GET_ISSNS_MULTIPLE_PUBLICATIONS_QUERY = "select" + " distinct mn." + NAME_COLUMN + ", issn." + ISSN_COLUMN + " from " + MD_ITEM_NAME_TABLE + " mn" + ", " + ISSN_TABLE + " where mn." + MD_ITEM_SEQ_COLUMN + " = issn." + MD_ITEM_SEQ_COLUMN + " and mn." + NAME_TYPE_COLUMN + " = 'primary'" + " and issn." + ISSN_COLUMN + " in (" + "select subq." + ISSN_COLUMN + " from (" + " select distinct mn." + NAME_COLUMN + ", issn." + ISSN_COLUMN + " from " + MD_ITEM_NAME_TABLE + " mn" + ", " + ISSN_TABLE + " where mn." + MD_ITEM_SEQ_COLUMN + " = issn." + MD_ITEM_SEQ_COLUMN + " and mn." + NAME_TYPE_COLUMN + " = 'primary'" + ") as subq" + " group by subq." + ISSN_COLUMN + " having count(subq." + ISSN_COLUMN + ") > 1)" + " order by issn." + ISSN_COLUMN + ", mn." + NAME_COLUMN; // Query to retrieve all the different ISSNs that are linked to books. private static final String GET_BOOKS_WITH_ISSNS_QUERY = "select distinct" + " mn." + NAME_COLUMN + ", mit." + TYPE_NAME_COLUMN + ", issn." + ISSN_COLUMN + " from " + MD_ITEM_NAME_TABLE + " mn" + ", " + MD_ITEM_TYPE_TABLE + " mit" + ", " + ISSN_TABLE + ", " + PUBLICATION_TABLE + " p" + ", " + MD_ITEM_TABLE + " m" + " where p." + MD_ITEM_SEQ_COLUMN + " = m." + MD_ITEM_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = mn." + MD_ITEM_SEQ_COLUMN + " and mn." + NAME_TYPE_COLUMN + " = 'primary'" + " and m." + MD_ITEM_SEQ_COLUMN + " = issn." + MD_ITEM_SEQ_COLUMN + " and m." + MD_ITEM_TYPE_SEQ_COLUMN + " = mit." + MD_ITEM_TYPE_SEQ_COLUMN + " and mit." + TYPE_NAME_COLUMN + " != '" + MD_ITEM_TYPE_BOOK_SERIES + "'" + " and mit." + TYPE_NAME_COLUMN + " != '" + MD_ITEM_TYPE_JOURNAL + "'" + " and mit." + TYPE_NAME_COLUMN + " != '" + MD_ITEM_TYPE_PROCEEDINGS + "'" + " and mit." + TYPE_NAME_COLUMN + " != '" + MD_ITEM_TYPE_UNKNOWN_PUBLICATION + "'" + " order by mn." + NAME_COLUMN + ", mit." + TYPE_NAME_COLUMN + ", issn." + ISSN_COLUMN; // Query to retrieve all the different ISBNs that are linked to periodicals. private static final String GET_PERIODICALS_WITH_ISBNS_QUERY = "select" + " distinct mn." + NAME_COLUMN + ", mit." + TYPE_NAME_COLUMN + ", isbn." + ISBN_COLUMN + " from " + MD_ITEM_NAME_TABLE + " mn" + ", " + MD_ITEM_TYPE_TABLE + " mit" + ", " + ISBN_TABLE + ", " + PUBLICATION_TABLE + " p" + ", " + MD_ITEM_TABLE + " m" + " where p." + MD_ITEM_SEQ_COLUMN + " = m." + MD_ITEM_SEQ_COLUMN + " and m." + MD_ITEM_SEQ_COLUMN + " = mn." + MD_ITEM_SEQ_COLUMN + " and mn." + NAME_TYPE_COLUMN + " = 'primary'" + " and m." + MD_ITEM_SEQ_COLUMN + " = isbn." + MD_ITEM_SEQ_COLUMN + " and m." + MD_ITEM_TYPE_SEQ_COLUMN + " = mit." + MD_ITEM_TYPE_SEQ_COLUMN + " and mit." + TYPE_NAME_COLUMN + " != '" + MD_ITEM_TYPE_BOOK + "'" + " order by mn." + NAME_COLUMN + ", mit." + TYPE_NAME_COLUMN + ", isbn." + ISBN_COLUMN; // Query to retrieve all the Archival Units with an unknown provider. private static final String GET_UNKNOWN_PROVIDER_AUS_QUERY = "select" + " pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + " from " + PLUGIN_TABLE + " pl" + ", " + AU_TABLE + ", " + AU_MD_TABLE + " am" + ", " + PROVIDER_TABLE + " pr" + " where pl." + PLUGIN_SEQ_COLUMN + " = au." + PLUGIN_SEQ_COLUMN + " and au." + AU_SEQ_COLUMN + " = am." + AU_SEQ_COLUMN + " and am." + PROVIDER_SEQ_COLUMN + " = pr." + PROVIDER_SEQ_COLUMN + " and pr." + PROVIDER_NAME_COLUMN + " = '" + UNKNOWN_PROVIDER_NAME + "'" + " order by pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN; // Query to retrieve all the journal articles in the database whose parent // is not a journal. private static final String GET_MISMATCHED_PARENT_JOURNAL_ARTICLES_QUERY = "select min1." + NAME_COLUMN + " as \"col1\"" + ", min2." + NAME_COLUMN + " as \"col2\"" + ", mit." + TYPE_NAME_COLUMN + " as \"col3\"" + ", au." + AU_KEY_COLUMN + " as \"col4\"" + ", pl." + PLUGIN_ID_COLUMN + " as \"col5\"" + " from " + MD_ITEM_TYPE_TABLE + " mit" + ", " + AU_TABLE + ", " + PLUGIN_TABLE + " pl" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " mi1" + " left outer join " + MD_ITEM_NAME_TABLE + " min1" + " on mi1." + MD_ITEM_SEQ_COLUMN + " = min1." + MD_ITEM_SEQ_COLUMN + " and min1." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + ", " + MD_ITEM_TABLE + " mi2" + " left outer join " + MD_ITEM_NAME_TABLE + " min2" + " on mi2." + MD_ITEM_SEQ_COLUMN + " = min2." + MD_ITEM_SEQ_COLUMN + " and min2." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + " where mi1." + PARENT_SEQ_COLUMN + " = mi2." + MD_ITEM_SEQ_COLUMN + " and mit." + MD_ITEM_TYPE_SEQ_COLUMN + " = mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " and mi1." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and au." + PLUGIN_SEQ_COLUMN + " = pl." + PLUGIN_SEQ_COLUMN + " and mi1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 5" + " and mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " != 4" + " union " + "select min1." + NAME_COLUMN + " as \"col1\"" + ", '' as \"col2\"" + ", '' as \"col3\"" + ", au." + AU_KEY_COLUMN + " as \"col4\"" + ", pl." + PLUGIN_ID_COLUMN + " as \"col5\"" + " from " + AU_TABLE + ", " + PLUGIN_TABLE + " pl" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " mi1" + " left outer join " + MD_ITEM_NAME_TABLE + " min1" + " on mi1." + MD_ITEM_SEQ_COLUMN + " = min1." + MD_ITEM_SEQ_COLUMN + " and min1." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + " where mi1." + PARENT_SEQ_COLUMN + " is null" + " and mi1." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and au." + PLUGIN_SEQ_COLUMN + " = pl." + PLUGIN_SEQ_COLUMN + " and mi1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 5" + " order by \"col5\", \"col4\", \"col2\", \"col1\""; // Query to retrieve all the book chapters in the database whose parent is not // a book nor a book series. private static final String GET_MISMATCHED_PARENT_BOOK_CHAPTERS_QUERY = "select min1." + NAME_COLUMN + " as \"col1\"" + ", min2." + NAME_COLUMN + " as \"col2\"" + ", mit." + TYPE_NAME_COLUMN + " as \"col3\"" + ", au." + AU_KEY_COLUMN + " as \"col4\"" + ", pl." + PLUGIN_ID_COLUMN + " as \"col5\"" + " from " + MD_ITEM_TYPE_TABLE + " mit" + ", " + AU_TABLE + ", " + PLUGIN_TABLE + " pl" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " mi1" + " left outer join " + MD_ITEM_NAME_TABLE + " min1" + " on mi1." + MD_ITEM_SEQ_COLUMN + " = min1." + MD_ITEM_SEQ_COLUMN + " and min1." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + ", " + MD_ITEM_TABLE + " mi2" + " left outer join " + MD_ITEM_NAME_TABLE + " min2" + " on mi2." + MD_ITEM_SEQ_COLUMN + " = min2." + MD_ITEM_SEQ_COLUMN + " and min2." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + " where mi1." + PARENT_SEQ_COLUMN + " = mi2." + MD_ITEM_SEQ_COLUMN + " and mit." + MD_ITEM_TYPE_SEQ_COLUMN + " = mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " and mi1." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and au." + PLUGIN_SEQ_COLUMN + " = pl." + PLUGIN_SEQ_COLUMN + " and mi1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 3" + " and mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " != 2" + " and mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " != 1" + " union " + "select min1." + NAME_COLUMN + " as \"col1\"" + ", '' as \"col2\"" + ", '' as \"col3\"" + ", au." + AU_KEY_COLUMN + " as \"col4\"" + ", pl." + PLUGIN_ID_COLUMN + " as \"col5\"" + " from " + AU_TABLE + ", " + PLUGIN_TABLE + " pl" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " mi1" + " left outer join " + MD_ITEM_NAME_TABLE + " min1" + " on mi1." + MD_ITEM_SEQ_COLUMN + " = min1." + MD_ITEM_SEQ_COLUMN + " and min1." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + " where mi1." + PARENT_SEQ_COLUMN + " is null" + " and mi1." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and au." + PLUGIN_SEQ_COLUMN + " = pl." + PLUGIN_SEQ_COLUMN + " and mi1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 3" + " order by \"col5\", \"col4\", \"col2\", \"col1\""; // Query to retrieve all the book volumes in the database whose parent is not // a book nor a book series. private static final String GET_MISMATCHED_PARENT_BOOK_VOLUMES_QUERY = "select min1." + NAME_COLUMN + " as \"col1\"" + ", min2." + NAME_COLUMN + " as \"col2\"" + ", mit." + TYPE_NAME_COLUMN + " as \"col3\"" + ", au." + AU_KEY_COLUMN + " as \"col4\"" + ", pl." + PLUGIN_ID_COLUMN + " as \"col5\"" + " from " + MD_ITEM_TYPE_TABLE + " mit" + ", " + AU_TABLE + ", " + PLUGIN_TABLE + " pl" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " mi1" + " left outer join " + MD_ITEM_NAME_TABLE + " min1" + " on mi1." + MD_ITEM_SEQ_COLUMN + " = min1." + MD_ITEM_SEQ_COLUMN + " and min1." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + ", " + MD_ITEM_TABLE + " mi2" + " left outer join " + MD_ITEM_NAME_TABLE + " min2" + " on mi2." + MD_ITEM_SEQ_COLUMN + " = min2." + MD_ITEM_SEQ_COLUMN + " and min2." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + " where mi1." + PARENT_SEQ_COLUMN + " = mi2." + MD_ITEM_SEQ_COLUMN + " and mit." + MD_ITEM_TYPE_SEQ_COLUMN + " = mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " and mi1." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and au." + PLUGIN_SEQ_COLUMN + " = pl." + PLUGIN_SEQ_COLUMN + " and mi1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 6" + " and mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " != 2" + " and mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " != 1" + " union " + "select min1." + NAME_COLUMN + " as \"col1\"" + ", '' as \"col2\"" + ", '' as \"col3\"" + ", au." + AU_KEY_COLUMN + " as \"col4\"" + ", pl." + PLUGIN_ID_COLUMN + " as \"col5\"" + " from " + AU_TABLE + ", " + PLUGIN_TABLE + " pl" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " mi1" + " left outer join " + MD_ITEM_NAME_TABLE + " min1" + " on mi1." + MD_ITEM_SEQ_COLUMN + " = min1." + MD_ITEM_SEQ_COLUMN + " and min1." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + " where mi1." + PARENT_SEQ_COLUMN + " is null" + " and mi1." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and au." + PLUGIN_SEQ_COLUMN + " = pl." + PLUGIN_SEQ_COLUMN + " and mi1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 6" + " order by \"col5\", \"col4\", \"col2\", \"col1\""; // Query to retrieve all the different publishers of all the Archival Units // with multiple publishers. private static final String GET_AUS_MULTIPLE_PUBLISHERS_QUERY = "select " + "distinct pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + ", pr." + PUBLISHER_NAME_COLUMN + " from " + PLUGIN_TABLE + " pl" + ", " + AU_TABLE + ", " + PUBLISHER_TABLE + " pr" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " m" + ", " + PUBLICATION_TABLE + " pn" + " where pl." + PLUGIN_SEQ_COLUMN + " = au." + PLUGIN_SEQ_COLUMN + " and au." + AU_SEQ_COLUMN + " = am." + AU_SEQ_COLUMN + " and am." + AU_MD_SEQ_COLUMN + " = m." + AU_MD_SEQ_COLUMN + " and m." + PARENT_SEQ_COLUMN + " = pn." + MD_ITEM_SEQ_COLUMN + " and pn." + PUBLISHER_SEQ_COLUMN + " = pr." + PUBLISHER_SEQ_COLUMN + " and au." + AU_SEQ_COLUMN + " in (" + " select subq." + AU_SEQ_COLUMN + " from (" + "select distinct au." + AU_SEQ_COLUMN + ", pr." + PUBLISHER_SEQ_COLUMN + " from " + AU_TABLE + ", " + PUBLISHER_TABLE + " pr" + ", " + AU_MD_TABLE + " am" + ", " + MD_ITEM_TABLE + " m" + ", " + PUBLICATION_TABLE + " pn" + " where au." + AU_SEQ_COLUMN + " = am." + AU_SEQ_COLUMN + " and am." + AU_MD_SEQ_COLUMN + " = m." + AU_MD_SEQ_COLUMN + " and m." + PARENT_SEQ_COLUMN + " = pn." + MD_ITEM_SEQ_COLUMN + " and pn." + PUBLISHER_SEQ_COLUMN + " = pr." + PUBLISHER_SEQ_COLUMN + ") as subq" + " group by subq." + AU_SEQ_COLUMN + " having count(subq." + AU_SEQ_COLUMN + ") > 1)" + " order by pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + ", pr." + PUBLISHER_NAME_COLUMN; // Query to retrieve all the metadata items that have no name. private static final String GET_UNNAMED_ITEMS_QUERY = "select " + "count(mi1." + MD_ITEM_SEQ_COLUMN + ") as \"col1\"" + ", mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " as \"ts1\"" + ", mit1." + TYPE_NAME_COLUMN + " as \"col2\"" + ", min2." + NAME_COLUMN + " as \"col3\"" + ", mit2." + MD_ITEM_TYPE_SEQ_COLUMN + " as \"ts2\"" + ", mit2." + TYPE_NAME_COLUMN + " as \"col4\"" + ", au." + AU_KEY_COLUMN + " as \"col5\"" + ", pl." + PLUGIN_ID_COLUMN + " as \"col6\"" + ", pr." + PUBLISHER_NAME_COLUMN + " as \"col7\"" + " from " + MD_ITEM_TYPE_TABLE + " mit1" + ", " + MD_ITEM_TYPE_TABLE + " mit2" + ", " + AU_TABLE + ", " + PLUGIN_TABLE + " pl" + ", " + AU_MD_TABLE + " am" + ", " + PUBLICATION_TABLE + " pn" + ", " + PUBLISHER_TABLE + " pr" + ", " + MD_ITEM_TABLE + " mi1" + " left outer join " + MD_ITEM_NAME_TABLE + " min1" + " on mi1." + MD_ITEM_SEQ_COLUMN + " = min1." + MD_ITEM_SEQ_COLUMN + " and min1." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + ", " + MD_ITEM_TABLE + " mi2" + " left outer join " + MD_ITEM_NAME_TABLE + " min2" + " on mi2." + MD_ITEM_SEQ_COLUMN + " = min2." + MD_ITEM_SEQ_COLUMN + " and min2." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + " where mi1." + PARENT_SEQ_COLUMN + " = mi2." + MD_ITEM_SEQ_COLUMN + " and mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = mi1." + MD_ITEM_TYPE_SEQ_COLUMN + " and mit2." + MD_ITEM_TYPE_SEQ_COLUMN + " = mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " and mi1." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and au." + PLUGIN_SEQ_COLUMN + " = pl." + PLUGIN_SEQ_COLUMN + " and mi2." + MD_ITEM_SEQ_COLUMN + " = pn." + MD_ITEM_SEQ_COLUMN + " and pn." + PUBLISHER_SEQ_COLUMN + " = pr." + PUBLISHER_SEQ_COLUMN + " and min1." + NAME_COLUMN + " is null" + " group by mit1." + MD_ITEM_TYPE_SEQ_COLUMN + ", mit1." + TYPE_NAME_COLUMN + ", min2." + NAME_COLUMN + ", mit2." + MD_ITEM_TYPE_SEQ_COLUMN + ", mit2." + TYPE_NAME_COLUMN + ", au." + AU_KEY_COLUMN + ", pl." + PLUGIN_ID_COLUMN + ", pr." + PUBLISHER_NAME_COLUMN + " union " + "select count(mi1." + MD_ITEM_SEQ_COLUMN + ") as \"col1\"" + ", mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " as \"ts1\"" + ", mit1." + TYPE_NAME_COLUMN + " as \"col2\"" + ", '' as \"col3\"" + ", 0 as \"ts2\"" + ", '' as \"col4\"" + ", au." + AU_KEY_COLUMN + " as \"col5\"" + ", pl." + PLUGIN_ID_COLUMN + " as \"col6\"" + ", pr." + PUBLISHER_NAME_COLUMN + " as \"col7\"" + " from " + MD_ITEM_TYPE_TABLE + " mit1" + ", " + AU_TABLE + ", " + PLUGIN_TABLE + " pl" + ", " + AU_MD_TABLE + " am" + ", " + PUBLICATION_TABLE + " pn" + ", " + PUBLISHER_TABLE + " pr" + ", " + MD_ITEM_TABLE + " mi1" + " left outer join " + MD_ITEM_NAME_TABLE + " min1" + " on mi1." + MD_ITEM_SEQ_COLUMN + " = min1." + MD_ITEM_SEQ_COLUMN + " and min1." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + " where mi1." + PARENT_SEQ_COLUMN + " is null" + " and mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = mi1." + MD_ITEM_TYPE_SEQ_COLUMN + " and mi1." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and au." + PLUGIN_SEQ_COLUMN + " = pl." + PLUGIN_SEQ_COLUMN + " and mi1." + MD_ITEM_SEQ_COLUMN + " = pn." + MD_ITEM_SEQ_COLUMN + " and pn." + PUBLISHER_SEQ_COLUMN + " = pr." + PUBLISHER_SEQ_COLUMN + " and min1." + NAME_COLUMN + " is null" + " group by mit1." + MD_ITEM_TYPE_SEQ_COLUMN + ", mit1." + TYPE_NAME_COLUMN + ", au." + AU_KEY_COLUMN + ", pl." + PLUGIN_ID_COLUMN + ", pr." + PUBLISHER_NAME_COLUMN + " order by \"col7\", \"col6\", \"col5\", \"ts2\", \"col3\", \"ts1\""; // Query to find the publication date interval of an Archival Unit. private static final String FIND_PUBLICATION_DATE_INTERVAL_QUERY = "select " + "min(mi." + DATE_COLUMN + ") as earliest" + ", max(mi." + DATE_COLUMN + ") as latest" + " from " + MD_ITEM_TABLE + " mi" + ", " + AU_MD_TABLE + " am" + ", " + AU_TABLE + ", " + PLUGIN_TABLE + " p" + " where mi." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and " + AU_TABLE + "." + AU_KEY_COLUMN + " = ?" + " and " + AU_TABLE + "." + PLUGIN_SEQ_COLUMN + " = p." + PLUGIN_SEQ_COLUMN + " and p." + PLUGIN_ID_COLUMN + " = ?"; private static final String GET_PUBLICATIONS_MULTIPLE_PIDS_QUERY = "select n." + NAME_COLUMN + ", pi." + PROPRIETARY_ID_COLUMN + " from " + MD_ITEM_NAME_TABLE + " n" + ", " + PROPRIETARY_ID_TABLE + " pi" + ", " + PUBLICATION_TABLE + " pn" + " where n." + MD_ITEM_SEQ_COLUMN + " = pn." + MD_ITEM_SEQ_COLUMN + " and pn." + MD_ITEM_SEQ_COLUMN + " = pi." + MD_ITEM_SEQ_COLUMN + " and n." + NAME_TYPE_COLUMN + " = 'primary'" + " and pn." + MD_ITEM_SEQ_COLUMN + " in (" + " select subq." + MD_ITEM_SEQ_COLUMN + " from (" + "select pn." + MD_ITEM_SEQ_COLUMN + ", pi." + PROPRIETARY_ID_COLUMN + " from " + PUBLICATION_TABLE + " pn" + ", " + PROPRIETARY_ID_TABLE + " pi" + " where pn." + MD_ITEM_SEQ_COLUMN + " = pi." + MD_ITEM_SEQ_COLUMN + ") as subq" + " group by subq." + MD_ITEM_SEQ_COLUMN + " having count(subq." + MD_ITEM_SEQ_COLUMN + ") > 1)" + " order by n." + NAME_COLUMN + ", pi." + PROPRIETARY_ID_COLUMN; // Query to retrieve all the non-parent metadata items that have no DOI. private static final String GET_NO_DOI_ITEMS_QUERY = "select " + "min1." + NAME_COLUMN + " as \"col1\"" + ", mit1." + TYPE_NAME_COLUMN + " as \"col2\"" + ", min2." + NAME_COLUMN + " as \"col3\"" + ", mit2." + TYPE_NAME_COLUMN + " as \"col4\"" + ", au." + AU_KEY_COLUMN + " as \"col5\"" + ", pl." + PLUGIN_ID_COLUMN + " as \"col6\"" + ", pr." + PUBLISHER_NAME_COLUMN + " as \"col7\"" + " from " + MD_ITEM_TYPE_TABLE + " mit1" + ", " + MD_ITEM_TYPE_TABLE + " mit2" + ", " + AU_TABLE + ", " + PLUGIN_TABLE + " pl" + ", " + AU_MD_TABLE + " am" + ", " + PUBLICATION_TABLE + " pn" + ", " + PUBLISHER_TABLE + " pr" + ", " + MD_ITEM_TABLE + " mi1" + " left outer join " + DOI_TABLE + " on mi1." + MD_ITEM_SEQ_COLUMN + " = " + DOI_TABLE + "." + MD_ITEM_SEQ_COLUMN + " left outer join " + MD_ITEM_NAME_TABLE + " min1" + " on mi1." + MD_ITEM_SEQ_COLUMN + " = min1." + MD_ITEM_SEQ_COLUMN + " and min1." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + ", " + MD_ITEM_TABLE + " mi2" + " left outer join " + MD_ITEM_NAME_TABLE + " min2" + " on mi2." + MD_ITEM_SEQ_COLUMN + " = min2." + MD_ITEM_SEQ_COLUMN + " and min2." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + " where mi1." + PARENT_SEQ_COLUMN + " = mi2." + MD_ITEM_SEQ_COLUMN + " and mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = mi1." + MD_ITEM_TYPE_SEQ_COLUMN + " and mit2." + MD_ITEM_TYPE_SEQ_COLUMN + " = mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " and mi1." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and au." + PLUGIN_SEQ_COLUMN + " = pl." + PLUGIN_SEQ_COLUMN + " and mi2." + MD_ITEM_SEQ_COLUMN + " = pn." + MD_ITEM_SEQ_COLUMN + " and pn." + PUBLISHER_SEQ_COLUMN + " = pr." + PUBLISHER_SEQ_COLUMN + " and (mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 3" + " or mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 5" + " or mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 6" + " or mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 8)" + " and " + DOI_TABLE + "." + DOI_COLUMN + " is null" + " order by \"col7\", \"col6\", \"col5\", \"col3\", \"col1\""; // Query to retrieve all the non-parent metadata items that have no Access // URL. private static final String GET_NO_ACCESS_URL_ITEMS_QUERY = "select " + "min1." + NAME_COLUMN + " as \"col1\"" + ", mit1." + TYPE_NAME_COLUMN + " as \"col2\"" + ", min2." + NAME_COLUMN + " as \"col3\"" + ", mit2." + TYPE_NAME_COLUMN + " as \"col4\"" + ", au." + AU_KEY_COLUMN + " as \"col5\"" + ", pl." + PLUGIN_ID_COLUMN + " as \"col6\"" + ", pr." + PUBLISHER_NAME_COLUMN + " as \"col7\"" + " from " + MD_ITEM_TYPE_TABLE + " mit1" + ", " + MD_ITEM_TYPE_TABLE + " mit2" + ", " + AU_TABLE + ", " + PLUGIN_TABLE + " pl" + ", " + AU_MD_TABLE + " am" + ", " + PUBLICATION_TABLE + " pn" + ", " + PUBLISHER_TABLE + " pr" + ", " + MD_ITEM_TABLE + " mi1" + " left outer join " + URL_TABLE + " on mi1." + MD_ITEM_SEQ_COLUMN + " = " + URL_TABLE + "." + MD_ITEM_SEQ_COLUMN + " and " + URL_TABLE + "." + FEATURE_COLUMN + " = '" + MetadataManager.ACCESS_URL_FEATURE + "'" + " left outer join " + MD_ITEM_NAME_TABLE + " min1" + " on mi1." + MD_ITEM_SEQ_COLUMN + " = min1." + MD_ITEM_SEQ_COLUMN + " and min1." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + ", " + MD_ITEM_TABLE + " mi2" + " left outer join " + MD_ITEM_NAME_TABLE + " min2" + " on mi2." + MD_ITEM_SEQ_COLUMN + " = min2." + MD_ITEM_SEQ_COLUMN + " and min2." + NAME_TYPE_COLUMN + " = '" + PRIMARY_NAME_TYPE + "'" + " where mi1." + PARENT_SEQ_COLUMN + " = mi2." + MD_ITEM_SEQ_COLUMN + " and mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = mi1." + MD_ITEM_TYPE_SEQ_COLUMN + " and mit2." + MD_ITEM_TYPE_SEQ_COLUMN + " = mi2." + MD_ITEM_TYPE_SEQ_COLUMN + " and mi1." + AU_MD_SEQ_COLUMN + " = am." + AU_MD_SEQ_COLUMN + " and am." + AU_SEQ_COLUMN + " = au." + AU_SEQ_COLUMN + " and au." + PLUGIN_SEQ_COLUMN + " = pl." + PLUGIN_SEQ_COLUMN + " and mi2." + MD_ITEM_SEQ_COLUMN + " = pn." + MD_ITEM_SEQ_COLUMN + " and pn." + PUBLISHER_SEQ_COLUMN + " = pr." + PUBLISHER_SEQ_COLUMN + " and (mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 3" + " or mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 5" + " or mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 6" + " or mit1." + MD_ITEM_TYPE_SEQ_COLUMN + " = 8)" + " and " + URL_TABLE + "." + FEATURE_COLUMN + " is null" + " order by \"col7\", \"col6\", \"col5\", \"col3\", \"col1\""; // Query to delete an ISSN linked to a publication. private static final String DELETE_ISSN_QUERY = "delete from " + ISSN_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?" + " and " + ISSN_COLUMN + " = ?" + " and " + ISSN_TYPE_COLUMN + " = ?"; // Query to update the unknown provider of an Archival Unit. private static final String UPDATE_AU_MD_UNKNOWN_PROVIDER_QUERY = "update " + AU_MD_TABLE + " set " + PROVIDER_SEQ_COLUMN + " = ?" + " where " + AU_MD_SEQ_COLUMN + " IN (" + "select am." + AU_MD_SEQ_COLUMN + " from " + AU_MD_TABLE + " am" + "," + PROVIDER_TABLE + " p" + " where am." + PROVIDER_SEQ_COLUMN + " = p." + PROVIDER_SEQ_COLUMN + " and p." + PROVIDER_NAME_COLUMN + " = '" + UNKNOWN_PROVIDER_NAME + "'" + " and am." + AU_MD_SEQ_COLUMN + " = ?" + ")"; // Query to retrieve all the Archival Units with no metadata items. private static final String GET_NO_ITEMS_AUS_QUERY = "select" + " pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + ", count(mi." + MD_ITEM_SEQ_COLUMN + ")" + " from " + PLUGIN_TABLE + " pl" + ", " + AU_TABLE + ", " + AU_MD_TABLE + " am" + " left outer join " + MD_ITEM_TABLE + " mi" + " on am." + AU_MD_SEQ_COLUMN + " = mi."+ AU_MD_SEQ_COLUMN + " where pl." + PLUGIN_SEQ_COLUMN + " = au." + PLUGIN_SEQ_COLUMN + " and au." + AU_SEQ_COLUMN + " = am." + AU_SEQ_COLUMN + " group by pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN + " having count(mi." + MD_ITEM_SEQ_COLUMN + ") = 0" + " order by pl." + PLUGIN_ID_COLUMN + ", au." + AU_KEY_COLUMN; // Query to get the metadata information of an Archival Unit. private static final String GET_AU_MD_QUERY = "select " + "m." + AU_MD_SEQ_COLUMN + ", m." + AU_SEQ_COLUMN + ", m." + MD_VERSION_COLUMN + ", m." + EXTRACT_TIME_COLUMN + ", m." + CREATION_TIME_COLUMN + ", m." + PROVIDER_SEQ_COLUMN + " from " + AU_MD_TABLE + " m," + AU_TABLE + " a," + PLUGIN_TABLE + " p" + " where m." + AU_SEQ_COLUMN + " = " + " a." + AU_SEQ_COLUMN + " and a." + PLUGIN_SEQ_COLUMN + " = " + " p." + PLUGIN_SEQ_COLUMN + " and p." + PLUGIN_ID_COLUMN + " = ?" + " and a." + AU_KEY_COLUMN + " = ?"; private DbManager dbManager; private MetadataManager metadataManager; /** * Constructor. * * @param dbManager * A DbManager with the database manager. * @param metadataManager * A MetadataManager with the metadata manager. */ MetadataManagerSql(DbManager dbManager, MetadataManager metadataManager) throws DbException { this.dbManager = dbManager; this.metadataManager = metadataManager; } /** * Provides the number of enabled pending AUs. * * @return a long with the number of enabled pending AUs. * @throws DbException * if any problem occurred accessing the database. */ long getEnabledPendingAusCount() throws DbException { final String DEBUG_HEADER = "getEnabledPendingAusCount(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; // Get a connection to the database. Connection conn = dbManager.getConnection(); try { rowCount = getEnabledPendingAusCount(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides the number of enabled pending AUs. * * @param conn * A Connection with the database connection to be used. * @return a long with the number of enabled pending AUs. * @throws DbException * if any problem occurred accessing the database. */ long getEnabledPendingAusCount(Connection conn) throws DbException { final String DEBUG_HEADER = "getEnabledPendingAusCount(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; PreparedStatement stmt = dbManager.prepareStatement(conn, COUNT_ENABLED_PENDING_AUS_QUERY); ResultSet resultSet = null; try { resultSet = dbManager.executeQuery(stmt); resultSet.next(); rowCount = resultSet.getLong(1); } catch (SQLException sqle) { String message = "Cannot get the count of enabled pending AUs"; log.error(message, sqle); log.error("SQL = '" + COUNT_ENABLED_PENDING_AUS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides the number of articles in the metadata database. * * @return a long with the number of articles in the metadata database. * @throws DbException * if any problem occurred accessing the database. */ long getArticleCount() throws DbException { final String DEBUG_HEADER = "getArticleCount(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; // Get a connection to the database. Connection conn = dbManager.getConnection(); try { rowCount = getArticleCount(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides the number of articles in the metadata database. * * @param conn * A Connection with the database connection to be used. * @return a long with the number of articles in the metadata database. * @throws DbException * if any problem occurred accessing the database. */ long getArticleCount(Connection conn) throws DbException { final String DEBUG_HEADER = "getArticleCount(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; PreparedStatement stmt = dbManager.prepareStatement(conn, COUNT_BIB_ITEM_QUERY); ResultSet resultSet = null; try { resultSet = dbManager.executeQuery(stmt); resultSet.next(); rowCount = resultSet.getLong(1); } catch (SQLException sqle) { String message = "Cannot get the count of articles"; log.error(message, sqle); log.error("SQL = '" + COUNT_BIB_ITEM_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides the number of publications in the metadata database. * * @return a long with the number of publications in the metadata database. * @throws DbException * if any problem occurred accessing the database. */ long getPublicationCount() throws DbException { final String DEBUG_HEADER = "getPublicationCount(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; // Get a connection to the database. Connection conn = dbManager.getConnection(); try { rowCount = getPublicationCount(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides the number of publications in the metadata database. * * @param conn * A Connection with the database connection to be used. * @return a long with the number of publications in the metadata database. * @throws DbException * if any problem occurred accessing the database. */ long getPublicationCount(Connection conn) throws DbException { final String DEBUG_HEADER = "getPublicationCount(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; PreparedStatement stmt = dbManager.prepareStatement(conn, COUNT_PUBLICATION_QUERY); ResultSet resultSet = null; try { resultSet = dbManager.executeQuery(stmt); resultSet.next(); rowCount = resultSet.getLong(1); } catch (SQLException sqle) { String message = "Cannot get the count of publications"; log.error(message, sqle); log.error("SQL = '" + COUNT_PUBLICATION_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides the number of publishers in the metadata database. * * @return a long with the number of publishers in the metadata database. * @throws DbException * if any problem occurred accessing the database. */ long getPublisherCount() throws DbException { final String DEBUG_HEADER = "getPublisherCount(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; // Get a connection to the database. Connection conn = dbManager.getConnection(); try { rowCount = getPublisherCount(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides the number of publishers in the metadata database. * * @param conn * A Connection with the database connection to be used. * @return a long with the number of publishers in the metadata database. * @throws DbException * if any problem occurred accessing the database. */ long getPublisherCount(Connection conn) throws DbException { final String DEBUG_HEADER = "getPublisherCount(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; PreparedStatement stmt = dbManager.prepareStatement(conn, COUNT_PUBLISHER_QUERY); ResultSet resultSet = null; try { resultSet = dbManager.executeQuery(stmt); resultSet.next(); rowCount = resultSet.getLong(1); } catch (SQLException sqle) { String message = "Cannot get the count of publishers"; log.error(message, sqle); log.error("SQL = '" + COUNT_PUBLISHER_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides the number of providers in the metadata database. * * @return a long with the number of providers in the metadata database. * @throws DbException * if any problem occurred accessing the database. */ long getProviderCount() throws DbException { final String DEBUG_HEADER = "getProviderCount(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; // Get a connection to the database. Connection conn = dbManager.getConnection(); try { rowCount = getProviderCount(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides the number of providers in the metadata database. * * @param conn * A Connection with the database connection to be used. * @return a long with the number of providers in the metadata database. * @throws DbException * if any problem occurred accessing the database. */ long getProviderCount(Connection conn) throws DbException { final String DEBUG_HEADER = "getProviderCount(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; PreparedStatement stmt = dbManager.prepareStatement(conn, COUNT_PROVIDER_QUERY); ResultSet resultSet = null; try { resultSet = dbManager.executeQuery(stmt); resultSet.next(); rowCount = resultSet.getLong(1); } catch (SQLException sqle) { String message = "Cannot get the count of providers"; log.error(message, sqle); log.error("SQL = '" + COUNT_PROVIDER_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides a list of AuIds that require reindexing sorted by priority. * * @param conn * A Connection with the database connection to be used. * @param maxAuIds * An int with the maximum number of AuIds to return. * @param prioritizeIndexingNewAus * A boolean with the indication of whether to prioritize new * Archival Units for indexing purposes. * @return a List<String> with the list of AuIds that require reindexing * sorted by priority. */ List<PrioritizedAuId> getPrioritizedAuIdsToReindex(Connection conn, int maxAuIds, boolean prioritizeIndexingNewAus) { final String DEBUG_HEADER = "getPrioritizedAuIdsToReindex(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "maxAuIds = " + maxAuIds); log.debug2(DEBUG_HEADER + "prioritizeIndexingNewAus = " + prioritizeIndexingNewAus); } ArrayList<PrioritizedAuId> auIds = new ArrayList<PrioritizedAuId>(); PreparedStatement selectPendingAus = null; ResultSet results = null; String sql = FIND_PRIORITIZED_ENABLED_PENDING_AUS_QUERY; try { selectPendingAus = dbManager.prepareStatement(conn, sql); selectPendingAus.setBoolean(1, prioritizeIndexingNewAus); results = dbManager.executeQuery(selectPendingAus); while ((auIds.size() < maxAuIds) && results.next()) { String pluginId = results.getString(PLUGIN_ID_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); String auKey = results.getString(AU_KEY_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); String auId = PluginManager.generateAuId(pluginId, auKey); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auId = " + auId); if (metadataManager.isEligibleForReindexing(auId)) { if (!metadataManager.activeReindexingTasks.containsKey(auId)) { PrioritizedAuId auToReindex = new PrioritizedAuId(); auToReindex.auId = auId; long priority = results.getLong(PRIORITY_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "priority = " + priority); auToReindex.priority = priority; boolean isNew = results.getBoolean(ISNEW_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "isNew = " + isNew); auToReindex.isNew = isNew; boolean needFullReindex = results.getBoolean(FULLY_REINDEX_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "needFullReindex = " + needFullReindex); auToReindex.needFullReindex = needFullReindex; auIds.add(auToReindex); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Added auId = " + auId + " to reindex list"); } } } } catch (SQLException sqle) { String message = "Cannot identify the enabled pending AUs"; log.error(message, sqle); log.error("maxAuIds = " + maxAuIds); log.error("SQL = '" + sql + "'."); log.error("prioritizeIndexingNewAus = " + prioritizeIndexingNewAus); } catch (DbException dbe) { String message = "Cannot identify the enabled pending AUs"; log.error(message, dbe); log.error("SQL = '" + sql + "'."); log.error("prioritizeIndexingNewAus = " + prioritizeIndexingNewAus); } finally { DbManager.safeCloseResultSet(results); DbManager.safeCloseStatement(selectPendingAus); } auIds.trimToSize(); if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auIds.size() = " + auIds.size()); return auIds; } /** * Removes an AU from the pending Aus table. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the AU identifier. * @throws DbException * if any problem occurred accessing the database. */ long removeFromPendingAus(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "removeFromPendingAus(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); String pluginId = null; String auKey = null; PreparedStatement deletePendingAu = dbManager.prepareStatement(conn, DELETE_PENDING_AU_QUERY); try { pluginId = PluginManager.pluginIdFromAuId(auId); auKey = PluginManager.auKeyFromAuId(auId); deletePendingAu.setString(1, pluginId); deletePendingAu.setString(2, auKey); dbManager.executeUpdate(deletePendingAu); } catch (SQLException sqle) { String message = "Cannot remove AU from pending table"; log.error(message, sqle); log.error("auId = '" + auId + "'."); log.error("SQL = '" + DELETE_PENDING_AU_QUERY + "'."); log.error("pluginId = '" + pluginId + "'."); log.error("auKey = '" + auKey + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(deletePendingAu); } long enabledPendingAusCount = getEnabledPendingAusCount(conn); if (log.isDebug2()) log.debug2(DEBUG_HEADER + "enabledPendingAusCount = " + enabledPendingAusCount); return enabledPendingAusCount; } /** * Removes all metadata items for an AU. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the AU identifier. * @return an int with the number of metadata items deleted. * @throws DbException * if any problem occurred accessing the database. */ int removeAuMetadataItems(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "removeAuMetadataItems(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); int count = 0; Long auMdSeq = findAuMdByAuId(conn, auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auMdSeq = " + auMdSeq); if (auMdSeq != null) { PreparedStatement deleteMetadataItems = dbManager.prepareStatement(conn, DELETE_AU_MD_ITEM_QUERY); try { deleteMetadataItems.setLong(1, auMdSeq); count = dbManager.executeUpdate(deleteMetadataItems); } catch (SQLException sqle) { String message = "Cannot delete AU metadata items"; log.error(message, sqle); log.error("auId = " + auId); log.error("SQL = '" + DELETE_AU_MD_ITEM_QUERY + "'."); log.error("auMdSeq = " + auMdSeq); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(deleteMetadataItems); } } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "count = " + count); return count; } /** * Provides the identifier of an Archival Unit metadata. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the AU identifier. * @return a Long with the identifier of the Archival Unit metadata. * @throws DbException * if any problem occurred accessing the database. */ private Long findAuMdByAuId(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "findAuMdByAuId(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); String pluginId = null; String auKey = null; Long auMdSeq = null; PreparedStatement findAuMd = dbManager.prepareStatement(conn, FIND_AU_MD_BY_AU_ID_QUERY); ResultSet resultSet = null; try { pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId() = " + pluginId); auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); findAuMd.setString(1, pluginId); findAuMd.setString(2, auKey); resultSet = dbManager.executeQuery(findAuMd); if (resultSet.next()) { auMdSeq = resultSet.getLong(AU_MD_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auMdSeq = " + auMdSeq); } } catch (SQLException sqle) { String message = "Cannot find AU metadata identifier"; log.error(message, sqle); log.error("auId = " + auId); log.error("SQL = '" + FIND_AU_MD_BY_AU_ID_QUERY + "'."); log.error("pluginId = " + pluginId); log.error("auKey = " + auKey); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findAuMd); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auMdSeq = " + auMdSeq); return auMdSeq; } /** * Removes an AU. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the AU identifier. * @return an int with the number of rows deleted. * @throws DbException * if any problem occurred accessing the database. */ int removeAu(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "removeAu(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); int count = 0; Long auSeq = findAuByAuId(conn, auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auSeq = " + auSeq); if (auSeq != null) { PreparedStatement deleteAu = dbManager.prepareStatement(conn, DELETE_AU_QUERY); try { deleteAu.setLong(1, auSeq); count = dbManager.executeUpdate(deleteAu); } catch (SQLException sqle) { String message = "Cannot delete AU"; log.error(message, sqle); log.error("auId = " + auId); log.error("SQL = '" + DELETE_AU_QUERY + "'."); log.error("auSeq = " + auSeq); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(deleteAu); } } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "count = " + count); return count; } /** * Provides the identifier of an Archival Unit. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the AU identifier. * @return a Long with the identifier of the Archival Unit. * @throws DbException * if any problem occurred accessing the database. */ Long findAuByAuId(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "findAuByAuId(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); String pluginId = PluginManager.pluginIdFromAuId(auId); String auKey = PluginManager.auKeyFromAuId(auId); Long auSeq = null; PreparedStatement findAu = dbManager.prepareStatement(conn, FIND_AU_BY_AU_ID_QUERY); ResultSet resultSet = null; try { pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId() = " + pluginId); auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); findAu.setString(1, pluginId); findAu.setString(2, auKey); resultSet = dbManager.executeQuery(findAu); if (resultSet.next()) { auSeq = resultSet.getLong(AU_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auSeq = " + auSeq); } } catch (SQLException sqle) { String message = "Cannot find AU identifier"; log.error(message, sqle); log.error("auId = " + auId); log.error("SQL = '" + FIND_AU_BY_AU_ID_QUERY + "'."); log.error("pluginId = " + pluginId); log.error("auKey = " + auKey); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findAu); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auSeq = " + auSeq); return auSeq; } /** * Provides the identifier of a plugin if existing or after creating it * otherwise. * * @param conn * A Connection with the database connection to be used. * @param pluginId * A String with the plugin identifier. * @param platformSeq * A Long with the publishing platform identifier. * @param isBulkContent * A boolean with the indication of bulk content for the plugin. * @return a Long with the identifier of the plugin. * @throws DbException * if any problem occurred accessing the database. */ Long findOrCreatePlugin(Connection conn, String pluginId, Long platformSeq, boolean isBulkContent) throws DbException { final String DEBUG_HEADER = "findOrCreatePlugin(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "pluginId = " + pluginId); log.debug2(DEBUG_HEADER + "platformSeq = " + platformSeq); log.debug2(DEBUG_HEADER + "isBulkContent = " + isBulkContent); } Long pluginSeq = findPlugin(conn, pluginId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginSeq = " + pluginSeq); // Check whether it is a new plugin. if (pluginSeq == null) { // Yes: Add to the database the new plugin. pluginSeq = addPlugin(conn, pluginId, platformSeq, isBulkContent); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "new pluginSeq = " + pluginSeq); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "pluginSeq = " + pluginSeq); return pluginSeq; } /** * Provides the identifier of a plugin. * * @param conn * A Connection with the database connection to be used. * @param pluginId * A String with the plugin identifier. * @return a Long with the identifier of the plugin. * @throws DbException * if any problem occurred accessing the database. */ Long findPlugin(Connection conn, String pluginId) throws DbException { final String DEBUG_HEADER = "findPlugin(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "pluginId = " + pluginId); Long pluginSeq = null; ResultSet resultSet = null; PreparedStatement findPlugin = dbManager.prepareStatement(conn, FIND_PLUGIN_QUERY); try { findPlugin.setString(1, pluginId); resultSet = dbManager.executeQuery(findPlugin); if (resultSet.next()) { pluginSeq = resultSet.getLong(PLUGIN_SEQ_COLUMN); } } catch (SQLException sqle) { String message = "Cannot find plugin"; log.error(message, sqle); log.error("SQL = '" + FIND_PLUGIN_QUERY + "'."); log.error("pluginId = " + pluginId); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findPlugin); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "pluginSeq = " + pluginSeq); return pluginSeq; } /** * Adds a plugin to the database. * * @param conn * A Connection with the database connection to be used. * @param pluginId * A String with the plugin identifier. * @param platformSeq * A Long with the publishing platform identifier. * @param isBulkContent * A boolean with the indication of bulk content for the plugin. * @return a Long with the identifier of the plugin just added. * @throws DbException * if any problem occurred accessing the database. */ private Long addPlugin(Connection conn, String pluginId, Long platformSeq, boolean isBulkContent) throws DbException { final String DEBUG_HEADER = "addPlugin(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "pluginId = " + pluginId); log.debug2(DEBUG_HEADER + "platformSeq = " + platformSeq); log.debug2(DEBUG_HEADER + "isBulkContent = " + isBulkContent); } Long pluginSeq = null; ResultSet resultSet = null; PreparedStatement insertPlugin = dbManager.prepareStatement(conn, INSERT_PLUGIN_QUERY, Statement.RETURN_GENERATED_KEYS); try { // skip auto-increment key field insertPlugin.setString(1, pluginId); if (platformSeq != null) { insertPlugin.setLong(2, platformSeq); } else { insertPlugin.setNull(2, BIGINT); } insertPlugin.setBoolean(3, isBulkContent); dbManager.executeUpdate(insertPlugin); resultSet = insertPlugin.getGeneratedKeys(); if (!resultSet.next()) { log.error("Unable to create plugin table row."); return null; } pluginSeq = resultSet.getLong(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Added pluginSeq = " + pluginSeq); } catch (SQLException sqle) { String message = "Cannot add plugin"; log.error(message, sqle); log.error("SQL = '" + INSERT_PLUGIN_QUERY + "'."); log.error("pluginId = " + pluginId); log.error("platformSeq = " + platformSeq); log.error("isBulkContent = " + isBulkContent); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(insertPlugin); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "pluginSeq = " + pluginSeq); return pluginSeq; } /** * Provides the identifier of an Archival Unit. * * @param conn * A Connection with the database connection to be used. * @param pluginSeq * A Long with the identifier of the plugin. * @param auKey * A String with the Archival Unit key. * @return a Long with the identifier of the Archival Unit. * @throws DbException * if any problem occurred accessing the database. */ Long findAu(Connection conn, Long pluginSeq, String auKey) throws DbException { final String DEBUG_HEADER = "findAu(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "pluginSeq = " + pluginSeq); log.debug2(DEBUG_HEADER + "auKey = " + auKey); } ResultSet resultSet = null; Long auSeq = null; PreparedStatement findAu = dbManager.prepareStatement(conn, FIND_AU_QUERY); try { findAu.setLong(1, pluginSeq); findAu.setString(2, auKey); resultSet = dbManager.executeQuery(findAu); if (resultSet.next()) { auSeq = resultSet.getLong(AU_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Found auSeq = " + auSeq); } } catch (SQLException sqle) { String message = "Cannot find AU"; log.error(message, sqle); log.error("SQL = '" + FIND_AU_QUERY + "'."); log.error("pluginSeq = " + pluginSeq); log.error("auKey = " + auKey); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findAu); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auSeq = " + auSeq); return auSeq; } /** * Adds an Archival Unit to the database. * * @param conn * A Connection with the database connection to be used. * @param pluginSeq * A Long with the identifier of the plugin. * @param auKey * A String with the Archival Unit key. * @return a Long with the identifier of the Archival Unit just added. * @throws DbException * if any problem occurred accessing the database. */ Long addAu(Connection conn, Long pluginSeq, String auKey) throws DbException { final String DEBUG_HEADER = "addAu(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "pluginSeq = " + pluginSeq); log.debug2(DEBUG_HEADER + "auKey = " + auKey); } ResultSet resultSet = null; Long auSeq = null; PreparedStatement insertAu = dbManager.prepareStatement(conn, INSERT_AU_QUERY, Statement.RETURN_GENERATED_KEYS); try { // skip auto-increment key field insertAu.setLong(1, pluginSeq); insertAu.setString(2, auKey); dbManager.executeUpdate(insertAu); resultSet = insertAu.getGeneratedKeys(); if (!resultSet.next()) { log.error("Unable to create AU table row for AU key " + auKey); return null; } auSeq = resultSet.getLong(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Added auSeq = " + auSeq); } catch (SQLException sqle) { String message = "Cannot add AU"; log.error(message, sqle); log.error("SQL = '" + INSERT_AU_QUERY + "'."); log.error("pluginSeq = " + pluginSeq); log.error("auKey = " + auKey); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(insertAu); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auSeq = " + auSeq); return auSeq; } /** * Adds an Archival Unit metadata to the database. * * @param conn * A Connection with the database connection to be used. * @param auSeq * A Long with the identifier of the Archival Unit. * @param version * An int with the metadata version. * @param extractTime * A long with the extraction time of the metadata. * @param creationTime * A long with the creation time of the archival unit. * @param providerSeq * A Long with the identifier of the Archival Unit provider. * @return a Long with the identifier of the Archival Unit metadata just * added. * @throws DbException * if any problem occurred accessing the database. */ Long addAuMd(Connection conn, Long auSeq, int version, long extractTime, long creationTime, Long providerSeq) throws DbException { final String DEBUG_HEADER = "addAuMd(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "auSeq = " + auSeq); log.debug2(DEBUG_HEADER + "version = " + version); log.debug2(DEBUG_HEADER + "extractTime = " + extractTime); log.debug2(DEBUG_HEADER + "creationTime = " + creationTime); log.debug2(DEBUG_HEADER + "providerSeq = " + providerSeq); } ResultSet resultSet = null; Long auMdSeq = null; PreparedStatement insertAuMd = dbManager.prepareStatement(conn, INSERT_AU_MD_QUERY, Statement.RETURN_GENERATED_KEYS); try { // skip auto-increment key field insertAuMd.setLong(1, auSeq); insertAuMd.setShort(2, (short) version); insertAuMd.setLong(3, extractTime); insertAuMd.setLong(4, creationTime); insertAuMd.setLong(5, providerSeq); dbManager.executeUpdate(insertAuMd); resultSet = insertAuMd.getGeneratedKeys(); if (!resultSet.next()) { log.error("Unable to create AU_MD table row for auSeq " + auSeq); return null; } auMdSeq = resultSet.getLong(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Added auMdSeq = " + auMdSeq); } catch (SQLException sqle) { String message = "Cannot add AU metadata"; log.error(message, sqle); log.error("sql = " + INSERT_AU_MD_QUERY); log.error("auSeq = " + auSeq); log.error("version = " + version); log.error("extractTime = " + extractTime); log.error("creationTime = " + creationTime); log.error("providerSeq = " + providerSeq); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(insertAuMd); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auMdSeq = " + auMdSeq); return auMdSeq; } /** * Updates the timestamp of the last extraction of an Archival Unit metadata. * * @param au * The ArchivalUnit whose time to update. * @param conn * A Connection with the database connection to be used. * @param auMdSeq * A Long with the identifier of the Archival Unit metadata. * @throws DbException * if any problem occurred accessing the database. */ void updateAuLastExtractionTime(ArchivalUnit au, Connection conn, Long auMdSeq) throws DbException { final String DEBUG_HEADER = "updateAuLastExtractionTime(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "au = " + au); log.debug2(DEBUG_HEADER + "auMdSeq = " + auMdSeq); } long now = TimeBase.nowMs(); AuUtil.getAuState(au).setLastMetadataIndex(now); PreparedStatement updateAuLastExtractionTime = dbManager.prepareStatement(conn, UPDATE_AU_MD_EXTRACT_TIME_QUERY); try { updateAuLastExtractionTime.setLong(1, now); updateAuLastExtractionTime.setLong(2, auMdSeq); dbManager.executeUpdate(updateAuLastExtractionTime); } catch (SQLException sqle) { String message = "Cannot update the AU extraction time"; log.error(message, sqle); log.error("au = '" + au + "'."); log.error("SQL = '" + UPDATE_AU_MD_EXTRACT_TIME_QUERY + "'."); log.error("now = " + now + "."); log.error("auMdSeq = '" + auMdSeq + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(updateAuLastExtractionTime); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Adds a publication to the database. * * @param conn * A Connection with the database connection to be used. * @param publisherSeq * A Long with the publisher identifier. * @param parentMdItemSeq * A Long with the publication parent metadata item parent * identifier. * @param mdItemType * A String with the type of publication. * @param title * A String with the title of the publication. * @return a Long with the identifier of the publication just added. * @throws DbException * if any problem occurred accessing the database. */ Long addPublication(Connection conn, Long publisherSeq, Long parentMdItemSeq, String mdItemType, String title) throws DbException { final String DEBUG_HEADER = "addPublication(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "publisherSeq = " + publisherSeq); log.debug2(DEBUG_HEADER + "parentMdItemSeq = " + parentMdItemSeq); log.debug2(DEBUG_HEADER + "mdItemType = " + mdItemType); log.debug2(DEBUG_HEADER + "title = " + title); } Long publicationSeq = null; Long mdItemTypeSeq = findMetadataItemType(conn, mdItemType); if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemTypeSeq = " + mdItemTypeSeq); if (mdItemTypeSeq == null) { log.error("Unable to find the metadata item type " + mdItemType); return null; } Long mdItemSeq = addMdItem(conn, parentMdItemSeq, mdItemTypeSeq, null, null, null, -1); if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); if (mdItemSeq == null) { log.error("Unable to create metadata item table row."); return null; } addMdItemName(conn, mdItemSeq, title, PRIMARY_NAME_TYPE); ResultSet resultSet = null; PreparedStatement insertPublication = dbManager.prepareStatement(conn, INSERT_PUBLICATION_QUERY, Statement.RETURN_GENERATED_KEYS); try { // skip auto-increment key field insertPublication.setLong(1, mdItemSeq); insertPublication.setLong(2, publisherSeq); dbManager.executeUpdate(insertPublication); resultSet = insertPublication.getGeneratedKeys(); if (!resultSet.next()) { log.error("Unable to create publication table row."); return null; } publicationSeq = resultSet.getLong(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Added publicationSeq = " + publicationSeq); } catch (SQLException sqle) { String message = "Cannot insert publication"; log.error(message, sqle); log.error("parentMdItemSeq = " + parentMdItemSeq); log.error("mdItemType = " + mdItemType); log.error("title = " + title); log.error("SQL = '" + INSERT_PUBLICATION_QUERY + "'."); log.error("mdItemSeq = '" + mdItemSeq + "'."); log.error("publisherSeq = '" + publisherSeq + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(insertPublication); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationSeq = " + publicationSeq); return publicationSeq; } /** * Provides the identifier of the metadata item of a publication. * * @param conn * A Connection with the database connection to be used. * @param publicationSeq * A Long with the identifier of the publication. * @return a Long with the identifier of the metadata item of the publication. * @throws DbException * if any problem occurred accessing the database. */ Long findPublicationMetadataItem(Connection conn, Long publicationSeq) throws DbException { final String DEBUG_HEADER = "findPublicationMetadataItem(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationSeq = " + publicationSeq); Long mdItemSeq = null; PreparedStatement findMdItem = dbManager.prepareStatement(conn, FIND_PUBLICATION_METADATA_ITEM_QUERY); ResultSet resultSet = null; try { findMdItem.setLong(1, publicationSeq); resultSet = dbManager.executeQuery(findMdItem); if (resultSet.next()) { mdItemSeq = resultSet.getLong(MD_ITEM_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); } } catch (SQLException sqle) { String message = "Cannot find publication metadata item"; log.error(message, sqle); log.error("SQL = '" + FIND_PUBLICATION_METADATA_ITEM_QUERY + "'."); log.error("publicationSeq = " + publicationSeq + "."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findMdItem); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); return mdItemSeq; } /** * Provides the identifier of the parent of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mditemSeq * A Long with the identifier of the metadata item. * @return a Long with the identifier of the parent of the metadata item. * @throws DbException * if any problem occurred accessing the database. */ Long findParentMetadataItem(Connection conn, Long mditemSeq) throws DbException { final String DEBUG_HEADER = "findParentMetadataItem(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mditemSeq = " + mditemSeq); Long mdParentItemSeq = null; PreparedStatement findParentMdItem = dbManager.prepareStatement(conn, FIND_PARENT_METADATA_ITEM_QUERY); ResultSet resultSet = null; try { findParentMdItem.setLong(1, mditemSeq); resultSet = dbManager.executeQuery(findParentMdItem); if (resultSet.next()) { mdParentItemSeq = resultSet.getLong(MD_ITEM_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "mdParentItemSeq = " + mdParentItemSeq); } } catch (SQLException sqle) { String message = "Cannot find parent metadata item"; log.error(message, sqle); log.error("SQL = '" + FIND_PARENT_METADATA_ITEM_QUERY + "'."); log.error("mditemSeq = " + mditemSeq + "."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findParentMdItem); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdParentItemSeq = " + mdParentItemSeq); return mdParentItemSeq; } /** * Adds to the database the ISSNs of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @param pIssn * A String with the print ISSN of the metadata item. * @param eIssn * A String with the electronic ISSN of the metadata item. * @throws DbException * if any problem occurred accessing the database. */ void addMdItemIssns(Connection conn, Long mdItemSeq, String pIssn, String eIssn) throws DbException { final String DEBUG_HEADER = "addMdItemIssns(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); log.debug2(DEBUG_HEADER + "pIssn = " + pIssn); log.debug2(DEBUG_HEADER + "eIssn = " + eIssn); } if (pIssn == null && eIssn == null) { return; } PreparedStatement insertIssn = dbManager.prepareStatement(conn, INSERT_ISSN_QUERY); try { if (pIssn != null) { insertIssn.setLong(1, mdItemSeq); insertIssn.setString(2, pIssn); insertIssn.setString(3, P_ISSN_TYPE); int count = dbManager.executeUpdate(insertIssn); if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "count = " + count); log.debug3(DEBUG_HEADER + "Added PISSN = " + pIssn); } insertIssn.clearParameters(); } if (eIssn != null) { insertIssn.setLong(1, mdItemSeq); insertIssn.setString(2, eIssn); insertIssn.setString(3, E_ISSN_TYPE); int count = dbManager.executeUpdate(insertIssn); if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "count = " + count); log.debug3(DEBUG_HEADER + "Added EISSN = " + eIssn); } } } catch (SQLException sqle) { String message = "Cannot add metadata item ISSNs"; log.error(message, sqle); log.error("SQL = '" + INSERT_ISSN_QUERY + "'."); log.error("mdItemSeq = " + mdItemSeq); log.error("pIssn = " + pIssn); log.error("eIssn = " + eIssn); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(insertIssn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Adds to the database the ISBNs of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @param pIsbn * A String with the print ISBN of the metadata item. * @param eIsbn * A String with the electronic ISBN of the metadata item. * @throws DbException * if any problem occurred accessing the database. */ void addMdItemIsbns(Connection conn, Long mdItemSeq, String pIsbn, String eIsbn) throws DbException { final String DEBUG_HEADER = "addMdItemIsbns(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); log.debug2(DEBUG_HEADER + "pIsbn = " + pIsbn); log.debug2(DEBUG_HEADER + "eIsbn = " + eIsbn); } if (pIsbn == null && eIsbn == null) { return; } PreparedStatement insertIsbn = dbManager.prepareStatement(conn, INSERT_ISBN_QUERY); try { if (pIsbn != null) { insertIsbn.setLong(1, mdItemSeq); insertIsbn.setString(2, pIsbn); insertIsbn.setString(3, P_ISBN_TYPE); int count = dbManager.executeUpdate(insertIsbn); if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "count = " + count); log.debug3(DEBUG_HEADER + "Added PISBN = " + pIsbn); } insertIsbn.clearParameters(); } if (eIsbn != null) { insertIsbn.setLong(1, mdItemSeq); insertIsbn.setString(2, eIsbn); insertIsbn.setString(3, E_ISBN_TYPE); int count = dbManager.executeUpdate(insertIsbn); if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "count = " + count); log.debug3(DEBUG_HEADER + "Added EISBN = " + eIsbn); } } } catch (SQLException sqle) { String message = "Cannot add metadata item ISBNs"; log.error(message, sqle); log.error("SQL = '" + INSERT_ISBN_QUERY + "'."); log.error("mdItemSeq = " + mdItemSeq); log.error("pIssn = " + pIsbn); log.error("eIssn = " + eIsbn); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(insertIsbn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Provides the ISSNs of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @return a Set<Issn> with the ISSNs. * @throws DbException * if any problem occurred accessing the database. */ Set<Issn> getMdItemIssns(Connection conn, Long mdItemSeq) throws DbException { final String DEBUG_HEADER = "getMdItemIssns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); Set<Issn> issns = new HashSet<Issn>(); PreparedStatement findIssns = dbManager.prepareStatement(conn, FIND_MD_ITEM_ISSN_QUERY); ResultSet resultSet = null; Issn issn; try { // Get the metadata item ISSNs. findIssns.setLong(1, mdItemSeq); resultSet = dbManager.executeQuery(findIssns); // Loop through the results. while (resultSet.next()) { // Get the next ISSN. issn = new Issn(resultSet.getString(ISSN_COLUMN), resultSet.getString(ISSN_TYPE_COLUMN)); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Found " + issn); // Add it to the results. issns.add(issn); } } catch (SQLException sqle) { String message = "Cannot find metadata item ISSNs"; log.error(message, sqle); log.error("SQL = '" + FIND_MD_ITEM_ISSN_QUERY + "'."); log.error("mdItemSeq = " + mdItemSeq); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findIssns); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "issns = " + issns); return issns; } Collection<String> getMdItemProprietaryIds(Connection conn, Long mdItemSeq) throws DbException { final String DEBUG_HEADER = "getMdItemProprietaryIds(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); List<String> proprietaryIds = new ArrayList<String>(); PreparedStatement findMdItemProprietaryId = dbManager.prepareStatement(conn, FIND_MD_ITEM_PROPRIETARY_ID_QUERY); ResultSet resultSet = null; try { findMdItemProprietaryId.setLong(1, mdItemSeq); resultSet = dbManager.executeQuery(findMdItemProprietaryId); while (resultSet.next()) { proprietaryIds.add(resultSet.getString(PROPRIETARY_ID_COLUMN)); } } catch (SQLException sqle) { String message = "Cannot get the proprietary identifiers of a metadata item"; log.error(message, sqle); log.error("SQL = '" + FIND_MD_ITEM_PROPRIETARY_ID_QUERY + "'."); log.error("mdItemSeq = " + mdItemSeq); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findMdItemProprietaryId); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "proprietaryIds = " + proprietaryIds); return proprietaryIds; } /** * Provides the ISBNs of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @return a Set<Isbn> with the ISBNs. * @throws DbException * if any problem occurred accessing the database. */ Set<Isbn> getMdItemIsbns(Connection conn, Long mdItemSeq) throws DbException { final String DEBUG_HEADER = "getMdItemIsbns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); Set<Isbn> isbns = new HashSet<Isbn>(); PreparedStatement findIsbns = dbManager.prepareStatement(conn, FIND_MD_ITEM_ISBN_QUERY); ResultSet resultSet = null; Isbn isbn; try { // Get the metadata item ISBNs. findIsbns.setLong(1, mdItemSeq); resultSet = dbManager.executeQuery(findIsbns); // Loop through the results. while (resultSet.next()) { // Get the next ISBN. isbn = new Isbn(resultSet.getString(ISBN_COLUMN), resultSet.getString(ISBN_TYPE_COLUMN)); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Found " + isbn); // Add it to the results. isbns.add(isbn); } } catch (SQLException sqle) { String message = "Cannot find metadata item ISBNs"; log.error(message, sqle); log.error("SQL = '" + FIND_MD_ITEM_ISBN_QUERY + "'."); log.error("mdItemSeq = " + mdItemSeq); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findIsbns); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "isbns = " + isbns); return isbns; } /** * Provides the identifier of a publication by its publisher and ISSNs. * * @param conn * A Connection with the database connection to be used. * @param publisherSeq * A Long with the publisher identifier. * @param pIssn * A String with the print ISSN of the publication. * @param eIssn * A String with the electronic ISSN of the publication. * @param mdItemType * A String with the type of publication to be identified. * @return a Long with the identifier of the publication. * @throws DbException * if any problem occurred accessing the database. */ Long findPublicationByIssns(Connection conn, Long publisherSeq, String pIssn, String eIssn, String mdItemType) throws DbException { final String DEBUG_HEADER = "findPublicationByIssns(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "publisherSeq = " + publisherSeq); log.debug2(DEBUG_HEADER + "pIssn = " + pIssn); log.debug2(DEBUG_HEADER + "eIssn = " + eIssn); log.debug2(DEBUG_HEADER + "mdItemType = " + mdItemType); } Long publicationSeq = null; ResultSet resultSet = null; PreparedStatement findPublicationByIssns = dbManager.prepareStatement(conn, FIND_PUBLICATION_BY_ISSNS_QUERY); try { findPublicationByIssns.setLong(1, publisherSeq); findPublicationByIssns.setString(2, pIssn); findPublicationByIssns.setString(3, eIssn); findPublicationByIssns.setString(4, mdItemType); resultSet = dbManager.executeQuery(findPublicationByIssns); if (resultSet.next()) { publicationSeq = resultSet.getLong(PUBLICATION_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationSeq = " + publicationSeq); } } catch (SQLException sqle) { String message = "Cannot find publication"; log.error(message, sqle); log.error("SQL = '" + FIND_PUBLICATION_BY_ISSNS_QUERY + "'."); log.error("publisherSeq = " + publisherSeq + "."); log.error("pIssn = " + pIssn); log.error("eIssn = " + eIssn); log.error("mdItemType = " + mdItemType); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findPublicationByIssns); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationSeq = " + publicationSeq); return publicationSeq; } /** * Provides the identifier of a publication by its publisher and ISBNs. * * @param conn * A Connection with the database connection to be used. * @param publisherSeq * A Long with the publisher identifier. * @param pIsbn * A String with the print ISBN of the publication. * @param eIsbn * A String with the electronic ISBN of the publication. * @param mdItemType * A String with the type of publication to be identified. * @return a Long with the identifier of the publication. * @throws DbException * if any problem occurred accessing the database. */ Long findPublicationByIsbns(Connection conn, Long publisherSeq, String pIsbn, String eIsbn, String mdItemType) throws DbException { final String DEBUG_HEADER = "findPublicationByIsbns(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "publisherSeq = " + publisherSeq); log.debug2(DEBUG_HEADER + "pIsbn = " + pIsbn); log.debug2(DEBUG_HEADER + "eIsbn = " + eIsbn); log.debug2(DEBUG_HEADER + "mdItemType = " + mdItemType); } Long publicationSeq = null; ResultSet resultSet = null; PreparedStatement findPublicationByIsbns = dbManager.prepareStatement(conn, FIND_PUBLICATION_BY_ISBNS_QUERY); try { findPublicationByIsbns.setLong(1, publisherSeq); findPublicationByIsbns.setString(2, pIsbn); findPublicationByIsbns.setString(3, eIsbn); findPublicationByIsbns.setString(4, mdItemType); resultSet = dbManager.executeQuery(findPublicationByIsbns); if (resultSet.next()) { publicationSeq = resultSet.getLong(PUBLICATION_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationSeq = " + publicationSeq); } } catch (SQLException sqle) { String message = "Cannot find publication"; log.error(message, sqle); log.error("SQL = '" + FIND_PUBLICATION_BY_ISBNS_QUERY + "'."); log.error("publisherSeq = " + publisherSeq); log.error("pIsbn = " + pIsbn); log.error("eIsbn = " + eIsbn); log.error("mdItemType = " + mdItemType); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findPublicationByIsbns); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationSeq = " + publicationSeq); return publicationSeq; } /** * Provides the identifier of a publication by its title and publisher. * * @param conn * A Connection with the database connection to be used. * @param title * A String with the title of the publication. * @param publisherSeq * A Long with the publisher identifier. * @param mdItemType * A String with the type of publication to be identified. * @return a Long with the identifier of the publication. * @throws DbException * if any problem occurred accessing the database. */ Long findPublicationByName(Connection conn, Long publisherSeq, String title, String mdItemType) throws DbException { final String DEBUG_HEADER = "findPublicationByName(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "publisherSeq = " + publisherSeq); log.debug2(DEBUG_HEADER + "title = " + title); log.debug2(DEBUG_HEADER + "mdItemType = " + mdItemType); } Long publicationSeq = null; ResultSet resultSet = null; PreparedStatement findPublicationByName = dbManager.prepareStatement(conn, FIND_PUBLICATION_BY_NAME_QUERY); try { findPublicationByName.setLong(1, publisherSeq); findPublicationByName.setString(2, title); findPublicationByName.setString(3, mdItemType); resultSet = dbManager.executeQuery(findPublicationByName); if (resultSet.next()) { publicationSeq = resultSet.getLong(PUBLICATION_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationSeq = " + publicationSeq); } } catch (SQLException sqle) { String message = "Cannot find publication"; log.error(message, sqle); log.error("SQL = '" + FIND_PUBLICATION_BY_NAME_QUERY + "'."); log.error("publisherSeq = '" + publisherSeq + "'."); log.error("title = " + title); log.error("mdItemType = " + mdItemType); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findPublicationByName); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationSeq = " + publicationSeq); return publicationSeq; } /** * Provides an indication of whether a publication has ISBNs in the database. * * @param conn * A Connection with the database connection to be used. * @param publicationSeq * A Long with the publication identifier. * @return a boolean with <code>true</code> if the publication has ISBNs, * <code>false</code> otherwise. * @throws DbException * if any problem occurred accessing the database. */ boolean publicationHasIsbns(Connection conn, Long publicationSeq) throws DbException { final String DEBUG_HEADER = "publicationHasIsbns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationSeq = " + publicationSeq); long rowCount = -1; ResultSet results = null; PreparedStatement countIsbns = dbManager.prepareStatement(conn, COUNT_PUBLICATION_ISBNS_QUERY); try { countIsbns.setLong(1, publicationSeq); // Find the ISBNs. results = dbManager.executeQuery(countIsbns); results.next(); rowCount = results.getLong(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "rowCount = " + rowCount); } catch (SQLException sqle) { String message = "Cannot count publication ISBNs"; log.error(message, sqle); log.error("SQL = '" + COUNT_PUBLICATION_ISBNS_QUERY + "'."); log.error("publicationSeq = " + publicationSeq); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(results); DbManager.safeCloseStatement(countIsbns); } boolean result = rowCount > 0; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "result = " + result); return result; } /** * Provides an indication of whether a publication has ISSNs in the database. * * @param conn * A Connection with the database connection to be used. * @param publicationSeq * A Long with the publication identifier. * @return a boolean with <code>true</code> if the publication has ISSNs, * <code>false</code> otherwise. * @throws DbException * if any problem occurred accessing the database. */ boolean publicationHasIssns(Connection conn, Long publicationSeq) throws DbException { final String DEBUG_HEADER = "publicationHasIssns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationSeq = " + publicationSeq); long rowCount = -1; ResultSet results = null; PreparedStatement countIssns = dbManager.prepareStatement(conn, COUNT_PUBLICATION_ISSNS_QUERY); try { countIssns.setLong(1, publicationSeq); // Find the ISSNs. results = dbManager.executeQuery(countIssns); results.next(); rowCount = results.getLong(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "rowCount = " + rowCount); } catch (SQLException sqle) { String message = "Cannot count publication ISSNs"; log.error(message, sqle); log.error("SQL = '" + COUNT_PUBLICATION_ISSNS_QUERY + "'."); log.error("publicationSeq = " + publicationSeq); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(results); DbManager.safeCloseStatement(countIssns); } boolean result = rowCount > 0; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "result = " + result); return result; } /** * Provides the identifier of a metadata item type by its name. * * @param conn * A Connection with the database connection to be used. * @param typeName * A String with the name of the metadata item type. * @return a Long with the identifier of the metadata item type. * @throws DbException * if any problem occurred accessing the database. */ Long findMetadataItemType(Connection conn, String typeName) throws DbException { final String DEBUG_HEADER = "findMetadataItemType(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "typeName = " + typeName); Long mdItemTypeSeq = null; ResultSet resultSet = null; PreparedStatement findMdItemType = dbManager.prepareStatement(conn, FIND_MD_ITEM_TYPE_QUERY); try { findMdItemType.setString(1, typeName); resultSet = dbManager.executeQuery(findMdItemType); if (resultSet.next()) { mdItemTypeSeq = resultSet.getLong(MD_ITEM_TYPE_SEQ_COLUMN); } } catch (SQLException sqle) { String message = "Cannot find metadata item type"; log.error(message, sqle); log.error("SQL = '" + FIND_MD_ITEM_TYPE_QUERY + "'."); log.error("typeName = '" + typeName + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findMdItemType); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemTypeSeq = " + mdItemTypeSeq); return mdItemTypeSeq; } /** * Adds a metadata item to the database. * * @param conn * A Connection with the database connection to be used. * @param parentSeq * A Long with the metadata item parent identifier. * @param auMdSeq * A Long with the identifier of the Archival Unit metadata. * @param mdItemTypeSeq * A Long with the identifier of the type of metadata item. * @param date * A String with the publication date of the metadata item. * @param coverage * A String with the metadata item coverage. * @param fetchTime * A long with the fetch time of metadata item. * @return a Long with the identifier of the metadata item just added. * @throws DbException * if any problem occurred accessing the database. */ Long addMdItem(Connection conn, Long parentSeq, Long mdItemTypeSeq, Long auMdSeq, String date, String coverage, long fetchTime) throws DbException { final String DEBUG_HEADER = "addMdItem(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "parentSeq = " + parentSeq); log.debug2(DEBUG_HEADER + "mdItemTypeSeq = " + mdItemTypeSeq); log.debug2(DEBUG_HEADER + "auMdSeq = " + auMdSeq); log.debug2(DEBUG_HEADER + "date = " + date); log.debug2(DEBUG_HEADER + "coverage = " + coverage); log.debug2(DEBUG_HEADER + "fetchTime = " + fetchTime); } PreparedStatement insertMdItem = dbManager.prepareStatement(conn, INSERT_MD_ITEM_QUERY, Statement.RETURN_GENERATED_KEYS); ResultSet resultSet = null; Long mdItemSeq = null; try { // skip auto-increment key field if (parentSeq != null) { insertMdItem.setLong(1, parentSeq); } else { insertMdItem.setNull(1, BIGINT); } insertMdItem.setLong(2, mdItemTypeSeq); if (auMdSeq != null) { insertMdItem.setLong(3, auMdSeq); } else { insertMdItem.setNull(3, BIGINT); } insertMdItem.setString(4, date); insertMdItem.setString(5, coverage); insertMdItem.setLong(6, fetchTime); dbManager.executeUpdate(insertMdItem); resultSet = insertMdItem.getGeneratedKeys(); if (!resultSet.next()) { log.error("Unable to create metadata item table row."); return null; } mdItemSeq = resultSet.getLong(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Added mdItemSeq = " + mdItemSeq); } catch (SQLException sqle) { String message = "Cannot insert metadata item"; log.error(message, sqle); log.error("SQL = '" + INSERT_MD_ITEM_QUERY + "'."); log.error("parentSeq = " + parentSeq + "."); log.error("mdItemTypeSeq = " + mdItemTypeSeq + "."); log.error("auMdSeq = " + auMdSeq + "."); log.error("date = '" + date + "'."); log.error("coverage = '" + coverage + "'."); log.error("fetchTime = " + fetchTime); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(insertMdItem); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); return mdItemSeq; } /** * Provides the names of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @return a Map<String, String> with the names and name types of the metadata * item. * @throws DbException * if any problem occurred accessing the database. */ Map<String, String> getMdItemNames(Connection conn, Long mdItemSeq) throws DbException { final String DEBUG_HEADER = "getMdItemNames(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); Map<String, String> names = new HashMap<String, String>(); PreparedStatement getNames = dbManager.prepareStatement(conn, FIND_MD_ITEM_NAME_QUERY); ResultSet resultSet = null; try { getNames.setLong(1, mdItemSeq); resultSet = dbManager.executeQuery(getNames); while (resultSet.next()) { names.put(resultSet.getString(NAME_COLUMN), resultSet.getString(NAME_TYPE_COLUMN)); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Found metadata item name = '" + resultSet.getString(NAME_COLUMN) + "' of type '" + resultSet.getString(NAME_TYPE_COLUMN) + "'."); } } catch (SQLException sqle) { String message = "Cannot get the names of a metadata item"; log.error(message, sqle); log.error("SQL = '" + FIND_MD_ITEM_NAME_QUERY + "'."); log.error("mdItemSeq = " + mdItemSeq + "."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(getNames); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "names = " + names); return names; } /** * Adds a metadata item name to the database. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @param name * A String with the name of the metadata item. * @param type * A String with the type of name of the metadata item. * @throws DbException * if any problem occurred accessing the database. */ void addMdItemName(Connection conn, Long mdItemSeq, String name, String type) throws DbException { final String DEBUG_HEADER = "addMdItemName(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); log.debug2(DEBUG_HEADER + "name = " + name); log.debug2(DEBUG_HEADER + "type = " + type); } if (name == null || type == null) { return; } PreparedStatement insertMdItemName = dbManager.prepareStatement(conn, INSERT_MD_ITEM_NAME_QUERY); try { insertMdItemName.setLong(1, mdItemSeq); insertMdItemName.setString(2, name); insertMdItemName.setString(3, type); int count = dbManager.executeUpdate(insertMdItemName); if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "count = " + count); log.debug3(DEBUG_HEADER + "Added metadata item name = " + name); } } catch (SQLException sqle) { String message = "Cannot add a metadata item name"; log.error(message, sqle); log.error("SQL = '" + INSERT_MD_ITEM_NAME_QUERY + "'."); log.error("mdItemSeq = " + mdItemSeq + "."); log.error("name = " + name + "."); log.error("type = " + type + "."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(insertMdItemName); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Adds to the database a metadata item URL. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @param feature * A String with the feature of the metadata item URL. * @param url * A String with the metadata item URL. * @throws DbException * if any problem occurred accessing the database. */ void addMdItemUrl(Connection conn, Long mdItemSeq, String feature, String url) throws DbException { final String DEBUG_HEADER = "addMdItemUrl(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); log.debug2(DEBUG_HEADER + "feature = " + feature); log.debug2(DEBUG_HEADER + "url = " + url); } PreparedStatement insertMdItemUrl = dbManager.prepareStatement(conn, INSERT_URL_QUERY); try { insertMdItemUrl.setLong(1, mdItemSeq); insertMdItemUrl.setString(2, feature); insertMdItemUrl.setString(3, url); int count = dbManager.executeUpdate(insertMdItemUrl); if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "count = " + count); log.debug3(DEBUG_HEADER + "Added URL = " + url); } } catch (SQLException sqle) { String message = "Cannot add a metadata item URL"; log.error(message, sqle); log.error("SQL = '" + INSERT_URL_QUERY + "'."); log.error("mdItemSeq = " + mdItemSeq + "."); log.error("feature = " + feature + "."); log.error("url = " + url + "."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(insertMdItemUrl); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Adds to the database a metadata item DOI. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @param doi * A String with the DOI of the metadata item. * @throws DbException * if any problem occurred accessing the database. */ void addMdItemDoi(Connection conn, Long mdItemSeq, String doi) throws DbException { final String DEBUG_HEADER = "addMdItemDoi(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); log.debug2(DEBUG_HEADER + "doi = " + doi); } if (StringUtil.isNullString(doi)) { return; } PreparedStatement insertMdItemDoi = dbManager.prepareStatement(conn, INSERT_DOI_QUERY); try { insertMdItemDoi.setLong(1, mdItemSeq); insertMdItemDoi.setString(2, doi); int count = dbManager.executeUpdate(insertMdItemDoi); if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "count = " + count); log.debug3(DEBUG_HEADER + "Added DOI = " + doi); } } catch (SQLException sqle) { String message = "Cannot add a metadata item DOI"; log.error(message, sqle); log.error("SQL = '" + INSERT_DOI_QUERY + "'."); log.error("mdItemSeq = " + mdItemSeq + "."); log.error("doi = " + doi + "."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(insertMdItemDoi); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Removes an AU with disabled indexing from the table of pending AUs. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the Archiva lUnit identifier. * @throws DbException * if any problem occurred accessing the database. */ void removeDisabledFromPendingAus(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "removeDisabledFromPendingAus(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); String pluginId = null; String auKey = null; PreparedStatement deletePendingAu = dbManager.prepareStatement(conn, DELETE_DISABLED_PENDING_AU_QUERY); try { pluginId = PluginManager.pluginIdFromAuId(auId); auKey = PluginManager.auKeyFromAuId(auId); deletePendingAu.setString(1, pluginId); deletePendingAu.setString(2, auKey); dbManager.executeUpdate(deletePendingAu); } catch (SQLException sqle) { String message = "Cannot remove disabled AU from pending table"; log.error(message, sqle); log.error("auId = '" + auId + "'."); log.error("SQL = '" + DELETE_DISABLED_PENDING_AU_QUERY + "'."); log.error("pluginId = '" + pluginId + "'."); log.error("auKey = '" + auKey + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(deletePendingAu); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Provides the prepared statement used to insert pending AUs. * * @param conn * A Connection with the database connection to be used. * @return a PreparedStatement with the prepared statement used to insert * pending AUs. */ PreparedStatement getInsertPendingAuBatchStatement(Connection conn) throws DbException { final String DEBUG_HEADER = "getInsertPendingAuBatchStatement(): "; if (dbManager.isTypeMysql()) { if (log.isDebug3()) log.debug3(DEBUG_HEADER + "SQL = " + INSERT_ENABLED_PENDING_AU_MYSQL_QUERY); return dbManager.prepareStatement(conn, INSERT_ENABLED_PENDING_AU_MYSQL_QUERY); } if (log.isDebug3()) log.debug3(DEBUG_HEADER + "SQL = " + INSERT_ENABLED_PENDING_AU_QUERY); return dbManager.prepareStatement(conn, INSERT_ENABLED_PENDING_AU_QUERY); } /** * Provides the prepared statement used to insert pending AUs with the * highest priority. * * @param conn * A Connection with the database connection to be used. * @return a PreparedStatement with the prepared statement used to insert * pending AUs with the highest priority. */ PreparedStatement getPrioritizedInsertPendingAuBatchStatement(Connection conn) throws DbException { final String DEBUG_HEADER = "getPrioritizedInsertPendingAuBatchStatement(): "; if (log.isDebug3()) log.debug3(DEBUG_HEADER + "SQL = " + INSERT_HIGHEST_PRIORITY_PENDING_AU_QUERY); return dbManager.prepareStatement(conn, INSERT_HIGHEST_PRIORITY_PENDING_AU_QUERY); } /** * Provides an indication of whether an Archival Unit is pending reindexing. * * @param conn * A Connection with the database connection to be used. * @param pluginId * A String with the plugin identifier. * @param auKey * A String with the Archival Unit key. * @return a boolean with <code>true</code> if the Archival Unit is pending * reindexing, <code>false</code> otherwise. * @throws DbException * if any problem occurred accessing the database. */ boolean isAuPending(Connection conn, String pluginId, String auKey) throws DbException { final String DEBUG_HEADER = "isAuPending(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "pluginId = " + pluginId); log.debug2(DEBUG_HEADER + "auKey = " + auKey); } boolean result = false; PreparedStatement selectPendingAu = null; ResultSet results = null; try { selectPendingAu = dbManager.prepareStatement(conn, FIND_PENDING_AU_QUERY); // Find the AU in the table. selectPendingAu.setString(1, pluginId); selectPendingAu.setString(2, auKey); results = dbManager.executeQuery(selectPendingAu); result = results.next(); } catch (SQLException sqle) { String message = "Cannot find pending AU"; log.error(message, sqle); log.error("SQL = '" + FIND_PENDING_AU_QUERY + "'."); log.error("pluginId = '" + pluginId + "'."); log.error("auKey = '" + auKey + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(results); DbManager.safeCloseStatement(selectPendingAu); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "result = " + result); return result; } /** * Adds an Archival Unit to the batch of Archival Units to be added to the * pending Archival Units table in the database. * * @param pluginId * A String with the plugin identifier. * @param auKey * A String with the Archival Unit key. * @param fullReindex * A boolean indicating whether a full reindex of the Archival Unit * is required. * @param insertPendingAuBatchStatement * A PreparedStatement with the SQL staement used to add Archival * Units to the pending Archival Units table in the database. * @throws SQLException * if any problem occurred accessing the database. */ void addAuToPendingAusBatch(String pluginId, String auKey, boolean fullReindex, PreparedStatement insertPendingAuBatchStatement) throws SQLException { insertPendingAuBatchStatement.setString(1, pluginId); insertPendingAuBatchStatement.setString(2, auKey); insertPendingAuBatchStatement.setBoolean(3, fullReindex); insertPendingAuBatchStatement.addBatch(); } /** * Adds a batch of Archival Units to the pending Archival Units table in the * database. * * @param insertPendingAuBatchStatement * A PreparedStatement with the SQL staement used to add Archival * Units to the pending Archival Units table in the database. * @throws SQLException * if any problem occurred accessing the database. */ void addAuBatchToPendingAus(PreparedStatement insertPendingAuBatchStatement) throws SQLException { final String DEBUG_HEADER = "addAuBatchToPendingAus(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); insertPendingAuBatchStatement.executeBatch(); if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Provides the version of the metadata of an AU stored in the database. * * @param au * An ArchivalUnit with the AU involved. * @return an int with the version of the metadata of the AU stored in the * database. */ int getAuMetadataVersion(ArchivalUnit au) { final String DEBUG_HEADER = "getAuMetadataVersion(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "au = " + au); int version = UNKNOWN_VERSION; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the version. version = getAuMetadataVersion(conn, au); } catch (DbException dbe) { log.error("Cannot get AU metadata version - Using " + version + ": " + dbe); log.error("au = '" + au + "'."); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "version = " + version); return version; } /** * Provides the version of the metadata of an AU stored in the database. * * @param conn * A Connection with the database connection to be used. * @param au * An ArchivalUnit with the AU involved. * @return an int with the version of the metadata of the AU stored in the * database. * @throws DbException * if any problem occurred accessing the database. */ int getAuMetadataVersion(Connection conn, ArchivalUnit au) throws DbException { final String DEBUG_HEADER = "getAuMetadataVersion(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "au = " + au); String pluginId = null; String auKey = null; int version = UNKNOWN_VERSION; PreparedStatement selectMetadataVersion = null; ResultSet resultSet = null; try { String auId = au.getAuId(); pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId() = " + pluginId); auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); selectMetadataVersion = dbManager.prepareStatement(conn, FIND_AU_METADATA_VERSION_QUERY); selectMetadataVersion.setString(1, pluginId); selectMetadataVersion.setString(2, auKey); resultSet = dbManager.executeQuery(selectMetadataVersion); if (resultSet.next()) { version = resultSet.getShort(MD_VERSION_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "version = " + version); } } catch (SQLException sqle) { String message = "Cannot get AU metadata version"; log.error(message, sqle); log.error("au = '" + au + "'."); log.error("SQL = '" + FIND_AU_METADATA_VERSION_QUERY + "'."); log.error("pluginId = '" + pluginId + "'."); log.error("auKey = '" + auKey + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(selectMetadataVersion); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "version = " + version); return version; } /** * Provides an indication of whether an Archival Unit requires full * reindexing. * * @param conn * A Connection with the database connection to be used. * @param au * An ArchivalUnit with the AU involved. * @return an boolean indicating whether the Archival Unit requires full * reindexing. * @throws DbException * if any problem occurred accessing the database. */ boolean needAuFullReindexing(Connection conn, ArchivalUnit au) throws DbException { final String DEBUG_HEADER = "needAuFullReindexing(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "au = " + au); String auId = au.getAuId(); String pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId() = " + pluginId); String auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); boolean fullReindexing = false; PreparedStatement selectFullReindexing = null; ResultSet resultSet = null; try { selectFullReindexing = dbManager.prepareStatement(conn, FIND_AU_FULL_REINDEXING_BY_AU_QUERY); selectFullReindexing.setString(1, pluginId); selectFullReindexing.setString(2, auKey); resultSet = dbManager.executeQuery(selectFullReindexing); if (resultSet.next()) { fullReindexing = resultSet.getBoolean(FULLY_REINDEX_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "full reindexing = " + fullReindexing); } } catch (SQLException sqle) { String message = "Cannot get AU fully reindexing flag"; log.error(message, sqle); log.error("au = '" + au + "'."); log.error("SQL = '" + FIND_AU_FULL_REINDEXING_BY_AU_QUERY + "'."); log.error("pluginId = '" + pluginId + "'."); log.error("auKey = '" + auKey + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(selectFullReindexing); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "fullReindexing = " + fullReindexing); return fullReindexing; } /** * Sets whether AU stored in the database requires full reindexing. * @param conn * A Connection with the database connection to be used. * @param au * An ArchivalUnit with the AU involved. * @param fullReindexing the new value of full_reindexing for the AU * in the database * @throws DbException * if any problem occurred accessing the database. */ void updateAuFullReindexing(Connection conn, ArchivalUnit au, boolean fullReindexing) throws DbException { final String DEBUG_HEADER = "updateAuFullReindexing(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "au = " + au); log.debug2(DEBUG_HEADER + "fullReindexing = " + fullReindexing); } PreparedStatement updateFullReindexing = null; String auId = au.getAuId(); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auId = " + auId); String pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId() = " + pluginId); String auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); try { updateFullReindexing = dbManager.prepareStatement(conn, UPDATE_AU_FULL_REINDEXING_QUERY); updateFullReindexing.setBoolean(1, fullReindexing); updateFullReindexing.setString(2, pluginId); updateFullReindexing.setString(3, auKey); dbManager.executeUpdate(updateFullReindexing); } catch (SQLException sqle) { String message = "Cannot set AU fully reindex flag"; log.error(message, sqle); log.error("au = '" + au + "'."); log.error("SQL = '" + UPDATE_AU_FULL_REINDEXING_QUERY + "'."); log.error("fullReindexing = '" + fullReindexing + "'."); log.error("pluginId = '" + pluginId + "'."); log.error("auKey = '" + auKey + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(updateFullReindexing); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Provides the extraction time of an Archival Unit metadata. * * @param conn * A Connection with the database connection to be used. * @param auSeq * A Long with the identifier of the Archival Unit. * @return a long with the extraction time of the Archival Unit metadata. * @throws DbException * if any problem occurred accessing the database. */ long getAuExtractionTime(Connection conn, Long auSeq) throws DbException { final String DEBUG_HEADER = "getAuExtractionTime(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auSeq = " + auSeq); long timestamp = NEVER_EXTRACTED_EXTRACTION_TIME; PreparedStatement selectLastExtractionTime = null; ResultSet resultSet = null; try { selectLastExtractionTime = dbManager.prepareStatement(conn, FIND_AU_MD_EXTRACT_TIME_BY_AUSEQ_QUERY); selectLastExtractionTime.setLong(1, auSeq); resultSet = dbManager.executeQuery(selectLastExtractionTime); if (resultSet.next()) { timestamp = resultSet.getLong(EXTRACT_TIME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "timestamp = " + timestamp); } } catch (SQLException sqle) { String message = "Cannot get AU extraction time"; log.error(message, sqle); log.error("SQL = '" + FIND_AU_MD_EXTRACT_TIME_BY_AUSEQ_QUERY + "'."); log.error("auSeq = '" + auSeq + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(selectLastExtractionTime); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "timestamp = " + timestamp); return timestamp; } /** * Provides the extraction time of an Archival Unit metadata. * * @param conn * A Connection with the database connection to be used. * @param au * An ArchivalUnit with the AU involved. * @return a long with the extraction time of the Archival Unit metadata. * @throws DbException * if any problem occurred accessing the database. */ long getAuExtractionTime(Connection conn, ArchivalUnit au) throws DbException { final String DEBUG_HEADER = "getAuExtractionTime(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "au = " + au); String pluginId = null; String auKey = null; long timestamp = NEVER_EXTRACTED_EXTRACTION_TIME; PreparedStatement selectLastExtractionTime = null; ResultSet resultSet = null; try { String auId = au.getAuId(); pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId() = " + pluginId); auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); selectLastExtractionTime = dbManager.prepareStatement(conn, FIND_AU_MD_EXTRACT_TIME_BY_AU_QUERY); selectLastExtractionTime.setString(1, pluginId); selectLastExtractionTime.setString(2, auKey); resultSet = dbManager.executeQuery(selectLastExtractionTime); if (resultSet.next()) { timestamp = resultSet.getLong(EXTRACT_TIME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "timestamp = " + timestamp); } } catch (SQLException sqle) { String message = "Cannot get AU extraction time"; log.error(message, sqle); log.error("au = '" + au + "'."); log.error("SQL = '" + FIND_AU_MD_EXTRACT_TIME_BY_AU_QUERY + "'."); log.error("pluginId = '" + pluginId + "'."); log.error("auKey = '" + auKey + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(selectLastExtractionTime); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "timestamp = " + timestamp); return timestamp; } /** * Provides the identifier of a platform. * * @param conn * A Connection with the database connection to be used. * @param platformName * A String with the platform identifier. * @return a Long with the identifier of the platform. * @throws DbException * if any problem occurred accessing the database. */ Long findPlatform(Connection conn, String platformName) throws DbException { final String DEBUG_HEADER = "findPlatform(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "platformName = " + platformName); Long platformSeq = null; ResultSet resultSet = null; PreparedStatement findPlatform = dbManager.prepareStatement(conn, FIND_PLATFORM_QUERY); try { findPlatform.setString(1, platformName); resultSet = dbManager.executeQuery(findPlatform); if (resultSet.next()) { platformSeq = resultSet.getLong(PLATFORM_SEQ_COLUMN); } } catch (SQLException sqle) { String message = "Cannot find platform"; log.error(message, sqle); log.error("SQL = '" + FIND_PLATFORM_QUERY + "'."); log.error("platformName = '" + platformName + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findPlatform); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "platformSeq = " + platformSeq); return platformSeq; } /** * Adds a platform to the database. * * @param conn * A Connection with the database connection to be used. * @param platformName * A String with the platform name. * @return a Long with the identifier of the platform just added. * @throws DbException * if any problem occurred accessing the database. */ Long addPlatform(Connection conn, String platformName) throws DbException { final String DEBUG_HEADER = "addPlatform(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "platformName = " + platformName); Long platformSeq = null; ResultSet resultSet = null; PreparedStatement insertPlatform = dbManager.prepareStatement(conn, INSERT_PLATFORM_QUERY, Statement.RETURN_GENERATED_KEYS); try { // Skip auto-increment key field insertPlatform.setString(1, platformName); dbManager.executeUpdate(insertPlatform); resultSet = insertPlatform.getGeneratedKeys(); if (!resultSet.next()) { log.error("Unable to create platform table row."); return null; } platformSeq = resultSet.getLong(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "Added platformSeq = " + platformSeq); } catch (SQLException sqle) { String message = "Cannot add platform"; log.error(message, sqle); log.error("SQL = '" + INSERT_PLATFORM_QUERY + "'."); log.error("platformName = '" + platformName + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(insertPlatform); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "platformSeq = " + platformSeq); return platformSeq; } /** * Adds a disabled AU to the list of pending AUs to reindex. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the Archival Unit identifier. * @throws DbException * if any problem occurred accessing the database. */ void addDisabledAuToPendingAus(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "addDisabledAuToPendingAus(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); String pluginId = null; String auKey = null; PreparedStatement addPendingAuStatement = dbManager.prepareStatement(conn, INSERT_DISABLED_PENDING_AU_QUERY); try { pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); addPendingAuStatement.setString(1, pluginId); addPendingAuStatement.setString(2, auKey); int count = dbManager.executeUpdate(addPendingAuStatement); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "count = " + count); } catch (SQLException sqle) { String message = "Cannot add disabled pending AU"; log.error(message, sqle); log.error("auId = '" + auId + "'."); log.error("SQL = '" + INSERT_PLATFORM_QUERY + "'."); log.error("pluginId = '" + pluginId + "'."); log.error("auKey = '" + auKey + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(addPendingAuStatement); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Adds an AU with failed indexing to the list of pending AUs to reindex. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the Archival Unit identifier. * @throws DbException * if any problem occurred accessing the database. */ void addFailedIndexingAuToPendingAus(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "addFailedIndexingAuToPendingAus(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); String pluginId = null; String auKey = null; PreparedStatement addPendingAuStatement = dbManager.prepareStatement(conn, INSERT_FAILED_INDEXING_PENDING_AU_QUERY); try { pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); addPendingAuStatement.setString(1, pluginId); addPendingAuStatement.setString(2, auKey); int count = dbManager.executeUpdate(addPendingAuStatement); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "count = " + count); } catch (SQLException sqle) { String message = "Cannot add failed pending AU"; log.error(message, sqle); log.error("auId = '" + auId + "'."); log.error("SQL = '" + INSERT_PLATFORM_QUERY + "'."); log.error("pluginId = '" + pluginId + "'."); log.error("auKey = '" + auKey + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(addPendingAuStatement); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Provides the identifiers of pending Archival Units with a given priority. * * @param conn * A Connection with the database connection to be used. * @param priority * An int with the priority of the requested Archival Units. * @return a Collection<String> with the identifiers of pending Archival Units * with the given priority. * @throws DbException * if any problem occurred accessing the database. */ Collection<String> findPendingAusWithPriority(Connection conn, int priority) throws DbException { final String DEBUG_HEADER = "findPendingAusWithPriority(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "priority = " + priority); Collection<String> aus = new ArrayList<String>(); String pluginId; String auKey; String auId; ResultSet results = null; PreparedStatement selectAus = dbManager.prepareStatement(conn, FIND_PENDING_AUS_WITH_PRIORITY_QUERY); try { selectAus.setInt(1, priority); results = dbManager.executeQuery(selectAus); while (results.next()) { pluginId = results.getString(PLUGIN_ID_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); auKey = results.getString(AU_KEY_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); auId = PluginManager.generateAuId(pluginId, auKey); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auId = " + auId); aus.add(auId); } } catch (SQLException sqle) { String message = "Cannot find pending AUs"; log.error(message, sqle); log.error("SQL = '" + INSERT_PLATFORM_QUERY + "'."); log.error("priority = '" + priority + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(results); DbManager.safeCloseStatement(selectAus); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "aus.size() = " + aus.size()); return aus; } /** * Provides the identifier of the publisher of an Archival Unit. * * @param conn * A Connection with the database connection to be used. * @param auSeq * A Long with the identifier of the Archival Unit. * @return a Long with the identifier of the publisher. * @throws DbException * if any problem occurred accessing the database. */ Long findAuPublisher(Connection conn, Long auSeq) throws DbException { final String DEBUG_HEADER = "findAuPublisher(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auSeq = " + auSeq); Long publisherSeq = null; ResultSet resultSet = null; PreparedStatement findPublisher = dbManager.prepareStatement(conn, FIND_AU_PUBLISHER_QUERY); try { findPublisher.setLong(1, auSeq); resultSet = dbManager.executeQuery(findPublisher); if (resultSet.next()) { publisherSeq = resultSet.getLong(PUBLISHER_SEQ_COLUMN); } } catch (SQLException sqle) { String message = "Cannot find the publisher of an AU"; log.error(message, sqle); log.error("SQL = '" + FIND_AU_PUBLISHER_QUERY + "'."); log.error("auSeq = '" + auSeq + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findPublisher); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publisherSeq = " + publisherSeq); return publisherSeq; } /** * Provides the authors of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @return a Collection<String> with the authors of the metadata item. * @throws DbException * if any problem occurred accessing the database. */ Collection<String> getMdItemAuthors(Connection conn, Long mdItemSeq) throws DbException { final String DEBUG_HEADER = "getMdItemAuthors(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); List<String> authors = new ArrayList<String>(); PreparedStatement findMdItemAuthor = dbManager.prepareStatement(conn, FIND_MD_ITEM_AUTHOR_QUERY); ResultSet resultSet = null; try { // Get the existing authors. findMdItemAuthor.setLong(1, mdItemSeq); resultSet = dbManager.executeQuery(findMdItemAuthor); while (resultSet.next()) { authors.add(resultSet.getString(AUTHOR_NAME_COLUMN)); } } catch (SQLException sqle) { String message = "Cannot get the authors of a metadata item"; log.error(message, sqle); log.error("SQL = '" + FIND_MD_ITEM_AUTHOR_QUERY + "'."); log.error("mdItemSeq = '" + mdItemSeq + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findMdItemAuthor); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "authors = " + authors); return authors; } /** * Provides the keywords of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @return A Collection<String> with the keywords of the metadata item. * @throws DbException * if any problem occurred accessing the database. */ Collection<String> getMdItemKeywords(Connection conn, Long mdItemSeq) throws DbException { final String DEBUG_HEADER = "getMdItemKeywords(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); List<String> keywords = new ArrayList<String>(); PreparedStatement findMdItemKeyword = dbManager.prepareStatement(conn, FIND_MD_ITEM_KEYWORD_QUERY); ResultSet resultSet = null; try { // Get the existing keywords. findMdItemKeyword.setLong(1, mdItemSeq); resultSet = dbManager.executeQuery(findMdItemKeyword); while (resultSet.next()) { keywords.add(resultSet.getString(KEYWORD_COLUMN)); } } catch (SQLException sqle) { String message = "Cannot get the keywords of a metadata item"; log.error(message, sqle); log.error("SQL = '" + FIND_MD_ITEM_KEYWORD_QUERY + "'."); log.error("mdItemSeq = '" + mdItemSeq + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(findMdItemKeyword); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "keywords = " + keywords); return keywords; } /** * Adds to the database the authors of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @param authors * A Collection<String> with the authors of the metadata item. * @throws DbException * if any problem occurred accessing the database. */ void addMdItemAuthors(Connection conn, Long mdItemSeq, Collection<String> authors) throws DbException { final String DEBUG_HEADER = "addMdItemAuthors(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); log.debug2(DEBUG_HEADER + "authors = " + authors); } if (authors == null || authors.size() == 0) { return; } String sql = getInsertMdItemAuthorSql(); PreparedStatement insertMdItemAuthor = dbManager.prepareStatement(conn, sql); try { for (String author : authors) { insertMdItemAuthor.setLong(1, mdItemSeq); insertMdItemAuthor.setString(2, author); insertMdItemAuthor.setLong(3, mdItemSeq); int count = dbManager.executeUpdate(insertMdItemAuthor); if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "count = " + count); log.debug3(DEBUG_HEADER + "Added author = " + author); } } } catch (SQLException sqle) { String message = "Cannot add metadata item authors"; log.error(message, sqle); log.error("SQL = '" + sql + "'."); log.error("mdItemSeq = '" + mdItemSeq + "'."); log.error("authors = " + authors + "."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(insertMdItemAuthor); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Provides the SQL query used to insert a metadata item author. * * @return a String with the SQL query used to insert a metadata item author. */ private String getInsertMdItemAuthorSql() { if (dbManager.isTypeMysql()) { return INSERT_AUTHOR_MYSQL_QUERY; } return INSERT_AUTHOR_QUERY; } /** * Adds to the database the keywords of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @param keywords * A Collection<String> with the keywords of the metadata item. * @throws DbException * if any problem occurred accessing the database. */ void addMdItemKeywords(Connection conn, Long mdItemSeq, Collection<String> keywords) throws DbException { final String DEBUG_HEADER = "addMdItemKeywords(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); log.debug2(DEBUG_HEADER + "keywords = " + keywords); } if (keywords == null || keywords.size() == 0) { return; } PreparedStatement insertMdItemKeyword = dbManager.prepareStatement(conn, INSERT_KEYWORD_QUERY); try { for (String keyword : keywords) { insertMdItemKeyword.setLong(1, mdItemSeq); insertMdItemKeyword.setString(2, keyword); int count = dbManager.executeUpdate(insertMdItemKeyword); if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "count = " + count); log.debug3(DEBUG_HEADER + "Added keyword = " + keyword); } } } catch (SQLException sqle) { String message = "Cannot add metadata item keywords"; log.error(message, sqle); log.error("SQL = '" + INSERT_KEYWORD_QUERY + "'."); log.error("mdItemSeq = '" + mdItemSeq + "'."); log.error("keywords = " + keywords + "."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(insertMdItemKeyword); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Adds an Archival Unit to the table of unconfigured Archival Units. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the Archival Unit identifier. * @throws DbException * if any problem occurred accessing the database. */ void persistUnconfiguredAu(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "persistUnconfiguredAu(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); PreparedStatement insertUnconfiguredAu = null; String pluginId = null; String auKey = null; try { insertUnconfiguredAu = dbManager.prepareStatement(conn, INSERT_UNCONFIGURED_AU_QUERY); pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); insertUnconfiguredAu.setString(1, pluginId); insertUnconfiguredAu.setString(2, auKey); int count = dbManager.executeUpdate(insertUnconfiguredAu); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "count = " + count); } catch (SQLException sqle) { String message = "Cannot insert archival unit in unconfigured table"; log.error(message, sqle); log.error("auId = " + auId); log.error("SQL = '" + INSERT_UNCONFIGURED_AU_QUERY + "'."); log.error("pluginId = " + pluginId); log.error("auKey = " + auKey); throw new DbException(message, sqle); } catch (DbException dbe) { String message = "Cannot insert archival unit in unconfigured table"; log.error(message, dbe); log.error("auId = " + auId); log.error("SQL = '" + INSERT_UNCONFIGURED_AU_QUERY + "'."); log.error("pluginId = " + pluginId); log.error("auKey = " + auKey); throw dbe; } finally { DbManager.safeCloseStatement(insertUnconfiguredAu); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Removes an Archival Unit from the table of unconfigured Archival Units. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the AU identifier. * @throws DbException * if any problem occurred accessing the database. */ void removeFromUnconfiguredAus(Connection conn, String auId) { final String DEBUG_HEADER = "removeFromUnconfiguredAus(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); PreparedStatement deleteUnconfiguredAu = null; String pluginId = null; String auKey = null; try { if (isAuInUnconfiguredAuTable(conn, auId)) { deleteUnconfiguredAu = dbManager.prepareStatement(conn, DELETE_UNCONFIGURED_AU_QUERY); pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); deleteUnconfiguredAu.setString(1, pluginId); deleteUnconfiguredAu.setString(2, auKey); int count = dbManager.executeUpdate(deleteUnconfiguredAu); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "count = " + count); DbManager.commitOrRollback(conn, log); } } catch (SQLException sqle) { String message = "Cannot delete archival unit from unconfigured table"; log.error(message, sqle); log.error("auId = " + auId); log.error("SQL = '" + DELETE_UNCONFIGURED_AU_QUERY + "'."); log.error("pluginId = " + pluginId); log.error("auKey = " + auKey); } catch (DbException dbe) { String message = "Cannot delete archival unit from unconfigured table"; log.error(message, dbe); log.error("auId = " + auId); log.error("SQL = '" + DELETE_UNCONFIGURED_AU_QUERY + "'."); log.error("pluginId = " + pluginId); log.error("auKey = " + auKey); } finally { DbManager.safeCloseStatement(deleteUnconfiguredAu); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Done."); } /** * Provides the count of recorded unconfigured archival units. * * @param conn * A Connection with the database connection to be used. * @return a long with the count of recorded unconfigured archival units. * @throws DbException * if any problem occurred accessing the database. */ long countUnconfiguredAus(Connection conn) throws DbException { final String DEBUG_HEADER = "countUnconfiguredAus(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); long rowCount = -1; ResultSet results = null; PreparedStatement unconfiguredAu = dbManager.prepareStatement(conn, UNCONFIGURED_AU_COUNT_QUERY); try { // Count the rows in the table. results = dbManager.executeQuery(unconfiguredAu); results.next(); rowCount = results.getLong(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "rowCount = " + rowCount); } catch (SQLException sqle) { String message = "Cannot count unconfigured archival units"; log.error(message, sqle); log.error("SQL = '" + UNCONFIGURED_AU_COUNT_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(results); DbManager.safeCloseStatement(unconfiguredAu); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "rowCount = " + rowCount); return rowCount; } /** * Provides an indication of whether an Archival Unit is in the table of * unconfigured Archival Units. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the Archival Unit identifier. * @return a boolean with <code>true</code> if the Archival Unit is in the * UNCONFIGURED_AU table, <code>false</code> otherwise. * @throws DbException * if any problem occurred accessing the database. */ boolean isAuInUnconfiguredAuTable(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "isAuInUnconfiguredAuTable(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); String pluginId = null; String auKey = null; long rowCount = -1; ResultSet results = null; PreparedStatement unconfiguredAu = dbManager.prepareStatement(conn, FIND_UNCONFIGURED_AU_COUNT_QUERY); try { pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); unconfiguredAu.setString(1, pluginId); unconfiguredAu.setString(2, auKey); // Find the archival unit in the table. results = dbManager.executeQuery(unconfiguredAu); results.next(); rowCount = results.getLong(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "rowCount = " + rowCount); } catch (SQLException sqle) { String message = "Cannot find archival unit in unconfigured table"; log.error(message, sqle); log.error("auId = " + auId); log.error("SQL = '" + FIND_UNCONFIGURED_AU_COUNT_QUERY + "'."); log.error("pluginId = " + pluginId); log.error("auKey = " + auKey); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(results); DbManager.safeCloseStatement(unconfiguredAu); } boolean result = rowCount > 0; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "result = " + result); return result; } /** * Removes an Archival Unit child metadata item from the database. * * @param conn * A Connection with the database connection to be used. * @param auMdSeq * A Long with the identifier of the Archival Unit metadata. * @param mdItemSeq * A Long with the metadata identifier. * @return an int with the number of metadata items deleted. * @throws DbException * if any problem occurred accessing the database. */ int removeAuChildMetadataItem(Connection conn, Long auMdSeq, Long mdItemSeq) throws DbException { final String DEBUG_HEADER = "removeAuChildMetadataItem(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "auMdSeq = " + auMdSeq); log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); } int count = 0; // Do nothing if any of the parameters are null. if (auMdSeq != null && mdItemSeq != null) { PreparedStatement deleteMetadataItem = dbManager.prepareStatement(conn, DELETE_AU_CHILD_MD_ITEM_QUERY); try { deleteMetadataItem.setLong(1, auMdSeq); deleteMetadataItem.setLong(2, mdItemSeq); count = dbManager.executeUpdate(deleteMetadataItem); } catch (SQLException sqle) { String message = "Cannot delete child metadata item"; log.error(message, sqle); log.error("mdItemSeq = " + mdItemSeq); log.error("SQL = '" + DELETE_AU_CHILD_MD_ITEM_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(deleteMetadataItem); } } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "count = " + count); return count; } /** * Provides the names of the publishers in the database. * * @return a Collection<String> with the publisher names. * @throws DbException * if any problem occurred accessing the database. */ Collection<String> getPublisherNames() throws DbException { final String DEBUG_HEADER = "getPublisherNames(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<String> publisherNames = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the publisher names. publisherNames = getPublisherNames(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publisherNames.size() = " + publisherNames.size()); return publisherNames; } /** * Provides the names of the publishers in the database. * * @param conn * A Connection with the database connection to be used. * @return a Collection<String> with the publisher names. * @throws DbException * if any problem occurred accessing the database. */ Collection<String> getPublisherNames(Connection conn) throws DbException { final String DEBUG_HEADER = "getPublisherNames(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<String> publisherNames = new ArrayList<String>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { // Get the publisher names. stmt = dbManager.prepareStatement(conn, GET_PUBLISHER_NAMES_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the publisher names. while (resultSet.next()) { String publisherName = resultSet.getString(PUBLISHER_NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publisherName = " + publisherName); publisherNames.add(publisherName); } } catch (SQLException sqle) { String message = "Cannot get the publisher names"; log.error(message, sqle); log.error("SQL = '" + GET_PUBLISHER_NAMES_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publisherNames.size() = " + publisherNames.size()); return publisherNames; } /** * Provides the DOI prefixes for the publishers in the database with multiple * DOI prefixes. * * @return a Map<String, Collection<String>> with the DOI prefixes keyed by * the publisher name. * @throws DbException * if any problem occurred accessing the database. */ Map<String, Collection<String>> getPublishersWithMultipleDoiPrefixes() throws DbException { final String DEBUG_HEADER = "getPublishersWithMultipleDoiPrefixes(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> publishersDoiPrefixes = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the publisher DOI prefixes. publishersDoiPrefixes = getPublishersWithMultipleDoiPrefixes(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publishersDoiPrefixes.size() = " + publishersDoiPrefixes.size()); return publishersDoiPrefixes; } /** * Provides the DOI prefixes for the publishers in the database with multiple * DOI prefixes. * * @param conn * A Connection with the database connection to be used. * @return a Map<String, Collection<String>> with the DOI prefixes keyed by * the publisher name. * @throws DbException * if any problem occurred accessing the database. */ Map<String, Collection<String>> getPublishersWithMultipleDoiPrefixes( Connection conn) throws DbException { final String DEBUG_HEADER = "getPublishersWithMultipleDoiPrefixes(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> publishersDoiPrefixes = new TreeMap<String, Collection<String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; String sql = null; try { String previousPublisherName = null; // Get the publisher DOI prefixes. sql = GET_PUBLISHERS_MULTIPLE_DOI_PREFIXES_DERBY_QUERY; if (dbManager.isTypePostgresql()) { sql = GET_PUBLISHERS_MULTIPLE_DOI_PREFIXES_PG_QUERY; } else if (dbManager.isTypeMysql()) { sql = GET_PUBLISHERS_MULTIPLE_DOI_PREFIXES_MYSQL_QUERY; } stmt = dbManager.prepareStatement(conn, sql); resultSet = dbManager.executeQuery(stmt); // Loop through the publisher DOI prefixes. while (resultSet.next()) { String publisherName = resultSet.getString(PUBLISHER_NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publisherName = " + publisherName); String prefix = resultSet.getString("prefix"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "prefix = " + prefix); if (publisherName.equals(previousPublisherName)) { publishersDoiPrefixes.get(publisherName).add(prefix); } else { Collection<String> publisherPrefixes = new ArrayList<String>(); publisherPrefixes.add(prefix); publishersDoiPrefixes.put(publisherName, publisherPrefixes); previousPublisherName = publisherName; } } } catch (SQLException sqle) { String message = "Cannot get the publishers DOI prefixes"; log.error(message, sqle); log.error("SQL = '" + sql + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publishersDoiPrefixes.size() = " + publishersDoiPrefixes.size()); return publishersDoiPrefixes; } /** * Provides the publisher names linked to DOI prefixes in the database that * are linked to multiple publishers. * * @return a Map<String, Collection<String>> with the publisher names keyed by * the DOI prefixes to which they are linked. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<String>> getDoiPrefixesWithMultiplePublishers() throws DbException { final String DEBUG_HEADER = "getDoiPrefixesWithMultiplePublishers(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> doiPrefixesPublishers = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the DOI prefix publishers. doiPrefixesPublishers = getDoiPrefixesWithMultiplePublishers(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publishersDoiPrefixes.size() = " + doiPrefixesPublishers.size()); return doiPrefixesPublishers; } /** * Provides the publisher names linked to DOI prefixes in the database that * are linked to multiple publishers. * * @param conn * A Connection with the database connection to be used. * @return a Map<String, Collection<String>> with the publisher names keyed by * the DOI prefixes to which they are linked. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<String>> getDoiPrefixesWithMultiplePublishers( Connection conn) throws DbException { final String DEBUG_HEADER = "getDoiPrefixesWithMultiplePublishers(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> doiPrefixesPublishers = new TreeMap<String, Collection<String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; String sql = null; try { String previousDoiPrefix = null; // Get the DOI prefix publishers. sql = GET_DOI_PREFIXES_MULTIPLE_PUBLISHERS_DERBY_QUERY; if (dbManager.isTypePostgresql()) { sql = GET_DOI_PREFIXES_MULTIPLE_PUBLISHERS_PG_QUERY; } else if (dbManager.isTypeMysql()) { sql = GET_DOI_PREFIXES_MULTIPLE_PUBLISHERS_MYSQL_QUERY; } stmt = dbManager.prepareStatement(conn, sql); resultSet = dbManager.executeQuery(stmt); // Loop through the DOI prefix publishers. while (resultSet.next()) { String prefix = resultSet.getString("prefix"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "prefix = " + prefix); String publisherName = resultSet.getString(PUBLISHER_NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publisherName = " + publisherName); if (prefix.equals(previousDoiPrefix)) { doiPrefixesPublishers.get(prefix).add(publisherName); } else { Collection<String> prefixPublishers = new ArrayList<String>(); prefixPublishers.add(publisherName); doiPrefixesPublishers.put(prefix, prefixPublishers); previousDoiPrefix = prefix; } } } catch (SQLException sqle) { String message = "Cannot get the DOI prefixes publishers"; log.error(message, sqle); log.error("SQL = '" + sql + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publishersDoiPrefixes.size() = " + doiPrefixesPublishers.size()); return doiPrefixesPublishers; } /** * Provides the DOI prefixes linked to the Archival Unit identifier for the * Archival Units in the database with multiple DOI prefixes. * * @return a Map<String, Collection<String>> with the DOI prefixes keyed by * the Archival Unit identifier. * @throws DbException * if any problem occurred accessing the database. */ Map<String, Collection<String>> getAuIdsWithMultipleDoiPrefixes() throws DbException { final String DEBUG_HEADER = "getAuIdsWithMultipleDoiPrefixes(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> ausDoiPrefixes = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the Archival Unit DOI prefixes. ausDoiPrefixes = getAuIdsWithMultipleDoiPrefixes(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "ausDoiPrefixes.size() = " + ausDoiPrefixes.size()); return ausDoiPrefixes; } /** * Provides the DOI prefixes linked to the Archival Unit identifier for the * Archival Units in the database with multiple DOI prefixes. * * @param conn * A Connection with the database connection to be used. * @return a Map<String, Collection<String>> with the DOI prefixes keyed by * the Archival Unit identifier. * @throws DbException * if any problem occurred accessing the database. */ Map<String, Collection<String>> getAuIdsWithMultipleDoiPrefixes( Connection conn) throws DbException { final String DEBUG_HEADER = "getAuIdsWithMultipleDoiPrefixes(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> ausDoiPrefixes = new TreeMap<String, Collection<String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; String sql = null; try { String previousAuId = null; // Get the Archival Unit DOI prefixes. sql = GET_AUS_MULTIPLE_DOI_PREFIXES_DERBY_QUERY; if (dbManager.isTypePostgresql()) { sql = GET_AUS_MULTIPLE_DOI_PREFIXES_PG_QUERY; } else if (dbManager.isTypeMysql()) { sql = GET_AUS_MULTIPLE_DOI_PREFIXES_MYSQL_QUERY; } stmt = dbManager.prepareStatement(conn, sql); resultSet = dbManager.executeQuery(stmt); // Loop through the Archival Unit DOI prefixes. while (resultSet.next()) { String pluginId = resultSet.getString(PLUGIN_ID_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); String auKey = resultSet.getString(AU_KEY_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); String auId = PluginManager.generateAuId(pluginId, auKey); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auId = " + auId); String prefix = resultSet.getString("prefix"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "prefix = " + prefix); if (auId.equals(previousAuId)) { ausDoiPrefixes.get(auId).add(prefix); } else { Collection<String> auPrefixes = new ArrayList<String>(); auPrefixes.add(prefix); ausDoiPrefixes.put(auId, auPrefixes); previousAuId = auId; } } } catch (SQLException sqle) { String message = "Cannot get the Archival Units DOI prefixes"; log.error(message, sqle); log.error("SQL = '" + sql + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "ausDoiPrefixes.size() = " + ausDoiPrefixes.size()); return ausDoiPrefixes; } /** * Provides the ISBNs for the publications in the database with more than two * ISBNS. * * @return a Map<String, Collection<Isbn>> with the ISBNs keyed by the * publication name. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<Isbn>> getPublicationsWithMoreThan2Isbns() throws DbException { final String DEBUG_HEADER = "getPublicationsWithMoreThan2Isbns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<Isbn>> publicationsIsbns = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the publication ISBNs. publicationsIsbns = getPublicationsWithMoreThan2Isbns(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationsIsbns.size() = " + publicationsIsbns.size()); return publicationsIsbns; } /** * Provides the ISBNs for the publications in the database with more than two * ISBNS. * * @param conn * A Connection with the database connection to be used. * @return a Map<String, Collection<Isbn>> with the ISBNs keyed by the * publication name. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<Isbn>> getPublicationsWithMoreThan2Isbns( Connection conn) throws DbException { final String DEBUG_HEADER = "getPublicationsWithMoreThan2Isbns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<Isbn>> publicationsIsbns = new TreeMap<String, Collection<Isbn>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { String previousPublicationName = null; // Get the publication ISBNs. stmt = dbManager.prepareStatement(conn, GET_PUBLICATIONS_MORE_2_ISBNS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the publication ISBNs. while (resultSet.next()) { String publicationName = resultSet.getString(NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationName = " + publicationName); String isbn = resultSet.getString(ISBN_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "isbn = " + isbn); String isbnType = resultSet.getString(ISBN_TYPE_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "isbnType = " + isbnType); if (publicationName.equals(previousPublicationName)) { publicationsIsbns.get(publicationName).add(new Isbn(isbn, isbnType)); } else { Collection<Isbn> publicationIsbns = new ArrayList<Isbn>(); publicationIsbns.add(new Isbn(isbn, isbnType)); publicationsIsbns.put(publicationName, publicationIsbns); previousPublicationName = publicationName; } } } catch (SQLException sqle) { String message = "Cannot get the publication ISBNs"; log.error(message, sqle); log.error("SQL = '" + GET_PUBLICATIONS_MORE_2_ISBNS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationsIsbns.size() = " + publicationsIsbns.size()); return publicationsIsbns; } /** * Provides the ISSNs for the publications in the database with more than two * ISSNS. * * @return a Map<PkNamePair, Collection<Issn>> with the ISSNs keyed by the * publication PK/name pair. * @throws DbException * if any problem occurred accessing the database. */ public Map<PkNamePair, Collection<Issn>> getPublicationsWithMoreThan2Issns() throws DbException { final String DEBUG_HEADER = "getPublicationsWithMoreThan2Issns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<PkNamePair, Collection<Issn>> publicationsIssns = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the publication ISSNs. publicationsIssns = getPublicationsWithMoreThan2Issns(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationsIssns.size() = " + publicationsIssns.size()); return publicationsIssns; } /** * Provides the ISSNs for the publications in the database with more than two * ISSNS. * * @param conn * A Connection with the database connection to be used. * @return a Map<PkNamePair, Collection<Issn>> with the ISSNs keyed by the * publication PK/name pair. * @throws DbException * if any problem occurred accessing the database. */ public Map<PkNamePair, Collection<Issn>> getPublicationsWithMoreThan2Issns( Connection conn) throws DbException { final String DEBUG_HEADER = "getPublicationsWithMoreThan2Issns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<PkNamePair, Collection<Issn>> publicationsIssns = new TreeMap<PkNamePair, Collection<Issn>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { PkNamePair previousPair = null; // Get the publication ISSNs. stmt = dbManager.prepareStatement(conn, GET_PUBLICATIONS_MORE_2_ISSNS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the publication ISSNs. while (resultSet.next()) { String publicationName = resultSet.getString(NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationName = " + publicationName); Long pk = resultSet.getLong(MD_ITEM_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pk = " + pk); PkNamePair pair = new PkNamePair(pk, publicationName); String issn = resultSet.getString(ISSN_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "issn = " + issn); String issnType = resultSet.getString(ISSN_TYPE_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "issnType = " + issnType); if (pair.equals(previousPair)) { publicationsIssns.get(pair).add(new Issn(issn, issnType)); } else { Collection<Issn> publicationIssns = new ArrayList<Issn>(); publicationIssns.add(new Issn(issn, issnType)); publicationsIssns.put(pair, publicationIssns); previousPair = pair; } } } catch (SQLException sqle) { String message = "Cannot get the publication ISSNs"; log.error(message, sqle); log.error("SQL = '" + GET_PUBLICATIONS_MORE_2_ISSNS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationsIssns.size() = " + publicationsIssns.size()); return publicationsIssns; } /** * Provides the publication names linked to ISBNs in the database that are * linked to multiple publications. * * @return a Map<String, Collection<String>> with the publication names keyed * by the ISBNs to which they are linked. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<String>> getIsbnsWithMultiplePublications() throws DbException { final String DEBUG_HEADER = "getIsbnsWithMultiplePublications(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> isbnsPublications = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the ISBN publications. isbnsPublications = getIsbnsWithMultiplePublications(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "isbnsPublications.size() = " + isbnsPublications.size()); return isbnsPublications; } /** * Provides the publication names linked to ISBNs in the database that are * linked to multiple publications. * * @param conn * A Connection with the database connection to be used. * @return a Map<String, Collection<String>> with the publication names keyed * by the ISBNs to which they are linked. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<String>> getIsbnsWithMultiplePublications( Connection conn) throws DbException { final String DEBUG_HEADER = "getIsbnsWithMultiplePublications(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> isbnsPublications = new TreeMap<String, Collection<String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { String previousIsbn = null; // Get the ISBN publications. stmt = dbManager.prepareStatement(conn, GET_ISBNS_MULTIPLE_PUBLICATIONS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the ISBN publications. while (resultSet.next()) { String isbn = resultSet.getString(ISBN_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "isbn = " + isbn); String publicationName = resultSet.getString(NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationName = " + publicationName); if (isbn.equals(previousIsbn)) { isbnsPublications.get(isbn).add(publicationName); } else { Collection<String> isbnPublications = new ArrayList<String>(); isbnPublications.add(publicationName); isbnsPublications.put(isbn, isbnPublications); previousIsbn = isbn; } } } catch (SQLException sqle) { String message = "Cannot get the ISBN publications"; log.error(message, sqle); log.error("SQL = '" + GET_ISBNS_MULTIPLE_PUBLICATIONS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "isbnsPublications.size() = " + isbnsPublications.size()); return isbnsPublications; } /** * Provides the publication names linked to ISSNs in the database that are * linked to multiple publications. * * @return a Map<String, Collection<String>> with the publication names keyed * by the ISSNs to which they are linked. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<String>> getIssnsWithMultiplePublications() throws DbException { final String DEBUG_HEADER = "getIssnsWithMultiplePublications(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> issnsPublications = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the ISSN publications. issnsPublications = getIssnsWithMultiplePublications(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "issnsPublications.size() = " + issnsPublications.size()); return issnsPublications; } /** * Provides the publication names linked to ISSNs in the database that are * linked to multiple publications. * * @param conn * A Connection with the database connection to be used. * @return a Map<String, Collection<String>> with the publication names keyed * by the ISSNs to which they are linked. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<String>> getIssnsWithMultiplePublications( Connection conn) throws DbException { final String DEBUG_HEADER = "getIssnsWithMultiplePublications(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> issnsPublications = new TreeMap<String, Collection<String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { String previousIssn = null; // Get the ISSN publications. stmt = dbManager.prepareStatement(conn, GET_ISSNS_MULTIPLE_PUBLICATIONS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the ISSN publications. while (resultSet.next()) { String issn = resultSet.getString(ISSN_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "issn = " + issn); String publicationName = resultSet.getString(NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationName = " + publicationName); if (issn.equals(previousIssn)) { issnsPublications.get(issn).add(publicationName); } else { Collection<String> issnPublications = new ArrayList<String>(); issnPublications.add(publicationName); issnsPublications.put(issn, issnPublications); previousIssn = issn; } } } catch (SQLException sqle) { String message = "Cannot get the ISSN publications"; log.error(message, sqle); log.error("SQL = '" + GET_ISSNS_MULTIPLE_PUBLICATIONS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "issnsPublications.size() = " + issnsPublications.size()); return issnsPublications; } /** * Provides the ISSNs for books in the database. * * @return a Map<String, Collection<String>> with the ISSNs keyed by the * publication name. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<String>> getBooksWithIssns() throws DbException { final String DEBUG_HEADER = "getBooksWithIssns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> booksWithIssns = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the books with ISSNs. booksWithIssns = getBooksWithIssns(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "booksWithIssns.size() = " + booksWithIssns.size()); return booksWithIssns; } /** * Provides the ISSNs for books in the database. * * @param conn * A Connection with the database connection to be used. * @return a Map<String, Collection<String>> with the ISSNs keyed by the * publication name. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<String>> getBooksWithIssns(Connection conn) throws DbException { final String DEBUG_HEADER = "getBooksWithIssns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> booksWithIssns = new TreeMap<String, Collection<String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { String previousDisplayPublicationName = null; // Get the publication ISSNs. stmt = dbManager.prepareStatement(conn, GET_BOOKS_WITH_ISSNS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the book ISSNs. while (resultSet.next()) { String publicationName = resultSet.getString(NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationName = " + publicationName); String publicationTypeName = resultSet.getString(TYPE_NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationTypeName = " + publicationTypeName); String displayPublicationName = publicationName + " [" + publicationTypeName.substring(0, 1) + "]"; String issn = resultSet.getString(ISSN_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "issn = " + issn); if (displayPublicationName.equals(previousDisplayPublicationName)) { booksWithIssns.get(displayPublicationName).add(issn); } else { Collection<String> publicationIssns = new ArrayList<String>(); publicationIssns.add(issn); booksWithIssns.put(displayPublicationName, publicationIssns); previousDisplayPublicationName = displayPublicationName; } } } catch (SQLException sqle) { String message = "Cannot get the book ISSNs"; log.error(message, sqle); log.error("SQL = '" + GET_BOOKS_WITH_ISSNS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "booksWithIssns.size() = " + booksWithIssns.size()); return booksWithIssns; } /** * Provides the ISBNs for periodicals in the database. * * @return a Map<String, Collection<String>> with the ISBNs keyed by the * publication name. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<String>> getPeriodicalsWithIsbns() throws DbException { final String DEBUG_HEADER = "getPeriodicalsWithIsbns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> periodicalsWithIsbns = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the periodicals with ISBNs. periodicalsWithIsbns = getPeriodicalsWithIsbns(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "periodicalsWithIsbns.size() = " + periodicalsWithIsbns.size()); return periodicalsWithIsbns; } /** * Provides the ISBNs for periodicals in the database. * * @param conn * A Connection with the database connection to be used. * @return a Map<String, Collection<String>> with the ISBNs keyed by the * publication name. * @throws DbException * if any problem occurred accessing the database. */ public Map<String, Collection<String>> getPeriodicalsWithIsbns( Connection conn) throws DbException { final String DEBUG_HEADER = "getPeriodicalsWithIsbns(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> periodicalsWithIsbns = new TreeMap<String, Collection<String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { String previousDisplayPublicationName = null; // Get the publication ISBNs. stmt = dbManager.prepareStatement(conn, GET_PERIODICALS_WITH_ISBNS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the periodical ISBNs. while (resultSet.next()) { String publicationName = resultSet.getString(NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationName = " + publicationName); String publicationTypeName = resultSet.getString(TYPE_NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationTypeName = " + publicationTypeName); String displayPublicationName = publicationName + " [" + publicationTypeName.substring(0, 1) + "]"; String isbn = resultSet.getString(ISBN_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "isbn = " + isbn); if (displayPublicationName.equals(previousDisplayPublicationName)) { periodicalsWithIsbns.get(displayPublicationName).add(isbn); } else { Collection<String> publicationIsbns = new ArrayList<String>(); publicationIsbns.add(isbn); periodicalsWithIsbns.put(displayPublicationName, publicationIsbns); previousDisplayPublicationName = displayPublicationName; } } } catch (SQLException sqle) { String message = "Cannot get the periodical ISBNs"; log.error(message, sqle); log.error("SQL = '" + GET_PERIODICALS_WITH_ISBNS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "periodicalsWithIsbns.size() = " + periodicalsWithIsbns.size()); return periodicalsWithIsbns; } /** * Provides the Archival Units in the database with an unknown provider. * * @return a Collection<String> with the sorted Archival Unit names. * @throws DbException * if any problem occurred accessing the database. */ Collection<String> getUnknownProviderAuIds() throws DbException { final String DEBUG_HEADER = "getUnknownProviderAus(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<String> unknownProviderAuIds = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the identifiers of the Archival Unitswith an unknown provider. unknownProviderAuIds = getUnknownProviderAuIds(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "unknownProviderAuIds.size() = " + unknownProviderAuIds.size()); return unknownProviderAuIds; } /** * Provides the Archival Units in the database with an unknown provider. * * @param conn * A Connection with the database connection to be used. * @return a Collection<String> with the sorted Archival Unit names. * @throws DbException * if any problem occurred accessing the database. */ Collection<String> getUnknownProviderAuIds(Connection conn) throws DbException { final String DEBUG_HEADER = "getUnknownProviderAuIds(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<String> unknownProviderAuIds = new ArrayList<String>(); PreparedStatement stmt = null; ResultSet resultSet = null; String sql = GET_UNKNOWN_PROVIDER_AUS_QUERY; try { stmt = dbManager.prepareStatement(conn, sql); resultSet = dbManager.executeQuery(stmt); // Loop through the Archival Unit DOI prefixes. while (resultSet.next()) { String pluginId = resultSet.getString(PLUGIN_ID_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); String auKey = resultSet.getString(AU_KEY_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); String auId = PluginManager.generateAuId(pluginId, auKey); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auId = " + auId); unknownProviderAuIds.add(auId); } } catch (SQLException sqle) { String message = "Cannot get the Archival Units with unknown provider"; log.error(message, sqle); log.error("SQL = '" + sql + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "unknownProviderAuIds.size() = " + unknownProviderAuIds.size()); return unknownProviderAuIds; } /** * Provides the journal articles in the database whose parent is not a * journal. * * @return a Collection<Map<String, String>> with the mismatched journal * articles sorted by Archival Unit, parent name and child name. * @throws DbException * if any problem occurred accessing the database. */ Collection<Map<String, String>> getMismatchedParentJournalArticles() throws DbException { return getMismatchedParentChildren( GET_MISMATCHED_PARENT_JOURNAL_ARTICLES_QUERY); } /** * Provides the book chapters in the database whose parent is not a book or a * book series. * * @return a Collection<Map<String, String>> with the mismatched book chapters * sorted by Archival Unit, parent name and child name. * @throws DbException * if any problem occurred accessing the database. */ Collection<Map<String, String>> getMismatchedParentBookChapters() throws DbException { return getMismatchedParentChildren( GET_MISMATCHED_PARENT_BOOK_CHAPTERS_QUERY); } /** * Provides the book volumes in the database whose parent is not a book or a * book series. * * @return a Collection<Map<String, String>> with the mismatched book volumes * sorted by Archival Unit, parent name and child name. * @throws DbException * if any problem occurred accessing the database. */ Collection<Map<String, String>> getMismatchedParentBookVolumes() throws DbException { return getMismatchedParentChildren( GET_MISMATCHED_PARENT_BOOK_VOLUMES_QUERY); } /** * Provides the children in the database with a mismatched parent. * * @param A * String with the database query to be used. * @return a Collection<Map<String, String>> with the mismatched children * sorted by Archival Unit, parent name and child name. * @throws DbException * if any problem occurred accessing the database. */ private Collection<Map<String, String>> getMismatchedParentChildren( String query) throws DbException { final String DEBUG_HEADER = "getMismatchedParentChildren(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "query = " + query); Collection<Map<String, String>> mismatchedChildren = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the children in the database with a mismatched parent. mismatchedChildren = getMismatchedParentChildren(conn, query); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mismatchedChildren.size() = " + mismatchedChildren.size()); return mismatchedChildren; } /** * Provides the children in the database with a mismatched parent. * * @param conn * A Connection with the database connection to be used. * @param A * String with the database query to be used. * @return a Collection<Map<String, String>> with the mismatched children * sorted by Archival Unit, parent name and child name. * @throws DbException * if any problem occurred accessing the database. */ private Collection<Map<String, String>> getMismatchedParentChildren( Connection conn, String query) throws DbException { final String DEBUG_HEADER = "getMismatchedParentChildren(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "query = " + query); Collection<Map<String, String>> mismatchedChildren = new ArrayList<Map<String, String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { stmt = dbManager.prepareStatement(conn, query); resultSet = dbManager.executeQuery(stmt); // Loop through the mismatched children. while (resultSet.next()) { Map<String, String> mismatchedChild = new HashMap<String, String>(); String col1 = resultSet.getString("col1"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col1 = " + col1); mismatchedChild.put("col1", col1); String col2 = resultSet.getString("col2"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col2 = " + col2); mismatchedChild.put("col2", col2); String col3 = resultSet.getString("col3"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col3 = " + col3); mismatchedChild.put("col3", col3); String col4 = resultSet.getString("col4"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col4 = " + col4); mismatchedChild.put("col4", col4); String col5 = resultSet.getString("col5"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col5 = " + col5); mismatchedChild.put("col5", col5); mismatchedChildren.add(mismatchedChild); } } catch (SQLException sqle) { String message = "Cannot get the children with mismatched parents"; log.error(message, sqle); log.error("SQL = '" + query + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "mismatchedChildren.size() = " + mismatchedChildren.size()); return mismatchedChildren; } /** * Provides the publishers linked to the Archival Unit identifier for the * Archival Units in the database with multiple publishers. * * @return a Map<String, Collection<String>> with the publishers keyed by the * Archival Unit identifier. * @throws DbException * if any problem occurred accessing the database. */ Map<String, Collection<String>> getAuIdsWithMultiplePublishers() throws DbException { final String DEBUG_HEADER = "getAuIdsWithMultiplePublishers(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> ausPublishers = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the Archival Unit publishers. ausPublishers = getAuIdsWithMultiplePublishers(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "ausPublishers.size() = " + ausPublishers.size()); return ausPublishers; } /** * Provides the publishers linked to the Archival Unit identifier for the * Archival Units in the database with multiple publishers. * * @param conn * A Connection with the database connection to be used. * @return a Map<String, Collection<String>> with the publishers keyed by the * Archival Unit identifier. * @throws DbException * if any problem occurred accessing the database. */ Map<String, Collection<String>> getAuIdsWithMultiplePublishers( Connection conn) throws DbException { final String DEBUG_HEADER = "getAuIdsWithMultiplePublishers(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> ausPublishers = new TreeMap<String, Collection<String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { String previousAuId = null; // Get the Archival Unit publishers. stmt = dbManager.prepareStatement(conn, GET_AUS_MULTIPLE_PUBLISHERS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the Archival Unit publishers. while (resultSet.next()) { String pluginId = resultSet.getString(PLUGIN_ID_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); String auKey = resultSet.getString(AU_KEY_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); String auId = PluginManager.generateAuId(pluginId, auKey); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auId = " + auId); String publisherName = resultSet.getString(PUBLISHER_NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publisherName = " + publisherName); if (auId.equals(previousAuId)) { ausPublishers.get(auId).add(publisherName); } else { Collection<String> auPublishers = new ArrayList<String>(); auPublishers.add(publisherName); ausPublishers.put(auId, auPublishers); previousAuId = auId; } } } catch (SQLException sqle) { String message = "Cannot get the Archival Units publishers"; log.error(message, sqle); log.error("SQL = '" + GET_AUS_MULTIPLE_PUBLISHERS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "ausPublishers.size() = " + ausPublishers.size()); return ausPublishers; } /** * Provides the metadata items in the database that have no name. * * @return a Collection<Map<String, String>> with the unnamed metadata items * sorted by publisher, parent type, parent title and item type. * @throws DbException * if any problem occurred accessing the database. */ Collection<Map<String, String>> getUnnamedItems() throws DbException { final String DEBUG_HEADER = "getUnnamedItems(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<Map<String, String>> unnamedItems = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the metadata items in the database that have no name. unnamedItems = getUnnamedItems(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "unnamedItems.size() = " + unnamedItems.size()); return unnamedItems; } /** * Provides the metadata items in the database that have no name. * * @param conn * A Connection with the database connection to be used. * @return a Collection<Map<String, String>> with the unnamed metadata items * articles sorted by publisher, parent type, parent title and item * type. * @throws DbException * if any problem occurred accessing the database. */ private Collection<Map<String, String>> getUnnamedItems(Connection conn) throws DbException { final String DEBUG_HEADER = "getUnnamedItems(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<Map<String, String>> unnamedItems = new ArrayList<Map<String, String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { stmt = dbManager.prepareStatement(conn, GET_UNNAMED_ITEMS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the unnamed items. while (resultSet.next()) { Map<String, String> unnamedItem = new HashMap<String, String>(); String col1 = "" + resultSet.getInt("col1"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col1 = " + col1); unnamedItem.put("col1", col1); String col2 = resultSet.getString("col2"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col2 = " + col2); unnamedItem.put("col2", col2); String col3 = resultSet.getString("col3"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col3 = " + col3); unnamedItem.put("col3", col3); String col4 = resultSet.getString("col4"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col4 = " + col4); unnamedItem.put("col4", col4); String col5 = resultSet.getString("col5"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col5 = " + col5); unnamedItem.put("col5", col5); String col6 = resultSet.getString("col6"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col6 = " + col6); unnamedItem.put("col6", col6); String col7 = resultSet.getString("col7"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col7 = " + col7); unnamedItem.put("col7", col7); unnamedItems.add(unnamedItem); } } catch (SQLException sqle) { String message = "Cannot get the unnamed items"; log.error(message, sqle); log.error("SQL = '" + GET_UNNAMED_ITEMS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "unnamedItems.size() = " + unnamedItems.size()); return unnamedItems; } /** * Provides the earliest and latest publication dates of all the metadata * items included in an Archival Unit. * * @param conn * A Connection with the database connection to be used. * @param pluginId * A String with the plugin identifier. * @param auKey * A String with the Archival Unit key. * @return a KeyPair with the earliest and latest publication dates. * @throws DbException * if any problem occurred accessing the database. */ KeyPair findPublicationDateInterval(Connection conn, String pluginId, String auKey) throws DbException { final String DEBUG_HEADER = "findPublicationDateInterval(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "pluginId = " + pluginId); log.debug2(DEBUG_HEADER + "auKey = " + auKey); } KeyPair publicationInterval = null; PreparedStatement stmt = null; ResultSet resultSet = null; try { stmt = dbManager.prepareStatement(conn, FIND_PUBLICATION_DATE_INTERVAL_QUERY); stmt.setString(1, auKey); stmt.setString(2, pluginId); resultSet = dbManager.executeQuery(stmt); // Get the single result. if (resultSet.next()) { String earliest = resultSet.getString("earliest"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "earliest = " + earliest); String latest = resultSet.getString("latest"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "latest = " + latest); // Handle the case where the earliest value is wider than the latest. if (latest.startsWith(earliest)) { latest = earliest; } publicationInterval = new KeyPair(earliest, latest); } } catch (SQLException sqle) { String message = "Cannot find publication date interval"; log.error(message, sqle); log.error("SQL = '" + FIND_PUBLICATION_DATE_INTERVAL_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationInterval = '" + publicationInterval.car + "' - '" + publicationInterval.cdr + "'"); return publicationInterval; } Map<String, Collection<String>> getPublicationsWithMultiplePids() throws DbException { final String DEBUG_HEADER = "getPublicationsWithMultiplePids(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> publicationsPids = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); publicationsPids = getPublicationsWithMultiplePids(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationsPids.size() = " + publicationsPids.size()); return publicationsPids; } Map<String, Collection<String>> getPublicationsWithMultiplePids( Connection conn) throws DbException { final String DEBUG_HEADER = "getPublicationsWithMultiplePids(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Map<String, Collection<String>> publicationsPids = new TreeMap<String, Collection<String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { String previousPublicationName = null; stmt = dbManager.prepareStatement(conn, GET_PUBLICATIONS_MULTIPLE_PIDS_QUERY); resultSet = dbManager.executeQuery(stmt); while (resultSet.next()) { String publicationName = resultSet.getString(NAME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "publicationName = " + publicationName); String proprietaryId = resultSet.getString(PROPRIETARY_ID_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "proprietaryId = " + proprietaryId); if (publicationName.equals(previousPublicationName)) { publicationsPids.get(publicationName).add(proprietaryId); } else { Collection<String> publicationPids = new ArrayList<String>(); publicationPids.add(proprietaryId); publicationsPids.put(publicationName, publicationPids); previousPublicationName = publicationName; } } } catch (SQLException sqle) { String message = "Cannot get the publications proprietary identifiers"; log.error(message, sqle); log.error("SQL = '" + GET_PUBLICATIONS_MULTIPLE_PIDS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "publicationsPids.size() = " + publicationsPids.size()); return publicationsPids; } /** * Provides the non-parent metadata items in the database that have no DOI. * * @return a Collection<Map<String, String>> with the non-parent metadata * items that have no DOI sorted by publisher, parent type, parent * title and item type. * @throws DbException * if any problem occurred accessing the database. */ Collection<Map<String, String>> getNoDoiItems() throws DbException { final String DEBUG_HEADER = "getNoDoiItems(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<Map<String, String>> noDoiItems = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the non-parent metadata items in the database that have no DOI. noDoiItems = getNoDoiItems(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "noDoiItems.size() = " + noDoiItems.size()); return noDoiItems; } /** * Provides the non-parent metadata items in the database that have no DOI. * * @param conn * A Connection with the database connection to be used. * @return a Collection<Map<String, String>> with the non-parent metadata * items that have no DOI sorted by publisher, parent type, parent * title and item type. * @throws DbException * if any problem occurred accessing the database. */ private Collection<Map<String, String>> getNoDoiItems(Connection conn) throws DbException { final String DEBUG_HEADER = "getNoDoiItems(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<Map<String, String>> noDoiItems = new ArrayList<Map<String, String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { stmt = dbManager.prepareStatement(conn, GET_NO_DOI_ITEMS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the non-parent items with no DOI. while (resultSet.next()) { Map<String, String> noDoiItem = new HashMap<String, String>(); String col1 = resultSet.getString("col1"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col1 = " + col1); noDoiItem.put("col1", col1); String col2 = resultSet.getString("col2"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col2 = " + col2); noDoiItem.put("col2", col2); String col3 = resultSet.getString("col3"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col3 = " + col3); noDoiItem.put("col3", col3); String col4 = resultSet.getString("col4"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col4 = " + col4); noDoiItem.put("col4", col4); String col5 = resultSet.getString("col5"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col5 = " + col5); noDoiItem.put("col5", col5); String col6 = resultSet.getString("col6"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col6 = " + col6); noDoiItem.put("col6", col6); String col7 = resultSet.getString("col7"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col7 = " + col7); noDoiItem.put("col7", col7); noDoiItems.add(noDoiItem); } } catch (SQLException sqle) { String message = "Cannot get the non-parent items with no DOI"; log.error(message, sqle); log.error("SQL = '" + GET_NO_DOI_ITEMS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "noDoiItems.size() = " + noDoiItems.size()); return noDoiItems; } /** * Provides the non-parent metadata items in the database that have no Access * URL. * * @return a Collection<Map<String, String>> with the non-parent metadata * items that have no Access URL sorted by publisher, parent type, * parent title and item type. * @throws DbException * if any problem occurred accessing the database. */ Collection<Map<String, String>> getNoAccessUrlItems() throws DbException { final String DEBUG_HEADER = "getNoAccessUrlItems(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<Map<String, String>> noAccessUrlItems = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the non-parent metadata items in the database that have no Access // URL. noAccessUrlItems = getNoAccessUrlItems(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "noAccessUrlItems.size() = " + noAccessUrlItems.size()); return noAccessUrlItems; } /** * Provides the non-parent metadata items in the database that have no Access * URL. * * @param conn * A Connection with the database connection to be used. * @return a Collection<Map<String, String>> with the non-parent metadata * items that have no Access URL sorted by publisher, parent type, * parent title and item type. * @throws DbException * if any problem occurred accessing the database. */ private Collection<Map<String, String>> getNoAccessUrlItems(Connection conn) throws DbException { final String DEBUG_HEADER = "getNoAccessUrlItems(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<Map<String, String>> noAccessUrlItems = new ArrayList<Map<String, String>>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { stmt = dbManager.prepareStatement(conn, GET_NO_ACCESS_URL_ITEMS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the non-parent items with no Access URL. while (resultSet.next()) { Map<String, String> noAccessUrlItem = new HashMap<String, String>(); String col1 = resultSet.getString("col1"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col1 = " + col1); noAccessUrlItem.put("col1", col1); String col2 = resultSet.getString("col2"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col2 = " + col2); noAccessUrlItem.put("col2", col2); String col3 = resultSet.getString("col3"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col3 = " + col3); noAccessUrlItem.put("col3", col3); String col4 = resultSet.getString("col4"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col4 = " + col4); noAccessUrlItem.put("col4", col4); String col5 = resultSet.getString("col5"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col5 = " + col5); noAccessUrlItem.put("col5", col5); String col6 = resultSet.getString("col6"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col6 = " + col6); noAccessUrlItem.put("col6", col6); String col7 = resultSet.getString("col7"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "col7 = " + col7); noAccessUrlItem.put("col7", col7); noAccessUrlItems.add(noAccessUrlItem); } } catch (SQLException sqle) { String message = "Cannot get the non-parent items with no Access URL"; log.error(message, sqle); log.error("SQL = '" + GET_NO_ACCESS_URL_ITEMS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "noAccessUrlItems.size() = " + noAccessUrlItems.size()); return noAccessUrlItems; } /** * Deletes an ISSN linked to a publication. * * @param mdItemSeq * A Long with the publication metadata identifier. * @param issn * A String with the ISSN. * @param issnType * A String with the ISSN type. * @return a boolean with <code>true</code> if the ISSN was deleted, * <code>false</code> otherwise. * @throws DbException * if any problem occurred accessing the database. */ boolean deletePublicationIssn(Long mdItemSeq, String issn, String issnType) throws DbException { final String DEBUG_HEADER = "deletePublicationIssn(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); boolean deleted = false; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Delete the ISSN. deleted = deletePublicationIssn(conn, mdItemSeq, issn, issnType); DbManager.commitOrRollback(conn, log); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "deleted = " + deleted); return deleted; } /** * Deletes an ISSN linked to a publication. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the publication metadata identifier. * @param issn * A String with the ISSN. * @param issnType * A String with the ISSN type. * @return a boolean with <code>true</code> if the ISSN was deleted, * <code>false</code> otherwise. * @throws DbException * if any problem occurred accessing the database. */ private boolean deletePublicationIssn(Connection conn, Long mdItemSeq, String issn, String issnType) throws DbException { final String DEBUG_HEADER = "deletePublicationIssn(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); log.debug2(DEBUG_HEADER + "issn = " + issn); log.debug2(DEBUG_HEADER + "issnType = " + issnType); } int deletedCount = -1; PreparedStatement deleteIssn = dbManager.prepareStatement(conn, DELETE_ISSN_QUERY); try { deleteIssn.setLong(1, mdItemSeq); deleteIssn.setString(2, issn); deleteIssn.setString(3, issnType); deletedCount = dbManager.executeUpdate(deleteIssn); } catch (SQLException sqle) { String message = "Cannot delete ISSN"; log.error(message, sqle); log.error("mdItemSeq = '" + mdItemSeq + "'."); log.error("issn = '" + issn + "'."); log.error("issnType = '" + issnType + "'."); log.error("SQL = '" + DELETE_ISSN_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(deleteIssn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "result = " + (deletedCount > 0)); return deletedCount > 0; } /** * Updates the unknown provider of an archival unit. * * @param conn * A Connection with the database connection to be used. * @param auMdSeq * A Long with the archival unit metadata identifier. * @param providerSeq * A Long with the provider identifier. * @return a boolean with <code>true</code> if the unknown provider was * updated, <code>false</code> otherwise. * @throws DbException * if any problem occurred accessing the database. */ boolean updateAuUnknownProvider(Connection conn, Long auMdSeq, Long providerSeq) throws DbException { final String DEBUG_HEADER = "updateAuUnknownProvider(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "auMdSeq = " + auMdSeq); log.debug2(DEBUG_HEADER + "providerSeq = " + providerSeq); } int updatedCount = -1; PreparedStatement updateUnknownProvider = dbManager.prepareStatement(conn, UPDATE_AU_MD_UNKNOWN_PROVIDER_QUERY); try { updateUnknownProvider.setLong(1, providerSeq); updateUnknownProvider.setLong(2, auMdSeq); updatedCount = dbManager.executeUpdate(updateUnknownProvider); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "updatedCount = " + updatedCount); } catch (SQLException sqle) { String message = "Cannot update unknown provider"; log.error(message, sqle); log.error("auMdSeq = '" + auMdSeq + "'."); log.error("providerSeq = '" + providerSeq + "'."); log.error("SQL = '" + UPDATE_AU_MD_UNKNOWN_PROVIDER_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseStatement(updateUnknownProvider); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "result = " + (updatedCount > 0)); return updatedCount > 0; } /** * Provides the Archival Units in the database with no metadata items. * * @return a Collection<String> with the sorted Archival Unit identifiers. * @throws DbException * if any problem occurred accessing the database. */ Collection<String> getNoItemsAuIds() throws DbException { final String DEBUG_HEADER = "getNoItemsAuIds(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<String> noItemsAuIds = null; Connection conn = null; try { // Get a connection to the database. conn = dbManager.getConnection(); // Get the identifiers of the Archival Units with no metadata items. noItemsAuIds = getNoItemsAuIds(conn); } finally { DbManager.safeRollbackAndClose(conn); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "noItemsAuIds.size() = " + noItemsAuIds.size()); return noItemsAuIds; } /** * Provides the Archival Units in the database with no metadata items. * * @param conn * A Connection with the database connection to be used. * @return a Collection<String> with the sorted Archival Unit identifiers. * @throws DbException * if any problem occurred accessing the database. */ Collection<String> getNoItemsAuIds(Connection conn) throws DbException { final String DEBUG_HEADER = "getNoItemsAuIds(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "Starting..."); Collection<String> noItemsAuIds = new ArrayList<String>(); PreparedStatement stmt = null; ResultSet resultSet = null; try { stmt = dbManager.prepareStatement(conn, GET_NO_ITEMS_AUS_QUERY); resultSet = dbManager.executeQuery(stmt); // Loop through the Archival Unit DOI prefixes. while (resultSet.next()) { String pluginId = resultSet.getString(PLUGIN_ID_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId = " + pluginId); String auKey = resultSet.getString(AU_KEY_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); String auId = PluginManager.generateAuId(pluginId, auKey); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auId = " + auId); noItemsAuIds.add(auId); } } catch (SQLException sqle) { String message = "Cannot get the Archival Units with no metadata items"; log.error(message, sqle); log.error("SQL = '" + GET_NO_ITEMS_AUS_QUERY + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(stmt); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "noItemsAuIds.size() = " + noItemsAuIds.size()); return noItemsAuIds; } /** * Provides the metadata information of an Archival Unit. * * @param conn * A Connection with the database connection to be used. * @param auId * A String with the Archival Unit identifier. * @return a Map<String, Object> with the metadata information of the Archival * Unit. * @throws DbException * if any problem occurred accessing the database. */ Map<String, Object> getAuMetadata(Connection conn, String auId) throws DbException { final String DEBUG_HEADER = "getAuMetadata(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auId = " + auId); Map<String, Object> result = null; String pluginId = null; String auKey = null; PreparedStatement getAuMetadata = dbManager.prepareStatement(conn, GET_AU_MD_QUERY); ResultSet resultSet = null; try { pluginId = PluginManager.pluginIdFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "pluginId() = " + pluginId); auKey = PluginManager.auKeyFromAuId(auId); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auKey = " + auKey); getAuMetadata.setString(1, pluginId); getAuMetadata.setString(2, auKey); resultSet = dbManager.executeQuery(getAuMetadata); if (resultSet.next()) { result = new HashMap<String, Object>(); Long auMdSeq = resultSet.getLong(AU_MD_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auMdSeq = " + auMdSeq); if (!resultSet.wasNull()) { result.put(AU_MD_SEQ_COLUMN, auMdSeq); } Long auSeq = resultSet.getLong(AU_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "auSeq = " + auSeq); if (!resultSet.wasNull()) { result.put(AU_SEQ_COLUMN, auSeq); } Integer mdVersion = resultSet.getInt(MD_VERSION_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "mdVersion = " + mdVersion); if (!resultSet.wasNull()) { result.put(MD_VERSION_COLUMN, mdVersion); } Long extractTime = resultSet.getLong(EXTRACT_TIME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "extractTime = " + extractTime); if (!resultSet.wasNull()) { result.put(EXTRACT_TIME_COLUMN, extractTime); } Long creationTime = resultSet.getLong(CREATION_TIME_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "creationTime = " + creationTime); if (!resultSet.wasNull()) { result.put(CREATION_TIME_COLUMN, creationTime); } Long providerSeq = resultSet.getLong(PROVIDER_SEQ_COLUMN); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "providerSeq = " + providerSeq); if (!resultSet.wasNull()) { result.put(PROVIDER_SEQ_COLUMN, providerSeq); } } } catch (SQLException sqle) { String message = "Cannot get AU extraction time"; log.error(message, sqle); log.error("auId = '" + auId + "'."); log.error("SQL = '" + GET_AU_MD_QUERY + "'."); log.error("pluginId = '" + pluginId + "'."); log.error("auKey = '" + auKey + "'."); throw new DbException(message, sqle); } finally { DbManager.safeCloseResultSet(resultSet); DbManager.safeCloseStatement(getAuMetadata); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "result = " + result); return result; } }
// modification, are permitted provided that the following conditions are met: // documentation and/or other materials provided with the distribution. // * Neither the name of the <organization> nor the // names of its contributors may be used to endorse or promote products // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL DAVID J. PEARCE BE LIABLE FOR ANY // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND // ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package wyc.stages; import java.util.*; import static wyil.util.SyntaxError.*; import wyil.ModuleLoader; import wyil.util.*; import wyil.lang.*; import wyc.lang.*; import wyc.lang.WhileyFile.*; import wyc.lang.Stmt; import wyc.lang.Stmt.*; import wyc.lang.Expr.*; import wyc.stages.WhileyLexer.Ampersand; import wyc.util.*; public class NameResolution { private final ModuleLoader loader; private String filename; private ModuleID module; public NameResolution(ModuleLoader loader) { this.loader = loader; } public void resolve(WhileyFile wf) { ArrayList<PkgID> imports = new ArrayList<PkgID>(); module = wf.module; filename = wf.filename; imports.add(module.pkg().append(module.module())); imports.add(module.pkg().append("*")); imports.add(new PkgID(new String[]{"whiley","lang"}).append("*")); for(Decl d : wf.declarations) { try { if(d instanceof ImportDecl) { ImportDecl impd = (ImportDecl) d; imports.add(0,new PkgID(impd.pkg)); } else if(d instanceof FunDecl) { resolve((FunDecl)d,imports); } else if(d instanceof TypeDecl) { resolve((TypeDecl)d,imports); } else if(d instanceof ConstDecl) { resolve((ConstDecl)d,imports); } } catch(ResolveError ex) { syntaxError(ex.getMessage(),filename,d); } } } protected void resolve(ConstDecl td, ArrayList<PkgID> imports) { resolve(td.constant,new HashMap<String,Set<Expr>>(), imports); } protected void resolve(TypeDecl td, ArrayList<PkgID> imports) throws ResolveError { try { resolve(td.type, imports); if (td.constraint != null) { HashMap<String, Set<Expr>> environment = new HashMap<String, Set<Expr>>(); environment.put("$", Collections.EMPTY_SET); addExposedNames(new Expr.UnknownVariable("$", td.constraint .attribute(Attribute.Source.class), new Attributes.Alias(null)), td.type, environment); resolve(td.constraint, environment, imports); } } catch (ResolveError e) { // Ok, we've hit a resolution error. syntaxError(e.getMessage(), filename, td); } } protected void resolve(FunDecl fd, ArrayList<PkgID> imports) { HashMap<String,Set<Expr>> environment = new HashMap<String,Set<Expr>>(); // method parameter types for (WhileyFile.Parameter p : fd.parameters) { try { resolve(p.type, imports); environment.put(p.name(),Collections.EMPTY_SET); } catch (ResolveError e) { // Ok, we've hit a resolution error. syntaxError(e.getMessage(), filename, p, e); } } if (fd instanceof MethDecl) { MethDecl md = (MethDecl) fd; if(md.receiver != null) { environment.put("this",Collections.EMPTY_SET); } } // method return and throw types try { resolve(fd.ret, imports); resolve(fd.throwType, imports); } catch (ResolveError e) { // Ok, we've hit a resolution error. syntaxError(e.getMessage(), filename, fd.ret); } // method receiver type (if applicable) if(fd instanceof MethDecl) { MethDecl md = (MethDecl) fd; try { resolve(md.receiver, imports); } catch (ResolveError e) { // Ok, we've hit a resolution error. syntaxError(e.getMessage(),filename,md.receiver); } } if (fd.precondition != null) { resolve(fd.precondition, environment, imports); } if (fd.postcondition != null) { environment.put("$", Collections.EMPTY_SET); resolve(fd.postcondition, environment, imports); environment.remove("$"); } List<Stmt> stmts = fd.statements; for (int i=0;i!=stmts.size();++i) { resolve(stmts.get(i), environment, imports); } } public void resolve(Stmt s, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) { try { if(s instanceof Assign) { resolve((Assign)s, environment, imports); } else if(s instanceof Assert) { resolve((Assert)s, environment, imports); } else if(s instanceof Return) { resolve((Return)s, environment, imports); } else if(s instanceof Debug) { resolve((Debug)s, environment, imports); } else if(s instanceof Skip || s instanceof Break) { // do nothing } else if(s instanceof Throw) { resolve((Throw)s, environment, imports); } else if(s instanceof IfElse) { resolve((IfElse)s, environment, imports); } else if(s instanceof Switch) { resolve((Switch)s, environment, imports); } else if(s instanceof While) { resolve((While)s, environment, imports); } else if(s instanceof For) { resolve((For)s, environment, imports); } else if(s instanceof Invoke) { resolve((Invoke)s, environment, imports); } else if(s instanceof Spawn) { resolve((UnOp)s, environment, imports); } else { syntaxError("unknown statement encountered: " + s.getClass().getName(), filename, s); } } catch (ResolveError e) { // Ok, we've hit a resolution error. syntaxError(e.getMessage(), filename, s); } } protected void resolve(Assign s, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) { if(s.lhs instanceof UnknownVariable) { UnknownVariable v = (UnknownVariable) s.lhs; environment.put(v.var, Collections.EMPTY_SET); s.lhs = new LocalVariable(v.var,v.attributes()); } else if(s.lhs instanceof TupleGen) { TupleGen tg = (TupleGen) s.lhs; for(int i=0;i!=tg.fields.size();++i) { Expr e = tg.fields.get(i); if(e instanceof UnknownVariable) { UnknownVariable v = (UnknownVariable) e; tg.fields.set(i,new LocalVariable(v.var,e.attributes())); environment.put(v.var, Collections.EMPTY_SET); } else { syntaxError("variable expected",filename,e); } } } else { s.lhs = (LVal) resolve(s.lhs, environment, imports); } s.rhs = resolve(s.rhs, environment, imports); } protected void resolve(Assert s, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) { s.expr = resolve(s.expr, environment, imports); } protected void resolve(Return s, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) { if(s.expr != null) { s.expr = resolve(s.expr, environment, imports); } } protected void resolve(Debug s, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) { resolve(s.expr, environment, imports); } protected void resolve(Throw s, HashMap<String, Set<Expr>> environment, ArrayList<PkgID> imports) { s.expr = resolve(s.expr, environment, imports); } protected void resolve(IfElse s, HashMap<String, Set<Expr>> environment, ArrayList<PkgID> imports) { s.condition = resolve(s.condition, environment, imports); for (Stmt st : s.trueBranch) { resolve(st, environment, imports); } if (s.falseBranch != null) { for (Stmt st : s.falseBranch) { resolve(st, environment, imports); } } } protected void resolve(Switch s, HashMap<String, Set<Expr>> environment, ArrayList<PkgID> imports) { s.expr = resolve(s.expr, environment, imports); for(Stmt.Case c : s.cases){ if(c.value != null) { c.value = resolve(c.value,environment,imports); } for (Stmt st : c.stmts) { resolve(st, environment, imports); } } } protected void resolve(While s, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) { s.condition = resolve(s.condition, environment, imports); if (s.invariant != null) { s.invariant = resolve(s.invariant, environment, imports); } environment = new HashMap<String,Set<Expr>>(environment); for (Stmt st : s.body) { resolve(st, environment, imports); } } protected void resolve(For s, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) { s.source = resolve(s.source, environment, imports); if (s.invariant != null) { s.invariant = resolve(s.invariant, environment, imports); } environment = new HashMap<String,Set<Expr>>(environment); for(String var : s.variables) { if (environment.containsKey(var)) { syntaxError("variable " + var + " is alreaded defined", filename, s); } environment.put(var, Collections.EMPTY_SET); } for (Stmt st : s.body) { resolve(st, environment, imports); } } protected Expr resolve(Expr e, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) { try { if (e instanceof Constant) { } else if (e instanceof UnknownVariable) { e = resolve((UnknownVariable)e, environment, imports); } else if (e instanceof NaryOp) { e = resolve((NaryOp)e, environment, imports); } else if (e instanceof Comprehension) { e = resolve((Comprehension) e, environment, imports); } else if (e instanceof BinOp) { e = resolve((BinOp)e, environment, imports); } else if (e instanceof Convert) { e = resolve((Convert)e, environment, imports); } else if (e instanceof ListAccess) { e = resolve((ListAccess)e, environment, imports); } else if (e instanceof UnOp) { e = resolve((UnOp)e, environment, imports); } else if (e instanceof Invoke) { e = resolve((Invoke)e, environment, imports); } else if (e instanceof RecordAccess) { e = resolve((RecordAccess) e, environment, imports); } else if (e instanceof RecordGen) { e = resolve((RecordGen) e, environment, imports); } else if (e instanceof TupleGen) { e = resolve((TupleGen) e, environment, imports); } else if (e instanceof DictionaryGen) { e = resolve((DictionaryGen) e, environment, imports); } else if(e instanceof TypeConst) { e = resolve((TypeConst) e, environment, imports); } else if(e instanceof FunConst) { e = resolve((FunConst) e, environment, imports); } else { syntaxError("unknown expression encountered: " + e.getClass().getName(), filename, e); } } catch(ResolveError re) { syntaxError(re.getMessage(),filename,e,re); } catch(SyntaxError se) { throw se; } catch(Exception ex) { syntaxError("internal failure", filename, e, ex); } return e; } protected Expr resolve(Invoke ivk, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { for(int i=0;i!=ivk.arguments.size();++i) { Expr e = ivk.arguments.get(i); e = resolve(e, environment, imports); ivk.arguments.set(i,e); } if(!environment.containsKey(ivk.name)) { // only look for non-local function binding if there is not a local // variable with the same name. Expr target = ivk.receiver; if(target != null) { ivk.receiver = resolve(target,environment,imports); try { NameID nid = loader.resolve(ivk.name,imports); ivk.attributes().add(new Attributes.Module(nid.module())); } catch(ResolveError e) { // in this case, we've been unable to resolve the method // being called. However, this does not necessarily indicate // by an indirect function call. } } else { NameID nid = loader.resolve(ivk.name,imports); // Ok, resolve the module for this invoke ivk.attributes().add(new Attributes.Module(nid.module())); } } else if(ivk.receiver != null) { ivk.receiver = resolve(ivk.receiver,environment,imports); } return ivk; } protected Expr resolve(UnknownVariable v, HashMap<String, Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { Set<Expr> aliases = environment.get(v.var); if (aliases == null) { // This variable access may correspond to an external access. // Therefore, we must determine which module this // is, and update the tree accordingly. try { NameID nid = loader.resolve(v.var, imports); return new ExternalAccess(nid,v.attributes()); } catch(ResolveError err) { // In this case, we may still be OK as this name could be part // of a dereference expression which will actually form a proper // name. return v; } } else if (aliases.size() == 1) { v.attributes().add(new Attributes.Alias(aliases.iterator().next())); System.out.println("GOT HERE"); } else if (aliases.size() > 1) { syntaxError("ambigous variable name", filename, v); } else { // following signals a local variable return new LocalVariable(v.var,v.attributes()); } return v; } protected Expr resolve(UnOp v, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { v.mhs = resolve(v.mhs, environment, imports); return v; } protected Expr resolve(BinOp v, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) { v.lhs = resolve(v.lhs, environment, imports); v.rhs = resolve(v.rhs, environment, imports); return v; } protected Expr resolve(Convert c, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { resolve(c.type, imports); c.expr = resolve(c.expr, environment, imports); return c; } protected Expr resolve(ListAccess v, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) { v.src = resolve(v.src, environment, imports); v.index = resolve(v.index, environment, imports); return v; } protected Expr resolve(NaryOp v, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { for(int i=0;i!=v.arguments.size();++i) { Expr e = v.arguments.get(i); e = resolve(e, environment, imports); v.arguments.set(i,e); } return v; } protected Expr resolve(Comprehension e, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { HashMap<String,Set<Expr>> nenv = new HashMap<String,Set<Expr>>(environment); for(int i=0;i!=e.sources.size();++i) { Pair<String,Expr> me = e.sources.get(i); if (environment.containsKey(me.first())) { syntaxError("variable " + me.first() + " is alreaded defined", filename, e); } Expr me_second = resolve(me.second(),nenv,imports); e.sources.set(i, new Pair(me.first(),me_second)); nenv.put(me.first(),Collections.EMPTY_SET); } if(e.value != null) { e.value = resolve(e.value,nenv,imports); } if(e.condition != null) { e.condition = resolve(e.condition,nenv,imports); } return e; } protected Expr resolve(RecordGen sg, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { ArrayList<String> keys = new ArrayList<String>(sg.fields.keySet()); for(String key : keys) { Expr val = sg.fields.get(key); val = resolve(val,environment,imports); sg.fields.put(key,val); } return sg; } protected Expr resolve(TupleGen sg, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { for(int i=0;i!=sg.fields.size();++i) { Expr e = sg.fields.get(i); e = resolve(e,environment,imports); sg.fields.set(i,e); } return sg; } protected Expr resolve(DictionaryGen sg, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { for(int i=0;i!=sg.pairs.size();++i) { Pair<Expr,Expr> e = sg.pairs.get(i); Expr e_first = resolve(e.first(),environment,imports); Expr e_second = resolve(e.second(),environment,imports); sg.pairs.set(i, new Pair(e_first,e_second)); } return sg; } protected Expr resolve(TypeConst tc, HashMap<String,Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { resolve(tc.type,imports); return tc; } protected Expr resolve(FunConst tc, HashMap<String, Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { if (tc.paramTypes != null) { for (UnresolvedType t : tc.paramTypes) { resolve(t, imports); } } NameID nid = loader.resolve(tc.name, imports); tc.attributes().add(new Attributes.Module(nid.module())); return tc; } protected Expr resolve(RecordAccess sg, HashMap<String, Set<Expr>> environment, ArrayList<PkgID> imports) throws ResolveError { sg.lhs = resolve(sg.lhs, environment, imports); return sg; } protected void resolve(UnresolvedType t, ArrayList<PkgID> imports) throws ResolveError { if(t instanceof UnresolvedType.List) { UnresolvedType.List lt = (UnresolvedType.List) t; resolve(lt.element,imports); } else if(t instanceof UnresolvedType.Set) { UnresolvedType.Set st = (UnresolvedType.Set) t; resolve(st.element,imports); } else if(t instanceof UnresolvedType.Dictionary) { UnresolvedType.Dictionary st = (UnresolvedType.Dictionary) t; resolve(st.key,imports); resolve(st.value,imports); } else if(t instanceof UnresolvedType.Record) { UnresolvedType.Record tt = (UnresolvedType.Record) t; for(Map.Entry<String,UnresolvedType> e : tt.types.entrySet()) { resolve(e.getValue(),imports); } } else if(t instanceof UnresolvedType.Tuple) { UnresolvedType.Tuple tt = (UnresolvedType.Tuple) t; for(UnresolvedType e : tt.types) { resolve(e,imports); } } else if(t instanceof UnresolvedType.Named) { // This case corresponds to a user-defined type. This will be // defined in some module (possibly ours), and we need to identify // what module that is here, and save it for future use. UnresolvedType.Named dt = (UnresolvedType.Named) t; NameID nid = loader.resolve(dt.name, imports); t.attributes().add(new Attributes.Module(nid.module())); } else if(t instanceof UnresolvedType.Existential) { UnresolvedType.Existential dt = (UnresolvedType.Existential) t; t.attributes().add(new Attributes.Module(module)); } else if(t instanceof UnresolvedType.Union) { UnresolvedType.Union ut = (UnresolvedType.Union) t; for(UnresolvedType b : ut.bounds) { resolve(b,imports); } } else if(t instanceof UnresolvedType.Process) { UnresolvedType.Process ut = (UnresolvedType.Process) t; resolve(ut.element,imports); } else if(t instanceof UnresolvedType.Fun) { UnresolvedType.Fun ut = (UnresolvedType.Fun) t; resolve(ut.ret,imports); if(ut.receiver != null) { resolve(ut.receiver,imports); } for(UnresolvedType p : ut.paramTypes) { resolve(p,imports); } } } private static void addExposedNames(Expr src, UnresolvedType t, HashMap<String, Set<Expr>> environment) { // Extended this method to handle lists and sets etc, is very difficult. // The primary problem is that we need to expand expressions involved // names exposed in this way into quantified // expressions. if(t instanceof UnresolvedType.Record) { UnresolvedType.Record tt = (UnresolvedType.Record) t; for(Map.Entry<String,UnresolvedType> e : tt.types.entrySet()) { Expr s = new Expr.RecordAccess(src, e .getKey(), src.attribute(Attribute.Source.class)); addExposedNames(s,e.getValue(),environment); Set<Expr> aliases = environment.get(e.getKey()); if(aliases == null) { aliases = new HashSet<Expr>(); environment.put(e.getKey(),aliases); } aliases.add(s); } } else if (t instanceof UnresolvedType.Process) { UnresolvedType.Process ut = (UnresolvedType.Process) t; addExposedNames(new Expr.UnOp(Expr.UOp.PROCESSACCESS, src), ut.element, environment); } } }
package org.matheusdev.ror.entity; import net.indiespot.continuations.VirtualThread; import org.matheusdev.ror.entity.component.ComponentMovement; import org.matheusdev.util.Dir; import org.matheusdev.util.FloatUtils; import org.matheusdev.util.SpriteAnimation; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Input.Keys; import com.badlogic.gdx.controllers.Controller; import com.badlogic.gdx.controllers.Controllers; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.physics.box2d.Contact; import com.badlogic.gdx.physics.box2d.Fixture; import com.badlogic.gdx.physics.box2d.Manifold; import de.matthiasmann.continuations.SuspendExecution; /** * @author matheusdev * */ public class EntityPlayer extends Entity { private static final long serialVersionUID = 9012418973465053432L; private final float speed = 16f; private final SpriteAnimation[] walk; private final TextureRegion[] stand; private final Sprite sprite; private final ComponentMovement movement; private final Controller gamepad; /** * @param body * @param sprites */ public EntityPlayer(float x, float y, EntityManager entityManager) { super(createCircularBody(x, y, 0.30f, 0.1f, 0.9f, 1f, entityManager.getPhysics())); uploadAsUserData(body); body.setFixedRotation(true); walk = new SpriteAnimation[4]; walk[Dir.DOWN ] = entityManager.getResources().getAnimation("walking-down"); walk[Dir.LEFT ] = entityManager.getResources().getAnimation("walking-left"); walk[Dir.RIGHT] = entityManager.getResources().getAnimation("walking-right"); walk[Dir.UP ] = entityManager.getResources().getAnimation("walking-up"); stand = new TextureRegion[4]; stand[Dir.DOWN ] = entityManager.getResources().getRegion("standing-down"); stand[Dir.LEFT ] = entityManager.getResources().getRegion("standing-left"); stand[Dir.RIGHT] = entityManager.getResources().getRegion("standing-right"); stand[Dir.UP ] = entityManager.getResources().getRegion("standing-up"); sprite = new Sprite(stand[Dir.DOWN]); movement = new ComponentMovement(Dir.DOWN); if (Controllers.getControllers().size == 0) { System.err.println("Couldn't find controllers!"); gamepad = null; } else { gamepad = Controllers.getControllers().get(0); } } /* (non-Javadoc) * @see org.matheusdev.ddm.collision.Collidable#collide(com.badlogic.gdx.physics.box2d.Fixture, com.badlogic.gdx.physics.box2d.Contact, com.badlogic.gdx.physics.box2d.Manifold) */ @Override public void collide(Fixture other, Contact contact, Manifold manifold) { } @Override public void run() throws SuspendExecution { while (true) { long time = VirtualThread.currentThread().getProcessor().getCurrentTime(); float xsteer = 0f; float ysteer = 0f; if (Gdx.input.isKeyPressed(Keys.W)) { ysteer += 1f; } if (Gdx.input.isKeyPressed(Keys.S)) { ysteer -= 1f; } if (Gdx.input.isKeyPressed(Keys.D)) { xsteer += 1f; } if (Gdx.input.isKeyPressed(Keys.A)) { xsteer -= 1f; } float xGamepad = gamepad.getAxis(0); float yGamepad = gamepad.getAxis(1); if (FloatUtils.equalsEpsilon(xGamepad, 0f, 0.1f)) { xGamepad = 0f; } if (FloatUtils.equalsEpsilon(yGamepad, 0f, 0.1f)) { yGamepad = 0f; } // Temporarily disabled: // xsteer = xGamepad; // ysteer = -yGamepad; movement.apply(body, speed, 3f, xsteer, ysteer); for (SpriteAnimation anim : walk) { anim.tick(movement.isMoving() ? 0.016f : 0f); } VirtualThread.wakeupAt(time + 16); } } /* (non-Javadoc) * @see org.matheusdev.ddm.entity.Entity#draw(org.matheusdev.ddm.entity.EntityHandler, com.badlogic.gdx.graphics.g2d.SpriteBatch) */ @Override public void draw(EntityManager manager, SpriteBatch batch) { if (movement.isMoving()) { sprite.setRegion(walk[movement.getDirection()].getCurrentKeyframe()); } else { sprite.setRegion(stand[movement.getDirection()]); } draw(sprite, body, 1f, 0f, 0.2f, batch); } @Override public String toString() { return "Player Entity at " + body.getPosition(); } }
package org.torproject.onionoo; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.regex.Pattern; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; public class ResourceServlet extends HttpServlet { private static final long serialVersionUID = 7236658979947465319L; public void init() { this.readSummaryFile(); } long summaryFileLastModified = 0L; boolean readSummaryFile = false; private String versionLine = null, validAfterLine = null, freshUntilLine = null, relaysPublishedLine = null, bridgesPublishedLine = null; private List<String> relayLines = new ArrayList<String>(), bridgeLines = new ArrayList<String>(); private void readSummaryFile() { File summaryFile = new File("/srv/onionoo/out/summary.json"); if (!summaryFile.exists()) { readSummaryFile = false; return; } if (summaryFile.lastModified() > this.summaryFileLastModified) { this.versionLine = this.validAfterLine = this.freshUntilLine = null; this.relayLines.clear(); this.bridgeLines.clear(); try { BufferedReader br = new BufferedReader(new FileReader( summaryFile)); String line; while ((line = br.readLine()) != null) { if (line.startsWith("{\"version\":")) { this.versionLine = line; } else if (line.startsWith("\"valid_after\":")) { this.validAfterLine = line; } else if (line.startsWith("\"fresh_until\":")) { this.freshUntilLine = line; } else if (line.startsWith("\"relays_published\":")) { this.relaysPublishedLine = line; } else if (line.startsWith("\"bridges_published\":")) { this.bridgesPublishedLine = line; } else if (line.startsWith("\"relays\":")) { while ((line = br.readLine()) != null && !line.equals("],")) { this.relayLines.add(line); } } else if (line.startsWith("\"bridges\":")) { while ((line = br.readLine()) != null && !line.equals("]}")) { this.bridgeLines.add(line); } } } br.close(); } catch (IOException e) { return; } } this.summaryFileLastModified = summaryFile.lastModified(); this.readSummaryFile = true; } public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { this.readSummaryFile(); if (!this.readSummaryFile) { response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } String uri = request.getRequestURI(); String resourceType = null; if (uri.startsWith("/summary/")) { resourceType = "summary"; } else if (uri.startsWith("/details/")) { resourceType = "details"; } else if (uri.startsWith("/bandwidth/")) { resourceType = "bandwidth"; } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } /* Handle any errors resulting from invalid requests. */ if (uri.equals("/" + resourceType + "/all")) { } else if (uri.equals("/" + resourceType + "/running")) { } else if (uri.equals("/" + resourceType + "/relays")) { } else if (uri.equals("/" + resourceType + "/bridges")) { } else if (uri.startsWith("/" + resourceType + "/search/")) { String searchParameter = this.parseSearchParameter(uri.substring( ("/" + resourceType + "/search/").length())); if (searchParameter == null) { response.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } } else if (uri.startsWith("/" + resourceType + "/lookup/")) { Set<String> fingerprintParameters = this.parseFingerprintParameters( uri.substring(("/" + resourceType + "/lookup/").length())); if (fingerprintParameters == null) { response.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } /* Set response headers and start writing the response. */ response.setHeader("Access-Control-Allow-Origin", "*"); response.setContentType("application/json"); PrintWriter pw = response.getWriter(); if (uri.equals("/" + resourceType + "/all")) { this.writeHeader(pw); pw.print(this.relaysPublishedLine + "\n"); this.writeAllRelays(pw, resourceType); pw.print(this.bridgesPublishedLine + "\n"); this.writeAllBridges(pw, resourceType); } else if (uri.equals("/" + resourceType + "/running")) { this.writeHeader(pw); pw.print(this.relaysPublishedLine + "\n"); this.writeRunningRelays(pw, resourceType); pw.print(this.bridgesPublishedLine + "\n"); this.writeRunningBridges(pw, resourceType); } else if (uri.equals("/" + resourceType + "/relays")) { this.writeHeader(pw); pw.print(this.relaysPublishedLine + "\n"); this.writeAllRelays(pw, resourceType); pw.print(this.bridgesPublishedLine + "\n"); this.writeNoBridges(pw); } else if (uri.equals("/" + resourceType + "/bridges")) { this.writeHeader(pw); pw.print(this.relaysPublishedLine + "\n"); this.writeNoRelays(pw); pw.print(this.bridgesPublishedLine + "\n"); this.writeAllBridges(pw, resourceType); } else if (uri.startsWith("/" + resourceType + "/search/")) { String searchParameter = this.parseSearchParameter(uri.substring( ("/" + resourceType + "/search/").length())); this.writeHeader(pw); pw.print(this.relaysPublishedLine + "\n"); this.writeMatchingRelays(pw, searchParameter, resourceType); pw.print(this.bridgesPublishedLine + "\n"); this.writeMatchingBridges(pw, searchParameter, resourceType); } else if (uri.startsWith("/" + resourceType + "/lookup/")) { Set<String> fingerprintParameters = this.parseFingerprintParameters( uri.substring(("/" + resourceType + "/lookup/").length())); this.writeHeader(pw); pw.print(this.relaysPublishedLine + "\n"); this.writeRelaysWithFingerprints(pw, fingerprintParameters, resourceType); pw.print(this.bridgesPublishedLine + "\n"); this.writeBridgesWithFingerprints(pw, fingerprintParameters, resourceType); } pw.flush(); pw.close(); } private static Pattern searchParameterPattern = Pattern.compile("^\\$?[0-9a-fA-F]{1,40}$|^[0-9a-zA-Z\\.]{1,19}$"); private String parseSearchParameter(String parameter) { if (!searchParameterPattern.matcher(parameter).matches()) { return null; } return parameter; } private static Pattern fingerprintParameterPattern = Pattern.compile("^\\$[0-9a-zA-Z]{1,40}$"); private Set<String> parseFingerprintParameters(String parameter) { if (!fingerprintParameterPattern.matcher(parameter).matches()) { return null; } Set<String> parsedFingerprints = new HashSet<String>(); if (parameter.length() != 40) { return null; } parsedFingerprints.add(parameter); return parsedFingerprints; } private void writeHeader(PrintWriter pw) { pw.print(this.versionLine + "\n"); pw.print(this.validAfterLine + "\n"); pw.print(this.freshUntilLine + "\n"); } private void writeAllRelays(PrintWriter pw, String resourceType) { pw.print("\"relays\":["); int written = 0; for (String line : this.relayLines) { String lines = this.getFromSummaryLine(line, resourceType); if (lines.length() > 0) { pw.print((written++ > 0 ? ",\n" : "\n") + lines); } } pw.print("],\n"); } private void writeRunningRelays(PrintWriter pw, String resourceType) { pw.print("\"relays\":["); int written = 0; for (String line : this.relayLines) { if (line.contains("\"r\":true")) { String lines = this.getFromSummaryLine(line, resourceType); if (lines.length() > 0) { pw.print((written++ > 0 ? ",\n" : "\n") + lines); } } } pw.print("\n],\n"); } private void writeNoRelays(PrintWriter pw) { pw.print("\"relays\":[\n"); pw.print("],\n"); } private void writeMatchingRelays(PrintWriter pw, String searchTerm, String resourceType) { pw.print("\"relays\":["); int written = 0; for (String line : this.relayLines) { boolean lineMatches = false; if (searchTerm.startsWith("$")) { /* Search is for $-prefixed fingerprint. */ if (line.contains("\"f\":\"" + searchTerm.substring(1).toUpperCase())) { /* $-prefixed fingerprint matches. */ lineMatches = true; } } else if (line.toLowerCase().contains("\"n\":\"" + searchTerm.toLowerCase())) { /* Nickname matches. */ lineMatches = true; } else if ("unnamed".startsWith(searchTerm.toLowerCase()) && line.startsWith("{\"f\":")) { /* Nickname "Unnamed" matches. */ lineMatches = true; } else if (line.contains("\"f\":\"" + searchTerm.toUpperCase())) { /* Non-$-prefixed fingerprint matches. */ lineMatches = true; } else if (line.substring(line.indexOf("\"a\":[")).contains("\"" + searchTerm.toLowerCase())) { /* Address matches. */ lineMatches = true; } if (lineMatches) { String lines = this.getFromSummaryLine(line, resourceType); if (lines.length() > 0) { pw.print((written++ > 0 ? ",\n" : "\n") + lines); } } } pw.print("\n],\n"); } private void writeRelaysWithFingerprints(PrintWriter pw, Set<String> fingerprints, String resourceType) { pw.print("\"relays\":["); int written = 0; for (String line : this.relayLines) { for (String fingerprint : fingerprints) { if (line.contains("\"f\":\"" + fingerprint.toUpperCase() + "\",")) { String lines = this.getFromSummaryLine(line, resourceType); if (lines.length() > 0) { pw.print((written++ > 0 ? ",\n" : "\n") + lines); } break; } } } pw.print("\n],\n"); } private void writeAllBridges(PrintWriter pw, String resourceType) { pw.print("\"bridges\":["); int written = 0; for (String line : this.bridgeLines) { String lines = this.getFromSummaryLine(line, resourceType); if (lines.length() > 0) { pw.print((written++ > 0 ? ",\n" : "\n") + lines); } } pw.print("\n]}\n"); } private void writeRunningBridges(PrintWriter pw, String resourceType) { pw.print("\"bridges\":["); int written = 0; for (String line : this.bridgeLines) { if (line.contains("\"r\":true")) { String lines = this.getFromSummaryLine(line, resourceType); if (lines.length() > 0) { pw.print((written++ > 0 ? ",\n" : "\n") + lines); } } } pw.print("\n]}\n"); } private void writeNoBridges(PrintWriter pw) { pw.print("\"bridges\":[\n"); pw.print("]}\n"); } private void writeMatchingBridges(PrintWriter pw, String searchTerm, String resourceType) { if (searchTerm.startsWith("$")) { searchTerm = searchTerm.substring(1); } pw.print("\"bridges\":["); int written = 0; for (String line : this.bridgeLines) { if (line.contains("\"h\":\"" + searchTerm.toUpperCase())) { String lines = this.getFromSummaryLine(line, resourceType); if (lines.length() > 0) { pw.print((written++ > 0 ? ",\n" : "\n") + lines); } } } pw.print("\n]}\n"); } private void writeBridgesWithFingerprints(PrintWriter pw, Set<String> fingerprints, String resourceType) { pw.print("\"bridges\":["); int written = 0; for (String line : this.bridgeLines) { for (String fingerprint : fingerprints) { if (line.contains("\"h\":\"" + fingerprint.toUpperCase() + "\",")) { String lines = this.getFromSummaryLine(line, resourceType); if (lines.length() > 0) { pw.print((written++ > 0 ? ",\n" : "\n") + lines); } break; } } } pw.print("\n]}\n"); } private String getFromSummaryLine(String summaryLine, String resourceType) { if (resourceType.equals("summary")) { return this.writeSummaryLine(summaryLine); } else if (resourceType.equals("details")) { return this.writeDetailsLines(summaryLine); } else if (resourceType.equals("bandwidth")) { return this.writeBandwidthLines(summaryLine); } else { return ""; } } private String writeSummaryLine(String summaryLine) { return (summaryLine.endsWith(",") ? summaryLine.substring(0, summaryLine.length() - 1) : summaryLine); } private String writeDetailsLines(String summaryLine) { String fingerprint = null; if (summaryLine.contains("\"f\":\"")) { fingerprint = summaryLine.substring(summaryLine.indexOf( "\"f\":\"") + "\"f\":\"".length()); } else if (summaryLine.contains("\"h\":\"")) { fingerprint = summaryLine.substring(summaryLine.indexOf( "\"h\":\"") + "\"h\":\"".length()); } else { return ""; } fingerprint = fingerprint.substring(0, 40); File detailsFile = new File("/srv/onionoo/out/details/" + fingerprint); StringBuilder sb = new StringBuilder(); String detailsLines = null; if (detailsFile.exists()) { try { BufferedReader br = new BufferedReader(new FileReader( detailsFile)); String line = br.readLine(); if (line != null) { sb.append("{"); while ((line = br.readLine()) != null) { sb.append(line + "\n"); } } br.close(); detailsLines = sb.toString(); if (detailsLines.length() > 1) { detailsLines = detailsLines.substring(0, detailsLines.length() - 1); } } catch (IOException e) { } } if (detailsLines != null) { return detailsLines; } else { return ""; } } private String writeBandwidthLines(String summaryLine) { String fingerprint = null; if (summaryLine.contains("\"f\":\"")) { fingerprint = summaryLine.substring(summaryLine.indexOf( "\"f\":\"") + "\"f\":\"".length()); } else if (summaryLine.contains("\"h\":\"")) { fingerprint = summaryLine.substring(summaryLine.indexOf( "\"h\":\"") + "\"h\":\"".length()); } else { return ""; } fingerprint = fingerprint.substring(0, 40); File detailsFile = new File("/srv/onionoo/out/bandwidth/" + fingerprint); StringBuilder sb = new StringBuilder(); String bandwidthLines = null; if (detailsFile.exists()) { try { BufferedReader br = new BufferedReader(new FileReader( detailsFile)); String line; while ((line = br.readLine()) != null) { sb.append(line + "\n"); } br.close(); bandwidthLines = sb.toString(); } catch (IOException e) { } } if (bandwidthLines != null) { bandwidthLines = bandwidthLines.substring(0, bandwidthLines.length() - 1); return bandwidthLines; } else { return ""; } } }
package zab.atomics.bag; import java.util.concurrent.atomic.AtomicLong; /** * An atomic, lock-free and wait-free storage of items. It can store up to 32 items. * * The bag may keep references to up to 32 items after they have been removed. Use it * only for objects you do not expect to be garbage-collected. * * @author zlatinb * * @param <T> type of the items stored */ public class AtomicBag<T> { private static final int FREE = 0; private static final int CLAIM = 1; private static final int FULL = 2; private final Object[] storage = new Object[32]; private final AtomicLong state = new AtomicLong(); private static long freeMask(int i) { long mask = 1 << (i << 1); mask |= (mask << 1); return ~mask; } private static long free(long state, int i) { return state & freeMask(i); } private static long claim(long state, int i) { long freed = free(state,i); return freed | (CLAIM << (i <<1)); } private static long full(long state, int i) { long freed = free(state,i); return freed | (FULL << (i <<1)); } private static int get(long state, final int i) { state &= (~freeMask(i)); return (int)(state >>> (i << 1)); } /** * @param item to store * @return true if stored, false if there was no space. */ public boolean store(T item) { // find a free slot int slot; while(true) { final long s = state.get(); slot = -1; for (int i = 0; i < 32; i++) { if (get(s,i) != FREE) continue; slot = i; break; } if (slot < 0) return false; // try to claim it long claimState = claim(s,slot); if (state.compareAndSet(s,claimState)) break; } // write storage[slot] = item; while(true) { final long s = state.get(); long fullState = full(s,slot); if (state.compareAndSet(s,fullState)) return true; } } /** * @return number of items in the bag */ public int size() { final long s = state.get(); int size = 0; for (int i = 0; i < 32; i++) { if (get(s,i) == FULL) size++; } return size; } /** * @return an arbitrary item from the bag, null if empty */ @SuppressWarnings("unchecked") public T get() { while(true) { final long s = state.get(); int slot = -1; for (int i = 0; i < 32; i++) { if (get(s,i) != FULL) continue; slot = i; break; } if (slot == -1) return null; T item = (T)storage[slot]; long newState = free(s,slot); if (state.compareAndSet(s,newState)) return item; } } /** * Puts the items currently in the bag in the destination array, in arbitrary order. * More efficient than calling get() repeatedly. * * @param dest to store items * @return number of items stored */ @SuppressWarnings("unchecked") public int get(T[] dest) { while(true) { final long s = state.get(); long newState = s; int idx = 0; for(int i = 0; i < 32; i++) { if (get(s,i) != FULL) continue; dest[idx++] = (T)storage[i]; newState = free(newState,i); } if (idx == 0) return 0; if (state.compareAndSet(s,newState)) return idx; } } }
package pp.block5.cc.test; import org.antlr.v4.runtime.tree.ParseTree; import org.junit.Test; import pp.block5.cc.ParseException; import pp.block5.cc.SimplePascalCompiler; import pp.block5.cc.simple.Result; import pp.block5.cc.simple.Type; import java.io.File; import java.io.IOException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; @SuppressWarnings("javadoc") public class SimpleCheckerTest { private final static String BASE_DIR = "block-5/pp/block5/cc/sample"; private final static String EXT = ".pascal"; private final SimplePascalCompiler compiler = SimplePascalCompiler .instance(); @Test public void testBasic() throws IOException, ParseException { ParseTree tree = parse("basic"); Result result = check(tree); ParseTree body = tree.getChild(3).getChild(1); ParseTree assX = body.getChild(1); assertEquals(0, result.getOffset(assX.getChild(0))); assertEquals(Type.INT, result.getType(assX.getChild(0))); assertEquals(Type.INT, result.getType(assX.getChild(0))); assertEquals(Type.INT, result.getType(assX.getChild(2))); assertEquals(assX.getChild(2), result.getEntry(body)); assertEquals(assX.getChild(2), result.getEntry(assX)); } @Test public void testGCD() throws IOException, ParseException { check(parse("gcd")); } @Test public void testPrime() throws IOException, ParseException { check(parse("prime")); } @Test public void testTypeErr() throws IOException { checkFail("typeErr1"); checkFail("typeErr2"); } private void checkFail(String filename) throws IOException { try { check(parse(filename)); fail(filename + " shouldn't check but did"); } catch (ParseException exc) { // this is the expected behaviour } } private ParseTree parse(String filename) throws IOException, ParseException { return this.compiler.parse(new File(BASE_DIR, filename + EXT)); } private Result check(ParseTree tree) throws ParseException { return this.compiler.check(tree); } }