answer
stringlengths
17
10.2M
package mrtim.sasscompiler; import mrtim.sasscompiler.expr.ExpressionValue; import mrtim.sasscompiler.expr.StringExpressionValue; import mrtim.sasscompiler.grammar.SassParser; import org.antlr.v4.runtime.tree.ParseTree; import java.util.*; public class MixinScopeInitialiser { private final SassParser.Parameter_def_listContext mixinDefParameters; private final SassParser.Parameter_listContext includeParameters; public MixinScopeInitialiser(SassParser.Parameter_def_listContext mixinDefParameters, SassParser.Parameter_listContext includeParameters) { this.mixinDefParameters = mixinDefParameters; this.includeParameters = includeParameters; } public void initialiseScope(Scope scope) { List<String> paramNames = positionalParamNames(mixinDefParameters.variable_def()); int positionalIndex = 0; Map<String, ExpressionValue> evaluatedParams = new HashMap<>(); if (includeParameters != null) { for (SassParser.ParameterContext includeParam: includeParameters.parameter()) { if (isKeywordParam(includeParam)) { String paramName = includeParam.named_parameter().VARIABLE().getText(); paramNames.remove(paramName); evaluatedParams.put(paramName, extractExpressionValue(includeParam, scope)); } } for (SassParser.ParameterContext includeParam: includeParameters.parameter()) { if (!isKeywordParam(includeParam)) { evaluatedParams.put(paramNames.get(positionalIndex++), extractExpressionValue(includeParam, scope)); } } } for (SassParser.Variable_defContext variableDef: mixinDefParameters.variable_def()) { String variableName = variableDef.VARIABLE().getText(); if (!evaluatedParams.containsKey(variableName)) { evaluatedParams.put(variableName, evaluateInScope(scope, variableDef.expression_list())); } } for (Map.Entry<String, ExpressionValue> e: evaluatedParams.entrySet()) { scope.define(e.getKey(), e.getValue()); } } private boolean isKeywordParam(SassParser.ParameterContext includeParam) { return includeParam.named_parameter() != null; } private List<String> positionalParamNames(List<SassParser.Variable_defContext> variableDefs) { List<String> paramNames = new ArrayList<>(); for (SassParser.Variable_defContext variableDef: variableDefs) { paramNames.add(variableDef.VARIABLE().getText()); } return paramNames; } private ExpressionValue extractExpressionValue(SassParser.ParameterContext includeParam, Scope scope) { if (includeParam.IDENTIFIER() != null) { return new StringExpressionValue(includeParam.IDENTIFIER().getText()); } else if (includeParam.value() != null) { return evaluateInScope(scope, includeParam.value()); } else if (includeParam.named_parameter() != null) { return evaluateInScope(scope, includeParam.named_parameter().expression_list()); } else { throw new IllegalArgumentException("Could not extract parameter value from ParameterContext: " + includeParam.getText()); } } private ExpressionValue evaluateInScope(Scope scope, ParseTree expression) { return new ExpressionVisitor(scope).visit(expression); } }
package com.netflix.genie.client; import com.netflix.genie.common.dto.Cluster; import com.netflix.genie.common.dto.ClusterStatus; import com.netflix.genie.common.dto.Command; import com.netflix.genie.common.dto.CommandStatus; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; /** * Integration Tests for Cluster Client. * * @author amsharma */ public class ClusterClientIntegrationTests extends GenieClientsIntegrationTestsBase { private static ClusterClient clusterClient; private static CommandClient commandClient; /** * Setup for tests. * * @throws Exception If there is an error. */ @Before public void setup() throws Exception { clusterClient = new ClusterClient(getBaseUrl()); commandClient = new CommandClient(getBaseUrl()); } /** * Delete all clusters and commands between tests. * * @throws Exception If there is any problem. */ @After public void cleanUp() throws Exception { clusterClient.deleteAllClusters(); commandClient.deleteAllCommands(); } /** * Integration test to get all applications from Genie. * * @throws Exception If there is any problem. */ @Test public void testCanCreateAndGetCluster() throws Exception { final String id = UUID.randomUUID().toString(); final Cluster cluster = constructClusterDTO(id); final String clusterId = clusterClient.createCluster(cluster); Assert.assertEquals(clusterId, id); // Make sure Genie Not Found Exception is not thrown for this call. final Cluster cstr = clusterClient.getCluster(id); // Make sure the object returned is exactly what was sent to be created Assert.assertEquals(cluster.getId(), cstr.getId()); Assert.assertEquals(cluster.getName(), cstr.getName()); Assert.assertEquals(cluster.getDescription(), cstr.getDescription()); Assert.assertEquals(cluster.getConfigs(), cstr.getConfigs()); Assert.assertEquals(cluster.getSetupFile(), cstr.getSetupFile()); Assert.assertEquals(cstr.getTags().contains("foo"), true); Assert.assertEquals(cstr.getTags().contains("bar"), true); Assert.assertEquals(cluster.getStatus(), cstr.getStatus()); } /** * Test getting the clusters using the various query parameters. * * @throws Exception If there is any problem. */ @Test public void testGetClustersUsingParams() throws Exception { final String cluster1Id = UUID.randomUUID().toString(); final String cluster2Id = UUID.randomUUID().toString(); final Set<String> cluster1Tags = new HashSet<>(); cluster1Tags.add("foo"); cluster1Tags.add("pi"); final Set<String> cluster2Tags = new HashSet<>(); cluster2Tags.add("bar"); cluster2Tags.add("pi"); final Cluster cluster1 = new Cluster.Builder("cluster1name", "cluster1user", "1.0", ClusterStatus.UP) .withId(cluster1Id) .withTags(cluster1Tags) .build(); final Cluster cluster2 = new Cluster.Builder("cluster2name", "cluster2user", "2.0", ClusterStatus.OUT_OF_SERVICE) .withId(cluster2Id) .withTags(cluster2Tags) .build(); clusterClient.createCluster(cluster1); clusterClient.createCluster(cluster2); // Test get by tags List<Cluster> clusterList = clusterClient.getClusters( null, null, Arrays.asList("foo"), null, null ); Assert.assertEquals(1, clusterList.size()); Assert.assertEquals(cluster1Id, clusterList.get(0).getId()); clusterList = clusterClient.getClusters( null, null, Arrays.asList("pi"), null, null ); Assert.assertEquals(2, clusterList.size()); Assert.assertEquals(cluster2Id, clusterList.get(0).getId()); Assert.assertEquals(cluster1Id, clusterList.get(1).getId()); // Test get by name clusterList = clusterClient.getClusters( "cluster1name", null, null, null, null ); Assert.assertEquals(1, clusterList.size()); // Test get by status clusterList = clusterClient.getClusters( null, Arrays.asList(ClusterStatus.UP.toString()), null, null, null ); Assert.assertEquals(1, clusterList.size()); clusterList = clusterClient.getClusters( null, Arrays.asList(ClusterStatus.UP.toString(), ClusterStatus.OUT_OF_SERVICE.toString()), null, null, null ); Assert.assertEquals(2, clusterList.size()); } /** * Test to confirm getting an exception for non existent cluster. * * @throws Exception If there is a problem. */ @Test(expected = IOException.class) public void testClusterNotExist() throws Exception { clusterClient.getCluster("foo"); } /** * Test get all clusters. * * @throws Exception If there is problem. */ @Test public void testGetAllAndDeleteAllClusters() throws Exception { final List<Cluster> initialClusterList = clusterClient.getClusters(); Assert.assertEquals(initialClusterList.size(), 0); final Cluster cluster1 = constructClusterDTO(null); final Cluster cluster2 = constructClusterDTO(null); clusterClient.createCluster(cluster1); clusterClient.createCluster(cluster2); final List<Cluster> finalClusterList = clusterClient.getClusters(); Assert.assertEquals(finalClusterList.size(), 2); Assert.assertEquals(cluster1.getId(), finalClusterList.get(1).getId()); Assert.assertEquals(cluster2.getId(), finalClusterList.get(0).getId()); clusterClient.deleteAllClusters(); Assert.assertEquals(clusterClient.getClusters().size(), 0); } /** * Test whether we can delete a cluster in Genie. * * @throws Exception If there is any problem. */ @Test(expected = IOException.class) public void testDeleteCluster() throws Exception { final Cluster cluster1 = constructClusterDTO(null); clusterClient.createCluster(cluster1); final Cluster cluster2 = clusterClient.getCluster(cluster1.getId()); Assert.assertEquals(cluster2.getId(), cluster1.getId()); clusterClient.deleteCluster(cluster1.getId()); clusterClient.getCluster(cluster1.getId()); } /** * Test to verify if the update cluster method is working correctly. * * @throws Exception If there is any problem. */ @Test public void testUpdateCluster() throws Exception { final Cluster cluster1 = constructClusterDTO(null); clusterClient.createCluster(cluster1); final Cluster cluster2 = clusterClient.getCluster(cluster1.getId()); Assert.assertEquals(cluster2.getName(), cluster1.getName()); final Cluster cluster3 = new Cluster.Builder("newname", "newuser", "new version", ClusterStatus.OUT_OF_SERVICE) .withId(cluster1.getId()) .build(); clusterClient.updateCluster(cluster1.getId(), cluster3); final Cluster cluster4 = clusterClient.getCluster(cluster1.getId()); Assert.assertEquals("newname", cluster4.getName()); Assert.assertEquals("newuser", cluster4.getUser()); Assert.assertEquals("new version", cluster4.getVersion()); Assert.assertEquals(ClusterStatus.OUT_OF_SERVICE, cluster4.getStatus()); Assert.assertEquals(null, cluster4.getSetupFile()); Assert.assertEquals(null, cluster4.getDescription()); Assert.assertEquals(Collections.emptySet(), cluster4.getConfigs()); Assert.assertEquals(cluster4.getTags().contains("foo"), false); } /** * Test all the methods that manipulate tags for a cluster in genie. * * @throws Exception If there is any problem. */ @Test public void testTagsMethods() throws Exception { final Set<String> initialTags = new HashSet<>(); initialTags.add("foo"); initialTags.add("bar"); final Set<String> configList = new HashSet<>(); configList.add("config1"); configList.add("configs2"); final Cluster cluster = new Cluster.Builder("name", "user", "1.0", ClusterStatus.UP) .withId("cluster1") .withDescription("client Test") .withSetupFile("path to set up file") .withTags(initialTags) .withConfigs(configList) .build(); clusterClient.createCluster(cluster); // Test getTags for cluster Set<String> tags = clusterClient.getTagsForCluster("cluster1"); Assert.assertEquals(4, tags.size()); Assert.assertEquals(tags.contains("foo"), true); Assert.assertEquals(tags.contains("bar"), true); // Test adding a tag for cluster Set<String> moreTags = new HashSet<>(); moreTags.add("pi"); clusterClient.addTagsToCluster("cluster1", moreTags); tags = clusterClient.getTagsForCluster("cluster1"); Assert.assertEquals(5, tags.size()); Assert.assertEquals(tags.contains("foo"), true); Assert.assertEquals(tags.contains("bar"), true); Assert.assertEquals(tags.contains("pi"), true); // Test removing a tag for cluster clusterClient.removeTagFromCluster("cluster1", "bar"); tags = clusterClient.getTagsForCluster("cluster1"); Assert.assertEquals(4, tags.size()); Assert.assertEquals(tags.contains("foo"), true); Assert.assertEquals(tags.contains("pi"), true); // Test update tags for a cluster clusterClient.updateTagsForCluster("cluster1", initialTags); tags = clusterClient.getTagsForCluster("cluster1"); Assert.assertEquals(4, tags.size()); Assert.assertEquals(tags.contains("foo"), true); Assert.assertEquals(tags.contains("bar"), true); // Test delete all tags in a cluster clusterClient.removeAllTagsForCluster("cluster1"); tags = clusterClient.getTagsForCluster("cluster1"); Assert.assertEquals(2, tags.size()); } /** * Test all the methods that manipulate configs for a cluster in genie. * * @throws Exception If there is any problem. */ @Test public void testConfigsMethods() throws Exception { final Set<String> initialConfigs = new HashSet<>(); initialConfigs.add("foo"); initialConfigs.add("bar"); final Cluster cluster = new Cluster.Builder("name", "user", "1.0", ClusterStatus.UP) .withId("cluster1") .withDescription("client Test") .withSetupFile("path to set up file") .withConfigs(initialConfigs) .build(); clusterClient.createCluster(cluster); // Test getConfigs for cluster Set<String> configs = clusterClient.getConfigsForCluster("cluster1"); Assert.assertEquals(2, configs.size()); Assert.assertEquals(configs.contains("foo"), true); Assert.assertEquals(configs.contains("bar"), true); // Test adding a config for cluster Set<String> moreConfigs = new HashSet<>(); moreConfigs.add("pi"); clusterClient.addConfigsToCluster("cluster1", moreConfigs); configs = clusterClient.getConfigsForCluster("cluster1"); Assert.assertEquals(3, configs.size()); Assert.assertEquals(configs.contains("foo"), true); Assert.assertEquals(configs.contains("bar"), true); Assert.assertEquals(configs.contains("pi"), true); // Test update configs for a cluster clusterClient.updateConfigsForCluster("cluster1", initialConfigs); configs = clusterClient.getConfigsForCluster("cluster1"); Assert.assertEquals(2, configs.size()); Assert.assertEquals(configs.contains("foo"), true); Assert.assertEquals(configs.contains("bar"), true); // Test delete all configs in a cluster clusterClient.removeAllConfigsForCluster("cluster1"); configs = clusterClient.getConfigsForCluster("cluster1"); Assert.assertEquals(0, configs.size()); } /** * Test all the methods that manipulate commands for a cluster in genie. * * @throws Exception If there is any problem. */ @Test public void testCommandsMethods() throws Exception { final Command foo = new Command.Builder( "name", "user", "version", CommandStatus.ACTIVE, "exec", 5 ).withId("foo") .build(); commandClient.createCommand(foo); final Command bar = new Command.Builder( "name", "user", "version", CommandStatus.ACTIVE, "exec", 5 ).withId("bar") .build(); commandClient.createCommand(bar); final Command pi = new Command.Builder( "name", "user", "version", CommandStatus.ACTIVE, "exec", 5 ).withId("pi") .build(); commandClient.createCommand(pi); final Cluster cluster = new Cluster.Builder("name", "user", "1.0", ClusterStatus.UP) .withId("cluster1") .withDescription("client Test") .withSetupFile("path to set up file") .build(); clusterClient.createCluster(cluster); // Test add Commands to cluster final List<String> initialCommands = new ArrayList<>(); initialCommands.add("foo"); initialCommands.add("bar"); initialCommands.add("pi"); clusterClient.addCommandsToCluster("cluster1", initialCommands); List<Command> commands = clusterClient.getCommandsForCluster("cluster1"); Assert.assertEquals(3, commands.size()); Assert.assertEquals("foo", commands.get(0).getId()); Assert.assertEquals("bar", commands.get(1).getId()); Assert.assertEquals("pi", commands.get(2).getId()); // Test removing a command for cluster clusterClient.removeCommandFromCluster("cluster1", "pi"); commands = clusterClient.getCommandsForCluster("cluster1"); Assert.assertEquals(2, commands.size()); Assert.assertEquals("foo", commands.get(0).getId()); Assert.assertEquals("bar", commands.get(1).getId()); final List<String> updatedCommands = new ArrayList<>(); updatedCommands.add("foo"); updatedCommands.add("pi"); // Test update commands for a cluster clusterClient.updateCommandsForCluster("cluster1", updatedCommands); commands = clusterClient.getCommandsForCluster("cluster1"); Assert.assertEquals(2, commands.size()); Assert.assertEquals("foo", commands.get(0).getId()); Assert.assertEquals("pi", commands.get(1).getId()); // Test delete all commands in a cluster clusterClient.removeAllTagsForCluster("cluster1"); commands = clusterClient.getCommandsForCluster("cluster1"); Assert.assertEquals(0, commands.size()); } }
package net.catchpole.B9.math; import net.catchpole.B9.spacial.Location; public class DistanceCalculator { public double degreesDistance(Location location, Location target) { target = getWrappedLocation(location, target); return Math.sqrt(Math.pow((location.getLatitude() - target.getLatitude()), 2.0) + Math.pow((location.getLongitude() - target.getLongitude()), 2.0)); } public boolean isWithinDistance(Location location, Location target, double meters) { return distanceMeters(location, target) <= meters; } public double metersToDegreesLatitude(double meters, Location location) { return 1.0D / (distanceMeters(location, new Location(location.getLatitude()+1.0D, location.getLongitude())) / meters); } public double metersToDegreesLongitude(double meters, Location location) { return 1.0D / (distanceMeters(location, new Location(location.getLatitude(), location.getLongitude() + 1.0D)) / meters); } public double distanceMeters(Location location1, Location location2) { double latitude1 = location1.getLatitude(); double latitude2 = location2.getLatitude(); double longitude1 = location1.getLongitude(); double longitude2 = location2.getLongitude(); double elevation1 = location1.getAltitude(); double elevation2 = location2.getAltitude(); double latDistance = Math.toRadians(latitude2 - latitude1); double lonDistance = Math.toRadians(longitude2 - longitude1); double a = Math.sin(latDistance / 2) * Math.sin(latDistance / 2) + Math.cos(Math.toRadians(latitude1)) * Math.cos(Math.toRadians(latitude2)) * Math.sin(lonDistance / 2) * Math.sin(lonDistance / 2); double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)); double distanceMeters = HeadingCalculator.EARTH_RADIUS * c * 1000; double height = elevation1 - elevation2; return Math.sqrt(Math.pow(distanceMeters, 2) + Math.pow(height, 2)); } public Location getWrappedLocation(Location location, Location target) { boolean lat = (location.getLatitude() - target.getLatitude()) > 180.0d || (location.getLatitude() - target.getLatitude()) < -180.d; boolean lon = (location.getLongitude() - target.getLongitude()) > 180.0d || (location.getLongitude() - target.getLongitude()) < -180.0d; if (lat || lon) { return new Location( lat ? target.getLatitude() - 360.0d : target.getLatitude(), lon ? target.getLongitude() - 360.0d : target.getLongitude(), target.getAltitude()); } return target; } }
package net.darkhax.bookshelf.lib.util; import java.util.Arrays; import java.util.HashMap; import java.util.List; import org.apache.commons.lang3.SystemUtils; import org.apache.commons.lang3.text.WordUtils; import net.minecraft.block.Block; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.init.Blocks; import net.minecraft.inventory.EntityEquipmentSlot; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.util.EnumParticleTypes; import net.minecraft.util.text.TextFormatting; import net.minecraft.world.World; import net.minecraftforge.fluids.IFluidBlock; import net.minecraftforge.fml.common.Loader; import net.minecraftforge.fml.common.ModContainer; import net.minecraftforge.fml.common.registry.EntityRegistry; import net.minecraftforge.fml.common.registry.EntityRegistry.EntityRegistration; import net.minecraftforge.fml.common.registry.IForgeRegistryEntry; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; public final class Utilities { /** * An array of all the LWJGL numeric key codes. */ public static final int[] NUMERIC_KEYS = new int[] { 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 71, 72, 73, 75, 76, 77, 79, 80, 81 }; /** * A hashmap which links domains to their ModContainer. */ private static final HashMap<String, ModContainer> MODS; /** * An array of armor equipment slots. */ private static final EntityEquipmentSlot[] EQUIPMENT_SLOTS = new EntityEquipmentSlot[] { EntityEquipmentSlot.HEAD, EntityEquipmentSlot.CHEST, EntityEquipmentSlot.LEGS, EntityEquipmentSlot.FEET }; /** * This method will take a string and break it down into multiple lines based on a provided * line length. The separate strings are then added to the list provided. This method is * useful for adding a long description to an item tool tip and having it wrap. This method * is similar to wrap in Apache WordUtils however it uses a List making it easier to use * when working with Minecraft. * * @param string: The string being split into multiple lines. It's recommended to use * StatCollector.translateToLocal() for this so multiple languages will be * supported. * @param lnLength: The ideal size for each line of text. * @param wrapLongWords: If true the ideal size will be exact, potentially splitting words * on the end of each line. * @param list: A list to add each line of text to. An good example of such list would be * the list of tooltips on an item. * @return List: The same List instance provided however the string provided will be * wrapped to the ideal line length and then added. */ public static List<String> wrapStringToList (String string, int lnLength, boolean wrapLongWords, List<String> list) { String lines[] = WordUtils.wrap(string, lnLength, null, wrapLongWords).split(SystemUtils.LINE_SEPARATOR); list.addAll(Arrays.asList(lines)); return list; } /** * This method will take a string and break it down into multiple lines based on a provided * line length. The separate strings are then added to the list provided. This method is * useful for adding a long description to an item tool tip and having it wrap. This method * is similar to wrap in Apache WordUtils however it uses a List making it easier to use * when working with Minecraft. * * @param string: The string being split into multiple lines. It's recommended to use * StatCollector.translateToLocal() for this so multiple languages will be * supported. * @param lnLength: The ideal size for each line of text. * @param wrapLongWords: If true the ideal size will be exact, potentially splitting words * on the end of each line. * @param format: A list to add each line of text to. An good example of such list would be * the list of tooltips on an item. * @param color: An TextFormatting to apply to all lines added to the list. * @return List: The same List instance provided however the string provided will be * wrapped to the ideal line length and then added. */ public static List<String> wrapStringToListWithFormat (String string, int lnLength, boolean wrapLongWords, List<String> list, TextFormatting format) { String lines[] = WordUtils.wrap(string, lnLength, null, wrapLongWords).split(SystemUtils.LINE_SEPARATOR); for (String line : lines) list.add(format + line); return list; } /** * Checks if a block is a fluid or not. * * @param block: An instance of the block being checked. * @return boolean: If the block is a fluid, true will be returned. If not, false will be * returned. */ public static boolean isFluid (Block block) { return (block == Blocks.LAVA || block == Blocks.WATER || block instanceof IFluidBlock); } /** * A blend between the itemRegistry.getObject and bockRegistry.getObject methods. Used for * grabbing something from an ID, when you have no clue what it might be. * * @param name: The ID of the thing you're looking for. Domains are often preferred. * @return Object: Hopefully the thing you're looking for. */ public static Object getThingByName (String name) { Object thing = Item.getByNameOrId(name); if (thing != null) return thing; thing = Block.getBlockFromName(name); if (thing != null) return thing; return null; } /** * A basic check to see if two classes are the same. For the classes to be the same, * neither can be null, and they must share the same name. * * @param class1: The first class to compare. * @param class2: The second class to compare. * @return boolean: True if neither class is null, and both share the same name. */ public static boolean compareClasses (Class<?> class1, Class<?> class2) { return (class1 != null && class2 != null && class1.getName().equalsIgnoreCase(class2.getName())); } /** * Compares the class of an Object with another class. Useful for comparing a TileEntity or * Item. * * @param obj: The Object to compare. * @param clazz: The class to compare the Object to. * @return boolean: True if the Object is of the same class as the one provided. */ public static boolean compareObjectToClass (Object obj, Class<?> clazz) { return compareClasses(obj.getClass(), clazz); } /** * Makes the first character of a string upper case. Useful for taking raw text data and * turning it into part of a sentence or other display data. * * @param text: The text to convert. * @return String: The same string that was passed, however the first character has been * made upper case. */ public static String makeUpperCased (String text) { return Character.toString(text.charAt(0)).toUpperCase() + text.substring(1); } /** * Provides a safe way to get a class by its name. This is essentially the same as * Class.forName however it will handle any ClassNotFoundException automatically. * * @param name: The name of the class you are trying to get. Example: java.lang.String * @return Class: If a class could be found, it will be returned. Otherwise, null. */ public static Class<?> getClassFromString (String name) { try { return Class.forName(name); } catch (ClassNotFoundException e) { e.printStackTrace(); return null; } } /** * A check to see if an entity is wearing a full suit of the armor. This check is based on * the class names of armor. * * @param living: The living entity to check the armor of. * @param armorClass: The class of the armor to check against. * @return boolean: True if every piece of armor the entity is wearing are the same class * as the provied armor class. */ public static boolean isWearingFullSet (EntityLivingBase living, Class<Item> armorClass) { for (EntityEquipmentSlot slot : EntityEquipmentSlot.values()) { if (slot.getSlotType().equals(EntityEquipmentSlot.Type.ARMOR)) { ItemStack armor = living.getItemStackFromSlot(slot); if (armor == null || !armor.getItem().getClass().equals(armorClass)) return false; } } return true; } /** * Gets the name of a mod that registered the passed object. Has support for a wide range * of registerable objects such as blocks, items, enchantments, potions, sounds, villagers, * biomes, and so on. * * @param registerable The registerable object. Accepts anything that extends * IForgeRegistryEntry.Impl. Current list includes BiomeGenBase, Block, * Enchantment, Item, Potion, PotionType, SoundEvent and VillagerProfession. * @return String The name of the mod that registered the object. */ public static String getModName (IForgeRegistryEntry.Impl<?> registerable) { final String modID = registerable.getRegistryName().getResourceDomain(); final ModContainer mod = MODS.get(modID); return mod != null ? mod.getName() : modID.equalsIgnoreCase("minecraft") ? "Minecraft" : "Unknown"; } /** * Gets the name of a mod that registered the entity. Due to Entity not using * IForgeRegistryEntry.Impl a special method is required. * * @param entity The entity to get the mod name for. * @return String The name of the mod that registered the entity. */ public static String getModName (Entity entity) { if (entity == null) return "Unknown"; final EntityRegistration reg = EntityRegistry.instance().lookupModSpawn(entity.getClass(), false); if (reg != null) { final ModContainer mod = reg.getContainer(); if (mod != null) return mod.getName(); return "Unknown"; } return "Minecraft"; } /** * Spawns a particle into the world, in a basic ring pattern. The center of the ring is * focused around the provided XYZ coordinates. * * @param world: The world to spawn the particles in. * @param particle: The type of particle to spawn. * @param x: The x position to spawn the particle around. * @param y: The y position to spawn the particle around. * @param z: The z position to spawn the particle around. * @param velocityX: The velocity of the particle, in the x direction. * @param velocityY: The velocity of the particle, in the y direction. * @param velocityZ: The velocity of the particle, in the z direction. * @param step: The distance in degrees, between each particle. The maximum is 2 * PI, * which will create 1 particle per ring. 0.15 is a nice value. */ public static void spawnParticleRing (World world, EnumParticleTypes particle, double x, double y, double z, double velocityX, double velocityY, double velocityZ, double step) { for (double degree = 0.0d; degree < 2 * Math.PI; degree += step) world.spawnParticle(particle, x + Math.cos(degree), y, z + Math.sin(degree), velocityX, velocityY, velocityZ); } /** * Spawns a particle into the world, in a basic ring pattern. The center of the ring is * focused around the provided XYZ coordinates. * * @param world: The world to spawn the particles in. * @param particle: The type of particle to spawn. * @param percent: The percentage of the ring to render. * @param x: The x position to spawn the particle around. * @param y: The y position to spawn the particle around. * @param z: The z position to spawn the particle around. * @param velocityX: The velocity of the particle, in the x direction. * @param velocityY: The velocity of the particle, in the y direction. * @param velocityZ: The velocity of the particle, in the z direction. * @param step: The distance in degrees, between each particle. The maximum is 2 * PI, * which will create 1 particle per ring. 0.15 is a nice value. */ public static void spawnParticleRing (World world, EnumParticleTypes particle, float percentage, double x, double y, double z, double velocityX, double velocityY, double velocityZ, double step) { for (double degree = 0.0d; degree < (2 * Math.PI * percentage); degree += step) world.spawnParticle(particle, x + Math.cos(degree), y, z + Math.sin(degree), velocityX, velocityY, velocityZ); } public static String getTicksAstime (int timeInTicks) { float time = (float) timeInTicks / 20f; return MathsUtils.round(time, 2) + ((time == 1f) ? " Second " : "Seconds"); } /** * Checks if a keyCode is numeric, meaning 0-9 on the keyboard or number pad. * * @param keyCode: The key code to test. * @return boolean: True, if the key is a number key. */ public static boolean isKeyCodeNumeric (int keyCode) { for (int validKey : NUMERIC_KEYS) if (validKey == keyCode) return true; return false; } /** * Gets the type of equipment for slot index. * * @param index The index of the slot. * @return EntityEquipmentSlot The slot for the index. */ public static EntityEquipmentSlot getEquipmentSlot (int index) { if (index >= 0 && index < EQUIPMENT_SLOTS.length) return EQUIPMENT_SLOTS[index]; return null; } /** * Searches through the array of CreativeTabs and finds the first tab with the same label * as the one passed. * * @param label: The label of the tab you are looking for. * @return CreativeTabs: A CreativeTabs with the same label as the one passed. If this is * not found, you will get null. */ @SideOnly(Side.CLIENT) public static CreativeTabs getTabFromLabel (String label) { for (CreativeTabs tab : CreativeTabs.CREATIVE_TAB_ARRAY) if (tab.getTabLabel().equalsIgnoreCase(label)) return tab; return null; } static { MODS = new HashMap<String, ModContainer>(); final Loader loader = Loader.instance(); for (final String key : loader.getIndexedModList().keySet()) MODS.put(key, loader.getIndexedModList().get(key)); } }
package net.iaeste.iws.persistence.entities; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import net.iaeste.iws.api.enums.Currency; import net.iaeste.iws.api.enums.FieldOfStudy; import net.iaeste.iws.api.enums.Language; import net.iaeste.iws.api.enums.LanguageLevel; import net.iaeste.iws.api.enums.LanguageOperator; import net.iaeste.iws.api.enums.PaymentFrequency; import net.iaeste.iws.api.enums.Specialization; import net.iaeste.iws.api.enums.StudyLevel; import net.iaeste.iws.api.enums.TypeOfWork; import net.iaeste.iws.persistence.Authentication; import net.iaeste.iws.persistence.OfferDao; import net.iaeste.iws.persistence.jpa.OfferJpaDao; import net.iaeste.iws.persistence.setup.SpringConfig; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.support.AnnotationConfigContextLoader; import org.springframework.transaction.annotation.Transactional; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceException; import java.math.BigDecimal; import java.util.Date; import java.util.List; @SuppressWarnings("ClassWithTooManyFields") @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(loader = AnnotationConfigContextLoader.class, classes = { SpringConfig.class }) public class OfferEntityTest { private static final String REF_NO = "AT-2012-1234-AB"; private static final String REF_NO_2 = "AT-2012-5678-AB"; private static final Date NOMINATION_DEADLINE = new Date(); private static final String EMPLOYER_NAME = "Test_Employer_1"; private static final String EMPLOYER_NAME_LIKE = EMPLOYER_NAME.substring(3, 3); private static final String EMPLOYER_NAME_LIKE_NONEXISTING = "XxXxX"; private static final String WORK_DESCRIPTION = "nothing"; private static final Integer MAXIMUM_WEEKS = 12; private static final Integer MINIMUM_WEEKS = 12; private static final Float WEEKLY_HOURS = 40.0f; private static final Float DAILY_HOURS = 8.0f; private static final Date FROM_DATE = new Date(); private static final Date TO_DATE = new Date(FROM_DATE.getTime() + 3600 * 24 * 90); private static final Date FROM_DATE2 = new Date(TO_DATE.getTime() + 3600 * 24 * 90); private static final Date TO_DATE2 = new Date(FROM_DATE2.getTime() + 3600 * 24 * 90); private static final Date UNAVAIABLE_FROM = new Date(TO_DATE.getTime()); private static final Date UNAVAIABLE_TO = new Date(FROM_DATE2.getTime()); private static final BigDecimal PAYMENT = new BigDecimal(3000); private static final BigDecimal LODGING_COST = new BigDecimal(1000); private static final BigDecimal LIVING_COST = new BigDecimal(2000); private static final String FIELDS_OF_STUDY = String.format("%s|%s", FieldOfStudy.IT, FieldOfStudy.CHEMISTRY); private static final String SPECIALIZATIONS = String.format("%s|%s", Specialization.INFORMATION_TECHNOLOGY, "Custom"); private static final String STUDY_LEVELS = String.format("%s|%s", StudyLevel.E, StudyLevel.M); private static final String TYPE_OF_WORK = TypeOfWork.R.toString(); private static final String EMPLOYER_ADDRESS = "test address 30"; private static final String EMPLOYER_ADDRESS2 = "test address 31"; private static final String EMPLOYER_BUSINESS = "test business"; private static final Integer EMPLOYER_EMPLOYEES_COUNT = 10; private static final String EMPLOYER_WEBSITE = "www.website.at"; private static final String OTHER_REQUIREMENTS = "cooking"; private static final String WORKING_PLACE = "Vienna"; private static final String NEAREST_AIRPORT = "VIE"; private static final String NEAREST_PUBLIC_TRANSPORT = "U4"; private static final Currency CURRENCY = Currency.EUR; private static final PaymentFrequency PAYMENT_FREQUENCY = PaymentFrequency.W; private static final Integer DEDUCTION = 20; private static final String LODGING_BY = "IAESTE"; private static final PaymentFrequency LODGING_COST_FREQUENCY = PaymentFrequency.M; private static final PaymentFrequency LIVING_COST_FREQUENCY = PaymentFrequency.M; private static final Boolean CANTEEN = true; private OfferDao dao; @PersistenceContext private EntityManager entityManager; private OfferEntity offer; private Authentication authentication; @Before public void before() { dao = new OfferJpaDao(entityManager); offer = getMinimalOffer(); // watch out! // setting arguments to null may result in NullPointerException if auditing implementation changes authentication = new Authentication(null, null); } private static OfferEntity getMinimalOffer() { final OfferEntity offer = new OfferEntity(); offer.setRefNo(REF_NO); offer.setEmployerName(EMPLOYER_NAME); offer.setStudyLevels(STUDY_LEVELS); offer.setFieldOfStudies(FIELDS_OF_STUDY); offer.setLanguage1(Language.ENGLISH); offer.setLanguage1Level(LanguageLevel.E); offer.setWorkDescription(WORK_DESCRIPTION); offer.setMaximumWeeks(MAXIMUM_WEEKS); offer.setMinimumWeeks(MINIMUM_WEEKS); offer.setWeeklyHours(WEEKLY_HOURS); offer.setFromDate(FROM_DATE); offer.setToDate(TO_DATE); return offer; } public static OfferEntity getFullOffer() { final OfferEntity offer = getMinimalOffer(); offer.setNominationDeadline(NOMINATION_DEADLINE); offer.setEmployerAddress(EMPLOYER_ADDRESS); offer.setEmployerAddress2(EMPLOYER_ADDRESS2); offer.setEmployerBusiness(EMPLOYER_BUSINESS); offer.setEmployerEmployeesCount(EMPLOYER_EMPLOYEES_COUNT); offer.setEmployerWebsite(EMPLOYER_WEBSITE); offer.setPrevTrainingRequired(true); offer.setOtherRequirements(OTHER_REQUIREMENTS); offer.setLanguage1Operator(LanguageOperator.A); offer.setLanguage2(Language.FRENCH); offer.setLanguage2Level(LanguageLevel.E); offer.setLanguage2Operator(LanguageOperator.O); offer.setLanguage3(Language.GERMAN); offer.setLanguage3Level(LanguageLevel.E); offer.setTypeOfWork(TYPE_OF_WORK); offer.setFromDate2(FROM_DATE2); offer.setToDate2(TO_DATE2); offer.setUnavailableFrom(UNAVAIABLE_FROM); offer.setUnavailableTo(UNAVAIABLE_TO); offer.setWorkingPlace(WORKING_PLACE); offer.setNearestAirport(NEAREST_AIRPORT); offer.setNearestPubTransport(NEAREST_PUBLIC_TRANSPORT); offer.setDailyHours(DAILY_HOURS); offer.setCurrency(CURRENCY); offer.setPaymentFrequency(PAYMENT_FREQUENCY); offer.setDeduction(DEDUCTION); offer.setLodgingBy(LODGING_BY); offer.setLodgingCost(LODGING_COST); offer.setLodgingCostFrequency(LODGING_COST_FREQUENCY); offer.setLivingCost(LIVING_COST); offer.setLivingCostFrequency(LIVING_COST_FREQUENCY); offer.setCanteen(CANTEEN); offer.setSpecializations(SPECIALIZATIONS); return offer; } @Test @Transactional public void testMinimalOffer() { dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); offer = entityManager.find(OfferEntity.class, offer.getId()); assertThat(offer.getRefNo(), is(REF_NO)); assertThat(offer.getEmployerName(), is(EMPLOYER_NAME)); assertThat(offer.getStudyLevels(), is(STUDY_LEVELS)); assertThat(offer.getFieldOfStudies(), is(FIELDS_OF_STUDY)); assertThat(offer.getLanguage1(), is(Language.ENGLISH)); assertThat(offer.getLanguage1Level(), is(LanguageLevel.E)); assertThat(offer.getWorkDescription(), is(WORK_DESCRIPTION)); assertThat(offer.getMaximumWeeks(), is(MAXIMUM_WEEKS)); assertThat(offer.getMinimumWeeks(), is(MINIMUM_WEEKS)); assertThat(offer.getWeeklyHours(), is(WEEKLY_HOURS)); assertThat(offer.getFromDate(), is(FROM_DATE)); assertThat(offer.getToDate(), is(TO_DATE)); final OfferEntity persisted = dao.findOffer(offer.getId()); assertThat(offer, is(persisted)); } @SuppressWarnings("OverlyLongMethod") @Test @Transactional public void testFullOffer() { offer = getFullOffer(); dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); offer = entityManager.find(OfferEntity.class, offer.getId()); assertThat(offer.getRefNo(), is(REF_NO)); assertThat(offer.getEmployerName(), is(EMPLOYER_NAME)); assertThat(offer.getStudyLevels(), is(STUDY_LEVELS)); assertThat(offer.getFieldOfStudies(), is(FIELDS_OF_STUDY)); assertThat(offer.getLanguage1(), is(Language.ENGLISH)); assertThat(offer.getLanguage1Level(), is(LanguageLevel.E)); assertThat(offer.getWorkDescription(), is(WORK_DESCRIPTION)); assertThat(offer.getMaximumWeeks(), is(MAXIMUM_WEEKS)); assertThat(offer.getMinimumWeeks(), is(MINIMUM_WEEKS)); assertThat(offer.getWeeklyHours(), is(WEEKLY_HOURS)); assertThat(offer.getFromDate(), is(FROM_DATE)); assertThat(offer.getToDate(), is(TO_DATE)); assertThat(offer.getNominationDeadline(), is(NOMINATION_DEADLINE)); assertThat(offer.getEmployerAddress(), is(EMPLOYER_ADDRESS)); assertThat(offer.getEmployerAddress2(), is(EMPLOYER_ADDRESS2)); assertThat(offer.getEmployerBusiness(), is(EMPLOYER_BUSINESS)); assertThat(offer.getEmployerEmployeesCount(), is(EMPLOYER_EMPLOYEES_COUNT)); assertThat(offer.getEmployerWebsite(), is(EMPLOYER_WEBSITE)); assertThat(offer.getPrevTrainingRequired(), is(true)); assertThat(offer.getOtherRequirements(), is(OTHER_REQUIREMENTS)); assertThat(offer.getLanguage1Operator(), is(LanguageOperator.A)); assertThat(offer.getLanguage2(), is(Language.FRENCH)); assertThat(offer.getLanguage2Level(), is(LanguageLevel.E)); assertThat(offer.getLanguage2Operator(), is(LanguageOperator.O)); assertThat(offer.getLanguage3(), is(Language.GERMAN)); assertThat(offer.getLanguage3Level(), is(LanguageLevel.E)); assertThat(offer.getTypeOfWork(), is(TYPE_OF_WORK)); assertThat(offer.getFromDate2(), is(FROM_DATE2)); assertThat(offer.getToDate2(), is(TO_DATE2)); assertThat(offer.getUnavailableFrom(), is(UNAVAIABLE_FROM)); assertThat(offer.getUnavailableTo(), is(UNAVAIABLE_TO)); assertThat(offer.getWorkingPlace(), is(WORKING_PLACE)); assertThat(offer.getNearestAirport(), is(NEAREST_AIRPORT)); assertThat(offer.getNearestPubTransport(), is(NEAREST_PUBLIC_TRANSPORT)); assertThat(offer.getDailyHours(), is(DAILY_HOURS)); assertThat(offer.getCurrency(), is(CURRENCY)); assertThat(offer.getPaymentFrequency(), is(PAYMENT_FREQUENCY)); assertThat(offer.getDeduction(), is(DEDUCTION)); assertThat(offer.getLodgingBy(), is(LODGING_BY)); assertThat(offer.getLodgingCost(), is(LODGING_COST)); assertThat(offer.getLodgingCostFrequency(), is(LODGING_COST_FREQUENCY)); assertThat(offer.getLivingCost(), is(LIVING_COST)); assertThat(offer.getLivingCostFrequency(), is(LIVING_COST_FREQUENCY)); assertThat(offer.getCanteen(), is(CANTEEN)); assertThat(offer.getSpecializations(), is(SPECIALIZATIONS)); final OfferEntity persisted = dao.findOffer(offer.getId()); assertThat(offer, is(persisted)); } @Test(expected = PersistenceException.class) @Transactional public void testUniqueRefNo() { final String refNo = "CZ-2012-1001"; offer.setRefNo(refNo); offer.setId(null); dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); offer = getMinimalOffer(); offer.setRefNo(refNo); offer.setId(null); dao.persist(authentication, offer); } @Test(expected = PersistenceException.class) @Transactional public void testNullRefNo() { offer.setRefNo(null); dao.persist(authentication, offer); } @Test @Transactional public void testNullNominationDeadline() { offer.setNominationDeadline(null); dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); assertThat(dao.findOffer(offer.getId()), is(offer)); } @Test(expected = PersistenceException.class) @Transactional public void testNullEmployerName() { offer.setEmployerName(null); dao.persist(authentication, offer); } @Test(expected = PersistenceException.class) @Transactional public void testNullLang1() { offer.setLanguage1(null); dao.persist(authentication, offer); } @Test(expected = PersistenceException.class) @Transactional public void testNullLang1Level() { offer.setLanguage1Level(null); dao.persist(authentication, offer); } @Test(expected = PersistenceException.class) @Transactional public void testNullWorkDescription() { offer.setWorkDescription(null); dao.persist(authentication, offer); } @Test(expected = PersistenceException.class) @Transactional public void testNullMaxWeeks() { offer.setMaximumWeeks(null); dao.persist(authentication, offer); } @Test(expected = PersistenceException.class) @Transactional public void testNullMinWeeks() { offer.setMinimumWeeks(null); dao.persist(authentication, offer); } @Test(expected = PersistenceException.class) @Transactional public void testNullWeeklyHours() { offer.setWeeklyHours(null); dao.persist(authentication, offer); } @Test @Transactional public void testNullDailyHours() { offer.setDailyHours(null); dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); final OfferEntity foundOffer = dao.findOffer(offer.getId()); assertThat(foundOffer, is(offer)); } @Test @Transactional public void testOtherRequirements() { final StringBuilder sb = new StringBuilder(500); for (int i = 0; i < 500; i++) { sb.append('a'); } offer.setOtherRequirements(sb.toString()); dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); assertThat(dao.findOffer(offer.getId()), is(offer)); } @Test(expected = PersistenceException.class) @Transactional public void testTooLongOtherRequirements() { final StringBuilder sb = new StringBuilder(501); for (int i = 0; i < 501; i++) { sb.append('a'); } offer.setOtherRequirements(sb.toString()); dao.persist(authentication, offer); } @Test @Transactional public void testWorkDescription() { final StringBuilder sb = new StringBuilder(1000); for (int i = 0; i < 1000; i++) { sb.append('a'); } offer.setWorkDescription(sb.toString()); dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); assertThat(dao.findOffer(offer.getId()), is(offer)); } @Test(expected = PersistenceException.class) @Transactional public void testTooLongWorkDescription() { final StringBuilder sb = new StringBuilder(1001); for (int i = 0; i < 1001; i++) { sb.append('a'); } offer.setWorkDescription(sb.toString()); dao.persist(authentication, offer); } @Test @Transactional public void testWeeklyHoursPrecision() { offer.setWeeklyHours(0.999f); dao.persist(authentication, offer); offer = entityManager.find(OfferEntity.class, offer.getId()); assertThat(offer.getWeeklyHours(), is(Float.valueOf("0.999"))); } /* TODO for some reason the precision does not work with hsqldb @Test(expected = PersistenceException.class) @Transactional public void testWeeklyHoursPrecision2() { offer.setWeeklyHours(10.9999f); dao.persist(authentication, offer); offer = entityManager.find(OfferEntity.class, offer.getId()); Assert.assertEquals("10.9999", offer.getWeeklyHours().toString()); }*/ @Test(expected = PersistenceException.class) @Transactional public void testWeeklyHoursPrecision3() { offer.setWeeklyHours(100.0f); dao.persist(authentication, offer); } @Test @Transactional public void testDailyHoursPrecision() { offer.setDailyHours(0.999f); dao.persist(authentication, offer); offer = entityManager.find(OfferEntity.class, offer.getId()); assertThat(offer.getDailyHours(), is(Float.valueOf("0.999"))); } /* TODO for some reason the precision does not work with hsqldb @Test(expected = PersistenceException.class) @Transactional public void testDailyHoursPrecision2() { offer.setDailyHours(10.9999f); dao.persist(authentication, offer); offer = entityManager.find(OfferEntity.class, offer.getId()); Assert.assertEquals("10.9999", offer.getDailyHours().toString()); }*/ @Test(expected = PersistenceException.class) @Transactional public void testDailyHoursPrecision3() { offer.setDailyHours(100.0f); dao.persist(authentication, offer); } @Test @Transactional public void testPayment() { offer.setPayment(BigDecimal.valueOf(1234567890.12)); offer.setPaymentFrequency(PaymentFrequency.M); dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); assertThat(dao.findOffer(offer.getId()), is(offer)); } @Test(expected = PersistenceException.class) @Transactional public void testPayment2() { offer.setPayment(BigDecimal.valueOf(12345678901.0)); dao.persist(authentication, offer); } /* TODO for some reason the precision does not work with hsqldb @Test(expected = PersistenceException.class) @Transactional public void testPayment3() { offer.setPayment(BigDecimal.valueOf(1234567890.123)); dao.persist(authentication, offer); }*/ @Test @Transactional public void testDecuction() { offer.setDeduction(99); dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); assertThat(dao.findOffer(offer.getId()), is(offer)); } @Test(expected = PersistenceException.class) @Transactional public void testDecuction2() { offer.setDeduction(100); dao.persist(authentication, offer); } @Test @Transactional public void testLodgingCost() { offer.setLodgingCost(BigDecimal.valueOf(1234567890.12)); offer.setLodgingCostFrequency(PaymentFrequency.M); dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); assertThat(dao.findOffer(offer.getId()), is(offer)); } @Test(expected = PersistenceException.class) @Transactional public void testLodgingCost2() { offer.setLodgingCost(BigDecimal.valueOf(12345678901.0)); dao.persist(authentication, offer); } /* TODO for some reason the precision does not work with hsqldb @Test(expected = PersistenceException.class) @Transactional public void testLodgingCost3() { offer.setLodgingCost(BigDecimal.valueOf(1234567890.123)); dao.persist(authentication, offer); }*/ @Test @Transactional public void testLivingCost() { offer.setLivingCost(BigDecimal.valueOf(1234567890.12)); offer.setLivingCostFrequency(PaymentFrequency.M); dao.persist(authentication, offer); assertThat(offer.getId(), is(notNullValue())); assertThat(dao.findOffer(offer.getId()), is(offer)); } @Test(expected = PersistenceException.class) @Transactional public void testLivingCost2() { offer.setLivingCost(BigDecimal.valueOf(12345678901.0)); dao.persist(authentication, offer); } /* TODO for some reason the precision does not work with hsqldb @Test(expected = PersistenceException.class) @Transactional public void testLivingCost3() { offer.setLivingCost(BigDecimal.valueOf(1234567890.123)); dao.persist(authentication, offer); }*/ @Test(expected = PersistenceException.class) @Transactional public void testNullFromDate() { offer.setFromDate(null); dao.persist(authentication, offer); } @Test(expected = PersistenceException.class) @Transactional public void testNullToDate() { offer.setToDate(null); dao.persist(authentication, offer); } @Test @Transactional public void testNullPaymentFrequency() { offer.setPayment(null); offer.setPaymentFrequency(null); dao.persist(authentication, offer); final OfferEntity persistedOffer = dao.findOffer(offer.getId()); assertThat(persistedOffer, is(offer)); assertThat(persistedOffer.getPayment(), is(nullValue())); assertThat(persistedOffer.getPaymentFrequency(), is(nullValue())); } @Transactional @Test public void testNullLodgingCostFrequency() { offer.setLodgingCostFrequency(null); offer.setLodgingCost(null); dao.persist(authentication, offer); final OfferEntity persistedOffer = dao.findOffer(offer.getId()); assertThat(persistedOffer, is(offer)); assertThat(persistedOffer.getLodgingCostFrequency(), is(nullValue())); assertThat(persistedOffer.getLodgingCost(), is(nullValue())); } @Test @Transactional public void testNullLivingCostFrequency() { offer.setLivingCostFrequency(null); offer.setLivingCost(null); dao.persist(authentication, offer); final OfferEntity persistedOffer = dao.findOffer(offer.getId()); assertThat(persistedOffer, is(offer)); assertThat(persistedOffer.getLivingCostFrequency(), is(nullValue())); assertThat(persistedOffer.getLivingCost(), is(nullValue())); } @Test @Transactional public void testTypeOfWork() { offer.setTypeOfWork(TYPE_OF_WORK); offer.setId(null); dao.persist(authentication, offer); assertThat(offer, is(notNullValue())); assertThat(offer.getId(), is(notNullValue())); assertThat(offer.getTypeOfWork(), is(TYPE_OF_WORK)); } @Test @Transactional public void testFind() { assertThat(dao.findAll().size(), is(0)); dao.persist(authentication, offer); final OfferEntity offerFoundByRefNo = dao.findOffer(offer.getRefNo()); assertThat(offerFoundByRefNo, is(notNullValue())); assertThat(offerFoundByRefNo, is(offer)); final OfferEntity offerFoundById = dao.findOffer(offer.getId()); assertThat(offerFoundById, is(notNullValue())); assertThat(offerFoundById, is(offer)); assertThat(dao.findOffersByEmployerName(EMPLOYER_NAME_LIKE_NONEXISTING).size(), is(0)); final List<OfferEntity> offersFoundByEmployerName = dao.findOffersByEmployerName(offer.getEmployerName()); if (offersFoundByEmployerName == null || offersFoundByEmployerName.isEmpty()) { fail("This should not happen!"); } final OfferEntity offerFoundByEmployerName = offersFoundByEmployerName.get(0); assertThat(offerFoundByEmployerName, is(offer)); final OfferEntity offer2 = getFullOffer(); offer2.setRefNo(REF_NO_2); dao.persist(authentication, offer2); assertThat(dao.findAll().size(), is(2)); final List<OfferEntity> offersFoundByLikeEmployerName = dao.findOffersByLikeEmployerName(EMPLOYER_NAME_LIKE); if (offersFoundByLikeEmployerName == null || offersFoundByLikeEmployerName.isEmpty()) { fail("This should not happen!"); } assertThat(offersFoundByLikeEmployerName.size(), is(2)); assertThat(dao.findOffersByLikeEmployerName(EMPLOYER_NAME_LIKE_NONEXISTING).size(), is(0)); } @Test @Transactional public void testDeleteOffer() { final Long id = offer.getId(); final String refNo = offer.getRefNo(); assert id == null; dao.persist(authentication, offer); // make sure that offer was persisted final Long newId = offer.getId(); assertThat(newId, is(notNullValue())); final OfferEntity found = dao.findOffer(newId); assertThat(found.getId(), is(newId)); assertThat(found.getRefNo(), is(refNo)); // try to delete offer dao.delete(found.getId()); // make sure that offer was deleted final OfferEntity notFound = dao.findOffer(newId); assertThat(notFound, is(nullValue())); } @After public void cleanUp() { } }
package net.minecraftforge.oredict; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.RandomAccess; import java.util.Map.Entry; import java.util.Set; import net.minecraft.block.Block; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.item.crafting.CraftingManager; import net.minecraft.item.crafting.IRecipe; import net.minecraft.item.crafting.ShapedRecipes; import net.minecraft.item.crafting.ShapelessRecipes; import net.minecraftforge.common.MinecraftForge; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import net.minecraftforge.fml.common.FMLLog; import net.minecraftforge.fml.common.eventhandler.Event; public class OreDictionary { private static boolean hasInit = false; private static List<String> idToName = new ArrayList<String>(); private static Map<String, Integer> nameToId = new HashMap<String, Integer>(); private static List<List<ItemStack>> idToStack = Lists.newArrayList(); private static List<List<ItemStack>> idToStackUn = Lists.newArrayList(); private static Map<Integer, List<Integer>> stackToId = Maps.newHashMap(); public static final ImmutableList<ItemStack> EMPTY_LIST = ImmutableList.of(); /** * Minecraft changed from -1 to Short.MAX_VALUE in 1.5 release for the "block wildcard". Use this in case it * changes again. */ public static final int WILDCARD_VALUE = Short.MAX_VALUE; static { initVanillaEntries(); } @SuppressWarnings("unchecked") public static void initVanillaEntries() { if (!hasInit) { registerOre("logWood", new ItemStack(Blocks.log, 1, WILDCARD_VALUE)); registerOre("logWood", new ItemStack(Blocks.log2, 1, WILDCARD_VALUE)); registerOre("plankWood", new ItemStack(Blocks.planks, 1, WILDCARD_VALUE)); registerOre("slabWood", new ItemStack(Blocks.wooden_slab, 1, WILDCARD_VALUE)); registerOre("stairWood", Blocks.oak_stairs); registerOre("stairWood", Blocks.spruce_stairs); registerOre("stairWood", Blocks.birch_stairs); registerOre("stairWood", Blocks.jungle_stairs); registerOre("stairWood", Blocks.acacia_stairs); registerOre("stairWood", Blocks.dark_oak_stairs); registerOre("stickWood", Items.stick); registerOre("treeSapling", new ItemStack(Blocks.sapling, 1, WILDCARD_VALUE)); registerOre("treeLeaves", new ItemStack(Blocks.leaves, 1, WILDCARD_VALUE)); registerOre("treeLeaves", new ItemStack(Blocks.leaves2, 1, WILDCARD_VALUE)); registerOre("oreGold", Blocks.gold_ore); registerOre("oreIron", Blocks.iron_ore); registerOre("oreLapis", Blocks.lapis_ore); registerOre("oreDiamond", Blocks.diamond_ore); registerOre("oreRedstone", Blocks.redstone_ore); registerOre("oreEmerald", Blocks.emerald_ore); registerOre("oreQuartz", Blocks.quartz_ore); registerOre("oreCoal", Blocks.coal_ore); registerOre("blockGold", Blocks.gold_block); registerOre("blockIron", Blocks.iron_block); registerOre("blockLapis", Blocks.lapis_block); registerOre("blockDiamond", Blocks.diamond_block); registerOre("blockRedstone", Blocks.redstone_block); registerOre("blockEmerald", Blocks.emerald_block); registerOre("blockQuartz", Blocks.quartz_block); registerOre("blockCoal", Blocks.coal_block); registerOre("blockGlassColorless", Blocks.glass); registerOre("blockGlass", Blocks.glass); registerOre("blockGlass", new ItemStack(Blocks.stained_glass, 1, WILDCARD_VALUE)); //blockGlass{Color} is added below with dyes registerOre("paneGlassColorless", Blocks.glass_pane); registerOre("paneGlass", Blocks.glass_pane); registerOre("paneGlass", new ItemStack(Blocks.stained_glass_pane, 1, WILDCARD_VALUE)); //paneGlass{Color} is added below with dyes registerOre("ingotIron", Items.iron_ingot); registerOre("ingotGold", Items.gold_ingot); registerOre("ingotBrick", Items.brick); registerOre("ingotBrickNether", Items.netherbrick); registerOre("nuggetGold", Items.gold_nugget); registerOre("gemDiamond", Items.diamond); registerOre("gemEmerald", Items.emerald); registerOre("gemQuartz", Items.quartz); registerOre("dustRedstone", Items.redstone); registerOre("dustGlowstone", Items.glowstone_dust); registerOre("gemLapis", new ItemStack(Items.dye, 1, 4)); registerOre("slimeball", Items.slime_ball); registerOre("glowstone", Blocks.glowstone); registerOre("cropWheat", Items.wheat); registerOre("cropPotato", Items.potato); registerOre("cropCarrot", Items.carrot); registerOre("stone", Blocks.stone); registerOre("cobblestone", Blocks.cobblestone); registerOre("sandstone", new ItemStack(Blocks.sandstone, 1, WILDCARD_VALUE)); registerOre("sand", new ItemStack(Blocks.sand, 1, WILDCARD_VALUE)); registerOre("dye", new ItemStack(Items.dye, 1, WILDCARD_VALUE)); registerOre("record", Items.record_13); registerOre("record", Items.record_cat); registerOre("record", Items.record_blocks); registerOre("record", Items.record_chirp); registerOre("record", Items.record_far); registerOre("record", Items.record_mall); registerOre("record", Items.record_mellohi); registerOre("record", Items.record_stal); registerOre("record", Items.record_strad); registerOre("record", Items.record_ward); registerOre("record", Items.record_11); registerOre("record", Items.record_wait); registerOre("chest", Blocks.chest); registerOre("chest", Blocks.ender_chest); registerOre("chest", Blocks.trapped_chest); registerOre("chestWood", Blocks.chest); registerOre("chestEnder", Blocks.ender_chest); registerOre("chestTrapped", Blocks.trapped_chest); } // Build our list of items to replace with ore tags Map<ItemStack, String> replacements = new HashMap<ItemStack, String>(); replacements.put(new ItemStack(Items.stick), "stickWood"); replacements.put(new ItemStack(Blocks.planks), "plankWood"); replacements.put(new ItemStack(Blocks.planks, 1, WILDCARD_VALUE), "plankWood"); replacements.put(new ItemStack(Blocks.wooden_slab, 1, WILDCARD_VALUE), "slabWood"); replacements.put(new ItemStack(Blocks.stone), "stone"); replacements.put(new ItemStack(Blocks.stone, 1, WILDCARD_VALUE), "stone"); replacements.put(new ItemStack(Blocks.cobblestone), "cobblestone"); replacements.put(new ItemStack(Blocks.cobblestone, 1, WILDCARD_VALUE), "cobblestone"); replacements.put(new ItemStack(Items.gold_ingot), "ingotGold"); replacements.put(new ItemStack(Items.iron_ingot), "ingotIron"); replacements.put(new ItemStack(Items.diamond), "gemDiamond"); replacements.put(new ItemStack(Items.emerald), "gemEmerald"); replacements.put(new ItemStack(Items.redstone), "dustRedstone"); replacements.put(new ItemStack(Items.glowstone_dust), "dustGlowstone"); replacements.put(new ItemStack(Blocks.glowstone), "glowstone"); replacements.put(new ItemStack(Items.slime_ball), "slimeball"); replacements.put(new ItemStack(Blocks.glass), "blockGlassColorless"); replacements.put(new ItemStack(Blocks.chest), "chestWood"); replacements.put(new ItemStack(Blocks.ender_chest), "chestEnder"); replacements.put(new ItemStack(Blocks.trapped_chest), "chestTrapped"); // Register dyes String[] dyes = { "Black", "Red", "Green", "Brown", "Blue", "Purple", "Cyan", "LightGray", "Gray", "Pink", "Lime", "Yellow", "LightBlue", "Magenta", "Orange", "White" }; for(int i = 0; i < 16; i++) { ItemStack dye = new ItemStack(Items.dye, 1, i); ItemStack block = new ItemStack(Blocks.stained_glass, 1, 15 - i); ItemStack pane = new ItemStack(Blocks.stained_glass_pane, 1, 15 - i); if (!hasInit) { registerOre("dye" + dyes[i], dye); registerOre("blockGlass" + dyes[i], block); registerOre("paneGlass" + dyes[i], pane); } replacements.put(dye, "dye" + dyes[i]); replacements.put(block, "blockGlass" + dyes[i]); replacements.put(pane, "paneGlass" + dyes[i]); } hasInit = true; ItemStack[] replaceStacks = replacements.keySet().toArray(new ItemStack[replacements.keySet().size()]); // Ignore recipes for the following items ItemStack[] exclusions = new ItemStack[] { new ItemStack(Blocks.lapis_block), new ItemStack(Items.cookie), new ItemStack(Blocks.stonebrick), new ItemStack(Blocks.stone_slab, 1, WILDCARD_VALUE), new ItemStack(Blocks.stone_stairs), new ItemStack(Blocks.cobblestone_wall), new ItemStack(Blocks.oak_stairs), new ItemStack(Blocks.spruce_stairs), new ItemStack(Blocks.birch_stairs), new ItemStack(Blocks.jungle_stairs), new ItemStack(Blocks.acacia_stairs), new ItemStack(Blocks.dark_oak_stairs), new ItemStack(Blocks.wooden_slab), new ItemStack(Blocks.glass_pane) }; List<IRecipe> recipes = CraftingManager.getInstance().getRecipeList(); List<IRecipe> recipesToRemove = new ArrayList<IRecipe>(); List<IRecipe> recipesToAdd = new ArrayList<IRecipe>(); // Search vanilla recipes for recipes to replace for(Object obj : recipes) { if(obj instanceof ShapedRecipes) { ShapedRecipes recipe = (ShapedRecipes)obj; ItemStack output = recipe.getRecipeOutput(); if (output != null && containsMatch(false, exclusions, output)) { continue; } if(containsMatch(true, recipe.recipeItems, replaceStacks)) { recipesToRemove.add(recipe); recipesToAdd.add(new ShapedOreRecipe(recipe, replacements)); } } else if(obj instanceof ShapelessRecipes) { ShapelessRecipes recipe = (ShapelessRecipes)obj; ItemStack output = recipe.getRecipeOutput(); if (output != null && containsMatch(false, exclusions, output)) { continue; } if(containsMatch(true, (ItemStack[])recipe.recipeItems.toArray(new ItemStack[recipe.recipeItems.size()]), replaceStacks)) { recipesToRemove.add((IRecipe)obj); IRecipe newRecipe = new ShapelessOreRecipe(recipe, replacements); recipesToAdd.add(newRecipe); } } } recipes.removeAll(recipesToRemove); recipes.addAll(recipesToAdd); if (recipesToRemove.size() > 0) { FMLLog.info("Replaced %d ore recipies", recipesToRemove.size()); } } /** * Gets the integer ID for the specified ore name. * If the name does not have a ID it assigns it a new one. * * @param name The unique name for this ore 'oreIron', 'ingotIron', etc.. * @return A number representing the ID for this ore type */ public static int getOreID(String name) { Integer val = nameToId.get(name); if (val == null) { idToName.add(name); val = idToName.size() - 1; //0 indexed nameToId.put(name, val); List<ItemStack> back = Lists.newArrayList(); idToStack.add(back); idToStackUn.add(Collections.unmodifiableList(back)); } return val; } /** * Reverse of getOreID, will not create new entries. * * @param id The ID to translate to a string * @return The String name, or "Unknown" if not found. */ public static String getOreName(int id) { return (id >= 0 && id < idToName.size()) ? idToName.get(id) : "Unknown"; } /** * Gets all the integer ID for the ores that the specified item stakc is registered to. * If the item stack is not linked to any ore, this will return an empty array and no new entry will be created. * * @param stack The item stack of the ore. * @return An array of ids that this ore is registerd as. */ public static int[] getOreIDs(ItemStack stack) { if (stack == null || stack.getItem() == null) throw new IllegalArgumentException("Stack can not be null!"); Set<Integer> set = new HashSet<Integer>(); int id = Item.getIdFromItem(stack.getItem()); List<Integer> ids = stackToId.get(id); if (ids != null) set.addAll(ids); ids = stackToId.get(id | ((stack.getItemDamage() + 1) << 16)); if (ids != null) set.addAll(ids); Integer[] tmp = set.toArray(new Integer[set.size()]); int[] ret = new int[tmp.length]; for (int x = 0; x < tmp.length; x++) ret[x] = tmp[x]; return ret; } /** * Retrieves the ArrayList of items that are registered to this ore type. * Creates the list as empty if it did not exist. * * The returned List is unmodifiable, but will be updated if a new ore * is registered using registerOre * * @param name The ore name, directly calls getOreID * @return An arrayList containing ItemStacks registered for this ore */ public static List<ItemStack> getOres(String name) { return getOres(getOreID(name)); } /** * Retrieves a list of all unique ore names that are already registered. * * @return All unique ore names that are currently registered. */ public static String[] getOreNames() { return idToName.toArray(new String[idToName.size()]); } /** * Retrieves the List of items that are registered to this ore type. * Creates the list as empty if it did not exist. * * @param id The ore ID, see getOreID * @return An List containing ItemStacks registered for this ore */ private static List<ItemStack> getOres(int id) { return idToStackUn.size() > id ? idToStackUn.get(id) : EMPTY_LIST; } private static boolean containsMatch(boolean strict, ItemStack[] inputs, ItemStack... targets) { for (ItemStack input : inputs) { for (ItemStack target : targets) { if (itemMatches(target, input, strict)) { return true; } } } return false; } private static boolean containsMatch(boolean strict, List<ItemStack> inputs, ItemStack... targets) { for (ItemStack input : inputs) { for (ItemStack target : targets) { if (itemMatches(target, input, strict)) { return true; } } } return false; } public static boolean itemMatches(ItemStack target, ItemStack input, boolean strict) { if (input == null && target != null || input != null && target == null) { return false; } return (target.getItem() == input.getItem() && ((target.getItemDamage() == WILDCARD_VALUE && !strict) || target.getItemDamage() == input.getItemDamage())); } //Convenience functions that make for cleaner code mod side. They all drill down to registerOre(String, int, ItemStack) public static void registerOre(String name, Item ore){ registerOre(name, new ItemStack(ore)); } public static void registerOre(String name, Block ore){ registerOre(name, new ItemStack(ore)); } public static void registerOre(String name, ItemStack ore){ registerOreImpl(name, ore); } /** * Registers a ore item into the dictionary. * Raises the registerOre function in all registered handlers. * * @param name The name of the ore * @param id The ID of the ore * @param ore The ore's ItemStack */ private static void registerOreImpl(String name, ItemStack ore) { if ("Unknown".equals(name)) return; //prevent bad IDs. int oreID = getOreID(name); int hash = Item.getIdFromItem(ore.getItem()); if (ore.getItemDamage() != WILDCARD_VALUE) { hash |= ((ore.getItemDamage() + 1) << 16); // +1 so 0 is significant } //Add things to the baked version, and prevent duplicates List<Integer> ids = stackToId.get(hash); if (ids != null && ids.contains(oreID)) return; if (ids == null) { ids = Lists.newArrayList(); stackToId.put(hash, ids); } ids.add(oreID); //Add to the unbaked version ore = ore.copy(); idToStack.get(oreID).add(ore); MinecraftForge.EVENT_BUS.post(new OreRegisterEvent(name, ore)); } public static class OreRegisterEvent extends Event { public final String Name; public final ItemStack Ore; public OreRegisterEvent(String name, ItemStack ore) { this.Name = name; this.Ore = ore; } } public static void rebakeMap() { //System.out.println("Baking OreDictionary:"); stackToId.clear(); for (int id = 0; id < idToStack.size(); id++) { List<ItemStack> ores = idToStack.get(id); if (ores == null) continue; for (ItemStack ore : ores) { int hash = Item.getIdFromItem(ore.getItem()); if (ore.getItemDamage() != WILDCARD_VALUE) { hash |= ((ore.getItemDamage() + 1) << 16); // +1 so meta 0 is significant } List<Integer> ids = stackToId.get(hash); if (ids == null) { ids = Lists.newArrayList(); stackToId.put(hash, ids); } ids.add(id); //System.out.println(id + " " + getOreName(id) + " " + Integer.toHexString(hash) + " " + ore); } } } }
package com.github.javaparser.symbolsolver; import com.github.javaparser.ParseException; import com.github.javaparser.ast.CompilationUnit; import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration; import com.github.javaparser.ast.expr.Expression; import com.github.javaparser.ast.expr.MethodCallExpr; import com.github.javaparser.ast.stmt.ExpressionStmt; import com.github.javaparser.symbolsolver.javaparser.Navigator; import com.github.javaparser.symbolsolver.javaparsermodel.JavaParserFacade; import com.github.javaparser.symbolsolver.model.resolution.TypeSolver; import com.github.javaparser.symbolsolver.model.resolution.UnsolvedSymbolException; import com.github.javaparser.symbolsolver.resolution.AbstractResolutionTest; import com.github.javaparser.symbolsolver.resolution.typesolvers.CombinedTypeSolver; import com.github.javaparser.symbolsolver.resolution.typesolvers.JavaParserTypeSolver; import com.github.javaparser.symbolsolver.resolution.typesolvers.ReflectionTypeSolver; import org.junit.Before; import org.junit.Test; import java.io.File; import java.io.IOException; import static org.junit.Assert.assertEquals; public class Issue144 extends AbstractResolutionTest { private TypeSolver typeSolver; @Before public void setup() throws IOException { File srcDir = adaptPath(new File("src/test/resources/issue144")); typeSolver = new JavaParserTypeSolver(srcDir); } @Test(expected = UnsolvedSymbolException.class) public void issue144() throws ParseException { CompilationUnit cu = parseSampleWithStandardExtension("issue144/HelloWorld"); ClassOrInterfaceDeclaration clazz = Navigator.demandClass(cu, "HelloWorld"); ExpressionStmt expressionStmt = (ExpressionStmt)clazz.getMethodsByName("main").get(0).getBody().get().getStatement(0); MethodCallExpr methodCallExpr = (MethodCallExpr) expressionStmt.getExpression(); Expression firstParameter = methodCallExpr.getArgument(0); JavaParserFacade javaParserFacade = JavaParserFacade.get(typeSolver); javaParserFacade.solve(firstParameter).isSolved(); } @Test public void issue144WithReflectionTypeSolver() throws ParseException { CompilationUnit cu = parseSampleWithStandardExtension("issue144/HelloWorld"); ClassOrInterfaceDeclaration clazz = Navigator.demandClass(cu, "HelloWorld"); ExpressionStmt expressionStmt = (ExpressionStmt)clazz.getMethodsByName("main").get(0).getBody().get().getStatement(0); MethodCallExpr methodCallExpr = (MethodCallExpr) expressionStmt.getExpression(); Expression firstParameter = methodCallExpr.getArgument(0); JavaParserFacade javaParserFacade = JavaParserFacade.get(new ReflectionTypeSolver(true)); assertEquals(true, javaParserFacade.solve(firstParameter).isSolved()); } @Test public void issue144WithCombinedTypeSolver() throws ParseException { CompilationUnit cu = parseSampleWithStandardExtension("issue144/HelloWorld"); ClassOrInterfaceDeclaration clazz = Navigator.demandClass(cu, "HelloWorld"); ExpressionStmt expressionStmt = (ExpressionStmt)clazz.getMethodsByName("main").get(0).getBody().get().getStatement(0); MethodCallExpr methodCallExpr = (MethodCallExpr) expressionStmt.getExpression(); Expression firstParameter = methodCallExpr.getArgument(0); JavaParserFacade javaParserFacade = JavaParserFacade.get(new CombinedTypeSolver(typeSolver, new ReflectionTypeSolver(true))); assertEquals(true, javaParserFacade.solve(firstParameter).isSolved()); } }
package net.thauvin.erik.semver; import com.github.mustachejava.DefaultMustacheFactory; import com.github.mustachejava.Mustache; import com.github.mustachejava.MustacheFactory; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import javax.annotation.processing.*; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.PackageElement; import javax.lang.model.element.TypeElement; import javax.tools.Diagnostic; import javax.tools.FileObject; import java.io.*; import java.nio.charset.StandardCharsets; import java.util.HashSet; import java.util.Properties; import java.util.Set; /** * The <code>VersionProcessor</code> class implements a semantic version annotation processor. * * @author <a href="mailto:erik@thauvin.net" target="_blank">Erik C. Thauvin</a> * @created 2016-01-13 * @since 1.0 */ public class VersionProcessor extends AbstractProcessor { private Filer filer; private Messager messager; private void error(final String s) { log(Diagnostic.Kind.ERROR, s); } private void error(final String s, final Throwable t) { log(Diagnostic.Kind.ERROR, (t != null ? t.toString() : s)); } @SuppressFBWarnings(value = "PATH_TRAVERSAL_IN") private VersionInfo findValues(final Version version) throws IOException { final VersionInfo versionInfo = new VersionInfo(version); if (version.properties().length() > 0) { final File propsFile = new File(version.properties()); if (propsFile.isFile() && propsFile.canRead()) { note("Found properties: " + propsFile + " (" + propsFile.getAbsoluteFile().getParent() + ')'); final Properties p = new Properties(); try (final InputStreamReader reader = new InputStreamReader(new FileInputStream(propsFile), StandardCharsets.UTF_8)) { p.load(reader); versionInfo.setProject( p.getProperty(version.keysPrefix() + version.projectKey(), version.project())); versionInfo.setMajor( parseIntProperty(p, version.keysPrefix() + version.majorKey(), version.major())); versionInfo.setMinor( parseIntProperty(p, version.keysPrefix() + version.minorKey(), version.minor())); versionInfo.setPatch( parseIntProperty(p, version.keysPrefix() + version.patchKey(), version.patch())); versionInfo.setBuildMeta( p.getProperty(version.keysPrefix() + version.buildMetaKey(), version.buildMeta())); versionInfo.setBuildMetaPrefix( p.getProperty(version.keysPrefix() + version.buildMetaPrefixKey(), version.buildMetaPrefix())); versionInfo.setPreRelease( p.getProperty(version.keysPrefix() + version.preReleaseKey(), version.preRelease())); versionInfo.setPreReleasePrefix( p.getProperty(version.keysPrefix() + version.preReleasePrefixKey(), version.preReleasePrefix())); versionInfo.setSeparator( p.getProperty(version.keysPrefix() + version.separatorKey(), version.separator())); } } else { final String findOrRead; if (propsFile.canRead()) { findOrRead = "find"; } else { findOrRead = "read"; } error("Could not " + findOrRead + ": " + propsFile); throw new FileNotFoundException("Could not " + findOrRead + " the specified file: `" + propsFile.getAbsolutePath() + '`'); } } return versionInfo; } /** * {@inheritDoc} */ @Override public Set<String> getSupportedAnnotationTypes() { final Set<String> result = new HashSet<>(); result.add(Version.class.getCanonicalName()); return result; } /** * {@inheritDoc} */ @Override public SourceVersion getSupportedSourceVersion() { return SourceVersion.latestSupported(); } /** * {@inheritDoc} */ @Override public synchronized void init(final ProcessingEnvironment processingEnv) { super.init(processingEnv); filer = processingEnv.getFiler(); messager = processingEnv.getMessager(); } private void log(final Diagnostic.Kind kind, final String s) { if (messager != null) { messager.printMessage(kind, '[' + VersionProcessor.class.getSimpleName() + "] " + s + System.lineSeparator()); } } private void note(final String s) { log(Diagnostic.Kind.NOTE, s); } private int parseIntProperty(final Properties p, final String property, final int defaultValue) { try { return Integer.parseInt(p.getProperty(property, Integer.toString(defaultValue)).trim()); } catch (NumberFormatException ignore) { warn("Invalid property value: " + property); return defaultValue; } } /** * {@inheritDoc} */ @Override public boolean process(final Set<? extends TypeElement> annotations, final RoundEnvironment roundEnv) { for (final Element element : roundEnv.getElementsAnnotatedWith(Version.class)) { final Version version = element.getAnnotation(Version.class); if (element.getKind() == ElementKind.CLASS) { final Element enclosingElement = element.getEnclosingElement(); if (enclosingElement.getKind() == ElementKind.PACKAGE) { final PackageElement packageElement = (PackageElement) enclosingElement; try { final VersionInfo versionInfo = findValues(version); if (version.packageName().equals(Constants.EMPTY)) { versionInfo.setPackageName(packageElement.getQualifiedName().toString()); } note("Found version: " + versionInfo.getVersion()); final String template; if (version.template().equals(Constants.DEFAULT_JAVA_TEMPLATE) && new File(Constants.DEFAULT_TEMPLATE_NAME).exists()) { template = Constants.DEFAULT_TEMPLATE_NAME; } else if (version.template().equals(Constants.DEFAULT_JAVA_TEMPLATE) && version.type().equals(Constants.KOTLIN_TYPE)) { template = Constants.DEFAULT_KOTLIN_TEMPLATE; } else { template = version.template(); } writeTemplate(version.type(), versionInfo, template); } catch (IOException e) { error("IOException occurred while running the annotation processor: " + e.getMessage(), e); } } } } return true; } private void warn(final String s) { log(Diagnostic.Kind.WARNING, s); } @SuppressFBWarnings(value = "PATH_TRAVERSAL_IN") private void writeTemplate(final String type, final VersionInfo versionInfo, final String template) throws IOException { final MustacheFactory mf = new DefaultMustacheFactory(); final Mustache mustache = mf.compile(template); final String templateName; switch (mustache.getName()) { case Constants.DEFAULT_JAVA_TEMPLATE: templateName = "default (Java)"; break; case Constants.DEFAULT_KOTLIN_TEMPLATE: templateName = "default (Kotlin)"; break; default: templateName = mustache.getName(); break; } note("Loaded template: " + templateName); final String fileName = versionInfo.getClassName() + '.' + type; if (type.equalsIgnoreCase(Constants.KOTLIN_TYPE)) { final String kaptGenDir = processingEnv.getOptions().get(Constants.KAPT_KOTLIN_GENERATED_OPTION_NAME); if (kaptGenDir == null) { throw new IOException("Could not find the target directory for generated Kotlin files."); } final File versionFile = new File(kaptGenDir, fileName); if (!versionFile.getParentFile().exists() && !versionFile.getParentFile().mkdirs()) { note("Could not create target directory: " + versionFile.getParentFile().getAbsolutePath()); } try (final OutputStreamWriter osw = new OutputStreamWriter(new FileOutputStream(versionFile), StandardCharsets.UTF_8)) { mustache.execute(osw, versionInfo).flush(); } note("Generated source: " + fileName + " (" + versionFile.getParentFile().getAbsolutePath() + ')'); } else { final FileObject jfo = filer.createSourceFile(versionInfo.getPackageName() + '.' + versionInfo.getClassName()); try (final Writer writer = jfo.openWriter()) { mustache.execute(writer, versionInfo).flush(); } note("Generated source: " + fileName + " (" + new File(jfo.getName()).getAbsoluteFile().getParent() + ')'); } } }
package nl.basjes.parse.useragent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class DebugUserAgent extends UserAgent { private static final Logger LOG = LoggerFactory.getLogger(DebugUserAgent.class); List<UserAgent> appliedMatcherResults = new ArrayList<>(32); @Override public void set(UserAgent newValuesUserAgent) { appliedMatcherResults.add(new UserAgent(newValuesUserAgent)); super.set(newValuesUserAgent); } @Override public void reset() { appliedMatcherResults.clear(); super.reset(); } @Override public String toString() { return super.toString(); } public String toMatchTrace() { StringBuilder sb = new StringBuilder(4096); sb.append("+==========================================\n"); sb.append("| Matcher results that have been combined \n"); for (UserAgent result: appliedMatcherResults){ sb.append("+ for (String fieldName : result.getAvailableFieldNamesSorted()) { AgentField field = result.get(fieldName); if (field.confidence >= 0) { sb.append('|').append(fieldName).append('(').append(field.confidence).append(") = ").append(field.getValue()).append('\n'); } } } sb.append("+==========================================\n"); return sb.toString(); } public boolean analyzeMatchersResult() { boolean passed = true; for (String fieldName : getAvailableFieldNamesSorted()) { String value = getValue(fieldName); Map<Long, String> receivedValues = new HashMap<>(32); for (UserAgent result: appliedMatcherResults) { AgentField partialField = result.get(fieldName); if (partialField != null && partialField.confidence >= 0) { String previousValue = receivedValues.get(partialField.confidence); if (previousValue != null) { if (!previousValue.equals(partialField.getValue())) { if (passed) { LOG.error("***********************************************************"); LOG.error("*** REALLY IMPORTANT ERRORS IN THE RULESET ***"); LOG.error("*** YOU MUST CHANGE THE CONFIDENCE LEVELS OF YOUR RULES ***"); LOG.error("***********************************************************"); } passed = false; LOG.error("Found different value for \"{}\" with SAME confidence {}: \"{}\" and \"{}\"", fieldName, partialField.confidence, previousValue, partialField.getValue()); } } else { receivedValues.put(partialField.confidence, partialField.getValue()); } } } } return passed; } }
package nl.tudelft.lifetiles.graph.view; import java.util.LinkedList; import java.util.List; import java.util.PriorityQueue; import javafx.scene.Group; import javafx.scene.paint.Color; import nl.tudelft.lifetiles.graph.models.Graph; import nl.tudelft.lifetiles.graph.models.sequence.SequenceSegment; /** * The TileView is responsible for displaying the graph given from * the TileController. * */ public class TileView { /** * Default color of a tile element. */ private static Color defaultColor = Color.web("a1d3ff");; /** * The edges contains all EdgeLines to be displayed. */ private Group edges; /** * The nodes contains all Vertices to be displayed. */ private Group nodes; /** * The root contains all the to be displayed * elements. */ private Group root; /** * The lanes list which contains the occupation of the lanes inside the * tileview. */ private List<Long> lanes; /** * Create the TileView by initializing the groups where the to be drawn * vertices and edges are stored. */ public TileView() { root = new Group(); nodes = new Group(); edges = new Group(); lanes = new LinkedList<Long>(); } /** * Change Vertex colour. * * @param v * vertex to be changed. * @param color * the new colour */ public final void changeVertexColor(final VertexView v, final Color color) { v.setColor(color); } /** * Draw the given graph. * * @param gr * Graph to be drawn * @return the elements that must be displayed on the screen */ public final Group drawGraph(final Graph<SequenceSegment> gr) { PriorityQueue<SequenceSegment> it = sortStartVar(gr); while (!it.isEmpty()) { SequenceSegment segment = it.poll(); drawVertexLane(segment); } root.getChildren().addAll(edges, nodes); return root; } /** * Draws a given segment to an available position in the graph. * * @param segment * segment to be drawn */ private void drawVertexLane(final SequenceSegment segment) { String text = segment.getContent().toString(); long start = segment.getUnifiedStart(); long width = segment.getContent().getLength(); long height = segment.getSources().size(); Color color = sequenceColor(segment.getMutation()); for (int index = 0; index < lanes.size(); index++) { if (lanes.get(index) <= segment.getUnifiedStart() && segmentFree(index, segment)) { drawVertex(text, start, index, width, height, color); segmentInsert(index, segment); return; } } drawVertex(text, start, lanes.size(), width, height, color); segmentInsert(lanes.size(), segment); } /** * Returns the mutation color of a given mutation. Default if no mutation. * * @param mutation * mutation to return color from. * @return color of the mutation */ private Color sequenceColor(final Mutation mutation) { if (mutation == null) { return defaultColor; } else { return mutation.getColor(); } } /** * Create a Vertex that can be displayed on the screen. * * @param text * text of the dna segment * @param x * top left x coordinate * @param y * top left y coordinate * @param width * the width of the vertex * @param height * the height of the vertex * @param color * the colour of the vertex */ private void drawVertex(final String text, final double x, final double y, final double width, final double height, final Color color) { VertexView v = new VertexView(text, x, y, width, height, color); nodes.getChildren().add(v); } /** * Check if there is a free spot to draw the segment at this location. * * @param ind * location in the linked list of already drawn segments * @param segment * segment to be drawn * @return Boolean indicating if there is a free spot */ private boolean segmentFree(final int ind, final SequenceSegment segment) { for (int height = 0; height < segment.getSources().size(); height++) { int position = ind + height; if ((position < lanes.size()) && (lanes.get(position) > segment.getUnifiedStart())) { return false; } } return true; } /** * Insert a segment in the linked list. * * @param index * location in the linked list of already drawn segments * @param segment * segment to be inserted */ private void segmentInsert(final int index, final SequenceSegment segment) { for (int height = 0; height < segment.getSources().size(); height++) { int position = index + height; if (position < lanes.size()) { lanes.set(position, segment.getUnifiedEnd()); } else { lanes.add(position, segment.getUnifiedEnd()); } } } /** * This will sort the nodes based on the * starting position. * Beware: temporary code which will be obsolete with #56 Internal * sorting of edges on destination starting position * * @param gr * the graph that contains the to be sorted nodes * @return Iterator of the sorted list */ @Deprecated private PriorityQueue<SequenceSegment> sortStartVar( final Graph<SequenceSegment> gr) { PriorityQueue<SequenceSegment> it; it = new PriorityQueue<SequenceSegment>(); for (SequenceSegment segment : gr.getAllVertices()) { it.add(segment); } return it; } }
package fm.jiecao.jcvideoplayer_lib; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.support.v7.app.AppCompatActivity; import android.text.TextUtils; import android.util.AttributeSet; import android.view.Gravity; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.view.WindowManager; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.SeekBar; import android.widget.TextView; import android.widget.Toast; import java.lang.reflect.Constructor; import java.util.Objects; import java.util.Timer; import java.util.TimerTask; public class JCVideoPlayerStandard extends JCVideoPlayer { protected static Timer DISSMISS_CONTROL_VIEW_TIMER; public ImageView backButton; public ProgressBar bottomProgressBar, loadingProgressBar; public TextView titleTextView; public ImageView thumbImageView; public ImageView coverImageView; public ImageView tinyBackImageView; protected DismissControlViewTimerTask mDismissControlViewTimerTask; public JCVideoPlayerStandard(Context context) { super(context); } public JCVideoPlayerStandard(Context context, AttributeSet attrs) { super(context, attrs); } @Override public void init(Context context) { super.init(context); bottomProgressBar = (ProgressBar) findViewById(R.id.bottom_progressbar); titleTextView = (TextView) findViewById(R.id.title); backButton = (ImageView) findViewById(R.id.back); thumbImageView = (ImageView) findViewById(R.id.thumb); coverImageView = (ImageView) findViewById(R.id.cover); loadingProgressBar = (ProgressBar) findViewById(R.id.loading); tinyBackImageView = (ImageView) findViewById(R.id.back_tiny); thumbImageView.setOnClickListener(this); backButton.setOnClickListener(this); tinyBackImageView.setOnClickListener(this); } @Override public boolean setUp(String url, int screen, Object... objects) { if (objects.length == 0) return false; if (super.setUp(url, screen, objects)) { titleTextView.setText(objects[0].toString()); if (currentScreen == SCREEN_WINDOW_FULLSCREEN) { fullscreenButton.setImageResource(R.drawable.jc_shrink); backButton.setVisibility(View.VISIBLE); tinyBackImageView.setVisibility(View.INVISIBLE); } else if (currentScreen == SCREEN_LAYOUT_LIST) { fullscreenButton.setImageResource(R.drawable.jc_enlarge); backButton.setVisibility(View.GONE); tinyBackImageView.setVisibility(View.INVISIBLE); } else if (currentScreen == SCREEN_WINDOW_TINY) { tinyBackImageView.setVisibility(View.VISIBLE); setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); } return true; } return false; } @Override public int getLayoutId() { return R.layout.jc_layout_standard; } @Override public void setUiWitStateAndScreen(int state) { super.setUiWitStateAndScreen(state); switch (currentState) { case CURRENT_STATE_NORMAL: changeUiToNormal(); break; case CURRENT_STATE_PREPAREING: changeUiToPrepareingShow(); startDismissControlViewTimer(); break; case CURRENT_STATE_PLAYING: changeUiToPlayingShow(); startDismissControlViewTimer(); break; case CURRENT_STATE_PAUSE: changeUiToPauseShow(); cancelDismissControlViewTimer(); break; case CURRENT_STATE_ERROR: changeUiToError(); break; case CURRENT_STATE_AUTO_COMPLETE: changeUiToCompleteShow(); cancelDismissControlViewTimer(); bottomProgressBar.setProgress(100); break; case CURRENT_STATE_PLAYING_BUFFERING_START: changeUiToPlayingBufferingShow(); break; } } @Override public boolean onTouch(View v, MotionEvent event) { int id = v.getId(); if (id == R.id.surface_container) { switch (event.getAction()) { case MotionEvent.ACTION_DOWN: break; case MotionEvent.ACTION_MOVE: break; case MotionEvent.ACTION_UP: startDismissControlViewTimer(); if (mChangePosition) { int duration = getDuration(); int progress = mSeekTimePosition * 100 / (duration == 0 ? 1 : duration); bottomProgressBar.setProgress(progress); } if (!mChangePosition && !mChangeVolume) { onEvent(JCBuriedPointStandard.ON_CLICK_BLANK); onClickUiToggle(); } break; } } else if (id == R.id.progress) { switch (event.getAction()) { case MotionEvent.ACTION_DOWN: cancelDismissControlViewTimer(); break; case MotionEvent.ACTION_UP: startDismissControlViewTimer(); break; } } return super.onTouch(v, event); } @Override public void onClick(View v) { super.onClick(v); int i = v.getId(); if (i == R.id.thumb) { if (TextUtils.isEmpty(url)) { Toast.makeText(getContext(), getResources().getString(R.string.no_url), Toast.LENGTH_SHORT).show(); return; } if (currentState == CURRENT_STATE_NORMAL) { if (!url.startsWith("file") && !JCUtils.isWifiConnected(getContext()) && !WIFI_TIP_DIALOG_SHOWED) { showWifiDialog(); return; } startPlayLocic(); } else if (currentState == CURRENT_STATE_AUTO_COMPLETE) { onClickUiToggle(); } } else if (i == R.id.surface_container) { startDismissControlViewTimer(); } else if (i == R.id.back) { backPress(); } else if (i == R.id.back_tiny) { backPress(); } } @Override public void showWifiDialog() { super.showWifiDialog(); AlertDialog.Builder builder = new AlertDialog.Builder(getContext()); builder.setMessage(getResources().getString(R.string.tips_not_wifi)); builder.setPositiveButton(getResources().getString(R.string.tips_not_wifi_confirm), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); startPlayLocic(); WIFI_TIP_DIALOG_SHOWED = true; } }); builder.setNegativeButton(getResources().getString(R.string.tips_not_wifi_cancel), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }); builder.create().show(); } @Override public void onStartTrackingTouch(SeekBar seekBar) { super.onStartTrackingTouch(seekBar); cancelDismissControlViewTimer(); } @Override public void onStopTrackingTouch(SeekBar seekBar) { super.onStopTrackingTouch(seekBar); startDismissControlViewTimer(); } public void startPlayLocic() { prepareVideo(); startDismissControlViewTimer(); onEvent(JCBuriedPointStandard.ON_CLICK_START_THUMB); } public void onClickUiToggle() { if (currentState == CURRENT_STATE_PREPAREING) { if (bottomContainer.getVisibility() == View.VISIBLE) { changeUiToPrepareingClear(); } else { changeUiToPrepareingShow(); } } else if (currentState == CURRENT_STATE_PLAYING) { if (bottomContainer.getVisibility() == View.VISIBLE) { changeUiToPlayingClear(); } else { changeUiToPlayingShow(); } } else if (currentState == CURRENT_STATE_PAUSE) { if (bottomContainer.getVisibility() == View.VISIBLE) { changeUiToPauseClear(); } else { changeUiToPauseShow(); } } else if (currentState == CURRENT_STATE_AUTO_COMPLETE) { if (bottomContainer.getVisibility() == View.VISIBLE) { changeUiToCompleteClear(); } else { changeUiToCompleteShow(); } } else if (currentState == CURRENT_STATE_PLAYING_BUFFERING_START) { if (bottomContainer.getVisibility() == View.VISIBLE) { changeUiToPlayingBufferingClear(); } else { changeUiToPlayingBufferingShow(); } } } @Override public void setProgressAndTime(int progress, int secProgress, int currentTime, int totalTime) { super.setProgressAndTime(progress, secProgress, currentTime, totalTime); if (progress != 0) bottomProgressBar.setProgress(progress); if (secProgress != 0) bottomProgressBar.setSecondaryProgress(secProgress); } @Override public void resetProgressAndTime() { super.resetProgressAndTime(); bottomProgressBar.setProgress(0); bottomProgressBar.setSecondaryProgress(0); } //Unified management Ui public void changeUiToNormal() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.VISIBLE, View.INVISIBLE); updateStartImage(); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.VISIBLE, View.INVISIBLE); updateStartImage(); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToPrepareingShow() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.VISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.VISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToPrepareingClear() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToPlayingShow() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.VISIBLE, View.VISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); updateStartImage(); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.VISIBLE, View.VISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); updateStartImage(); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToPlayingClear() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.VISIBLE); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.VISIBLE); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToPauseShow() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.VISIBLE, View.VISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); updateStartImage(); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.VISIBLE, View.VISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); updateStartImage(); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToPauseClear() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToPlayingBufferingShow() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.VISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.VISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE, View.INVISIBLE); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToPlayingBufferingClear() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE, View.VISIBLE); updateStartImage(); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE, View.VISIBLE); updateStartImage(); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToCompleteShow() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.VISIBLE, View.VISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE); updateStartImage(); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.VISIBLE, View.VISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE); updateStartImage(); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToCompleteClear() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE); updateStartImage(); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.VISIBLE); updateStartImage(); break; case SCREEN_WINDOW_TINY: break; } } public void changeUiToError() { switch (currentScreen) { case SCREEN_LAYOUT_LIST: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE); updateStartImage(); break; case SCREEN_WINDOW_FULLSCREEN: setAllControlsVisible(View.INVISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE, View.INVISIBLE, View.VISIBLE, View.INVISIBLE); updateStartImage(); break; case SCREEN_WINDOW_TINY: break; } } public void setAllControlsVisible(int topCon, int bottomCon, int startBtn, int loadingPro, int thumbImg, int coverImg, int bottomPro) { topContainer.setVisibility(topCon); bottomContainer.setVisibility(bottomCon); startButton.setVisibility(startBtn); loadingProgressBar.setVisibility(loadingPro); thumbImageView.setVisibility(thumbImg); coverImageView.setVisibility(coverImg); bottomProgressBar.setVisibility(bottomPro); } public void updateStartImage() { if (currentState == CURRENT_STATE_PLAYING) { startButton.setImageResource(R.drawable.jc_click_pause_selector); } else if (currentState == CURRENT_STATE_ERROR) { startButton.setImageResource(R.drawable.jc_click_error_selector); } else { startButton.setImageResource(R.drawable.jc_click_play_selector); } } protected Dialog mProgressDialog; protected ProgressBar mDialogProgressBar; protected TextView mDialogSeekTime; protected TextView mDialogTotalTime; protected ImageView mDialogIcon; @Override public void showProgressDialog(float deltaX, String seekTime, int seekTimePosition, String totalTime, int totalTimeDuration) { super.showProgressDialog(deltaX, seekTime, seekTimePosition, totalTime, totalTimeDuration); if (mProgressDialog == null) { View localView = LayoutInflater.from(getContext()).inflate(fm.jiecao.jcvideoplayer_lib.R.layout.jc_progress_dialog, null); View content = localView.findViewById(R.id.content); content.setRotation(90); mDialogProgressBar = ((ProgressBar) localView.findViewById(fm.jiecao.jcvideoplayer_lib.R.id.duration_progressbar)); mDialogSeekTime = ((TextView) localView.findViewById(fm.jiecao.jcvideoplayer_lib.R.id.tv_current)); mDialogTotalTime = ((TextView) localView.findViewById(fm.jiecao.jcvideoplayer_lib.R.id.tv_duration)); mDialogIcon = ((ImageView) localView.findViewById(fm.jiecao.jcvideoplayer_lib.R.id.duration_image_tip)); mProgressDialog = new Dialog(getContext(), fm.jiecao.jcvideoplayer_lib.R.style.jc_style_dialog_progress); mProgressDialog.setContentView(localView); mProgressDialog.getWindow().addFlags(Window.FEATURE_ACTION_BAR); mProgressDialog.getWindow().addFlags(32); mProgressDialog.getWindow().addFlags(16); mProgressDialog.getWindow().setLayout(-2, -2); WindowManager.LayoutParams localLayoutParams = mProgressDialog.getWindow().getAttributes(); localLayoutParams.gravity = Gravity.CENTER_VERTICAL | Gravity.RIGHT; localLayoutParams.x = getResources().getDimensionPixelOffset(fm.jiecao.jcvideoplayer_lib.R.dimen.jc_progress_dialog_margin_top) / 2; mProgressDialog.getWindow().setAttributes(localLayoutParams); } if (!mProgressDialog.isShowing()) { mProgressDialog.show(); } mDialogSeekTime.setText(seekTime); mDialogTotalTime.setText(" / " + totalTime); mDialogProgressBar.setProgress(totalTimeDuration <= 0 ? 0 : (seekTimePosition * 100 / totalTimeDuration)); if (deltaX > 0) { mDialogIcon.setBackgroundResource(R.drawable.jc_forward_icon); } else { mDialogIcon.setBackgroundResource(R.drawable.jc_backward_icon); } } @Override public void dismissProgressDialog() { super.dismissProgressDialog(); if (mProgressDialog != null) { mProgressDialog.dismiss(); } } protected Dialog mVolumeDialog; protected ProgressBar mDialogVolumeProgressBar; @Override public void showVolumDialog(float deltaY, int volumePercent) { super.showVolumDialog(deltaY, volumePercent); if (mVolumeDialog == null) { View localView = LayoutInflater.from(getContext()).inflate(R.layout.jc_volume_dialog, null); View content = localView.findViewById(R.id.content); content.setRotation(90); mDialogVolumeProgressBar = ((ProgressBar) localView.findViewById(R.id.volume_progressbar)); mVolumeDialog = new Dialog(getContext(), R.style.jc_style_dialog_progress); mVolumeDialog.setContentView(localView); mVolumeDialog.getWindow().addFlags(8); mVolumeDialog.getWindow().addFlags(32); mVolumeDialog.getWindow().addFlags(16); mVolumeDialog.getWindow().setLayout(-2, -2); WindowManager.LayoutParams localLayoutParams = mVolumeDialog.getWindow().getAttributes(); localLayoutParams.gravity = Gravity.CENTER_HORIZONTAL | Gravity.TOP; // localLayoutParams.y = getContext().getResources().getDimensionPixelOffset(R.dimen.jc_volume_dialog_margin_left); mVolumeDialog.getWindow().setAttributes(localLayoutParams); } if (!mVolumeDialog.isShowing()) { mVolumeDialog.show(); } mDialogVolumeProgressBar.setProgress(volumePercent); } @Override public void dismissVolumDialog() { super.dismissVolumDialog(); if (mVolumeDialog != null) { mVolumeDialog.dismiss(); } } public void startDismissControlViewTimer() { cancelDismissControlViewTimer(); DISSMISS_CONTROL_VIEW_TIMER = new Timer(); mDismissControlViewTimerTask = new DismissControlViewTimerTask(); DISSMISS_CONTROL_VIEW_TIMER.schedule(mDismissControlViewTimerTask, 2500); } public void cancelDismissControlViewTimer() { if (DISSMISS_CONTROL_VIEW_TIMER != null) { DISSMISS_CONTROL_VIEW_TIMER.cancel(); } if (mDismissControlViewTimerTask != null) { mDismissControlViewTimerTask.cancel(); } } public class DismissControlViewTimerTask extends TimerTask { @Override public void run() { if (currentState != CURRENT_STATE_NORMAL && currentState != CURRENT_STATE_ERROR && currentState != CURRENT_STATE_AUTO_COMPLETE) { if (getContext() != null && getContext() instanceof Activity) { ((Activity) getContext()).runOnUiThread(new Runnable() { @Override public void run() { bottomContainer.setVisibility(View.INVISIBLE); topContainer.setVisibility(View.INVISIBLE); startButton.setVisibility(View.INVISIBLE); if (currentScreen != SCREEN_WINDOW_TINY) { bottomProgressBar.setVisibility(View.VISIBLE); } } }); } } } } }
package nl.wur.plantbreeding.gff2RDF.object; import java.util.ArrayList; /** * This class represent Gene information. * @author Pierre-Yves Chibon -- py@chibon.fr */ public class Gene { /** Locus of the gene. */ private String locus = ""; /** Type of the gene. */ private String type = ""; /** Description of the gene. */ private String description = ""; /** Gene synonym. */ private String synonym = ""; /** Name of the gene. */ private String name = ""; /** List of GO terms associated with the gene. */ private ArrayList<String> goterms = new ArrayList<String>(); /** Start position of the gene on the genome. */ private int start; /** Stop position of the gene on the genome. */ private int stop; /** The Chromosome of the gene on the genome. */ private String chromosome; /** The orientation of the gene on the genome. */ private String orientation; /** * Return the type of the gene. * @return String describing the type of the gene */ public final String getType() { return type; } /** * Set the type of the gene. * @param typetmp String describing the type of the gene */ public final void setType(final String typetmp) { this.type = typetmp; } /** * Return a String description the function of the gene. * @return a String describing the gene. */ public String getDescription() { return description; } /** * Set the description of the gene (its function). * @param description a String representing the description of the gene. */ public void setDescription(String description) { this.description = description; } /** * Get the list of GO terms associated with this gene. * @return ArrayList of String representing the GO ID */ public final ArrayList<String> getGoterms() { return goterms; } /** * Set the list of GO terms (GO ID) associated with this gene. * @param tmpgoterms ArrayList of String representing the Go ID */ public final void setGoterms(final ArrayList<String> tmpgoterms) { this.goterms = tmpgoterms; } /** * Retrueve the locus of the gene on the Arabidopsis genome. * @return a String of the locus of the gene */ public final String getLocus() { return locus; } /** * Set the locus of the gene on the Arabidopsis genome. * @param tmplocus a String of the locus of the gene */ public final void setLocus(final String tmplocus) { this.locus = tmplocus; } /** * Retrieve the name of the gene. * @return a String of the name of the gene */ public final String getName() { return name; } /** * Set the name of the gene. * @param tmpname a String of the name of the gene */ public final void setName(final String tmpname) { this.name = tmpname; } /** * Retrieve the start position of the gene on the genome. * @return a int of the start position */ public final int getStart() { return start; } /** * Set the start position of the gene on the genome. * @param tmpstart a int of the start position */ public final void setStart(final int tmpstart) { this.start = tmpstart; } /** * Set the start position of the gene on the genome. * @param tmpstart a String of the start position */ public final void setStart(final String tmpstart) { this.start = Integer.parseInt(tmpstart); } /** * Retrieve the stop position of the gene on the genome. * @return a int of the stop position */ public final int getStop() { return stop; } /** * Set the stop position of the gene on the genome. * @param tmpstop a int of the start position */ public final void setStop(final int tmpstop) { this.stop = tmpstop; } /** * Set the stop position of the gene on the genome. * @param tmpstop a String of the start position */ public final void setStop(final String tmpstop) { this.stop = Integer.parseInt(tmpstop); } /** * Return the chromosome of the gene. * @return a String representing the chromosome of the gene */ public final String getChromosome() { return chromosome; } /** * Set the chromosome of the gene. * @param tmpchromosome a String of the chromosome of the gene */ public final void setChromosome(final String tmpchromosome) { this.chromosome = tmpchromosome; } /** * Get the synonym gene name for this gene. * @return the synonym gene name for this gene. */ public final String getSynonym() { return synonym; } /** * Set a synonym name for the gene. * @param tmpsynonym a String of a synonym name */ public final void setSynonym(final String tmpsynonym) { this.synonym = tmpsynonym; } /** * Retrieve the orientation of this gene. * @return a String representating the orientation of this gene * (should either + or -) */ public String getOrientation() { return orientation; } /** * Set the orientation of this gene. * @param orientation a String which gives the information about the * orientation of this gene on the genome (should be either + or -). */ public void setOrientation(String orientation) { this.orientation = orientation; } /* * Non-basic functions */ /** * Add a GO ID to the list of Go terms associated with the gene * if the GO ID is not in the list yet. * @param tmpgo a String of the GO ID. */ public final void addGoTerm(final String tmpgo) { if (!goterms.contains(tmpgo)) { goterms.add(tmpgo); } } /** * Add the start and stop position of the gene on the genome. * Invert start and stop if stop is higher than start. * @param tmpstart a int of the start position * @param tmpstop a int of the stop position */ public final void addPosition(final int tmpstart, final int tmpstop) { if (tmpstart > tmpstop) { start = tmpstop; stop = tmpstart; } else { start = tmpstart; stop = tmpstop; } } /** * Add the start and stop position of the gene on the genome. * Invert start and stop if stop is higher than start. * @param tmpstart a string of the start position * @param tmpstop a string of the stop position */ public final void addPosition(final String tmpstart, final String tmpstop) { final int intstart = Integer.parseInt(tmpstart); final int intstop = Integer.parseInt(tmpstop); this.addPosition(intstart, intstop); } /** * Output the content of the object. */ @Override public final String toString() { String newline = System.getProperty("line.separator"); StringBuilder result = new StringBuilder(); result.append(this.getClass().getName()).append(" Object {").append(newline); result.append("Name: ").append(this.name).append(newline); result.append("Description: ").append(this.description).append(newline); result.append("Synonym: ").append(this.synonym).append(newline); result.append("Type: ").append(this.type).append(newline); result.append("Locus: ").append(this.locus).append(newline); result.append("Chromosome: ").append(this.chromosome).append(newline); result.append("Start: ").append(this.start).append(newline); result.append("Stop: ").append(this.stop).append(newline); result.append("}"); return result.toString(); } }
package org.asteriskjava.util.internal; import org.asteriskjava.util.Log; import org.asteriskjava.util.LogFactory; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.net.Socket; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.channels.FileChannel; import java.nio.charset.Charset; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; /** * Writes a trace file to the file system. */ public class FileTrace implements Trace { public static final String TRACE_DIRECTORY_PROPERTY = "org.asteriskjava.trace.directory"; protected static final String FILE_PREFIX = "aj-trace"; protected static final String FILE_SUFFIX = ".txt"; private final Log logger = LogFactory.getLog(FileTrace.class); // ok to share instance as access to this object is synchronized anyway private final DateFormat df = new SimpleDateFormat("yyyyMMddHHmmsszzz"); private Charset charset = Charset.forName("UTF-8"); private FileChannel channel; private boolean exceptionLogged = false; private RandomAccessFile randomAccessFile; public FileTrace(Socket socket) throws IOException { randomAccessFile = new RandomAccessFile(getFile(socket), "rw"); channel = randomAccessFile.getChannel(); print(getHeader(socket)); } private String getHeader(Socket socket) { final StringBuilder sb = new StringBuilder(); sb.append("Local: "); sb.append(socket.getLocalAddress()); sb.append(":"); sb.append(socket.getLocalPort()); sb.append("\n"); sb.append("Remote: "); sb.append(socket.getInetAddress()); sb.append(":"); sb.append(socket.getPort()); sb.append("\n"); sb.append("\n"); return sb.toString(); } private File getFile(Socket socket) { final String directory = System.getProperty(TRACE_DIRECTORY_PROPERTY, System.getProperty("java.io.tmpdir")); final String fileName = getFileName(socket); logger.info("Writing trace to " + directory + File.separator + fileName); return new File(directory, fileName); } private String getFileName(Socket socket) { final StringBuilder sb = new StringBuilder(FILE_PREFIX); sb.append("_"); sb.append(df.format(new Date())); sb.append("_"); sb.append(socket.getLocalAddress().getHostAddress()); sb.append("_"); sb.append(socket.getLocalPort()); sb.append("_"); sb.append(socket.getInetAddress().getHostAddress()); sb.append("_"); sb.append(socket.getPort()); sb.append(FILE_SUFFIX); return sb.toString(); } public synchronized void received(String s) { try { print(format("<<< ", s)); } catch (IOException e) { logException(e); } } public synchronized void sent(String s) { try { print(format(">>> ", s)); } catch (IOException e) { logException(e); } } private void logException(IOException e) { // avoid excessive failure logging if (exceptionLogged) { return; } logger.warn("Unable to write trace to disk", e); exceptionLogged = true; } protected String format(String prefix, String s) { final StringBuilder sb = new StringBuilder(df.format(new Date())); final String filler = String.format("%" + sb.length() + "s", ""); String[] lines = s.split("\n"); for (int i = 0; i < lines.length; i++) { if (i != 0) { sb.append(filler); } sb.append(" "); sb.append(prefix); sb.append(lines[i]); sb.append("\n"); } return sb.toString(); } protected void print(String s) throws IOException { final CharBuffer charBuffer = CharBuffer.allocate(s.length()); charBuffer.put(s); charBuffer.flip(); print(charset.encode(charBuffer)); } private void print(ByteBuffer byteBuffer) throws IOException { int bytesWritten = 0; while (bytesWritten < byteBuffer.remaining()) { // Loop if only part of the buffer contents get written. bytesWritten = channel.write(byteBuffer); if (bytesWritten == 0) { throw new IOException("Unable to write trace to channel. Media may be full."); } } } public void close() { try { randomAccessFile.close(); } catch (IOException e) { logException(e); } } }
package org.bychan.core.basic; import org.bychan.core.utils.StringUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class PositionTracerImpl<N> implements PositionTracer<N> { private String originalInputString; public PositionTracerImpl(@NotNull String originalInputString) { this.originalInputString = originalInputString; } @NotNull @Override public ParsingPosition getParsingPosition(@NotNull TokenStack<N> tokenStack) { Token<N> previous = tokenStack.previous(); final int startPosition = getStartPosition(previous); return new ParsingPosition(StringUtils.getTextPosition(originalInputString, startPosition), tokenStack); } private int getStartPosition(@Nullable Token<N> previous) { if (previous == null) { return 0; } LexingMatch match = previous.getMatch(); if (previous.getType().equals(EndTokenType.get())) { //Since the end token is past the end of the input text we use the position directly before //the end token. return match.getStartPosition() - 1; } return match.getStartPosition(); } }
package org.crygier.graphql; import graphql.Scalars; import graphql.schema.*; import org.crygier.graphql.annotation.SchemaDocumentation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.persistence.EntityManager; import javax.persistence.metamodel.*; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Field; import java.lang.reflect.Member; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; public class GraphQLSchemaBuilder { public static final String PAGINATION_REQUEST_PARAM_NAME = "paginationRequest"; private static final Logger log = LoggerFactory.getLogger(GraphQLSchemaBuilder.class); private EntityManager entityManager; public GraphQLSchemaBuilder(EntityManager entityManager) { this.entityManager = entityManager; } public GraphQLSchema getGraphQLSchema() { GraphQLSchema.Builder schemaBuilder = GraphQLSchema.newSchema(); schemaBuilder.query(getQueryType()); return schemaBuilder.build(); } private GraphQLObjectType getQueryType() { GraphQLObjectType.Builder queryType = GraphQLObjectType.newObject().name("QueryType_JPA").description("All encompassing schema for this JPA environment"); queryType.fields(entityManager.getMetamodel().getEntities().stream().map(this::getQueryFieldDefinition).collect(Collectors.toList())); queryType.fields(entityManager.getMetamodel().getEntities().stream().map(this::getQueryFieldPageableDefinition).collect(Collectors.toList())); return queryType.build(); } private GraphQLFieldDefinition getQueryFieldDefinition(EntityType<?> entityType) { return GraphQLFieldDefinition.newFieldDefinition() .name(entityType.getName()) .description(getSchemaDocumentation( entityType.getJavaType())) .type(new GraphQLList(getObjectType(entityType))) .dataFetcher(new JpaDataFetcher(entityManager, entityType)) .argument(entityType.getAttributes().stream().filter(this::isValidInput).map(this::getArgument).collect(Collectors.toList())) .build(); } private GraphQLFieldDefinition getQueryFieldPageableDefinition(EntityType<?> entityType) { GraphQLObjectType pageType = GraphQLObjectType.newObject() .name(entityType.getName() + "Connection") .description("'Connection' response wrapper object for " + entityType.getName() + ". When pagination or aggregation is requested, this object will be returned with metadata about the query.") .field(GraphQLFieldDefinition.newFieldDefinition().name("totalPages").description("Total number of pages calculated on the database for this pageSize.").type(JavaScalars.GraphQLLong).build()) .field(GraphQLFieldDefinition.newFieldDefinition().name("totalElements").description("Total number of results on the database for this query.").type(JavaScalars.GraphQLLong).build()) .field(GraphQLFieldDefinition.newFieldDefinition().name("content").description("The actual object results").type(new GraphQLList(getObjectType(entityType))).build()) .build(); return GraphQLFieldDefinition.newFieldDefinition() .name(entityType.getName() + "Connection") .description("'Connection' request wrapper object for " + entityType.getName() + ". Use this object in a query to request things like pagination or aggregation in an argument. Use the 'content' field to request actual fields ") .type(pageType) .dataFetcher(new ExtendedJpaDataFetcher(entityManager, entityType)) .argument(paginationArgument) .build(); } private GraphQLArgument getArgument(Attribute attribute) { GraphQLType type = getAttributeType(attribute); if (type instanceof GraphQLInputType) { return GraphQLArgument.newArgument() .name(attribute.getName()) .type((GraphQLInputType) type) .build(); } throw new IllegalArgumentException("Attribute " + attribute + " cannot be mapped as an Input Argument"); } private GraphQLObjectType getObjectType(EntityType<?> entityType) { return GraphQLObjectType.newObject() .name(entityType.getName()) .description(getSchemaDocumentation( entityType.getJavaType())) .fields(entityType.getAttributes().stream().map(this::getObjectField).collect(Collectors.toList())) .build(); } private GraphQLFieldDefinition getObjectField(Attribute attribute) { GraphQLType type = getAttributeType(attribute); if (type instanceof GraphQLOutputType) { List<GraphQLArgument> arguments = new ArrayList<>(); arguments.add(GraphQLArgument.newArgument().name("orderBy").type(orderByDirectionEnum).build()); // Always add the orderBy argument // Get the fields that can be queried on (i.e. Simple Types, no Sub-Objects) if (attribute instanceof SingularAttribute && attribute.getPersistentAttributeType() != Attribute.PersistentAttributeType.BASIC) { EntityType foreignType = (EntityType) ((SingularAttribute) attribute).getType(); Stream<Attribute> attributes = findBasicAttributes(foreignType.getAttributes()); attributes.forEach(it -> { arguments.add(GraphQLArgument.newArgument().name(it.getName()).type((GraphQLInputType) getAttributeType(it)).build()); }); } return GraphQLFieldDefinition.newFieldDefinition() .name(attribute.getName()) .description(getSchemaDocumentation(attribute.getJavaMember())) .type((GraphQLOutputType) type) .argument(arguments) .build(); } throw new IllegalArgumentException("Attribute " + attribute + " cannot be mapped as an Output Argument"); } private Stream<Attribute> findBasicAttributes(Collection<Attribute> attributes) { return attributes.stream().filter(it -> it.getPersistentAttributeType() == Attribute.PersistentAttributeType.BASIC); } private GraphQLType getAttributeType(Attribute attribute) { if (attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.BASIC) { if (String.class.isAssignableFrom(attribute.getJavaType())) return Scalars.GraphQLString; else if (Integer.class.isAssignableFrom(attribute.getJavaType()) || int.class.isAssignableFrom(attribute.getJavaType())) return Scalars.GraphQLInt; else if (Float.class.isAssignableFrom(attribute.getJavaType()) || float.class.isAssignableFrom(attribute.getJavaType())) return Scalars.GraphQLFloat; else if (Long.class.isAssignableFrom(attribute.getJavaType()) || long.class.isAssignableFrom(attribute.getJavaType())) return Scalars.GraphQLLong; else if (Boolean.class.isAssignableFrom(attribute.getJavaType()) || boolean.class.isAssignableFrom(attribute.getJavaType())) return Scalars.GraphQLBoolean; else if (attribute.getJavaType().isEnum()) { return getTypeFromJavaType(attribute.getJavaType()); } } else if (attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.ONE_TO_MANY || attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.MANY_TO_MANY) { EntityType foreignType = (EntityType) ((PluralAttribute) attribute).getElementType(); return new GraphQLList(new GraphQLTypeReference(foreignType.getName())); } else if (attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.MANY_TO_ONE || attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.ONE_TO_ONE) { EntityType foreignType = (EntityType) ((SingularAttribute) attribute).getType(); return new GraphQLTypeReference(foreignType.getName()); } else if (attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.ELEMENT_COLLECTION) { Type foreignType = ((PluralAttribute) attribute).getElementType(); return new GraphQLList(getTypeFromJavaType(foreignType.getJavaType())); } throw new UnsupportedOperationException("Attribute could not be mapped to GraphQL: " + attribute); } private boolean isValidInput(Attribute attribute) { return attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.BASIC || attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.ELEMENT_COLLECTION; } private String getSchemaDocumentation(Member member) { if (member instanceof AnnotatedElement) { return getSchemaDocumentation((AnnotatedElement) member); } return null; } private String getSchemaDocumentation(AnnotatedElement annotatedElement) { if (annotatedElement != null) { SchemaDocumentation schemaDocumentation = annotatedElement.getAnnotation(SchemaDocumentation.class); return schemaDocumentation != null ? schemaDocumentation.value() : null; } return null; } private GraphQLType getTypeFromJavaType(Class clazz) { if (clazz.isEnum()) { GraphQLEnumType.Builder enumBuilder = GraphQLEnumType.newEnum().name(clazz.getSimpleName()); int ordinal = 0; for (Enum enumValue : ((Class<Enum>)clazz).getEnumConstants()) enumBuilder.value(enumValue.name(), ordinal++); GraphQLType answer = enumBuilder.build(); setIdentityCoercing(answer); return answer; } return null; } /** * A bit of a hack, since JPA will deserialize our Enum's for us...we don't want GraphQL doing it. * * @param type */ private void setIdentityCoercing(GraphQLType type) { try { Field coercing = type.getClass().getDeclaredField("coercing"); coercing.setAccessible(true); coercing.set(type, new IdentityCoercing()); } catch (Exception e) { log.error("Unable to set coercing for " + type, e); } } private static final GraphQLArgument paginationArgument = GraphQLArgument.newArgument() .name(PAGINATION_REQUEST_PARAM_NAME) .type(GraphQLInputObjectType.newInputObject() .name("PaginationObject") .description("Query object for Pagination Requests, specifying the requested page, and that page's size.\n\nNOTE: 'page' parameter is 1-indexed, NOT 0-indexed.\n\nExample: paginationRequest { page: 1, size: 20 }") .field(GraphQLInputObjectField.newInputObjectField().name("page").description("Which page should be returned, starting with 1 (1-indexed)").type(Scalars.GraphQLInt).build()) .field(GraphQLInputObjectField.newInputObjectField().name("size").description("How many results should this page contain").type(Scalars.GraphQLInt).build()) .build() ).build(); private static final GraphQLEnumType orderByDirectionEnum = GraphQLEnumType.newEnum() .name("OrderByDirection") .description("Describes the direction (Ascending / Descending) to sort a field.") .value("ASC", 0, "Ascending") .value("DESC", 1, "Descending") .build(); }
package org.g_node.crawler.LKTLogbook; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import org.jopendocument.dom.spreadsheet.Sheet; import org.jopendocument.dom.spreadsheet.SpreadSheet; /** * Parser for the main ODS metadata file used in the lab of Kay Thurley. * * @author Michael Sonntag (sonntag@bio.lmu.de) */ public class LKTLogbook { /** * Line within the ODS file where the header of the * experiment description section is found. */ private final Integer sheetHeaderLine = 23; /** * String value of the first field of the header * of the experiment description section. This string is * required to check, if the header line and subsequently * the actual data entry lines are properly aligned for * the next parsing steps. */ private final String firstHeaderEntry = "ImportID"; /** * Boolean value stating, if errors during the parsing of the ODS file * have occurred. */ private boolean hasParserError; /** * ArrayList containing all messages that occurred while parsing the ODS file. * All parser errors connected to missing values or incorrect value formats should * be collected and written to a logfile, so that users can correct these * mistakes ideally all at once before running the crawler again. */ private final ArrayList<String> parserErrorMessages = new ArrayList<>(0); /** * Method for parsing the contents of a provided ODS input file. * @param inputFile ODS file specific to Kay Thurleys usecase. */ public final void parseFile(final String inputFile) { this.hasParserError = false; // TODO is this a good way to end the execution of the app // TODO if the input file is not available? if (!this.checkInputFile(inputFile)) { return; } // TODO check if a robuster solution exists. Also check with Kay, // TODO if multiple backup files e.g. with a timestamp should exist. // TODO will fail for sure, if the file contains more than one period; should be addressed as well. // Create file backup final String backupFile = String.join("", inputFile.split("\\.")[0], "_backup.ods"); try { Files.copy(Paths.get(inputFile), Paths.get(backupFile), StandardCopyOption.REPLACE_EXISTING); } catch (final IOException exc) { System.err.println(String.join(" ", "Error creating backup file:", exc.getMessage())); exc.printStackTrace(); // TODO is this in this case a valid way to end the function? return; } try { final File odsFile = new File(inputFile); // TODO remove later System.out.println( String.join( " ", "File has # sheets:", String.valueOf(SpreadSheet.createFromFile(odsFile).getSheetCount())) ); if (!(SpreadSheet.createFromFile(odsFile).getSheetCount() > 0)) { this.hasParserError = true; this.parserErrorMessages.add( String.join( " ", "Provided labbook", inputFile, "does not contain valid data sheets" ) ); } else { final ArrayList<LKTLogbookSheet> allSheets = this.parseSheets(odsFile); allSheets.forEach( s -> System.out.println( String.join( " ", "CurrSheet:", s.getAnimalID(), ", number of entries:", String.valueOf(s.getEntries().size()) ) ) ); if (!this.hasParserError) { // TODO remove later allSheets.stream().forEach( s -> { System.out.println( String.join( " ", "AnimalID:", s.getAnimalID(), ", AnimalSex:", s.getAnimalSex() ) ); s.getEntries().stream().forEach( e -> System.out.println( String.join( " ", "CurrRow:", e.getProject(), e.getExperiment(), e.getExperimentDate().toString() ) ) ); }); } else { // TODO write to logfile this.parserErrorMessages.forEach(System.err::println); } } } catch (final IOException exp) { System.err.println(String.join(" ", "Error reading from input file:", exp.getMessage())); exp.printStackTrace(); } } /** * Method for validating that the input file actually exists. * @param inputFile Path and filename of the provided input file. * @return True if the file exists, false otherwise. */ private boolean checkInputFile(final String inputFile) { boolean correctFile = true; if (!Files.exists(Paths.get(inputFile)) && (!Files.exists(Paths.get(inputFile).toAbsolutePath()))) { System.err.println(String.join(" ", "Invalid input file:", inputFile)); correctFile = false; } return correctFile; } /** * Method parsing all sheets of the current ODS file. * If parsing errors occur, {@hasParserError} will be set to true, * the corresponding message will be added to {@parserErrorMessages}. * Parsing will continue to collect further possible parser errors. * @param odsFile Input file. * @return ArrayList containing parsed {@LKTLogbookSheet}. */ private ArrayList<LKTLogbookSheet> parseSheets(final File odsFile) { final ArrayList<LKTLogbookSheet> allSheets = new ArrayList<>(0); Sheet currSheet; LKTLogbookSheet currLKTLSheet; try { for (int i = 0; i < SpreadSheet.createFromFile(odsFile).getSheetCount(); i = i + 1) { currSheet = SpreadSheet.createFromFile(odsFile).getSheet(i); final String sheetName = currSheet.getName(); // TODO remove later System.out.println( String.join( " ", "Sheet name:", currSheet.getName(), ", RowCount:", String.valueOf(currSheet.getRowCount()), ", ColCount:", String.valueOf(currSheet.getColumnCount()) ) ); currLKTLSheet = new LKTLogbookSheet(); currLKTLSheet = this.parseSheetVariables(currSheet, currLKTLSheet); // TODO come up with a more robust solution // TODO check that line 23 contains the header and that the information start at line 24 final String startCell = String.join("", "A", String.valueOf(this.sheetHeaderLine)); if (currSheet.getCellAt(startCell).getTextValue() == null || !currSheet.getCellAt(startCell).getTextValue().equals(this.firstHeaderEntry)) { this.parserErrorMessages.add( String.join( " ", "Parser error sheet", sheetName, "HeaderEntry ImportID does not start at required line", String.valueOf(this.sheetHeaderLine) ) ); } else { currLKTLSheet = this.parseSheetEntries(currSheet, currLKTLSheet); allSheets.add(currLKTLSheet); } } } catch (final IOException exp) { System.err.println(String.join(" ", "Error reading from input file:", exp.getMessage())); exp.printStackTrace(); } return allSheets; } /** * Method for retrieving all sheet specific data from the current ODS sheet. * @param currSheet The current sheet from the ODS file. * @param currLKTLSheet The current {@LKTLogbookSheet}. * @return The current {@LKTLogbookSheet} containing all parsed values. */ private LKTLogbookSheet parseSheetVariables(final Sheet currSheet, final LKTLogbookSheet currLKTLSheet) { final String sheetName = currSheet.getName(); ArrayList<String> parseSheetMessage; String checkDB; String checkDW; currLKTLSheet.setAnimalID(currSheet.getCellAt("C2").getTextValue()); currLKTLSheet.setAnimalSex(currSheet.getCellAt("C3").getTextValue()); checkDB = currLKTLSheet.setDateOfBirth(currSheet.getCellAt("C4").getTextValue()); checkDW = currLKTLSheet.setDateOfWithdrawal(currSheet.getCellAt("C5").getTextValue()); currLKTLSheet.setPermitNumber(currSheet.getCellAt("C6").getTextValue()); currLKTLSheet.setSpecies(currSheet.getCellAt("C7").getTextValue()); currLKTLSheet.setScientificName(currSheet.getCellAt("C8").getTextValue()); // TODO come up with a better way to deal with date errors parseSheetMessage = currLKTLSheet.isValidSheet(); if (!parseSheetMessage.isEmpty() || !checkDB.isEmpty() || !checkDW.isEmpty()) { this.hasParserError = true; if (!parseSheetMessage.isEmpty()) { parseSheetMessage.forEach( m -> this.parserErrorMessages.add( String.join(" ", "Parser error sheet", sheetName, ",", m) ) ); } // check valid date of birth if (!checkDB.isEmpty()) { this.parserErrorMessages.add(String.join(" ", "Parser error sheet", sheetName, ",", checkDB)); } // check valid date of withdrawal if (!checkDW.isEmpty()) { this.parserErrorMessages.add(String.join(" ", "Parser error sheet", sheetName, ",", checkDW)); } } return currLKTLSheet; } /** * Method for parsing the experiment entries of an animal sheet. * If parsing errors occur, {@hasParserError} will be set to true, * the corresponding message will be added to {@parserErrorMessages}. * Parsing will continue to collect further possible parser errors. * @param currSheet The current sheet of the parsed ODS file. * @param currLKTLSheet The current {@LKTLogbookSheet}. * @return The current {@LKTLogbookSheet} containing the parsed * experiment entries. */ private LKTLogbookSheet parseSheetEntries(final Sheet currSheet, final LKTLogbookSheet currLKTLSheet) { String parseEntryMessage; for (int i = this.sheetHeaderLine + 1; i < currSheet.getRowCount(); i = i + 1) { final LKTLogbookEntry currEntry = this.parseSheetEntriesVariables(currSheet, i); parseEntryMessage = currEntry.isValidEntry(); if (!currEntry.getIsEmptyLine() && parseEntryMessage.isEmpty()) { currLKTLSheet.addEntry(currEntry); } else if (!currEntry.getIsEmptyLine() && (!currEntry.getProject().isEmpty() || !currEntry.getExperiment().isEmpty() || currEntry.getExperimentDate() != null || !currEntry.getLastName().isEmpty())) { this.hasParserError = true; this.parserErrorMessages.add( String.join( " ", "Parser error sheet", currSheet.getName(), "row", String.valueOf(i), String.join("", ", missing value:", parseEntryMessage) ) ); } } return currLKTLSheet; } /** * Method parsing all variables of a single entry of the current * ODS sheet. * @param currSheet The current ODS sheet. * @param currLine Number of the current line in the current ODS sheet. * @return The {@LKTLogbookEntry} containing the parsed values from * the current single entry. */ private LKTLogbookEntry parseSheetEntriesVariables(final Sheet currSheet, final int currLine) { String[] handleName; String checkExperimentDate; final LKTLogbookEntry currEntry = new LKTLogbookEntry(); currEntry.setExistingImportID(currSheet.getCellAt( String.join("", "A", String.valueOf(currLine))).getTextValue() ); currEntry.setProject(currSheet.getCellAt( String.join("", "K", String.valueOf(currLine))).getTextValue() ); currEntry.setExperiment(currSheet.getCellAt( String.join("", "L", String.valueOf(currLine))).getTextValue() ); currEntry.setParadigm(currSheet.getCellAt( String.join("", "C", String.valueOf(currLine))).getTextValue() ); currEntry.setParadigmSpecifics(currSheet.getCellAt( String.join("", "D", String.valueOf(currLine))).getTextValue() ); // TODO check if the experimentDate parser error and the empty line messages all still work! checkExperimentDate = currEntry.setExperimentDate(currSheet.getCellAt( String.join("", "B", String.valueOf(currLine))).getTextValue() ); if (!checkExperimentDate.isEmpty()) { this.hasParserError = true; this.parserErrorMessages.add( String.join( " ", "Parser error sheet", currSheet.getName(), "row", String.valueOf(currLine), checkExperimentDate ) ); } // TODO solve this better, add middle name handleName = currSheet.getCellAt( String.join("", "M", String.valueOf(currLine)) ).getTextValue().trim().split("\\s+"); currEntry.setFirstName(handleName[0]); currEntry.setLastName(handleName[handleName.length - 1]); currEntry.setCommentExperiment(currSheet.getCellAt( String.join("", "H", String.valueOf(currLine))).getTextValue() ); currEntry.setCommentAnimal(currSheet.getCellAt( String.join("", "I", String.valueOf(currLine))).getTextValue() ); currEntry.setFeed(currSheet.getCellAt( String.join("", "J", String.valueOf(currLine))).getTextValue() ); currEntry.setIsOnDiet(currSheet.getCellAt( String.join("", "E", String.valueOf(currLine))).getTextValue() ); currEntry.setIsInitialWeight(currSheet.getCellAt( String.join("", "F", String.valueOf(currLine))).getTextValue() ); currEntry.setWeight(currSheet.getCellAt( String.join("", "G", String.valueOf(currLine))).getTextValue() ); return currEntry; } }
package org.grobid.core.engines; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterables; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.grobid.core.GrobidModels; import org.grobid.core.analyzers.GrobidAnalyzer; import org.grobid.core.data.*; import org.grobid.core.document.Document; import org.grobid.core.document.DocumentPiece; import org.grobid.core.document.DocumentSource; import org.grobid.core.engines.config.GrobidAnalysisConfig; import org.grobid.core.engines.label.SegmentationLabels; import org.grobid.core.engines.label.TaggingLabels; import org.grobid.core.layout.LayoutToken; import org.grobid.core.layout.LayoutTokenization; import org.grobid.core.tokenization.LabeledTokensContainer; import org.grobid.core.tokenization.TaggingTokenCluster; import org.grobid.core.tokenization.TaggingTokenClusteror; import org.grobid.core.utilities.GrobidProperties; import org.grobid.core.utilities.IOUtilities; import org.grobid.core.utilities.UnitUtilities; import org.grobid.service.exceptions.GrobidServiceException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import javax.inject.Singleton; import javax.ws.rs.core.Response; import java.io.*; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.NoSuchElementException; import java.util.SortedSet; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.nio.charset.StandardCharsets.UTF_8; /** * This class represent the aggregated processing applying multiple parsers or combining PDF extraction */ @Singleton public class QuantitiesEngine { private static final Logger LOGGER = LoggerFactory.getLogger(QuantitiesEngine.class); private QuantityParser quantityParser; private UnitParser unitParser; private EngineParsers parsers; private static QuantitiesEngine instance; @Inject public QuantitiesEngine() { GrobidProperties.getInstance(); this.quantityParser = QuantityParser.getInstance(); this.unitParser = UnitParser.getInstance(); this.parsers = new EngineParsers(); instance = this; } public static QuantitiesEngine getInstance() { if (instance == null) { instance = getNewInstance(); } return instance; } private static synchronized QuantitiesEngine getNewInstance() { return new QuantitiesEngine(); } public List<UnitBlock> parseUnits(String text) { return unitParser.tagUnit(text); } public MeasurementsResponse processPdf(InputStream inputStream) { long start = System.currentTimeMillis(); List<Measurement> measurements = new ArrayList<>(); Document doc; File originFile = null; try { originFile = IOUtilities.writeInputFile(inputStream); if (originFile == null) { throw new GrobidServiceException("Input file is empty or null", Response.Status.BAD_REQUEST); } GrobidAnalysisConfig config = new GrobidAnalysisConfig.GrobidAnalysisConfigBuilder() .analyzer(GrobidAnalyzer.getInstance()) .consolidateHeader(0) .consolidateCitations(0) .build(); DocumentSource documentSource = DocumentSource.fromPdf(originFile); doc = parsers.getSegmentationParser().processing(documentSource, config); // In the following, we process the relevant textual content of the document // for refining the process based on structures, we need to filter // segment of interest (e.g. header, body, annex) and possibly apply // the corresponding model to further filter by structure types // from the header, we are interested in title, abstract and keywords SortedSet<DocumentPiece> documentParts = doc.getDocumentPart(SegmentationLabels.HEADER); if (documentParts != null) { Pair<String, List<LayoutToken>> headerStruct = parsers.getHeaderParser().getSectionHeaderFeatured(doc, documentParts); List<LayoutToken> tokenizationHeader = headerStruct.getRight();//doc.getTokenizationParts(documentParts, doc.getTokenizations()); String header = headerStruct.getLeft(); String labeledResult = null; if ((header != null) && (header.trim().length() > 0)) { labeledResult = parsers.getHeaderParser().label(header); BiblioItem resHeader = new BiblioItem(); //parsers.getHeaderParser().processingHeaderSection(false, doc, resHeader); resHeader.generalResultMapping(doc, labeledResult, tokenizationHeader); // title List<LayoutToken> titleTokens = resHeader.getLayoutTokens(TaggingLabels.HEADER_TITLE); if (titleTokens != null) { measurements.addAll(quantityParser.process(titleTokens)); } // abstract List<LayoutToken> abstractTokens = resHeader.getLayoutTokens(TaggingLabels.HEADER_ABSTRACT); if (abstractTokens != null) { measurements.addAll(quantityParser.process(abstractTokens)); } // keywords List<LayoutToken> keywordTokens = resHeader.getLayoutTokens(TaggingLabels.HEADER_KEYWORD); if (keywordTokens != null) { measurements.addAll(quantityParser.process(keywordTokens)); } } } // we can process all the body, in the future figure and table could be the // object of more refined processing documentParts = doc.getDocumentPart(SegmentationLabels.BODY); if (documentParts != null) { Pair<String, LayoutTokenization> featSeg = parsers.getFullTextParser().getBodyTextFeatured(doc, documentParts); String fulltextTaggedRawResult = null; if (featSeg != null) { String featureText = featSeg.getLeft(); LayoutTokenization layoutTokenization = featSeg.getRight(); if (StringUtils.isNotEmpty(featureText)) { fulltextTaggedRawResult = parsers.getFullTextParser().label(featureText); } TaggingTokenClusteror clusteror = new TaggingTokenClusteror(GrobidModels.FULLTEXT, fulltextTaggedRawResult, layoutTokenization.getTokenization(), true); //Iterate and exclude figures and tables for (TaggingTokenCluster cluster : Iterables.filter(clusteror.cluster(), new TaggingTokenClusteror .LabelTypeExcludePredicate(TaggingLabels.TABLE_MARKER, TaggingLabels.EQUATION, TaggingLabels.CITATION_MARKER, TaggingLabels.FIGURE_MARKER, TaggingLabels.EQUATION_MARKER, TaggingLabels.EQUATION_LABEL))) { if (cluster.getTaggingLabel().equals(TaggingLabels.FIGURE)) { //apply the figure model to only get the caption final Figure processedFigure = parsers.getFigureParser() .processing(cluster.concatTokens(), cluster.getFeatureBlock()); measurements.addAll(quantityParser.process(processedFigure.getCaptionLayoutTokens())); } else if (cluster.getTaggingLabel().equals(TaggingLabels.TABLE)) { //apply the table model to only get the caption/description final Table processedTable = parsers.getTableParser().processing(cluster.concatTokens(), cluster.getFeatureBlock()); measurements.addAll(quantityParser.process(processedTable.getFullDescriptionTokens())); } else { final List<LabeledTokensContainer> labeledTokensContainers = cluster.getLabeledTokensContainers(); // extract all the layout tokens from the cluster as a list List<LayoutToken> tokens = labeledTokensContainers.stream() .map(LabeledTokensContainer::getLayoutTokens) .flatMap(List::stream) .collect(Collectors.toList()); measurements.addAll(quantityParser.process(tokens)); } } } } // we don't process references (although reference titles could be relevant) // acknowledgement? // we can process annexes documentParts = doc.getDocumentPart(SegmentationLabels.ANNEX); if (documentParts != null) { measurements.addAll(processDocumentPart(documentParts, doc)); } } catch (NoSuchElementException nseExp) { throw new GrobidServiceException("Could not get an instance of parser. ", Response.Status.SERVICE_UNAVAILABLE); } finally { IOUtilities.removeTempFile(originFile); } // for next line, comparable measurement needs to be implemented //Collections.sort(measurements); MeasurementsResponse measurementsResponse = new MeasurementsResponse(measurements, doc.getPages()); long end = System.currentTimeMillis(); measurementsResponse.setRuntime(end - start); return measurementsResponse; } /** * Process with the quantity model a segment coming from the segmentation model */ private List<Measurement> processDocumentPart(SortedSet<DocumentPiece> documentParts, Document doc) { // List<LayoutToken> for the selected segment List<LayoutToken> layoutTokens = doc.getTokenizationParts(documentParts, doc.getTokenizations()); return quantityParser.process(layoutTokens); } public List<Measurement> parseMeasurement(String json) { ObjectMapper mapper = new ObjectMapper(); mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true); JsonNode jsonAnnotation = null; try { jsonAnnotation = mapper.readTree(json); } catch (IOException ex) { throw new GrobidServiceException("Cannot parse input JSON. ", Response.Status.BAD_REQUEST); } if ((jsonAnnotation == null) || (jsonAnnotation.isMissingNode())) { throw new GrobidServiceException("The request is invalid or malformed.", Response.Status.BAD_REQUEST); } String fromValue = null; String toValue = null; String unitValue = null; String typeValue = null; JsonNode from = jsonAnnotation.findPath("from"); if ((from != null) && (!from.isMissingNode())) fromValue = from.textValue(); JsonNode to = jsonAnnotation.findPath("to"); if ((to != null) && (!to.isMissingNode())) toValue = to.textValue(); JsonNode unit = jsonAnnotation.findPath("unit"); if ((unit != null) && (!unit.isMissingNode())) unitValue = unit.textValue(); JsonNode type = jsonAnnotation.findPath("type"); if ((type != null) && (!type.isMissingNode())) typeValue = type.textValue(); UnitUtilities.Measurement_Type theType = null; String atomicValue = null; if (((fromValue == null) || (fromValue.length() == 0)) && ((toValue == null) || (toValue.length() == 0))) { throw new GrobidServiceException("The input JSON is empty or null.", Response.Status.NO_CONTENT); } else if ((fromValue == null) || (fromValue.length() == 0)) { atomicValue = toValue; theType = UnitUtilities.Measurement_Type.VALUE; } else if ((toValue == null) || (toValue.length() == 0)) { atomicValue = fromValue; theType = UnitUtilities.Measurement_Type.VALUE; } else theType = UnitUtilities.Measurement_Type.INTERVAL_MIN_MAX; Measurement measurement = new Measurement(theType); if (theType == UnitUtilities.Measurement_Type.VALUE) { Quantity quantity = new Quantity(); quantity.setRawValue(atomicValue); quantity.setRawUnit(new Unit(unitValue)); // note: there is no way to enforce the measurement type here // it will be inferred from the raw unit measurement.setAtomicQuantity(quantity); } else { Quantity quantityLeast = new Quantity(); Quantity quantityMost = new Quantity(); quantityLeast.setRawValue(fromValue); quantityLeast.setRawUnit(new Unit(unitValue)); quantityMost.setRawValue(toValue); quantityMost.setRawUnit(new Unit(unitValue)); measurement.setQuantityLeast(quantityLeast); measurement.setQuantityMost(quantityMost); } List<Measurement> measurements = new ArrayList<>(); measurements.add(measurement); return measurements; } public MeasurementsResponse processJson(String json) { try { long start = System.currentTimeMillis(); List<Measurement> measurements = parseMeasurement(json); measurements = quantityParser.normalizeMeasurements(measurements); long end = System.currentTimeMillis(); MeasurementsResponse response = new MeasurementsResponse(measurements); response.setRuntime(end - start); return response; } catch (NoSuchElementException e) { throw new GrobidServiceException("Could not get an engine from the pool within configured time. Sending service unavailable.", e, Response.Status.SERVICE_UNAVAILABLE); } catch (Exception e) { throw new GrobidServiceException("An unexpected exception occurs. ", e, Response.Status.INTERNAL_SERVER_ERROR); } } public MeasurementsResponse processText(String text) { try { long start = System.currentTimeMillis(); MeasurementsResponse response = new MeasurementsResponse(quantityParser.process(text)); long end = System.currentTimeMillis(); response.setRuntime(end - start); return response; } catch (NoSuchElementException e) { throw new GrobidServiceException("Could not get an engine from the pool within configured time. Sending service unavailable.", e, Response.Status.SERVICE_UNAVAILABLE); } catch (Exception e) { throw new GrobidServiceException("An unexpected exception occurs. ", e, Response.Status.INTERNAL_SERVER_ERROR); } } public void batchProcess(String inputDirectory, String outputDirectory, boolean isRecursive) { throw new NotImplementedException("Not yet implemented"); } /** * Processes a file with units */ public void unitBatchProcess(String inputDirectory, String outputDirectory, boolean isRecursive) { Path inputPath = Paths.get(inputDirectory); File[] refFiles = inputPath.toFile().listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".txt"); } }); if (refFiles == null) { return; } LOGGER.info(refFiles.length + " files"); for (File inputFile : refFiles) { Writer outputWriter = null; try { // the file for writing the training data OutputStream os2 = null; if (outputDirectory != null) { os2 = new FileOutputStream(outputDirectory + File.separator + inputFile.getName() + ".xml"); outputWriter = new OutputStreamWriter(os2, UTF_8); } else { return; } outputWriter.write("<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n"); outputWriter.write("<units>\n"); try (Stream<String> stream = Files.lines(Paths.get(inputFile.getAbsolutePath()))) { List<String> processedUnits = stream.map( s -> unitParser.tagUnit(s).stream().map(UnitBlock::getRawTaggedValue).collect(Collectors.joining()) ).collect(Collectors.toList()); for (String processedUnit : processedUnits) { outputWriter.write("<unit>"); outputWriter.write(processedUnit); outputWriter.write("</unit>"); outputWriter.write("\n"); } } catch (IOException e) { e.printStackTrace(); } outputWriter.write("</units>\n"); } catch (IOException e) { e.printStackTrace(); } finally { IOUtils.closeQuietly(outputWriter); } } } }
package org.jenkinsci.plugins.p4.groovy; import com.perforce.p4java.exception.P4JavaException; import com.perforce.p4java.server.IOptionsServer; import hudson.FilePath; import hudson.model.TaskListener; import org.jenkinsci.plugins.p4.client.ClientHelper; import org.jenkinsci.plugins.p4.credentials.P4BaseCredentials; import org.jenkinsci.plugins.p4.workspace.Workspace; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; public class P4Groovy implements Serializable { private static final long serialVersionUID = 1L; private final P4BaseCredentials credential; private final Workspace workspace; private final FilePath buildWorkspace; private transient TaskListener listener = null; protected P4Groovy(P4BaseCredentials credential, TaskListener listener, Workspace workspace, FilePath buildWorkspace) { this.credential = credential; this.workspace = workspace; this.listener = listener; this.buildWorkspace = buildWorkspace; } public String getClientName() { return workspace.getFullName(); } public String getUserName() throws Exception { IOptionsServer p4 = getConnection(); String user = p4.getUserName(); p4.disconnect(); return user; } @Deprecated public Map<String, Object>[] runString(String cmd, String args) throws P4JavaException, InterruptedException, IOException { List<String> argList = new ArrayList<String>(); for (String arg : args.split(",")) { arg = arg.trim(); argList.add(arg); } String[] array = argList.toArray(new String[0]); return run(cmd, array); } public Map<String, Object>[] run(String cmd, String... args) throws P4JavaException, InterruptedException, IOException { P4GroovyTask task = new P4GroovyTask(credential, listener, cmd, args); task.setWorkspace(workspace); return buildWorkspace.act(task); } public Map<String, Object>[] run(String cmd, List<String> args) throws P4JavaException, InterruptedException, IOException { String[] array = args.toArray(new String[0]); return run(cmd, array); } public Map<String, Object>[] save(String type, Map<String, Object> spec) throws P4JavaException, InterruptedException, IOException { return save(type, spec, new ArrayList()); } public Map<String, Object>[] save(String type, Map<String, Object> spec, List<String> list) throws P4JavaException, InterruptedException, IOException { // add '-i' to user provided args list if (!list.contains("-i")) { list.add("-i"); } String[] args = list.toArray(new String[0]); P4GroovyTask task = new P4GroovyTask(credential, listener, type, args, spec); task.setWorkspace(workspace); return buildWorkspace.act(task); } public Map<String, Object>[] save(String type, Map<String, Object> spec, String... args) throws P4JavaException, InterruptedException, IOException { ArrayList<String> list = new ArrayList<>(Arrays.asList(args)); return save(type, spec, list); } public Map<String, Object> fetch(String type, String id) throws P4JavaException, InterruptedException, IOException { String[] array = {"-o", id}; Map<String, Object>[] maps = run(type, array); if (maps.length == 0) return null; maps[0].remove("specFormatted"); return maps[0]; } private IOptionsServer getConnection() throws IOException { ClientHelper p4 = new ClientHelper(credential, listener, workspace); return p4.getConnection(); } }
package org.lantern; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; import java.util.concurrent.Executors; import javax.net.ssl.SSLEngine; import org.apache.commons.lang.StringUtils; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.channel.ChannelHandler; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.group.DefaultChannelGroup; import org.jboss.netty.channel.socket.ClientSocketChannelFactory; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import org.jboss.netty.handler.codec.http.DefaultHttpResponse; import org.jboss.netty.handler.codec.http.HttpChunk; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpHeaders.Names; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpRequestEncoder; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.codec.http.HttpVersion; import org.jboss.netty.handler.ssl.SslHandler; import org.littleshoot.proxy.DefaultRelayPipelineFactoryFactory; import org.littleshoot.proxy.HttpConnectRelayingHandler; import org.littleshoot.proxy.HttpFilter; import org.littleshoot.proxy.HttpRequestHandler; import org.littleshoot.proxy.HttpResponseFilters; import org.littleshoot.proxy.KeyStoreManager; import org.littleshoot.proxy.ProxyUtils; import org.littleshoot.proxy.RelayPipelineFactoryFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Handler that relays traffic to another proxy, dispatching between * appropriate proxies depending on the type of request. */ public class DispatchingProxyRelayHandler extends SimpleChannelUpstreamHandler { private final Logger log = LoggerFactory.getLogger(getClass()); private volatile long messagesReceived = 0L; /** * Outgoing channel that handles incoming HTTP Connect requests. */ private ChannelFuture httpConnectChannelFuture; private Channel browserToProxyChannel; // "Each incoming HTTP request can be no larger than 32MB" private static final long REQUEST_SIZE_LIMIT = 1024 * 1024 * 32 - 4096; private static final ClientSocketChannelFactory clientSocketChannelFactory = new NioClientSocketChannelFactory( Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); static { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { //clientSocketChannelFactory.releaseExternalResources(); } })); } private final HttpRequestProcessor unproxiedRequestProcessor = new HttpRequestProcessor() { final RelayPipelineFactoryFactory pf = new DefaultRelayPipelineFactoryFactory(null, new HttpResponseFilters() { @Override public HttpFilter getFilter(String arg0) { return null; } }, null, new DefaultChannelGroup("HTTP-Proxy-Server")); private final HttpRequestHandler requestHandler = new HttpRequestHandler(clientSocketChannelFactory, pf); @Override public boolean processRequest(final Channel browserChannel, final ChannelHandlerContext ctx, final MessageEvent me) throws IOException { requestHandler.messageReceived(ctx, me); return true; } @Override public boolean processChunk(final ChannelHandlerContext ctx, final MessageEvent me) throws IOException { requestHandler.messageReceived(ctx, me); return true; } @Override public void close() { } }; private final HttpRequestProcessor proxyRequestProcessor; //private final HttpRequestProcessor anonymousPeerRequestProcessor; //private final HttpRequestProcessor trustedPeerRequestProcessor; private final HttpRequestProcessor laeRequestProcessor; private HttpRequestProcessor currentRequestProcessor; private boolean readingChunks; /** * Specifies whether or not we're currently proxying requests. This is * necessary because we don't have all the initial HTTP request data, * such as the referer or the URI, when we're processing HTTP chunks. */ private boolean proxying; private final KeyStoreManager keyStoreManager; /** * Creates a new handler that reads incoming HTTP requests and dispatches * them to proxies as appropriate. * * @param keyStoreManager Keeps track of all trusted keys. */ public DispatchingProxyRelayHandler(final KeyStoreManager keyStoreManager) { //this.proxyProvider = proxyProvider; //this.proxyStatusListener = proxyStatusListener; this.keyStoreManager = keyStoreManager; // This uses the raw p2p client because all traffic sent over these // connections already uses end-to-end encryption. /* this.anonymousPeerRequestProcessor = new PeerHttpConnectRequestProcessor(new Proxy() { @Override public InetSocketAddress getProxy() { throw new UnsupportedOperationException( "Peer proxy required"); } @Override public URI getPeerProxy() { // For CONNECT we can use either an anonymous peer or a // trusted peer. final URI lantern = proxyProvider.getAnonymousProxy(); if (lantern == null) { return proxyProvider.getPeerProxy(); } return lantern; } }, proxyStatusListener, encryptingP2pClient); this.trustedPeerRequestProcessor = new PeerHttpRequestProcessor(new Proxy() { @Override public InetSocketAddress getProxy() { throw new UnsupportedOperationException( "Peer proxy required"); } @Override public URI getPeerProxy() { return proxyProvider.getPeerProxy(); } }, proxyStatusListener, encryptingP2pClient, this.keyStoreManager); */ this.proxyRequestProcessor = new DefaultHttpRequestProcessor(LanternHub.getProxyStatusListener(), new HttpRequestTransformer() { @Override public void transform(final HttpRequest request, final InetSocketAddress proxyAddress) { // Does nothing. } }, false, new Proxy() { @Override public URI getPeerProxy() { throw new UnsupportedOperationException( "Peer proxy not supported here."); } @Override public InetSocketAddress getProxy() { return LanternHub.getProxyProvider().getProxy(); } }, this.keyStoreManager); this.laeRequestProcessor = new DefaultHttpRequestProcessor(LanternHub.getProxyStatusListener(), new LaeHttpRequestTransformer(), true, new Proxy() { @Override public URI getPeerProxy() { throw new UnsupportedOperationException( "Peer proxy not supported here."); } @Override public InetSocketAddress getProxy() { return LanternHub.getProxyProvider().getLaeProxy(); } }, null); } @Override public void messageReceived(final ChannelHandlerContext ctx, final MessageEvent me) { messagesReceived++; log.info("Received {} total messages", messagesReceived); if (!readingChunks) { log.info("Reading HTTP request (not a chunk)..."); this.currentRequestProcessor = dispatchRequest(ctx, me); } else { log.info("Reading chunks..."); try { final HttpChunk chunk = (HttpChunk) me.getMessage(); // Remember this will typically be a persistent connection, // so we'll get another request after we're read the last // chunk. So we need to reset it back to no longer read in // chunk mode. if (chunk.isLast()) { this.readingChunks = false; } this.currentRequestProcessor.processChunk(ctx, me); } catch (final IOException e) { // Unclear what to do here. If we couldn't connect to a remote // peer, for example, we don't want to close the connection // to the browser. If the other end closed the connection, // it could have been due to connection close rules, or it // could have been because they simply went offline. log.info("Exception processing chunk", e); } } log.info("Done processing HTTP request...."); } private HttpRequestProcessor dispatchRequest( final ChannelHandlerContext ctx, final MessageEvent me) { final HttpRequest request = (HttpRequest)me.getMessage(); final String uri = request.getUri(); log.info("URI is: {}", uri); // We need to set this outside of proxying rules because we first // send incoming messages down chunked versus unchunked paths and // then send them down proxied versus unproxied paths. if (request.isChunked()) { readingChunks = true; } else { readingChunks = false; } this.proxying = LanternUtils.shouldProxy(request); if (proxying) { // If it's an HTTP request, see if we can redirect it to HTTPS. final String https = LanternHub.httpsEverywhere().toHttps(uri); if (!https.equals(uri)) { final HttpResponse response = new DefaultHttpResponse(request.getProtocolVersion(), HttpResponseStatus.MOVED_PERMANENTLY); response.setProtocolVersion(HttpVersion.HTTP_1_0); response.setHeader(HttpHeaders.Names.LOCATION, https); response.setHeader(HttpHeaders.Names.CONTENT_LENGTH, "0"); log.info("Sending redirect response!!"); browserToProxyChannel.write(response); ProxyUtils.closeOnFlush(browserToProxyChannel); // Note this redirect should result in a new HTTPS request // coming in on this connection or a new connection -- in fact // this redirect should always result in an HTTP CONNECT // request as a result of the redirect. That new request // will not attempt to use the existing processor, so it's // not an issue to return null here. return null; } log.info("Not converting to HTTPS"); LanternHub.statsTracker().incrementProxiedRequests(); return dispatchProxyRequest(ctx, me); } else { log.info("Not proxying!"); LanternHub.statsTracker().incrementDirectRequests(); try { this.unproxiedRequestProcessor.processRequest( browserToProxyChannel, ctx, me); } catch (final IOException e) { // This should not happen because the underlying Netty handler // does not throw an exception. log.warn("Could not handle unproxied request "should never happen", e); } return this.unproxiedRequestProcessor; } } private HttpRequestProcessor dispatchProxyRequest( final ChannelHandlerContext ctx, final MessageEvent me) { final HttpRequest request = (HttpRequest) me.getMessage(); log.info("Dispatching request"); if (request.getMethod() == HttpMethod.CONNECT) { try { if (LanternHub.settings().isUseAnonymousPeers() && LanternHub.getProxyProvider().getAnonymousPeerProxyManager().processRequest( browserToProxyChannel, ctx, me) != null) { log.info("Processed CONNECT on peer...returning"); return null; } else if (useStandardProxies()){ // We need to forward the CONNECT request from this proxy to an // external proxy that can handle it. We effectively want to // relay all traffic in this case without doing anything on // our own other than direct the CONNECT request to the correct // proxy. centralConnect(request); return null; } } catch (final IOException e) { log.warn("Could not send CONNECT to anonymous proxy", e); // This will happen whenever the server's giving us bad // anonymous proxies, which could happen quite often. // We should fall back to central. if (useStandardProxies()) { centralConnect(request); } return null; } } try { if (LanternHub.settings().isUseTrustedPeers()) { final PeerProxyManager provider = LanternHub.getProxyProvider().getTrustedPeerProxyManager(); if (provider != null) { final HttpRequestProcessor rp = provider.processRequest( browserToProxyChannel, ctx, me); if (rp != null) { return rp; } } } } catch (final IOException e) { log.info("Caught exception processing request", e); } try { if (useLae() && isLae(request) && this.laeRequestProcessor.processRequest(browserToProxyChannel, ctx, me)) { return this.laeRequestProcessor; } } catch (final IOException e) { log.info("Caught exception processing request", e); } try { if (useStandardProxies() && this.proxyRequestProcessor.processRequest( browserToProxyChannel, ctx, me)) { log.info("Used standard proxy"); return this.proxyRequestProcessor; } } catch (final IOException e) { log.info("Caught exception processing request", e); } log.warn("No proxy could process the request {}", me.getMessage()); // Not much we can do if no proxy can handle it. return null; } private boolean useStandardProxies() { return LanternHub.settings().isUseCentralProxies() && LanternHub.settings().isUseCloudProxies(); } private boolean useLae() { return LanternHub.settings().isUseLaeProxies() && LanternHub.settings().isUseCloudProxies(); } private void centralConnect(final HttpRequest request) { if (this.httpConnectChannelFuture == null) { log.info("Opening HTTP CONNECT tunnel"); try { this.httpConnectChannelFuture = openOutgoingRelayChannel(request); } catch (final IOException e) { log.error("Could not open CONNECT channel", e); } } else { log.error("Outbound channel already assigned?"); } } private boolean isLae(final HttpRequest request) { final String uri = request.getUri(); if (uri.contains("youtube.com")) { log.info("NOT USING LAE FOR YOUTUBE"); return false; } final HttpMethod method = request.getMethod(); if (method == HttpMethod.GET) { return true; } if (method == HttpMethod.CONNECT) { return false; } if (LanternUtils.isTransferEncodingChunked(request)) { return false; } if (method == HttpMethod.POST) { final String contentLength = request.getHeader(Names.CONTENT_LENGTH); if (StringUtils.isBlank(contentLength)) { // If it's a post without a content length, we want to be // cautious. return false; } final long cl = Long.parseLong(contentLength); if (cl > REQUEST_SIZE_LIMIT) { return false; } return true; } return false; } @Override public void channelOpen(final ChannelHandlerContext ctx, final ChannelStateEvent e) { log.info("Got incoming channel"); this.browserToProxyChannel = e.getChannel(); } private ChannelFuture openOutgoingRelayChannel(final HttpRequest request) throws IOException { this.browserToProxyChannel.setReadable(false); // Start the connection attempt. final ClientBootstrap cb = new ClientBootstrap(LanternUtils.clientSocketChannelFactory); final ChannelPipeline pipeline = cb.getPipeline(); // This is slightly odd, as we tunnel SSL inside SSL, but we'd // otherwise just be running an open CONNECT proxy. // It's also necessary to use our own engine here, as we need to trust // the cert from the proxy. final LanternClientSslContextFactory sslFactory = new LanternClientSslContextFactory(this.keyStoreManager); final SSLEngine engine = sslFactory.getClientContext().createSSLEngine(); engine.setUseClientMode(true); ChannelHandler stats = new StatsTrackingHandler() { @Override public void addDownBytes(long bytes, Channel channel) { // global bytes proxied statistic //log.info("Recording proxied bytes through HTTP CONNECT: {}", bytes); statsTracker().addBytesProxied(bytes, channel); // contributes to local download rate statsTracker().addDownBytesViaProxies(bytes, channel); } @Override public void addUpBytes(long bytes, Channel channel) { statsTracker().addUpBytesViaProxies(bytes, channel); } }; pipeline.addLast("stats", stats); pipeline.addLast("ssl", new SslHandler(engine)); pipeline.addLast("encoder", new HttpRequestEncoder()); pipeline.addLast("handler", new HttpConnectRelayingHandler(this.browserToProxyChannel, null)); log.info("Connecting to relay proxy"); final InetSocketAddress isa = LanternHub.getProxyProvider().getProxy(); if (isa == null) { log.error("NO PROXY AVAILABLE?"); ProxyUtils.closeOnFlush(browserToProxyChannel); throw new IOException("No proxy to use for CONNECT?"); } final ChannelFuture cf = cb.connect(isa); log.info("Got an outbound channel on: {}", hashCode()); final ChannelPipeline browserPipeline = browserToProxyChannel.getPipeline(); browserPipeline.remove("encoder"); browserPipeline.remove("decoder"); browserPipeline.remove("handler"); browserPipeline.addLast("handler", new HttpConnectRelayingHandler(cf.getChannel(), null)); // This is handy, as set readable to false while the channel is // connecting ensures we won't get any incoming messages until // we're fully connected. cf.addListener(new ChannelFutureListener() { @Override public void operationComplete(final ChannelFuture future) throws Exception { if (future.isSuccess()) { cf.getChannel().write(request).addListener( new ChannelFutureListener() { @Override public void operationComplete( final ChannelFuture channelFuture) throws Exception { // we're using HTTP connect here, so we need // to remove the encoder and start reading // from the inbound channel only when we've // used the original encoder to properly encode // the CONNECT request. pipeline.remove("encoder"); // Begin to accept incoming traffic. browserToProxyChannel.setReadable(true); } }); } else { // Close the connection if the connection attempt has failed. browserToProxyChannel.close(); LanternHub.getProxyStatusListener().onCouldNotConnect(isa); } } }); return cf; } @Override public void channelClosed(final ChannelHandlerContext ctx, final ChannelStateEvent e) { log.info("Got inbound channel closed. Closing outbound."); //this.trustedPeerRequestProcessor.close(); //this.anonymousPeerRequestProcessor.close(); if (this.currentRequestProcessor != null) { this.currentRequestProcessor.close(); } this.proxyRequestProcessor.close(); this.laeRequestProcessor.close(); } @Override public void exceptionCaught(final ChannelHandlerContext ctx, final ExceptionEvent e) throws Exception { log.error("Caught exception on INBOUND channel", e.getCause()); ProxyUtils.closeOnFlush(this.browserToProxyChannel); } }
package org.lightmare.jpa.datasource; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.concurrent.CountDownLatch; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.log4j.Logger; import org.lightmare.deploy.BeanLoader; import org.lightmare.jpa.datasource.Initializer.ConnectionConfig; import org.lightmare.utils.NamingUtils; import org.lightmare.utils.ObjectUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; public class FileParsers { // Tag names for XML file parser public static final String JBOSS_TAG_NAME = "urn:jboss:domain:datasources:1.0"; private static final String DATA_SURCE_TAG = "datasource"; private static final String USER_TAG = "user-name"; private static final String PASSWORD_TAG = "password"; private static final String DRIVER_TAG = "driver"; private static final String MAX_POOL_TAG = "max-pool-size"; private static final String MIN_POOL_TAG = "min-pool-size"; private static final String INITIAL_POOL_TAG = "prefill"; private static final String JNDI_NAME_TAG = "jndi-name"; private static final String CONNECTION_URL_TAG = "connection-url"; private static final String SECURITY_TAG = "security"; private static final String POOL_TAG = "pool"; private static final Logger LOG = Logger.getLogger(FileParsers.class); public static Document document(File file) throws IOException { return document(file.toURI().toURL()); } public static Document document(URL url) throws IOException { Document document; URLConnection connection = url.openConnection(); InputStream stream = connection.getInputStream(); try { document = parse(stream); } finally { ObjectUtils.close(stream); } return document; } /** * Gets item with first index from passed {@link NodeList} instance * * @param list * @return {@link Node} */ private static Node getFirst(NodeList list) { return list.item(ObjectUtils.FIRST_INDEX); } /** * To get text from tag depended on JRE installation * * @param element * @return {@link String} */ public static String getContext(Element element) { NodeList textList = element.getChildNodes(); Node firstNode = getFirst(textList); String data = firstNode.getNodeValue().trim(); return data; } /** * Parses XML document to initialize {@link javax.sql.DataSource}s * configuration properties * * @param stream * @return {@link Document} * @throws IOException */ public static Document parse(InputStream stream) throws IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder; Document document; try { builder = factory.newDocumentBuilder(); document = builder.parse(stream); } catch (ParserConfigurationException ex) { throw new IOException(ex); } catch (SAXException ex) { throw new IOException(ex); } return document; } public void setDataFromJBossDriver(NodeList nodeList, Properties properties) { Element thisElement = (Element) getFirst(nodeList); String name = getContext(thisElement); String driverName = DriverConfig.getDriverName(name); properties.setProperty(ConnectionConfig.DRIVER_PROPERTY.name, driverName); } /** * Gets security information from {@link javax.sql.DataSource} meta data * * @param nodeList * @param properties */ public void setDataFromJBossSecurity(NodeList nodeList, Properties properties) { for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); NodeList userList = thisElement.getElementsByTagName(USER_TAG); int elementLength = userList.getLength(); if (elementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element userElement = (Element) getFirst(userList); String user = getContext(userElement); properties.setProperty(ConnectionConfig.USER_PROPERTY.name, user); NodeList passList = thisElement.getElementsByTagName(PASSWORD_TAG); elementLength = passList.getLength(); if (elementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element passElement = (Element) getFirst(passList); String password = getContext(passElement); properties.setProperty(ConnectionConfig.PASSWORD_PROPERTY.name, password); } } /** * Gets security information from {@link javax.sql.DataSource} meta data * * @param nodeList * @param properties */ public void setDataFromJBossPool(NodeList nodeList, Properties properties) { for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); NodeList minPoolSizeList = thisElement .getElementsByTagName(MIN_POOL_TAG); int elementLength = minPoolSizeList.getLength(); if (elementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element minPoolSizeElement = (Element) getFirst(minPoolSizeList); String minPoolSize = getContext(minPoolSizeElement); properties.setProperty(PoolConfig.Defaults.MIN_POOL_SIZE.key, minPoolSize); NodeList maxPoolSizeList = thisElement .getElementsByTagName(MAX_POOL_TAG); elementLength = maxPoolSizeList.getLength(); if (elementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element maxPoolSizeElement = (Element) getFirst(maxPoolSizeList); String maxPoolSize = getContext(maxPoolSizeElement); properties.setProperty(PoolConfig.Defaults.MAX_POOL_SIZE.key, maxPoolSize); NodeList initPoolSizeList = thisElement .getElementsByTagName(INITIAL_POOL_TAG); elementLength = initPoolSizeList.getLength(); if (elementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element initPoolSizeElement = (Element) getFirst(initPoolSizeList); String prefill = getContext(initPoolSizeElement); if (Boolean.valueOf(prefill)) { properties.setProperty( PoolConfig.Defaults.INITIAL_POOL_SIZE.key, minPoolSize); } } } /** * Gets {@link javax.sql.DataSource}s configuration properties as * {@link List} of {@link Properties} * * @param nodeList * @return */ public List<Properties> getDataFromJBoss(NodeList nodeList) { List<Properties> properties = new ArrayList<Properties>(); String jndiName; String clearName; for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); Properties props = new Properties(); jndiName = thisElement.getAttribute(JNDI_NAME_TAG); clearName = NamingUtils.clearDataSourceName(jndiName); props.setProperty(ConnectionConfig.JNDI_NAME_PROPERTY.name, jndiName); props.setProperty(ConnectionConfig.NAME_PROPERTY.name, clearName); NodeList urlList = thisElement .getElementsByTagName(CONNECTION_URL_TAG); int urlElementLength = urlList.getLength(); if (urlElementLength == ObjectUtils.EMPTY_ARRAY_LENGTH) { continue; } Element urlElement = (Element) getFirst(urlList); String url = getContext(urlElement); props.setProperty(ConnectionConfig.URL_PROPERTY.name, url); NodeList securityList = thisElement .getElementsByTagName(SECURITY_TAG); setDataFromJBossSecurity(securityList, props); NodeList poolList = thisElement.getElementsByTagName(POOL_TAG); setDataFromJBossPool(poolList, props); NodeList driverList = thisElement.getElementsByTagName(DRIVER_TAG); setDataFromJBossDriver(driverList, props); properties.add(props); } return properties; } private static NodeList getDataSourceTags(Document document) { NodeList nodeList = document.getElementsByTagName(DATA_SURCE_TAG); return nodeList; } private static NodeList getDataSourceTags(File file) throws IOException { Document document = document(file); NodeList nodeList = getDataSourceTags(document); return nodeList; } private static NodeList getDataSourceTags(String dataSourcePath) throws IOException { File file = new File(dataSourcePath); NodeList nodeList = getDataSourceTags(file); return nodeList; } /** * Retrieves data source JNDI names from passed file * * @param dataSourcePath * @return * @throws IOException */ public static Collection<String> dataSourceNames(String dataSourcePath) throws IOException { Collection<String> jndiNames = new HashSet<String>(); NodeList nodeList = getDataSourceTags(dataSourcePath); String jndiName; for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); jndiName = thisElement.getAttribute(JNDI_NAME_TAG); jndiNames.add(jndiName); } return jndiNames; } /** * Parses standalone.xml file and initializes {@link javax.sql.DataSource}s * and binds them to JNDI context * * @param dataSourcePath * @throws IOException */ public void parseStandaloneXml(String dataSourcePath) throws IOException { NodeList nodeList = getDataSourceTags(dataSourcePath); List<Properties> properties = getDataFromJBoss(nodeList); // Blocking semaphore before all data source initialization finished CountDownLatch blocker = new CountDownLatch(properties.size()); BeanLoader.DataSourceParameters parameters; for (Properties props : properties) { try { // Initializes and fills BeanLoader.DataSourceParameters class // to deploy data source parameters = new BeanLoader.DataSourceParameters(); parameters.properties = props; parameters.blocker = blocker; BeanLoader.initializeDatasource(parameters); } catch (IOException ex) { LOG.error(InitMessages.INITIALIZING_ERROR, ex); } } try { blocker.await(); } catch (InterruptedException ex) { throw new IOException(ex); } Initializer.setDsAsInitialized(dataSourcePath); } }
package org.lightmare.jpa.datasource; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.concurrent.CountDownLatch; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.log4j.Logger; import org.lightmare.deploy.BeanLoader; import org.lightmare.utils.NamingUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import com.mchange.v2.codegen.bean.Property; public class FileParsers { public static final String JBOSS_TAG_NAME = "urn:jboss:domain:datasources:1.0"; private static final String DATA_SURCE_TAG = "datasource"; private static final String USER_TAG = "user-name"; private static final String PASSWORD_TAG = "password"; private static final String DRIVER_TAG = "driver"; private static final String MAX_POOL_TAG = "max-pool-size"; private static final String MIN_POOL_TAG = "min-pool-size"; private static final String INITIAL_POOL_TAG = "prefill"; private static final String JNDI_NAME_TAG = "jndi-name"; private static final String CONNECTION_URL_TAG = "connection-url"; private static final String SECURITY_TAG = "security"; private static final String POOL_TAG = "pool"; private static final Logger LOG = Logger.getLogger(FileParsers.class); public static Document document(File file) throws IOException { return document(file.toURI().toURL()); } public static Document document(URL url) throws IOException { URLConnection connection = url.openConnection(); InputStream stream = connection.getInputStream(); try { return parse(stream); } finally { stream.close(); } } /** * To get text from tag depended on jre installation * * @param element * @return {@link String} */ public static String getContext(Element element) { NodeList textList = element.getChildNodes(); String data = ((Node) textList.item(0)).getNodeValue().trim(); return data; } /** * Parses xml document to initialize {@link javax.sql.DataSource}s * configuration properties * * @param stream * @return {@link Document} * @throws IOException */ public static Document parse(InputStream stream) throws IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder; Document document; try { builder = factory.newDocumentBuilder(); document = builder.parse(stream); } catch (ParserConfigurationException ex) { throw new IOException(ex); } catch (SAXException ex) { throw new IOException(ex); } return document; } public void setDataFromJBossDriver(NodeList nodeList, Properties properties) { Element thisElement = (Element) nodeList.item(0); String name = getContext(thisElement); String driverName = DriverConfig.getDriverName(name); properties.setProperty(DataSourceInitializer.DRIVER_PROPERTY, driverName); } /** * Gets security information from {@link javax.sql.DataSource} meta data * * @param nodeList * @param properties */ public void setDataFromJBossSecurity(NodeList nodeList, Properties properties) { for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); NodeList userList = thisElement.getElementsByTagName(USER_TAG); int elementLength = userList.getLength(); if (elementLength == 0) { continue; } Element userElement = (Element) userList.item(0); String user = getContext(userElement); properties.setProperty(DataSourceInitializer.USER_PROPERTY, user); NodeList passList = thisElement.getElementsByTagName(PASSWORD_TAG); elementLength = passList.getLength(); if (elementLength == 0) { continue; } Element passElement = (Element) passList.item(0); String password = getContext(passElement); properties.setProperty(DataSourceInitializer.PASSWORD_PROPERTY, password); } } /** * Gets security information from {@link javax.sql.DataSource} meta data * * @param nodeList * @param properties */ public void setDataFromJBossPool(NodeList nodeList, Properties properties) { for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); NodeList minPoolSizeList = thisElement .getElementsByTagName(MIN_POOL_TAG); int elementLength = minPoolSizeList.getLength(); if (elementLength == 0) { continue; } Element minPoolSizeElement = (Element) minPoolSizeList.item(0); String minPoolSize = getContext(minPoolSizeElement); properties.setProperty(PoolConfig.MIN_POOL_SIZE, minPoolSize); NodeList maxPoolSizeList = thisElement .getElementsByTagName(MAX_POOL_TAG); elementLength = maxPoolSizeList.getLength(); if (elementLength == 0) { continue; } Element maxPoolSizeElement = (Element) maxPoolSizeList.item(0); String maxPoolSize = getContext(maxPoolSizeElement); properties.setProperty(PoolConfig.MAX_POOL_SIZE, maxPoolSize); NodeList initPoolSizeList = thisElement .getElementsByTagName(INITIAL_POOL_TAG); elementLength = initPoolSizeList.getLength(); if (elementLength == 0) { continue; } Element initPoolSizeElement = (Element) initPoolSizeList.item(0); String prefill = getContext(initPoolSizeElement); if (Boolean.valueOf(prefill)) { properties.setProperty(PoolConfig.INITIAL_POOL_SIZE, minPoolSize); } } } /** * Gets {@link javax.sql.DataSource}s configuration properties as * {@link List} of {@link Property} * * @param nodeList * @return */ public List<Properties> getDataFromJBoss(NodeList nodeList) { List<Properties> properties = new ArrayList<Properties>(); String jndiName; String clearName; for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); Properties props = new Properties(); jndiName = thisElement.getAttribute(JNDI_NAME_TAG); clearName = NamingUtils.clearDataSourceName(jndiName); props.setProperty(DataSourceInitializer.JNDI_NAME_PROPERTY, jndiName); props.setProperty(DataSourceInitializer.NAME_PROPERTY, clearName); NodeList urlList = thisElement .getElementsByTagName(CONNECTION_URL_TAG); int urlElementLength = urlList.getLength(); if (urlElementLength == 0) { continue; } Element urlElement = (Element) urlList.item(0); String url = getContext(urlElement); props.setProperty(DataSourceInitializer.URL_PROPERTY, url); NodeList securityList = thisElement .getElementsByTagName(SECURITY_TAG); setDataFromJBossSecurity(securityList, props); NodeList poolList = thisElement.getElementsByTagName(POOL_TAG); setDataFromJBossPool(poolList, props); NodeList driverList = thisElement.getElementsByTagName(DRIVER_TAG); setDataFromJBossDriver(driverList, props); properties.add(props); } return properties; } private static NodeList getDataSourceTags(Document document) { NodeList nodeList = document.getElementsByTagName(DATA_SURCE_TAG); return nodeList; } private static NodeList getDataSourceTags(File file) throws IOException { Document document = document(file); NodeList nodeList = getDataSourceTags(document); return nodeList; } private static NodeList getDataSourceTags(String dataSourcePath) throws IOException { File file = new File(dataSourcePath); NodeList nodeList = getDataSourceTags(file); return nodeList; } /** * Retrieves data source jndi names from passed file * * @param dataSourcePath * @return * @throws IOException */ public static Collection<String> dataSourceNames(String dataSourcePath) throws IOException { Collection<String> jndiNames = new HashSet<String>(); NodeList nodeList = getDataSourceTags(dataSourcePath); String jndiName; for (int i = 0; i < nodeList.getLength(); i++) { Element thisElement = (Element) nodeList.item(i); jndiName = thisElement.getAttribute(JNDI_NAME_TAG); jndiNames.add(jndiName); } return jndiNames; } /** * Parses standalone.xml file and initializes {@link javax.sql.DataSource}s * and binds them to jndi context * * @param dataSourcePath * @throws IOException */ public void parseStandaloneXml(String dataSourcePath) throws IOException { NodeList nodeList = getDataSourceTags(dataSourcePath); List<Properties> properties = getDataFromJBoss(nodeList); DataSourceInitializer initializer = new DataSourceInitializer(); // Blocking semaphore before all data source initialization finished CountDownLatch blocker = new CountDownLatch(properties.size()); BeanLoader.DataSourceParameters parameters; for (Properties props : properties) { try { // Initializes and fills BeanLoader.DataSourceParameters class // to deploy data source parameters = new BeanLoader.DataSourceParameters(); parameters.initializer = initializer; parameters.properties = props; parameters.blocker = blocker; BeanLoader.initializeDatasource(parameters); } catch (IOException ex) { LOG.error("Could not initialize datasource", ex); } } try { blocker.await(); } catch (InterruptedException ex) { throw new IOException(ex); } DataSourceInitializer.setDsAsInitialized(dataSourcePath); } }
package org.lightmare.jpa.jta; import java.io.IOException; import java.util.Collection; import javax.ejb.TransactionAttributeType; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import javax.transaction.HeuristicMixedException; import javax.transaction.HeuristicRollbackException; import javax.transaction.RollbackException; import javax.transaction.SystemException; import javax.transaction.UserTransaction; import org.lightmare.ejb.handlers.BeanHandler; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.ObjectUtils; /** * Class to manager {@link UserTransaction} creation and closure * * @author levan * */ public class TransactionManager { // Error messages private static final String ISNANTIATING_ERROR = "Class TransactionManager can not be instntiate"; private TransactionManager() { throw new InstantiationError(ISNANTIATING_ERROR); } /** * Adds {@link EntityTransaction} to passed {@link UserTransaction} instance * * @param userTransaction * @param entityTransaction * @param em */ protected static void addEntityTransaction(UserTransaction userTransaction, EntityTransaction entityTransaction, EntityManager em) { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); if (ObjectUtils.notNull(entityTransaction)) { transaction.addTransaction(entityTransaction); } if (ObjectUtils.notNull(em)) { transaction.addEntityManager(em); } } } /** * Adds {@link EntityTransaction} for each * {@link BeanTransactions.TransactionData} to passed * {@link UserTransaction} instance * * @param userTransaction * @param entityTransactions */ protected static void addEntityTransactions( UserTransaction userTransaction, Collection<BeanTransactions.TransactionData> entityTransactions) { if (userTransaction instanceof UserTransactionImpl && CollectionUtils.valid(entityTransactions)) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); for (BeanTransactions.TransactionData transactionData : entityTransactions) { addEntityTransaction(transaction, transactionData.entityTransaction, transactionData.em); } } } /** * Adds {@link EntityManager} to passed {@link UserTransaction} instance * * @param userTransaction * @param em */ protected static void addEntityManager(UserTransaction userTransaction, EntityManager em) { if (userTransaction instanceof UserTransactionImpl && ObjectUtils.notNull(em)) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); transaction.addEntityManager(em); } } /** * Adds each {@link EntityManager} from {@link Collection} to passed * {@link UserTransaction} instance * * @param userTransaction * @param ems */ protected static void addEntityManagers(UserTransaction userTransaction, Collection<EntityManager> ems) { if (userTransaction instanceof UserTransactionImpl && CollectionUtils.valid(ems)) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); for (EntityManager em : ems) { addEntityManager(transaction, em); } } } /** * Adds {@link EntityTransaction} to requires new stack in passed * {@link UserTransaction} instance * * @param userTransaction * @param entityTransaction * @param em */ private static void addReqNewTransaction(UserTransaction userTransaction, EntityTransaction entityTransaction, EntityManager em) { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); if (ObjectUtils.notNull(entityTransaction)) { transaction.pushReqNew(entityTransaction); } if (ObjectUtils.notNull(em)) { transaction.pushReqNewEm(em); } } } /** * Adds {@link EntityTransaction} for each * {@link BeanTransactions.TransactionData} to requires new stack in passed * {@link UserTransaction} instance * * @param userTransaction * @param entityTransactions */ protected static void addReqNewTransactions( UserTransaction userTransaction, Collection<BeanTransactions.TransactionData> entityTransactions) { if (userTransaction instanceof UserTransactionImpl && CollectionUtils.valid(entityTransactions)) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); for (BeanTransactions.TransactionData transactionData : entityTransactions) { addReqNewTransaction(transaction, transactionData.entityTransaction, transactionData.em); } } } /** * Adds caller to passed {@link UserTransaction} instance * * @param userTransaction * @param handler */ protected static void addCaller(UserTransaction userTransaction, BeanHandler handler) { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); Object caller = transaction.getCaller(); if (caller == null) { transaction.setCaller(handler); } } } /** * Commits passed {@link UserTransaction} with {@link IOException} throw * * @param transaction * @throws IOException */ protected static void commit(UserTransaction transaction) throws IOException { try { transaction.commit(); } catch (SecurityException ex) { throw new IOException(ex); } catch (IllegalStateException ex) { throw new IOException(ex); } catch (RollbackException ex) { throw new IOException(ex); } catch (HeuristicMixedException ex) { throw new IOException(ex); } catch (HeuristicRollbackException ex) { throw new IOException(ex); } catch (SystemException ex) { throw new IOException(ex); } } /** * Commits all {@link TransactionAttributeType.REQUIRES_NEW} transactions * for passed {@link UserTransactionImpl} with {@link IOException} throw * * @param transaction * @throws IOException */ protected static void commitReqNew(UserTransaction userTransaction) throws IOException { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); try { transaction.commitReqNew(); } catch (SecurityException ex) { throw new IOException(ex); } catch (IllegalStateException ex) { throw new IOException(ex); } catch (RollbackException ex) { throw new IOException(ex); } catch (HeuristicMixedException ex) { throw new IOException(ex); } catch (HeuristicRollbackException ex) { throw new IOException(ex); } catch (SystemException ex) { throw new IOException(ex); } } } /** * Calls {@link UserTransaction#rollback()} method of passed * {@link UserTransaction} with {@link IOException} throw * * @param transaction * @throws IOException */ private static void rollback(UserTransaction transaction) throws IOException { try { transaction.rollback(); } catch (IllegalStateException ex) { throw new IOException(ex); } catch (SecurityException ex) { throw new IOException(ex); } catch (SystemException ex) { throw new IOException(ex); } } /** * Calls {@link UserTransactionImpl#rollbackReqNews()} method of passed * {@link UserTransaction} with {@link IOException} throw * * @param transaction * @throws IOException */ protected static void rollbackReqNew(UserTransaction userTransaction) throws IOException { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); try { transaction.rollbackReqNews(); } catch (IllegalStateException ex) { throw new IOException(ex); } catch (SecurityException ex) { throw new IOException(ex); } catch (SystemException ex) { throw new IOException(ex); } } } /** * Checks if passed {@link BeanHandler} is first caller / beginner of passed * {@link UserTransaction} instance * * @param userTransaction * @param handler * @return <code>boolean</code> */ protected static boolean checkCaller(UserTransaction userTransaction, BeanHandler handler) { boolean check = (userTransaction instanceof UserTransactionImpl); if (check) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); check = transaction.checkCaller(handler); } return check; } /** * Closes cached {@link EntityManager}s after method call * * @param userTransaction */ public static void closeEntityManagers(UserTransaction userTransaction) { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); transaction.closeEntityManagers(); } } }
package org.lightmare.jpa.jta; import java.io.IOException; import java.util.Collection; import javax.ejb.TransactionAttributeType; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import javax.transaction.HeuristicMixedException; import javax.transaction.HeuristicRollbackException; import javax.transaction.RollbackException; import javax.transaction.SystemException; import javax.transaction.UserTransaction; import org.lightmare.cache.TransactionHolder; import org.lightmare.ejb.handlers.BeanHandler; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.ObjectUtils; /** * Class to manager {@link UserTransaction} creation and closure * * @author levan * @since 0.0.82-SNAPSHOT */ public class TransactionManager { // Error messages private static final String ISNANTIATING_ERROR = "Class TransactionManager can not be instntiate"; private TransactionManager() { throw new InstantiationError(ISNANTIATING_ERROR); } protected static boolean isTransactionalType(TransactionAttributeType type) { return type.equals(TransactionAttributeType.REQUIRED) || type.equals(TransactionAttributeType.MANDATORY) || type.equals(TransactionAttributeType.SUPPORTS); } protected static boolean isFreeType(TransactionAttributeType type) { } /** * Adds {@link EntityTransaction} to passed {@link UserTransaction} instance * * @param userTransaction * @param entityTransaction * @param em */ protected static void addEntityTransaction(UserTransaction userTransaction, EntityTransaction entityTransaction, EntityManager em) { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); if (ObjectUtils.notNull(entityTransaction)) { transaction.addTransaction(entityTransaction); } if (ObjectUtils.notNull(em)) { transaction.addEntityManager(em); } } } /** * Adds {@link EntityTransaction} for each * {@link BeanTransactions.TransactionData} to passed * {@link UserTransaction} instance * * @param userTransaction * @param entityTransactions */ protected static void addEntityTransactions( UserTransaction userTransaction, Collection<BeanTransactions.TransactionData> entityTransactions) { if (userTransaction instanceof UserTransactionImpl && CollectionUtils.valid(entityTransactions)) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); for (BeanTransactions.TransactionData transactionData : entityTransactions) { addEntityTransaction(transaction, transactionData.entityTransaction, transactionData.em); } } } /** * Adds {@link EntityManager} to passed {@link UserTransaction} instance * * @param userTransaction * @param em */ protected static void addEntityManager(UserTransaction userTransaction, EntityManager em) { if (userTransaction instanceof UserTransactionImpl && ObjectUtils.notNull(em)) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); transaction.addEntityManager(em); } } /** * Adds each {@link EntityManager} from {@link Collection} to passed * {@link UserTransaction} instance * * @param userTransaction * @param ems */ protected static void addEntityManagers(UserTransaction userTransaction, Collection<EntityManager> ems) { if (userTransaction instanceof UserTransactionImpl && CollectionUtils.valid(ems)) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); for (EntityManager em : ems) { addEntityManager(transaction, em); } } } /** * Adds {@link EntityManager}s without transaction scope * * @param userTransaction * @param ems */ protected static void addNotTransactionalEntityManagers( UserTransaction userTransaction, Collection<EntityManager> ems) { if (userTransaction instanceof UserTransactionImpl && CollectionUtils.valid(ems)) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); for (EntityManager em : ems) { transaction.pushFreeEntityManager(em); } } } /** * Adds {@link EntityTransaction} to requires new stack in passed * {@link UserTransaction} instance * * @param userTransaction * @param entityTransaction * @param em */ private static void addReqNewTransaction(UserTransaction userTransaction, EntityTransaction entityTransaction, EntityManager em) { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); if (ObjectUtils.notNull(entityTransaction)) { transaction.pushReqNew(entityTransaction); } if (ObjectUtils.notNull(em)) { transaction.pushReqNewEm(em); } } } /** * Adds {@link EntityTransaction} for each * {@link BeanTransactions.TransactionData} to requires new stack in passed * {@link UserTransaction} instance * * @param userTransaction * @param entityTransactions */ protected static void addReqNewTransactions( UserTransaction userTransaction, Collection<BeanTransactions.TransactionData> entityTransactions) { if (userTransaction instanceof UserTransactionImpl && CollectionUtils.valid(entityTransactions)) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); for (BeanTransactions.TransactionData transactionData : entityTransactions) { addReqNewTransaction(transaction, transactionData.entityTransaction, transactionData.em); } } } /** * Adds caller to passed {@link UserTransaction} instance * * @param userTransaction * @param handler */ protected static void addCaller(UserTransaction userTransaction, BeanHandler handler) { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); Object caller = transaction.getCaller(); if (caller == null) { transaction.setCaller(handler); } } } /** * Commits passed {@link UserTransaction} with {@link IOException} throw * * @param transaction * @throws IOException */ protected static void commit(UserTransaction transaction) throws IOException { try { transaction.commit(); } catch (SecurityException ex) { throw new IOException(ex); } catch (IllegalStateException ex) { throw new IOException(ex); } catch (RollbackException ex) { throw new IOException(ex); } catch (HeuristicMixedException ex) { throw new IOException(ex); } catch (HeuristicRollbackException ex) { throw new IOException(ex); } catch (SystemException ex) { throw new IOException(ex); } } /** * Commits all {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} * transactions from requires new stack in passed {@link UserTransaction} * with {@link IOException} throw * * @param transaction * @throws IOException */ protected static void commitReqNew(UserTransaction userTransaction) throws IOException { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); try { transaction.commitReqNew(); } catch (SecurityException ex) { throw new IOException(ex); } catch (IllegalStateException ex) { throw new IOException(ex); } catch (RollbackException ex) { throw new IOException(ex); } catch (HeuristicMixedException ex) { throw new IOException(ex); } catch (HeuristicRollbackException ex) { throw new IOException(ex); } catch (SystemException ex) { throw new IOException(ex); } } } /** * Rollbacks passed {@link UserTransaction} with {@link IOException} throw * * @param transaction * @throws IOException */ protected static void rollback(UserTransaction transaction) throws IOException { try { transaction.rollback(); } catch (IllegalStateException ex) { throw new IOException(ex); } catch (SecurityException ex) { throw new IOException(ex); } catch (SystemException ex) { throw new IOException(ex); } } /** * Rollbacks all {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} * transactions from requires new stack in method of passed * {@link UserTransaction} with {@link IOException} throw * * @param transaction * @throws IOException */ protected static void rollbackReqNew(UserTransaction userTransaction) throws IOException { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); try { transaction.rollbackReqNews(); } catch (IllegalStateException ex) { throw new IOException(ex); } catch (SecurityException ex) { throw new IOException(ex); } catch (SystemException ex) { throw new IOException(ex); } } } /** * Checks if passed {@link BeanHandler} is first caller / beginner of passed * {@link UserTransaction} instance * * @param userTransaction * @param handler * @return <code>boolean</code> */ protected static boolean checkCaller(UserTransaction userTransaction, BeanHandler handler) { boolean check = (userTransaction instanceof UserTransactionImpl); if (check) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); check = transaction.checkCaller(handler); } return check; } /** * Closes all cached {@link EntityManager}s from stack in passed * {@link UserTransaction} instance * * @param userTransaction */ public static void closeEntityManagers(UserTransaction userTransaction) { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); transaction.closeEntityManagers(); } } /** * Closes all cached {@link EntityManager}s which are not in transaction * scope * * @param userTransaction */ public static void closeFreeEntityManagers(UserTransaction userTransaction) { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); transaction.closeFreeEntityManagers(); } } /** * Closes all cached {@link EntityManager} instances * * @param userTransaction */ public static void close(UserTransaction userTransaction) { if (userTransaction instanceof UserTransactionImpl) { UserTransactionImpl transaction = ObjectUtils.cast(userTransaction, UserTransactionImpl.class); transaction.close(); } } /** * Removes transaction from {@link Thread} cache * * @param transaction */ public static void remove(UserTransaction transaction) { try { close(transaction); } finally { TransactionHolder.removeTransaction(); } } }
package org.littleshoot.proxy; import static org.jboss.netty.channel.Channels.pipeline; import java.lang.management.ManagementFactory; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.nio.channels.ClosedChannelException; import java.nio.charset.Charset; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import javax.management.InstanceAlreadyExistsException; import javax.management.InstanceNotFoundException; import javax.management.MBeanRegistrationException; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.NotCompliantMBeanException; import javax.management.ObjectName; import org.apache.commons.lang3.StringUtils; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.group.ChannelGroup; import org.jboss.netty.channel.socket.ClientSocketChannelFactory; import org.jboss.netty.handler.codec.http.DefaultHttpResponse; import org.jboss.netty.handler.codec.http.HttpChunk; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpRequestEncoder; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.codec.http.HttpVersion; import org.littleshoot.dnssec4j.VerifiedAddressFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Class for handling all HTTP requests from the browser to the proxy. * * Note this class only ever handles a single connection from the browser. * The browser can and will, however, send requests to multiple hosts using * that same connection, i.e. it will send a request to host B once a request * to host A has completed. */ public class HttpRequestHandler extends SimpleChannelUpstreamHandler implements RelayListener, ConnectionData { private final static Logger log = LoggerFactory.getLogger(HttpRequestHandler.class); private volatile boolean readingChunks; private static final AtomicInteger totalBrowserToProxyConnections = new AtomicInteger(0); private final AtomicInteger browserToProxyConnections = new AtomicInteger(0); private final Map<String, Queue<ChannelFuture>> externalHostsToChannelFutures = new ConcurrentHashMap<String, Queue<ChannelFuture>>(); private final AtomicInteger messagesReceived = new AtomicInteger(0); private final AtomicInteger unansweredRequestCount = new AtomicInteger(0); private final AtomicInteger requestsSent = new AtomicInteger(0); private final AtomicInteger responsesReceived = new AtomicInteger(0); private final ProxyAuthorizationManager authorizationManager; private final Set<String> answeredRequests = new HashSet<String>(); private final Set<String> unansweredRequests = new HashSet<String>(); private final Set<HttpRequest> unansweredHttpRequests = new HashSet<HttpRequest>(); private ChannelFuture currentChannelFuture; /** * This lock is necessary for when a second chunk arrives in a request * before we've even created the current channel future. */ private final Object channelFutureLock = new Object(); private final ChainProxyManager chainProxyManager; private final ChannelGroup channelGroup; private final ProxyCacheManager cacheManager; private final AtomicBoolean browserChannelClosed = new AtomicBoolean(false); private volatile boolean receivedChannelClosed = false; private final RelayPipelineFactoryFactory relayPipelineFactoryFactory; private ClientSocketChannelFactory clientChannelFactory; /** * This flag is necessary for edge cases where we prematurely halt request * processing but where there may be more incoming chunks for the request * (in cases where the request is chunked). This happens, for example, with * proxy authentication and chunked posts or when the external host just * does not resolve to an IP address. In those cases we prematurely return * pre-packaged responses and halt request processing but still need to * handle any future chunks associated with the request coming in on the * client channel. */ private boolean pendingRequestChunks = false; private ObjectName mxBeanName; /** * Creates a new class for handling HTTP requests with no frills. * * @param relayPipelineFactoryFactory The factory for creating factories * for channels to relay data from external sites back to clients. * @param clientChannelFactory The factory for creating outgoing channels * to external sites. */ public HttpRequestHandler( final RelayPipelineFactoryFactory relayPipelineFactoryFactory, final ClientSocketChannelFactory clientChannelFactory) { this(null, null, null, null, relayPipelineFactoryFactory, clientChannelFactory); } /** * Creates a new class for handling HTTP requests with the specified * authentication manager. * * @param cacheManager The manager for the cache. * @param authorizationManager The class that handles any * proxy authentication requirements. * @param channelGroup The group of channels for keeping track of all * channels we've opened. * @param relayPipelineFactoryFactory The factory for creating factories * for channels to relay data from external sites back to clients. * @param clientChannelFactory The factory for creating outgoing channels * to external sites. */ public HttpRequestHandler(final ProxyCacheManager cacheManager, final ProxyAuthorizationManager authorizationManager, final ChannelGroup channelGroup, final RelayPipelineFactoryFactory relayPipelineFactoryFactory, final ClientSocketChannelFactory clientChannelFactory) { this(cacheManager, authorizationManager, channelGroup, null, relayPipelineFactoryFactory, clientChannelFactory); } /** * Creates a new class for handling HTTP requests with the specified * authentication manager. * * @param cacheManager The manager for the cache. * @param authorizationManager The class that handles any * proxy authentication requirements. * @param channelGroup The group of channels for keeping track of all * channels we've opened. * @param chainProxyManager upstream proxy server host and port or null * if none used. * @param relayPipelineFactoryFactory The relay pipeline factory. * @param clientChannelFactory The factory for creating outgoing channels * to external sites. */ public HttpRequestHandler(final ProxyCacheManager cacheManager, final ProxyAuthorizationManager authorizationManager, final ChannelGroup channelGroup, final ChainProxyManager chainProxyManager, final RelayPipelineFactoryFactory relayPipelineFactoryFactory, final ClientSocketChannelFactory clientChannelFactory) { log.info("Creating new request handler..."); this.clientChannelFactory = clientChannelFactory; this.cacheManager = cacheManager; this.authorizationManager = authorizationManager; this.channelGroup = channelGroup; this.chainProxyManager = chainProxyManager; this.relayPipelineFactoryFactory = relayPipelineFactoryFactory; if (LittleProxyConfig.isUseJmx()) { setupJmx(); } } private void setupJmx() { final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); try { final Class<? extends SimpleChannelUpstreamHandler> clazz = getClass(); final String pack = clazz.getPackage().getName(); final String oName = pack+":type="+clazz.getSimpleName()+"-"+clazz.getSimpleName() + "-"+hashCode(); log.info("Registering MBean with name: {}", oName); mxBeanName = new ObjectName(oName); if(!mbs.isRegistered(mxBeanName)) { mbs.registerMBean(this, mxBeanName); } } catch (final MalformedObjectNameException e) { log.error("Could not set up JMX", e); } catch (final InstanceAlreadyExistsException e) { log.error("Could not set up JMX", e); } catch (final MBeanRegistrationException e) { log.error("Could not set up JMX", e); } catch (final NotCompliantMBeanException e) { log.error("Could not set up JMX", e); } } protected void cleanupJmx() { final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); try { mbs.unregisterMBean(mxBeanName); } catch (MBeanRegistrationException e) { //that's OK, because we won't leak } catch (InstanceNotFoundException e) { //ditto } } @Override public void messageReceived(final ChannelHandlerContext ctx, final MessageEvent me) { if (browserChannelClosed.get()) { log.info("Ignoring message since the connection to the browser " + "is about to close"); return; } messagesReceived.incrementAndGet(); log.debug("Received "+messagesReceived+" total messages"); if (!readingChunks) { processRequest(ctx, me); } else { processChunk(me); } } private void processChunk(final MessageEvent me) { log.info("Processing chunk..."); final HttpChunk chunk = (HttpChunk) me.getMessage(); // Remember this will typically be a persistent connection, so we'll // get another request after we're read the last chunk. So we need to // reset it back to no longer read in chunk mode. if (chunk.isLast()) { this.readingChunks = false; } // It's possible to receive a chunk before a channel future has even // been set. It's also possible for this to happen for requests that // require proxy authentication or in cases where we get a DNS lookup // error trying to reach the remote site. if (this.currentChannelFuture == null) { // First deal with the case where a proxy authentication manager // is active and we've received an HTTP POST requiring // authentication. In that scenario, we've returned a // 407 Proxy Authentication Required response, but we still need // to handle any incoming chunks from the original request. We // basically just drop them on the floor because the client will // issue a new POST request with the appropriate credentials // (assuming they have them) and associated chunks in the new // request body. if (pendingRequestChunks) { if (chunk.isLast()) { log.info("Received last chunk -- setting proxy auth " + "chunking to false"); this.pendingRequestChunks = false; //me.getChannel().close(); } log.info("Ignoring chunk with chunked post for edge case"); return; } else { // Note this can happen quite often in tests when requests are // arriving very quickly on the same JVM but is less likely // to occur in deployed servers. log.error("NO CHANNEL FUTURE!!"); synchronized (this.channelFutureLock) { if (this.currentChannelFuture == null) { try { log.debug("Waiting for channel future!"); channelFutureLock.wait(4000); } catch (final InterruptedException e) { log.info("Interrupted!!", e); } } } } } // We don't necessarily know the channel is connected yet!! This can // happen if the client sends a chunk directly after the initial // request. if (this.currentChannelFuture.getChannel().isConnected()) { this.currentChannelFuture.getChannel().write(chunk); } else { this.currentChannelFuture.addListener(new ChannelFutureListener() { public void operationComplete(final ChannelFuture future) throws Exception { currentChannelFuture.getChannel().write(chunk); } }); } } private void processRequest(final ChannelHandlerContext ctx, final MessageEvent me) { final HttpRequest request = (HttpRequest) me.getMessage(); final Channel inboundChannel = me.getChannel(); if (this.cacheManager != null && this.cacheManager.returnCacheHit((HttpRequest)me.getMessage(), inboundChannel)) { log.info("Found cache hit! Cache wrote the response."); return; } this.unansweredRequestCount.incrementAndGet(); log.info("Got request: {} on channel: "+inboundChannel, request); if (this.authorizationManager != null && !this.authorizationManager.handleProxyAuthorization(request, ctx)) { log.info("Not authorized!!"); // We need to do a few things here. First, if the request is // chunked, we need to make sure we read the full request/POST // message body. handleFutureChunksIfNecessary(request); return; } else { this.pendingRequestChunks = false; } String hostAndPort = null; if (this.chainProxyManager != null) { hostAndPort = this.chainProxyManager.getChainProxy(request); } if (hostAndPort == null) { hostAndPort = ProxyUtils.parseHostAndPort(request); if (StringUtils.isBlank(hostAndPort)) { log.warn("No host and port found in {}", request.getUri()); badGateway(request, inboundChannel); handleFutureChunksIfNecessary(request); return; } } final class OnConnect { public ChannelFuture onConnect(final ChannelFuture cf) { if (request.getMethod() != HttpMethod.CONNECT) { final ChannelFuture writeFuture = cf.getChannel().write(request); writeFuture.addListener(new ChannelFutureListener() { public void operationComplete(final ChannelFuture future) throws Exception { if (LittleProxyConfig.isUseJmx()) { unansweredRequests.add(request.toString()); } unansweredHttpRequests.add(request); requestsSent.incrementAndGet(); } }); return writeFuture; } else { writeConnectResponse(ctx, request, cf.getChannel()); return cf; } } } final OnConnect onConnect = new OnConnect(); final ChannelFuture curFuture = getChannelFuture(hostAndPort); if (curFuture != null) { log.info("Using existing connection..."); // We don't notify here because the current channel future will not // have been null before this assignment. if (this.currentChannelFuture == null) { log.error("Should not be null here"); } this.currentChannelFuture = curFuture; if (curFuture.getChannel().isConnected()) { onConnect.onConnect(curFuture); } else { final ChannelFutureListener cfl = new ChannelFutureListener() { public void operationComplete(final ChannelFuture future) throws Exception { onConnect.onConnect(curFuture); } }; curFuture.addListener(cfl); } } else { log.info("Establishing new connection"); final ChannelFuture cf; ctx.getChannel().setReadable(false); try { cf = newChannelFuture(request, inboundChannel, hostAndPort); } catch (final UnknownHostException e) { log.warn("Could not resolve host?", e); badGateway(request, inboundChannel); handleFutureChunksIfNecessary(request); ctx.getChannel().setReadable(true); return; } final class LocalChannelFutureListener implements ChannelFutureListener { private final String copiedHostAndPort; LocalChannelFutureListener(final String copiedHostAndPort) { this.copiedHostAndPort = copiedHostAndPort; } public void operationComplete(final ChannelFuture future) throws Exception { final Channel channel = future.getChannel(); if (channelGroup != null) { channelGroup.add(channel); } if (future.isSuccess()) { log.info("Connected successfully to: {}", channel); log.info("Writing message on channel..."); final ChannelFuture wf = onConnect.onConnect(cf); wf.addListener(new ChannelFutureListener() { public void operationComplete(final ChannelFuture wcf) throws Exception { log.info("Finished write: "+wcf+ " to: "+ request.getMethod()+" "+ request.getUri()); ctx.getChannel().setReadable(true); } }); currentChannelFuture = wf; synchronized(channelFutureLock) { channelFutureLock.notifyAll(); } } else { log.info("Could not connect to " + copiedHostAndPort, future.getCause()); final String nextHostAndPort; if (chainProxyManager == null) { nextHostAndPort = copiedHostAndPort; } else { chainProxyManager.onCommunicationError(copiedHostAndPort); nextHostAndPort = chainProxyManager.getChainProxy(request); } if (copiedHostAndPort.equals(nextHostAndPort)) { // We call the relay channel closed event handler // with one associated unanswered request. onRelayChannelClose(inboundChannel, copiedHostAndPort, 1, true); } else { // TODO I am not sure about this removeProxyToWebConnection(copiedHostAndPort); // try again with different hostAndPort processRequest(ctx, me); } } cleanupJmx(); } } cf.addListener(new LocalChannelFutureListener(hostAndPort)); } if (request.isChunked()) { readingChunks = true; } } private void badGateway(final HttpRequest request, final Channel inboundChannel) { final HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.BAD_GATEWAY); response.setHeader(HttpHeaders.Names.CONNECTION, "close"); final String body = "Bad Gateway: "+request.getUri(); response.setContent(ChannelBuffers.copiedBuffer(body, Charset.forName("UTF-8"))); response.setHeader(HttpHeaders.Names.CONTENT_LENGTH, body.length()); inboundChannel.write(response); } private void handleFutureChunksIfNecessary(final HttpRequest request) { if (request.isChunked()) { this.pendingRequestChunks = true; readingChunks = true; } } public void onChannelAvailable(final String hostAndPortKey, final ChannelFuture cf) { synchronized (this.externalHostsToChannelFutures) { final Queue<ChannelFuture> futures = this.externalHostsToChannelFutures.get(hostAndPortKey); final Queue<ChannelFuture> toUse; if (futures == null) { toUse = new LinkedList<ChannelFuture>(); this.externalHostsToChannelFutures.put(hostAndPortKey, toUse); } else { toUse = futures; } toUse.add(cf); } } private ChannelFuture getChannelFuture(final String hostAndPort) { synchronized (this.externalHostsToChannelFutures) { final Queue<ChannelFuture> futures = this.externalHostsToChannelFutures.get(hostAndPort); if (futures == null) { return null; } if (futures.isEmpty()) { return null; } final ChannelFuture cf = futures.remove(); if (cf != null && cf.isSuccess() && !cf.getChannel().isConnected()) { // In this case, the future successfully connected at one // time, but we're no longer connected. We need to remove the // channel and open a new one. removeProxyToWebConnection(hostAndPort); return null; } return cf; } } private void writeConnectResponse(final ChannelHandlerContext ctx, final HttpRequest httpRequest, final Channel outgoingChannel) { final int port = ProxyUtils.parsePort(httpRequest); final Channel browserToProxyChannel = ctx.getChannel(); // TODO: We should really only allow access on 443, but this breaks // what a lot of browsers do in practice. if (port != 443) { log.warn("Connecting on port other than 443: "+httpRequest.getUri()); } if (port < 0) { log.warn("Connecting on port other than 443!!"); final String statusLine = "HTTP/1.1 502 Proxy Error\r\n"; ProxyUtils.writeResponse(browserToProxyChannel, statusLine, ProxyUtils.PROXY_ERROR_HEADERS); ProxyUtils.closeOnFlush(browserToProxyChannel); } else { browserToProxyChannel.setReadable(false); // We need to modify both the pipeline encoders and decoders for the // browser to proxy channel -- the outgoing channel already has // the correct handlers and such set at this point. ctx.getPipeline().remove("encoder"); ctx.getPipeline().remove("decoder"); ctx.getPipeline().remove("handler"); // Note there are two HttpConnectRelayingHandler for each HTTP // CONNECT tunnel -- one writing to the browser, and one writing // to the remote host. ctx.getPipeline().addLast("handler", new HttpConnectRelayingHandler(outgoingChannel, this.channelGroup)); } log.debug("Sending response to CONNECT request..."); // This is sneaky -- thanks to Emil Goicovici from the list -- // We temporarily add in a request encoder if we're chaining, allowing // us to forward along the HTTP CONNECT request. We then remove that // encoder as soon as it's written since past that point we simply // want to relay all data. String chainProxy = null; if (chainProxyManager != null) { chainProxy = chainProxyManager.getChainProxy(httpRequest); if (chainProxy != null) { // forward the CONNECT request to the upstream proxy server // which will return a HTTP response outgoingChannel.getPipeline().addBefore("handler", "encoder", new HttpRequestEncoder()); outgoingChannel.write(httpRequest).addListener(new ChannelFutureListener() { public void operationComplete(final ChannelFuture future) throws Exception { outgoingChannel.getPipeline().remove("encoder"); } }); } } if (chainProxy == null) { final String statusLine = "HTTP/1.1 200 Connection established\r\n"; ProxyUtils.writeResponse(browserToProxyChannel, statusLine, ProxyUtils.CONNECT_OK_HEADERS); } browserToProxyChannel.setReadable(true); } private ChannelFuture newChannelFuture(final HttpRequest httpRequest, final Channel browserToProxyChannel, final String hostAndPort) throws UnknownHostException { final String host; final int port; if (hostAndPort.contains(":")) { host = StringUtils.substringBefore(hostAndPort, ":"); final String portString = StringUtils.substringAfter(hostAndPort, ":"); port = Integer.parseInt(portString); } else { host = hostAndPort; port = 80; } // Configure the client. final ClientBootstrap cb = new ClientBootstrap(this.clientChannelFactory); final ChannelPipelineFactory cpf; if (httpRequest.getMethod() == HttpMethod.CONNECT) { // In the case of CONNECT, we just want to relay all data in both // directions. We SHOULD make sure this is traffic on a reasonable // port, however, such as 80 or 443, to reduce security risks. cpf = new ChannelPipelineFactory() { public ChannelPipeline getPipeline() throws Exception { // Create a default pipeline implementation. final ChannelPipeline pipeline = pipeline(); pipeline.addLast("handler", new HttpConnectRelayingHandler(browserToProxyChannel, channelGroup)); return pipeline; } }; } else { cpf = relayPipelineFactoryFactory.getRelayPipelineFactory( httpRequest, browserToProxyChannel, this); } cb.setPipelineFactory(cpf); cb.setOption("connectTimeoutMillis", 40*1000); log.debug("Starting new connection to: {}", hostAndPort); if (LittleProxyConfig.isUseDnsSec()) { return cb.connect(VerifiedAddressFactory.newInetSocketAddress(host, port, LittleProxyConfig.isUseDnsSec())); } else { final InetAddress ia = InetAddress.getByName(host); final String address = ia.getHostAddress(); //final InetSocketAddress address = new InetSocketAddress(host, port); return cb.connect(new InetSocketAddress(address, port)); } } @Override public void channelOpen(final ChannelHandlerContext ctx, final ChannelStateEvent cse) throws Exception { final Channel inboundChannel = cse.getChannel(); log.debug("New channel opened: {}", inboundChannel); totalBrowserToProxyConnections.incrementAndGet(); browserToProxyConnections.incrementAndGet(); log.debug("Now "+totalBrowserToProxyConnections+ " browser to proxy channels..."); log.debug("Now this class has "+browserToProxyConnections+ " browser to proxy channels..."); // We need to keep track of the channel so we can close it at the end. if (this.channelGroup != null) { this.channelGroup.add(inboundChannel); } } @Override public void channelClosed(final ChannelHandlerContext ctx, final ChannelStateEvent cse) { log.info("Channel closed: {}", cse.getChannel()); totalBrowserToProxyConnections.decrementAndGet(); browserToProxyConnections.decrementAndGet(); log.debug("Now "+totalBrowserToProxyConnections+ " total browser to proxy channels..."); log.debug("Now this class has "+browserToProxyConnections+ " browser to proxy channels..."); // The following should always be the case with // @ChannelPipelineCoverage("one") if (browserToProxyConnections.get() == 0) { log.debug("Closing all proxy to web channels for this browser " + "to proxy connection!!!"); final Collection<Queue<ChannelFuture>> allFutures = this.externalHostsToChannelFutures.values(); for (final Queue<ChannelFuture> futures : allFutures) { for (final ChannelFuture future : futures) { final Channel ch = future.getChannel(); if (ch.isOpen()) { future.getChannel().close(); } } } } } /** * This is called when a relay channel to a remote server is closed in order * for this class to perform any necessary cleanup. Note that this is * called on the same thread as the incoming request processing. */ public void onRelayChannelClose(final Channel browserToProxyChannel, final String key, final int unansweredRequestsOnChannel, final boolean closedEndsResponseBody) { if (closedEndsResponseBody) { log.debug("Close ends response body"); this.receivedChannelClosed = true; } log.debug("this.receivedChannelClosed: "+this.receivedChannelClosed); removeProxyToWebConnection(key); // The closed channel may have had outstanding requests we haven't // properly accounted for. The channel closing effectively marks those // requests as "answered" when the responses didn't contain any other // markers for complete responses, such as Content-Length or the the // last chunk of a chunked encoding. All of this potentially results // in the closing of the client/browser connection here. this.unansweredRequestCount.set( this.unansweredRequestCount.get() - unansweredRequestsOnChannel); //this.unansweredRequestCount -= unansweredRequestsOnChannel; if (this.receivedChannelClosed && (this.externalHostsToChannelFutures.isEmpty() || this.unansweredRequestCount.get() == 0)) { if (!browserChannelClosed.getAndSet(true)) { log.info("Closing browser to proxy channel"); ProxyUtils.closeOnFlush(browserToProxyChannel); } } else { log.info("Not closing browser to proxy channel. Received channel " + "closed is "+this.receivedChannelClosed+" and we have {} " + "connections and awaiting {} responses", this.externalHostsToChannelFutures.size(), this.unansweredRequestCount ); } } private void removeProxyToWebConnection(final String key) { // It's probably already been removed at this point, but just in case. this.externalHostsToChannelFutures.remove(key); } public void onRelayHttpResponse(final Channel browserToProxyChannel, final String key, final HttpRequest httpRequest) { if (LittleProxyConfig.isUseJmx()) { this.answeredRequests.add(httpRequest.toString()); this.unansweredRequests.remove(httpRequest.toString()); } this.unansweredHttpRequests.remove(httpRequest); this.unansweredRequestCount.decrementAndGet(); this.responsesReceived.incrementAndGet(); // If we've received responses to all outstanding requests and one // of those outgoing channels has been closed, we should close the // connection to the browser. if (this.unansweredRequestCount.get() == 0 && this.receivedChannelClosed) { if (!browserChannelClosed.getAndSet(true)) { log.info("Closing browser to proxy channel on HTTP response"); ProxyUtils.closeOnFlush(browserToProxyChannel); } } else { log.info("Not closing browser to proxy channel. Still "+ "awaiting " + this.unansweredRequestCount+" responses..." + "receivedChannelClosed="+this.receivedChannelClosed); } } @Override public void exceptionCaught(final ChannelHandlerContext ctx, final ExceptionEvent e) throws Exception { final Channel channel = e.getChannel(); final Throwable cause = e.getCause(); if (cause instanceof ClosedChannelException) { log.warn("Caught an exception on browser to proxy channel: "+ channel, cause); } else { log.info("Caught an exception on browser to proxy channel: "+ channel, cause); } ProxyUtils.closeOnFlush(channel); } public int getClientConnections() { return this.browserToProxyConnections.get(); } public int getTotalClientConnections() { return totalBrowserToProxyConnections.get(); } public int getOutgoingConnections() { return this.externalHostsToChannelFutures.size(); } public int getRequestsSent() { return this.requestsSent.get(); } public int getResponsesReceived() { return this.responsesReceived.get(); } public String getUnansweredRequests() { return this.unansweredRequests.toString(); } public Set<HttpRequest> getUnansweredHttpRequests() { return unansweredHttpRequests; } public String getAnsweredReqeusts() { return this.answeredRequests.toString(); } }
package org.littleshoot.proxy; import static org.jboss.netty.channel.Channels.pipeline; import java.lang.management.ManagementFactory; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.nio.channels.ClosedChannelException; import java.nio.charset.Charset; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import javax.management.InstanceAlreadyExistsException; import javax.management.InstanceNotFoundException; import javax.management.MBeanRegistrationException; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.NotCompliantMBeanException; import javax.management.ObjectName; import org.apache.commons.lang3.StringUtils; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.group.ChannelGroup; import org.jboss.netty.channel.socket.ClientSocketChannelFactory; import org.jboss.netty.handler.codec.http.DefaultHttpResponse; import org.jboss.netty.handler.codec.http.HttpChunk; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpRequestEncoder; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.codec.http.HttpVersion; import org.littleshoot.dnssec4j.VerifiedAddressFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Class for handling all HTTP requests from the browser to the proxy. * * Note this class only ever handles a single connection from the browser. * The browser can and will, however, send requests to multiple hosts using * that same connection, i.e. it will send a request to host B once a request * to host A has completed. */ public class HttpRequestHandler extends SimpleChannelUpstreamHandler implements RelayListener, ConnectionData { private final static Logger log = LoggerFactory.getLogger(HttpRequestHandler.class); private volatile boolean readingChunks; private static final AtomicInteger totalBrowserToProxyConnections = new AtomicInteger(0); private final AtomicInteger browserToProxyConnections = new AtomicInteger(0); private final Map<String, Queue<ChannelFuture>> externalHostsToChannelFutures = new ConcurrentHashMap<String, Queue<ChannelFuture>>(); private final AtomicInteger messagesReceived = new AtomicInteger(0); private final AtomicInteger unansweredRequestCount = new AtomicInteger(0); private final AtomicInteger requestsSent = new AtomicInteger(0); private final AtomicInteger responsesReceived = new AtomicInteger(0); private final ProxyAuthorizationManager authorizationManager; private final Set<String> answeredRequests = new HashSet<String>(); private final Set<String> unansweredRequests = new HashSet<String>(); private final Set<HttpRequest> unansweredHttpRequests = new HashSet<HttpRequest>(); private ChannelFuture currentChannelFuture; /** * This lock is necessary for when a second chunk arrives in a request * before we've even created the current channel future. */ private final Object channelFutureLock = new Object(); private final ChainProxyManager chainProxyManager; private final ChannelGroup channelGroup; private final ProxyCacheManager cacheManager; private final AtomicBoolean browserChannelClosed = new AtomicBoolean(false); private volatile boolean receivedChannelClosed = false; private final RelayPipelineFactoryFactory relayPipelineFactoryFactory; private ClientSocketChannelFactory clientChannelFactory; /** * This flag is necessary for edge cases where we prematurely halt request * processing but where there may be more incoming chunks for the request * (in cases where the request is chunked). This happens, for example, with * proxy authentication and chunked posts or when the external host just * does not resolve to an IP address. In those cases we prematurely return * pre-packaged responses and halt request processing but still need to * handle any future chunks associated with the request coming in on the * client channel. */ private boolean pendingRequestChunks = false; private ObjectName mxBeanName; /** * Creates a new class for handling HTTP requests with no frills. * * @param relayPipelineFactoryFactory The factory for creating factories * for channels to relay data from external sites back to clients. * @param clientChannelFactory The factory for creating outgoing channels * to external sites. */ public HttpRequestHandler( final RelayPipelineFactoryFactory relayPipelineFactoryFactory, final ClientSocketChannelFactory clientChannelFactory) { this(null, null, null, null, relayPipelineFactoryFactory, clientChannelFactory); } /** * Creates a new class for handling HTTP requests with the specified * authentication manager. * * @param cacheManager The manager for the cache. * @param authorizationManager The class that handles any * proxy authentication requirements. * @param channelGroup The group of channels for keeping track of all * channels we've opened. * @param relayPipelineFactoryFactory The factory for creating factories * for channels to relay data from external sites back to clients. * @param clientChannelFactory The factory for creating outgoing channels * to external sites. */ public HttpRequestHandler(final ProxyCacheManager cacheManager, final ProxyAuthorizationManager authorizationManager, final ChannelGroup channelGroup, final RelayPipelineFactoryFactory relayPipelineFactoryFactory, final ClientSocketChannelFactory clientChannelFactory) { this(cacheManager, authorizationManager, channelGroup, null, relayPipelineFactoryFactory, clientChannelFactory); } /** * Creates a new class for handling HTTP requests with the specified * authentication manager. * * @param cacheManager The manager for the cache. * @param authorizationManager The class that handles any * proxy authentication requirements. * @param channelGroup The group of channels for keeping track of all * channels we've opened. * @param chainProxyManager upstream proxy server host and port or null * if none used. * @param relayPipelineFactoryFactory The relay pipeline factory. * @param clientChannelFactory The factory for creating outgoing channels * to external sites. */ public HttpRequestHandler(final ProxyCacheManager cacheManager, final ProxyAuthorizationManager authorizationManager, final ChannelGroup channelGroup, final ChainProxyManager chainProxyManager, final RelayPipelineFactoryFactory relayPipelineFactoryFactory, final ClientSocketChannelFactory clientChannelFactory) { log.info("Creating new request handler..."); this.clientChannelFactory = clientChannelFactory; this.cacheManager = cacheManager; this.authorizationManager = authorizationManager; this.channelGroup = channelGroup; this.chainProxyManager = chainProxyManager; this.relayPipelineFactoryFactory = relayPipelineFactoryFactory; if (LittleProxyConfig.isUseJmx()) { setupJmx(); } } private void setupJmx() { final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); try { final Class<? extends SimpleChannelUpstreamHandler> clazz = getClass(); final String pack = clazz.getPackage().getName(); final String oName = pack+":type="+clazz.getSimpleName()+"-"+clazz.getSimpleName() + "-"+hashCode(); log.info("Registering MBean with name: {}", oName); mxBeanName = new ObjectName(oName); if(!mbs.isRegistered(mxBeanName)) { mbs.registerMBean(this, mxBeanName); } } catch (final MalformedObjectNameException e) { log.error("Could not set up JMX", e); } catch (final InstanceAlreadyExistsException e) { log.error("Could not set up JMX", e); } catch (final MBeanRegistrationException e) { log.error("Could not set up JMX", e); } catch (final NotCompliantMBeanException e) { log.error("Could not set up JMX", e); } } protected void cleanupJmx() { if (this.mxBeanName == null) { log.debug("JMX not setup"); return; } final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); try { mbs.unregisterMBean(mxBeanName); } catch (MBeanRegistrationException e) { //that's OK, because we won't leak } catch (InstanceNotFoundException e) { //ditto } } @Override public void messageReceived(final ChannelHandlerContext ctx, final MessageEvent me) { if (browserChannelClosed.get()) { log.info("Ignoring message since the connection to the browser " + "is about to close"); return; } messagesReceived.incrementAndGet(); log.debug("Received "+messagesReceived+" total messages"); if (!readingChunks) { processRequest(ctx, me); } else { processChunk(me); } } private void processChunk(final MessageEvent me) { log.info("Processing chunk..."); final HttpChunk chunk = (HttpChunk) me.getMessage(); // Remember this will typically be a persistent connection, so we'll // get another request after we're read the last chunk. So we need to // reset it back to no longer read in chunk mode. if (chunk.isLast()) { this.readingChunks = false; } // It's possible to receive a chunk before a channel future has even // been set. It's also possible for this to happen for requests that // require proxy authentication or in cases where we get a DNS lookup // error trying to reach the remote site. if (this.currentChannelFuture == null) { // First deal with the case where a proxy authentication manager // is active and we've received an HTTP POST requiring // authentication. In that scenario, we've returned a // 407 Proxy Authentication Required response, but we still need // to handle any incoming chunks from the original request. We // basically just drop them on the floor because the client will // issue a new POST request with the appropriate credentials // (assuming they have them) and associated chunks in the new // request body. if (pendingRequestChunks) { if (chunk.isLast()) { log.info("Received last chunk -- setting proxy auth " + "chunking to false"); this.pendingRequestChunks = false; //me.getChannel().close(); } log.info("Ignoring chunk with chunked post for edge case"); return; } else { // Note this can happen quite often in tests when requests are // arriving very quickly on the same JVM but is less likely // to occur in deployed servers. log.error("NO CHANNEL FUTURE!!"); synchronized (this.channelFutureLock) { if (this.currentChannelFuture == null) { try { log.debug("Waiting for channel future!"); channelFutureLock.wait(4000); } catch (final InterruptedException e) { log.info("Interrupted!!", e); } } } } } // We don't necessarily know the channel is connected yet!! This can // happen if the client sends a chunk directly after the initial // request. if (this.currentChannelFuture.getChannel().isConnected()) { this.currentChannelFuture.getChannel().write(chunk); } else { this.currentChannelFuture.addListener(new ChannelFutureListener() { public void operationComplete(final ChannelFuture future) throws Exception { currentChannelFuture.getChannel().write(chunk); } }); } } private void processRequest(final ChannelHandlerContext ctx, final MessageEvent me) { final HttpRequest request = (HttpRequest) me.getMessage(); final Channel inboundChannel = me.getChannel(); if (this.cacheManager != null && this.cacheManager.returnCacheHit((HttpRequest)me.getMessage(), inboundChannel)) { log.info("Found cache hit! Cache wrote the response."); return; } this.unansweredRequestCount.incrementAndGet(); log.info("Got request: {} on channel: "+inboundChannel, request); if (this.authorizationManager != null && !this.authorizationManager.handleProxyAuthorization(request, ctx)) { log.info("Not authorized!!"); // We need to do a few things here. First, if the request is // chunked, we need to make sure we read the full request/POST // message body. handleFutureChunksIfNecessary(request); return; } else { this.pendingRequestChunks = false; } String hostAndPort = null; if (this.chainProxyManager != null) { hostAndPort = this.chainProxyManager.getChainProxy(request); } if (hostAndPort == null) { hostAndPort = ProxyUtils.parseHostAndPort(request); if (StringUtils.isBlank(hostAndPort)) { log.warn("No host and port found in {}", request.getUri()); badGateway(request, inboundChannel); handleFutureChunksIfNecessary(request); return; } } final class OnConnect { public ChannelFuture onConnect(final ChannelFuture cf) { if (request.getMethod() != HttpMethod.CONNECT) { final ChannelFuture writeFuture = cf.getChannel().write(request); writeFuture.addListener(new ChannelFutureListener() { public void operationComplete(final ChannelFuture future) throws Exception { if (LittleProxyConfig.isUseJmx()) { unansweredRequests.add(request.toString()); } unansweredHttpRequests.add(request); requestsSent.incrementAndGet(); } }); return writeFuture; } else { writeConnectResponse(ctx, request, cf.getChannel()); return cf; } } } final OnConnect onConnect = new OnConnect(); final ChannelFuture curFuture = getChannelFuture(hostAndPort); if (curFuture != null) { log.info("Using existing connection..."); // We don't notify here because the current channel future will not // have been null before this assignment. if (this.currentChannelFuture == null) { log.error("Should not be null here"); } this.currentChannelFuture = curFuture; if (curFuture.getChannel().isConnected()) { onConnect.onConnect(curFuture); } else { final ChannelFutureListener cfl = new ChannelFutureListener() { public void operationComplete(final ChannelFuture future) throws Exception { onConnect.onConnect(curFuture); } }; curFuture.addListener(cfl); } } else { log.info("Establishing new connection"); final ChannelFuture cf; ctx.getChannel().setReadable(false); try { cf = newChannelFuture(request, inboundChannel, hostAndPort); } catch (final UnknownHostException e) { log.warn("Could not resolve host?", e); badGateway(request, inboundChannel); handleFutureChunksIfNecessary(request); ctx.getChannel().setReadable(true); return; } final class LocalChannelFutureListener implements ChannelFutureListener { private final String copiedHostAndPort; LocalChannelFutureListener(final String copiedHostAndPort) { this.copiedHostAndPort = copiedHostAndPort; } public void operationComplete(final ChannelFuture future) throws Exception { final Channel channel = future.getChannel(); if (channelGroup != null) { channelGroup.add(channel); } if (future.isSuccess()) { log.info("Connected successfully to: {}", channel); log.info("Writing message on channel..."); final ChannelFuture wf = onConnect.onConnect(cf); wf.addListener(new ChannelFutureListener() { public void operationComplete(final ChannelFuture wcf) throws Exception { log.info("Finished write: "+wcf+ " to: "+ request.getMethod()+" "+ request.getUri()); ctx.getChannel().setReadable(true); } }); currentChannelFuture = wf; synchronized(channelFutureLock) { channelFutureLock.notifyAll(); } } else { log.info("Could not connect to " + copiedHostAndPort, future.getCause()); final String nextHostAndPort; if (chainProxyManager == null) { nextHostAndPort = copiedHostAndPort; } else { chainProxyManager.onCommunicationError(copiedHostAndPort); nextHostAndPort = chainProxyManager.getChainProxy(request); } if (copiedHostAndPort.equals(nextHostAndPort)) { // We call the relay channel closed event handler // with one associated unanswered request. onRelayChannelClose(inboundChannel, copiedHostAndPort, 1, true); } else { // TODO I am not sure about this removeProxyToWebConnection(copiedHostAndPort); // try again with different hostAndPort processRequest(ctx, me); } } if (LittleProxyConfig.isUseJmx()) { cleanupJmx(); } } } cf.addListener(new LocalChannelFutureListener(hostAndPort)); } if (request.isChunked()) { readingChunks = true; } } private void badGateway(final HttpRequest request, final Channel inboundChannel) { final HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.BAD_GATEWAY); response.setHeader(HttpHeaders.Names.CONNECTION, "close"); final String body = "Bad Gateway: "+request.getUri(); response.setContent(ChannelBuffers.copiedBuffer(body, Charset.forName("UTF-8"))); response.setHeader(HttpHeaders.Names.CONTENT_LENGTH, body.length()); inboundChannel.write(response); } private void handleFutureChunksIfNecessary(final HttpRequest request) { if (request.isChunked()) { this.pendingRequestChunks = true; readingChunks = true; } } public void onChannelAvailable(final String hostAndPortKey, final ChannelFuture cf) { synchronized (this.externalHostsToChannelFutures) { final Queue<ChannelFuture> futures = this.externalHostsToChannelFutures.get(hostAndPortKey); final Queue<ChannelFuture> toUse; if (futures == null) { toUse = new LinkedList<ChannelFuture>(); this.externalHostsToChannelFutures.put(hostAndPortKey, toUse); } else { toUse = futures; } toUse.add(cf); } } private ChannelFuture getChannelFuture(final String hostAndPort) { synchronized (this.externalHostsToChannelFutures) { final Queue<ChannelFuture> futures = this.externalHostsToChannelFutures.get(hostAndPort); if (futures == null) { return null; } if (futures.isEmpty()) { return null; } final ChannelFuture cf = futures.remove(); if (cf != null && cf.isSuccess() && !cf.getChannel().isConnected()) { // In this case, the future successfully connected at one // time, but we're no longer connected. We need to remove the // channel and open a new one. removeProxyToWebConnection(hostAndPort); return null; } return cf; } } private void writeConnectResponse(final ChannelHandlerContext ctx, final HttpRequest httpRequest, final Channel outgoingChannel) { final int port = ProxyUtils.parsePort(httpRequest); final Channel browserToProxyChannel = ctx.getChannel(); // TODO: We should really only allow access on 443, but this breaks // what a lot of browsers do in practice. if (port != 443) { log.warn("Connecting on port other than 443: "+httpRequest.getUri()); } if (port < 0) { log.warn("Connecting on port other than 443!!"); final String statusLine = "HTTP/1.1 502 Proxy Error\r\n"; ProxyUtils.writeResponse(browserToProxyChannel, statusLine, ProxyUtils.PROXY_ERROR_HEADERS); ProxyUtils.closeOnFlush(browserToProxyChannel); } else { browserToProxyChannel.setReadable(false); // We need to modify both the pipeline encoders and decoders for the // browser to proxy channel -- the outgoing channel already has // the correct handlers and such set at this point. ctx.getPipeline().remove("encoder"); ctx.getPipeline().remove("decoder"); ctx.getPipeline().remove("handler"); // Note there are two HttpConnectRelayingHandler for each HTTP // CONNECT tunnel -- one writing to the browser, and one writing // to the remote host. ctx.getPipeline().addLast("handler", new HttpConnectRelayingHandler(outgoingChannel, this.channelGroup)); } log.debug("Sending response to CONNECT request..."); // This is sneaky -- thanks to Emil Goicovici from the list -- // We temporarily add in a request encoder if we're chaining, allowing // us to forward along the HTTP CONNECT request. We then remove that // encoder as soon as it's written since past that point we simply // want to relay all data. String chainProxy = null; if (chainProxyManager != null) { chainProxy = chainProxyManager.getChainProxy(httpRequest); if (chainProxy != null) { // forward the CONNECT request to the upstream proxy server // which will return a HTTP response outgoingChannel.getPipeline().addBefore("handler", "encoder", new HttpRequestEncoder()); outgoingChannel.write(httpRequest).addListener(new ChannelFutureListener() { public void operationComplete(final ChannelFuture future) throws Exception { outgoingChannel.getPipeline().remove("encoder"); } }); } } if (chainProxy == null) { final String statusLine = "HTTP/1.1 200 Connection established\r\n"; ProxyUtils.writeResponse(browserToProxyChannel, statusLine, ProxyUtils.CONNECT_OK_HEADERS); } browserToProxyChannel.setReadable(true); } private ChannelFuture newChannelFuture(final HttpRequest httpRequest, final Channel browserToProxyChannel, final String hostAndPort) throws UnknownHostException { final String host; final int port; if (hostAndPort.contains(":")) { host = StringUtils.substringBefore(hostAndPort, ":"); final String portString = StringUtils.substringAfter(hostAndPort, ":"); port = Integer.parseInt(portString); } else { host = hostAndPort; port = 80; } // Configure the client. final ClientBootstrap cb = new ClientBootstrap(this.clientChannelFactory); final ChannelPipelineFactory cpf; if (httpRequest.getMethod() == HttpMethod.CONNECT) { // In the case of CONNECT, we just want to relay all data in both // directions. We SHOULD make sure this is traffic on a reasonable // port, however, such as 80 or 443, to reduce security risks. cpf = new ChannelPipelineFactory() { public ChannelPipeline getPipeline() throws Exception { // Create a default pipeline implementation. final ChannelPipeline pipeline = pipeline(); pipeline.addLast("handler", new HttpConnectRelayingHandler(browserToProxyChannel, channelGroup)); return pipeline; } }; } else { cpf = relayPipelineFactoryFactory.getRelayPipelineFactory( httpRequest, browserToProxyChannel, this); } cb.setPipelineFactory(cpf); cb.setOption("connectTimeoutMillis", 40*1000); log.debug("Starting new connection to: {}", hostAndPort); if (LittleProxyConfig.isUseDnsSec()) { return cb.connect(VerifiedAddressFactory.newInetSocketAddress(host, port, LittleProxyConfig.isUseDnsSec())); } else { final InetAddress ia = InetAddress.getByName(host); final String address = ia.getHostAddress(); //final InetSocketAddress address = new InetSocketAddress(host, port); return cb.connect(new InetSocketAddress(address, port)); } } @Override public void channelOpen(final ChannelHandlerContext ctx, final ChannelStateEvent cse) throws Exception { final Channel inboundChannel = cse.getChannel(); log.debug("New channel opened: {}", inboundChannel); totalBrowserToProxyConnections.incrementAndGet(); browserToProxyConnections.incrementAndGet(); log.debug("Now "+totalBrowserToProxyConnections+ " browser to proxy channels..."); log.debug("Now this class has "+browserToProxyConnections+ " browser to proxy channels..."); // We need to keep track of the channel so we can close it at the end. if (this.channelGroup != null) { this.channelGroup.add(inboundChannel); } } @Override public void channelClosed(final ChannelHandlerContext ctx, final ChannelStateEvent cse) { log.info("Channel closed: {}", cse.getChannel()); totalBrowserToProxyConnections.decrementAndGet(); browserToProxyConnections.decrementAndGet(); log.debug("Now "+totalBrowserToProxyConnections+ " total browser to proxy channels..."); log.debug("Now this class has "+browserToProxyConnections+ " browser to proxy channels..."); // The following should always be the case with // @ChannelPipelineCoverage("one") if (browserToProxyConnections.get() == 0) { log.debug("Closing all proxy to web channels for this browser " + "to proxy connection!!!"); final Collection<Queue<ChannelFuture>> allFutures = this.externalHostsToChannelFutures.values(); for (final Queue<ChannelFuture> futures : allFutures) { for (final ChannelFuture future : futures) { final Channel ch = future.getChannel(); if (ch.isOpen()) { future.getChannel().close(); } } } } } /** * This is called when a relay channel to a remote server is closed in order * for this class to perform any necessary cleanup. Note that this is * called on the same thread as the incoming request processing. */ public void onRelayChannelClose(final Channel browserToProxyChannel, final String key, final int unansweredRequestsOnChannel, final boolean closedEndsResponseBody) { if (closedEndsResponseBody) { log.debug("Close ends response body"); this.receivedChannelClosed = true; } log.debug("this.receivedChannelClosed: "+this.receivedChannelClosed); removeProxyToWebConnection(key); // The closed channel may have had outstanding requests we haven't // properly accounted for. The channel closing effectively marks those // requests as "answered" when the responses didn't contain any other // markers for complete responses, such as Content-Length or the the // last chunk of a chunked encoding. All of this potentially results // in the closing of the client/browser connection here. this.unansweredRequestCount.set( this.unansweredRequestCount.get() - unansweredRequestsOnChannel); //this.unansweredRequestCount -= unansweredRequestsOnChannel; if (this.receivedChannelClosed && (this.externalHostsToChannelFutures.isEmpty() || this.unansweredRequestCount.get() == 0)) { if (!browserChannelClosed.getAndSet(true)) { log.info("Closing browser to proxy channel"); ProxyUtils.closeOnFlush(browserToProxyChannel); } } else { log.info("Not closing browser to proxy channel. Received channel " + "closed is "+this.receivedChannelClosed+" and we have {} " + "connections and awaiting {} responses", this.externalHostsToChannelFutures.size(), this.unansweredRequestCount ); } } private void removeProxyToWebConnection(final String key) { // It's probably already been removed at this point, but just in case. this.externalHostsToChannelFutures.remove(key); } public void onRelayHttpResponse(final Channel browserToProxyChannel, final String key, final HttpRequest httpRequest) { if (LittleProxyConfig.isUseJmx()) { this.answeredRequests.add(httpRequest.toString()); this.unansweredRequests.remove(httpRequest.toString()); } this.unansweredHttpRequests.remove(httpRequest); this.unansweredRequestCount.decrementAndGet(); this.responsesReceived.incrementAndGet(); // If we've received responses to all outstanding requests and one // of those outgoing channels has been closed, we should close the // connection to the browser. if (this.unansweredRequestCount.get() == 0 && this.receivedChannelClosed) { if (!browserChannelClosed.getAndSet(true)) { log.info("Closing browser to proxy channel on HTTP response"); ProxyUtils.closeOnFlush(browserToProxyChannel); } } else { log.info("Not closing browser to proxy channel. Still "+ "awaiting " + this.unansweredRequestCount+" responses..." + "receivedChannelClosed="+this.receivedChannelClosed); } } @Override public void exceptionCaught(final ChannelHandlerContext ctx, final ExceptionEvent e) throws Exception { final Channel channel = e.getChannel(); final Throwable cause = e.getCause(); if (cause instanceof ClosedChannelException) { log.warn("Caught an exception on browser to proxy channel: "+ channel, cause); } else { log.info("Caught an exception on browser to proxy channel: "+ channel, cause); } ProxyUtils.closeOnFlush(channel); } public int getClientConnections() { return this.browserToProxyConnections.get(); } public int getTotalClientConnections() { return totalBrowserToProxyConnections.get(); } public int getOutgoingConnections() { return this.externalHostsToChannelFutures.size(); } public int getRequestsSent() { return this.requestsSent.get(); } public int getResponsesReceived() { return this.responsesReceived.get(); } public String getUnansweredRequests() { return this.unansweredRequests.toString(); } public Set<HttpRequest> getUnansweredHttpRequests() { return unansweredHttpRequests; } public String getAnsweredReqeusts() { return this.answeredRequests.toString(); } }
package org.mockito.internal.exceptions; import org.mockito.exceptions.base.MockitoAssertionError; import org.mockito.exceptions.base.MockitoException; import org.mockito.exceptions.misusing.*; import org.mockito.exceptions.verification.MoreThanAllowedActualInvocations; import org.mockito.exceptions.verification.NeverWantedButInvoked; import org.mockito.exceptions.verification.NoInteractionsWanted; import org.mockito.exceptions.verification.SmartNullPointerException; import org.mockito.exceptions.verification.TooLittleActualInvocations; import org.mockito.exceptions.verification.TooManyActualInvocations; import org.mockito.exceptions.verification.VerificationInOrderFailure; import org.mockito.exceptions.verification.WantedButNotInvoked; import org.mockito.internal.debugging.LocationImpl; import org.mockito.internal.exceptions.util.ScenarioPrinter; import org.mockito.internal.junit.ExceptionFactory; import org.mockito.internal.matchers.LocalizedMatcher; import org.mockito.internal.util.MockUtil; import org.mockito.invocation.DescribedInvocation; import org.mockito.invocation.Invocation; import org.mockito.invocation.InvocationOnMock; import org.mockito.invocation.Location; import org.mockito.listeners.InvocationListener; import org.mockito.mock.SerializableMode; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.List; import static org.mockito.internal.reporting.Pluralizer.pluralize; import static org.mockito.internal.reporting.Pluralizer.were_exactly_x_interactions; import static org.mockito.internal.util.StringUtil.join; /** * Reports verification and misusing errors. * <p> * One of the key points of mocking library is proper verification/exception * messages. All messages in one place makes it easier to tune and amend them. * <p> * Reporter can be injected and therefore is easily testable. * <p> * Generally, exception messages are full of line breaks to make them easy to * read (xunit plugins take only fraction of screen on modern IDEs). */ public class Reporter { private final static String NON_PUBLIC_PARENT = "Mocking methods declared on non-public parent classes is not supported."; private Reporter() { } public static MockitoException checkedExceptionInvalid(Throwable t) { return new MockitoException(join( "Checked exception is invalid for this method!", "Invalid: " + t )); } public static MockitoException cannotStubWithNullThrowable() { return new MockitoException(join( "Cannot stub with null throwable!" )); } public static MockitoException unfinishedStubbing(Location location) { return new UnfinishedStubbingException(join( "Unfinished stubbing detected here:", location, "", "E.g. thenReturn() may be missing.", "Examples of correct stubbing:", " when(mock.isOk()).thenReturn(true);", " when(mock.isOk()).thenThrow(exception);", " doThrow(exception).when(mock).someVoidMethod();", "Hints:", " 1. missing thenReturn()", " 2. you are trying to stub a final method, which is not supported", " 3: you are stubbing the behaviour of another mock inside before 'thenReturn' instruction if completed", "" )); } public static MockitoException incorrectUseOfApi() { return new MockitoException(join( "Incorrect use of API detected here:", new LocationImpl(), "", "You probably stored a reference to OngoingStubbing returned by when() and called stubbing methods like thenReturn() on this reference more than once.", "Examples of correct usage:", " when(mock.isOk()).thenReturn(true).thenReturn(false).thenThrow(exception);", " when(mock.isOk()).thenReturn(true, false).thenThrow(exception);", "" )); } public static MockitoException missingMethodInvocation() { return new MissingMethodInvocationException(join( "when() requires an argument which has to be 'a method call on a mock'.", "For example:", " when(mock.getArticles()).thenReturn(articles);", "", "Also, this error might show up because:", "1. you stub either of: final/private/equals()/hashCode() methods.", " Those methods *cannot* be stubbed/verified.", " " + NON_PUBLIC_PARENT, "2. inside when() you don't call method on mock but on some other object.", "" )); } public static MockitoException unfinishedVerificationException(Location location) { return new UnfinishedVerificationException(join( "Missing method call for verify(mock) here:", location, "", "Example of correct verification:", " verify(mock).doSomething()", "", "Also, this error might show up because you verify either of: final/private/equals()/hashCode() methods.", "Those methods *cannot* be stubbed/verified.", NON_PUBLIC_PARENT, "" )); } public static MockitoException notAMockPassedToVerify(Class<?> type) { return new NotAMockException(join( "Argument passed to verify() is of type " + type.getSimpleName() + " and is not a mock!", "Make sure you place the parenthesis correctly!", "See the examples of correct verifications:", " verify(mock).someMethod();", " verify(mock, times(10)).someMethod();", " verify(mock, atLeastOnce()).someMethod();" )); } public static MockitoException nullPassedToVerify() { return new NullInsteadOfMockException(join( "Argument passed to verify() should be a mock but is null!", "Examples of correct verifications:", " verify(mock).someMethod();", " verify(mock, times(10)).someMethod();", " verify(mock, atLeastOnce()).someMethod();", " not: verify(mock.someMethod());", "Also, if you use @Mock annotation don't miss initMocks()" )); } public static MockitoException notAMockPassedToWhenMethod() { return new NotAMockException(join( "Argument passed to when() is not a mock!", "Example of correct stubbing:", " doThrow(new RuntimeException()).when(mock).someMethod();" )); } public static MockitoException nullPassedToWhenMethod() { return new NullInsteadOfMockException(join( "Argument passed to when() is null!", "Example of correct stubbing:", " doThrow(new RuntimeException()).when(mock).someMethod();", "Also, if you use @Mock annotation don't miss initMocks()" )); } public static MockitoException mocksHaveToBePassedToVerifyNoMoreInteractions() { return new MockitoException(join( "Method requires argument(s)!", "Pass mocks that should be verified, e.g:", " verifyNoMoreInteractions(mockOne, mockTwo);", " verifyZeroInteractions(mockOne, mockTwo);", "" )); } public static MockitoException notAMockPassedToVerifyNoMoreInteractions() { return new NotAMockException(join( "Argument(s) passed is not a mock!", "Examples of correct verifications:", " verifyNoMoreInteractions(mockOne, mockTwo);", " verifyZeroInteractions(mockOne, mockTwo);", "" )); } public static MockitoException nullPassedToVerifyNoMoreInteractions() { return new NullInsteadOfMockException(join( "Argument(s) passed is null!", "Examples of correct verifications:", " verifyNoMoreInteractions(mockOne, mockTwo);", " verifyZeroInteractions(mockOne, mockTwo);" )); } public static MockitoException notAMockPassedWhenCreatingInOrder() { return new NotAMockException(join( "Argument(s) passed is not a mock!", "Pass mocks that require verification in order.", "For example:", " InOrder inOrder = inOrder(mockOne, mockTwo);" )); } public static MockitoException nullPassedWhenCreatingInOrder() { return new NullInsteadOfMockException(join( "Argument(s) passed is null!", "Pass mocks that require verification in order.", "For example:", " InOrder inOrder = inOrder(mockOne, mockTwo);" )); } public static MockitoException mocksHaveToBePassedWhenCreatingInOrder() { return new MockitoException(join( "Method requires argument(s)!", "Pass mocks that require verification in order.", "For example:", " InOrder inOrder = inOrder(mockOne, mockTwo);" )); } public static MockitoException inOrderRequiresFamiliarMock() { return new MockitoException(join( "InOrder can only verify mocks that were passed in during creation of InOrder.", "For example:", " InOrder inOrder = inOrder(mockOne);", " inOrder.verify(mockOne).doStuff();" )); } public static MockitoException invalidUseOfMatchers(int expectedMatchersCount, List<LocalizedMatcher> recordedMatchers) { return new InvalidUseOfMatchersException(join( "Invalid use of argument matchers!", expectedMatchersCount + " matchers expected, " + recordedMatchers.size() + " recorded:" + locationsOf(recordedMatchers), "", "This exception may occur if matchers are combined with raw values:", " //incorrect:", " someMethod(anyObject(), \"raw String\");", "When using matchers, all arguments have to be provided by matchers.", "For example:", " //correct:", " someMethod(anyObject(), eq(\"String by matcher\"));", "", "For more info see javadoc for Matchers class.", "" )); } public static MockitoException incorrectUseOfAdditionalMatchers(String additionalMatcherName, int expectedSubMatchersCount, Collection<LocalizedMatcher> matcherStack) { return new InvalidUseOfMatchersException(join( "Invalid use of argument matchers inside additional matcher " + additionalMatcherName + " !", new LocationImpl(), "", expectedSubMatchersCount + " sub matchers expected, " + matcherStack.size() + " recorded:", locationsOf(matcherStack), "", "This exception may occur if matchers are combined with raw values:", " //incorrect:", " someMethod(AdditionalMatchers.and(isNotNull(), \"raw String\");", "When using matchers, all arguments have to be provided by matchers.", "For example:", " //correct:", " someMethod(AdditionalMatchers.and(isNotNull(), eq(\"raw String\"));", "", "For more info see javadoc for Matchers and AdditionalMatchers classes.", "" )); } public static MockitoException stubPassedToVerify(Object mock) { return new CannotVerifyStubOnlyMock(join( "Argument \"" + MockUtil.getMockName(mock) + "\" passed to verify is a stubOnly() mock, not a full blown mock!", "If you intend to verify invocations on a mock, don't use stubOnly() in its MockSettings." )); } public static MockitoException reportNoSubMatchersFound(String additionalMatcherName) { return new InvalidUseOfMatchersException(join( "No matchers found for additional matcher " + additionalMatcherName, new LocationImpl(), "" )); } private static Object locationsOf(Collection<LocalizedMatcher> matchers) { List<String> description = new ArrayList<String>(); for (LocalizedMatcher matcher : matchers) description.add(matcher.getLocation().toString()); return join(description.toArray()); } public static AssertionError argumentsAreDifferent(String wanted, String actual, Location actualLocation) { String message = join("Argument(s) are different! Wanted:", wanted, new LocationImpl(), "Actual invocation has different arguments:", actual, actualLocation, "" ); return ExceptionFactory.createArgumentsAreDifferentException(message, wanted, actual); } public static MockitoAssertionError wantedButNotInvoked(DescribedInvocation wanted) { return new WantedButNotInvoked(createWantedButNotInvokedMessage(wanted)); } public static MockitoAssertionError wantedButNotInvoked(DescribedInvocation wanted, List<? extends DescribedInvocation> invocations) { String allInvocations; if (invocations.isEmpty()) { allInvocations = "Actually, there were zero interactions with this mock.\n"; } else { StringBuilder sb = new StringBuilder( "\nHowever, there " + were_exactly_x_interactions(invocations.size()) + " with this mock:\n"); for (DescribedInvocation i : invocations) { sb.append(i.toString()) .append("\n") .append(i.getLocation()) .append("\n\n"); } allInvocations = sb.toString(); } String message = createWantedButNotInvokedMessage(wanted); return new WantedButNotInvoked(message + allInvocations); } private static String createWantedButNotInvokedMessage(DescribedInvocation wanted) { return join( "Wanted but not invoked:", wanted.toString(), new LocationImpl(), "" ); } public static MockitoAssertionError wantedButNotInvokedInOrder(DescribedInvocation wanted, DescribedInvocation previous) { return new VerificationInOrderFailure(join( "Verification in order failure", "Wanted but not invoked:", wanted.toString(), new LocationImpl(), "Wanted anywhere AFTER following interaction:", previous.toString(), previous.getLocation(), "" )); } public static MockitoAssertionError tooManyActualInvocations(int wantedCount, int actualCount, DescribedInvocation wanted, List<Location> locations) { String message = createTooManyInvocationsMessage(wantedCount, actualCount, wanted, locations); return new TooManyActualInvocations(message); } private static String createTooManyInvocationsMessage(int wantedCount, int actualCount, DescribedInvocation wanted, List<Location> invocations) { return join( wanted.toString(), "Wanted " + pluralize(wantedCount) + ":", new LocationImpl(), "But was " + pluralize(actualCount) + ":", createAllLocationsMessage(invocations), "" ); } public static MockitoAssertionError neverWantedButInvoked(DescribedInvocation wanted, List<Location> invocations) { return new NeverWantedButInvoked(join( wanted.toString(), "Never wanted here:", new LocationImpl(), "But invoked here:", createAllLocationsMessage(invocations) )); } public static MockitoAssertionError tooManyActualInvocationsInOrder(int wantedCount, int actualCount, DescribedInvocation wanted, List<Location> invocations) { String message = createTooManyInvocationsMessage(wantedCount, actualCount, wanted, invocations); return new VerificationInOrderFailure(join( "Verification in order failure:" + message )); } private static String createAllLocationsMessage(List<Location> locations) { if (locations == null) { return "\n"; } StringBuilder sb = new StringBuilder(); for (Location location : locations) { sb.append(location).append("\n"); } return sb.toString(); } private static String createTooLittleInvocationsMessage(org.mockito.internal.reporting.Discrepancy discrepancy, DescribedInvocation wanted, List<Location> locations) { return join( wanted.toString(), "Wanted " + discrepancy.getPluralizedWantedCount() + (discrepancy.getWantedCount() == 0 ? "." : ":"), new LocationImpl(), "But was " + discrepancy.getPluralizedActualCount() + (discrepancy.getActualCount() == 0 ? "." : ":"), createAllLocationsMessage(locations) ); } public static MockitoAssertionError tooLittleActualInvocations(org.mockito.internal.reporting.Discrepancy discrepancy, DescribedInvocation wanted, List<Location> allLocations) { String message = createTooLittleInvocationsMessage(discrepancy, wanted, allLocations); return new TooLittleActualInvocations(message); } public static MockitoAssertionError tooLittleActualInvocationsInOrder(org.mockito.internal.reporting.Discrepancy discrepancy, DescribedInvocation wanted, List<Location> locations) { String message = createTooLittleInvocationsMessage(discrepancy, wanted, locations); return new VerificationInOrderFailure(join( "Verification in order failure:" + message )); } public static MockitoAssertionError noMoreInteractionsWanted(Invocation undesired, List<VerificationAwareInvocation> invocations) { ScenarioPrinter scenarioPrinter = new ScenarioPrinter(); String scenario = scenarioPrinter.print(invocations); return new NoInteractionsWanted(join( "No interactions wanted here:", new LocationImpl(), "But found this interaction on mock '" + MockUtil.getMockName(undesired.getMock()) + "':", undesired.getLocation(), scenario )); } public static MockitoAssertionError noMoreInteractionsWantedInOrder(Invocation undesired) { return new VerificationInOrderFailure(join( "No interactions wanted here:", new LocationImpl(), "But found this interaction on mock '" + MockUtil.getMockName(undesired.getMock()) + "':", undesired.getLocation() )); } public static MockitoException cannotMockClass(Class<?> clazz, String reason) { return new MockitoException(join( "Cannot mock/spy " + clazz.toString(), "Mockito cannot mock/spy because :", " - " + reason )); } public static MockitoException cannotStubVoidMethodWithAReturnValue(String methodName) { return new CannotStubVoidMethodWithReturnValue(join( "'" + methodName + "' is a *void method* and it *cannot* be stubbed with a *return value*!", "Voids are usually stubbed with Throwables:", " doThrow(exception).when(mock).someVoidMethod();", "If you need to set the void method to do nothing you can use:", " doNothing().when(mock).someVoidMethod();", "For more information, check out the javadocs for Mockito.doNothing().", "***", "If you're unsure why you're getting above error read on.", "Due to the nature of the syntax above problem might occur because:", "1. The method you are trying to stub is *overloaded*. Make sure you are calling the right overloaded version.", "2. Somewhere in your test you are stubbing *final methods*. Sorry, Mockito does not verify/stub final methods.", "3. A spy is stubbed using when(spy.foo()).then() syntax. It is safer to stub spies - ", " - with doReturn|Throw() family of methods. More in javadocs for Mockito.spy() method.", "4. " + NON_PUBLIC_PARENT, "" )); } public static MockitoException onlyVoidMethodsCanBeSetToDoNothing() { return new MockitoException(join( "Only void methods can doNothing()!", "Example of correct use of doNothing():", " doNothing().", " doThrow(new RuntimeException())", " .when(mock).someVoidMethod();", "Above means:", "someVoidMethod() does nothing the 1st time but throws an exception the 2nd time is called" )); } public static MockitoException wrongTypeOfReturnValue(String expectedType, String actualType, String methodName) { return new WrongTypeOfReturnValue(join( actualType + " cannot be returned by " + methodName + "()", methodName + "() should return " + expectedType, "***", "If you're unsure why you're getting above error read on.", "Due to the nature of the syntax above problem might occur because:", "1. This exception *might* occur in wrongly written multi-threaded tests.", " Please refer to Mockito FAQ on limitations of concurrency testing.", "2. A spy is stubbed using when(spy.foo()).then() syntax. It is safer to stub spies - ", " - with doReturn|Throw() family of methods. More in javadocs for Mockito.spy() method.", "" )); } public static MockitoException wrongTypeReturnedByDefaultAnswer(Object mock, String expectedType, String actualType, String methodName) { return new WrongTypeOfReturnValue(join( "Default answer returned a result with the wrong type:", actualType + " cannot be returned by " + methodName + "()", methodName + "() should return " + expectedType, "", "The default answer of " + MockUtil.getMockName(mock) + " that was configured on the mock is probably incorrectly implemented.", "" )); } public static MoreThanAllowedActualInvocations wantedAtMostX(int maxNumberOfInvocations, int foundSize) { return new MoreThanAllowedActualInvocations(join("Wanted at most " + pluralize(maxNumberOfInvocations) + " but was " + foundSize)); } public static MockitoException misplacedArgumentMatcher(List<LocalizedMatcher> lastMatchers) { return new InvalidUseOfMatchersException(join( "Misplaced or misused argument matcher detected here:", locationsOf(lastMatchers), "", "You cannot use argument matchers outside of verification or stubbing.", "Examples of correct usage of argument matchers:", " when(mock.get(anyInt())).thenReturn(null);", " doThrow(new RuntimeException()).when(mock).someVoidMethod(anyObject());", " verify(mock).someMethod(contains(\"foo\"))", "", "This message may appear after an NullPointerException if the last matcher is returning an object ", "like any() but the stubbed method signature expect a primitive argument, in this case,", "use primitive alternatives.", " when(mock.get(any())); // bad use, will raise NPE", " when(mock.get(anyInt())); // correct usage use", "", "Also, this error might show up because you use argument matchers with methods that cannot be mocked.", "Following methods *cannot* be stubbed/verified: final/private/equals()/hashCode().", NON_PUBLIC_PARENT, "" )); } public static MockitoException smartNullPointerException(String invocation, Location location) { return new SmartNullPointerException(join( "You have a NullPointerException here:", new LocationImpl(), "because this method call was *not* stubbed correctly:", location, invocation, "" )); } public static MockitoException noArgumentValueWasCaptured() { return new MockitoException(join( "No argument value was captured!", "You might have forgotten to use argument.capture() in verify()...", "...or you used capture() in stubbing but stubbed method was not called.", "Be aware that it is recommended to use capture() only with verify()", "", "Examples of correct argument capturing:", " ArgumentCaptor<Person> argument = ArgumentCaptor.forClass(Person.class);", " verify(mock).doSomething(argument.capture());", " assertEquals(\"John\", argument.getValue().getName());", "" )); } public static MockitoException extraInterfacesDoesNotAcceptNullParameters() { return new MockitoException(join( "extraInterfaces() does not accept null parameters." )); } public static MockitoException extraInterfacesAcceptsOnlyInterfaces(Class<?> wrongType) { return new MockitoException(join( "extraInterfaces() accepts only interfaces.", "You passed following type: " + wrongType.getSimpleName() + " which is not an interface." )); } public static MockitoException extraInterfacesCannotContainMockedType(Class<?> wrongType) { return new MockitoException(join( "extraInterfaces() does not accept the same type as the mocked type.", "You mocked following type: " + wrongType.getSimpleName(), "and you passed the same very interface to the extraInterfaces()" )); } public static MockitoException extraInterfacesRequiresAtLeastOneInterface() { return new MockitoException(join( "extraInterfaces() requires at least one interface." )); } public static MockitoException mockedTypeIsInconsistentWithSpiedInstanceType(Class<?> mockedType, Object spiedInstance) { return new MockitoException(join( "Mocked type must be the same as the type of your spied instance.", "Mocked type must be: " + spiedInstance.getClass().getSimpleName() + ", but is: " + mockedType.getSimpleName(), " //correct spying:", " spy = mock( ->ArrayList.class<- , withSettings().spiedInstance( ->new ArrayList()<- );", " //incorrect - types don't match:", " spy = mock( ->List.class<- , withSettings().spiedInstance( ->new ArrayList()<- );" )); } public static MockitoException cannotCallAbstractRealMethod() { return new MockitoException(join( "Cannot call abstract real method on java object!", "Calling real methods is only possible when mocking non abstract method.", " //correct example:", " when(mockOfConcreteClass.nonAbstractMethod()).thenCallRealMethod();" )); } public static MockitoException cannotVerifyToString() { return new MockitoException(join( "Mockito cannot verify toString()", "toString() is too often used behind of scenes (i.e. during String concatenation, in IDE debugging views). " + "Verifying it may give inconsistent or hard to understand results. " + "Not to mention that verifying toString() most likely hints awkward design (hard to explain in a short exception message. Trust me...)", "However, it is possible to stub toString(). Stubbing toString() smells a bit funny but there are rare, legitimate use cases." )); } public static MockitoException moreThanOneAnnotationNotAllowed(String fieldName) { return new MockitoException("You cannot have more than one Mockito annotation on a field!\n" + "The field '" + fieldName + "' has multiple Mockito annotations.\n" + "For info how to use annotations see examples in javadoc for MockitoAnnotations class."); } public static MockitoException unsupportedCombinationOfAnnotations(String undesiredAnnotationOne, String undesiredAnnotationTwo) { return new MockitoException("This combination of annotations is not permitted on a single field:\n" + "@" + undesiredAnnotationOne + " and @" + undesiredAnnotationTwo); } public static MockitoException cannotInitializeForSpyAnnotation(String fieldName, Exception details) { return new MockitoException(join("Cannot instantiate a @Spy for '" + fieldName + "' field.", "You haven't provided the instance for spying at field declaration so I tried to construct the instance.", "However, I failed because: " + details.getMessage(), "Examples of correct usage of @Spy:", " @Spy List mock = new LinkedList();", " @Spy Foo foo; //only if Foo has parameterless constructor", " //also, don't forget about MockitoAnnotations.initMocks();", ""), details); } public static MockitoException cannotInitializeForInjectMocksAnnotation(String fieldName, String causeMessage) { return new MockitoException(join("Cannot instantiate @InjectMocks field named '" + fieldName + "'! Cause: "+causeMessage, "You haven't provided the instance at field declaration so I tried to construct the instance.", "Examples of correct usage of @InjectMocks:", " @InjectMocks Service service = new Service();", " @InjectMocks Service service;", " //and... don't forget about some @Mocks for injection :)", "")); } public static MockitoException atMostAndNeverShouldNotBeUsedWithTimeout() { return new FriendlyReminderException(join("", "Don't panic! I'm just a friendly reminder!", "timeout() should not be used with atMost() or never() because...", "...it does not make much sense - the test would have passed immediately in concurrency", "We kept this method only to avoid compilation errors when upgrading Mockito.", "In future release we will remove timeout(x).atMost(y) from the API.", "If you want to find out more please refer to issue 235", "")); } public static MockitoException fieldInitialisationThrewException(Field field, Throwable details) { return new InjectMocksException(join( "Cannot instantiate @InjectMocks field named '" + field.getName() + "' of type '" + field.getType() + "'.", "You haven't provided the instance at field declaration so I tried to construct the instance.", "However the constructor or the initialization block threw an exception : " + details.getMessage(), ""), details); } public static MockitoException methodDoesNotAcceptParameter(String method, String parameter) { return new MockitoException(method + "() does not accept " + parameter + " See the Javadoc."); } public static MockitoException invocationListenersRequiresAtLeastOneListener() { return new MockitoException("invocationListeners() requires at least one listener"); } public static MockitoException invocationListenerThrewException(InvocationListener listener, Throwable listenerThrowable) { return new MockitoException(join( "The invocation listener with type " + listener.getClass().getName(), "threw an exception : " + listenerThrowable.getClass().getName() + listenerThrowable.getMessage()), listenerThrowable); } public static MockitoException cannotInjectDependency(Field field, Object matchingMock, Exception details) { return new MockitoException(join( "Mockito couldn't inject mock dependency '" + MockUtil.getMockName(matchingMock) + "' on field ", "'" + field + "'", "whose type '" + field.getDeclaringClass().getCanonicalName() + "' was annotated by @InjectMocks in your test.", "Also I failed because: " + exceptionCauseMessageIfAvailable(details), "" ), details); } private static String exceptionCauseMessageIfAvailable(Exception details) { if (details.getCause() == null) { return details.getMessage(); } return details.getCause().getMessage(); } public static MockitoException mockedTypeIsInconsistentWithDelegatedInstanceType(Class<?> mockedType, Object delegatedInstance) { return new MockitoException(join( "Mocked type must be the same as the type of your delegated instance.", "Mocked type must be: " + delegatedInstance.getClass().getSimpleName() + ", but is: " + mockedType.getSimpleName(), " //correct delegate:", " spy = mock( ->List.class<- , withSettings().delegatedInstance( ->new ArrayList()<- );", " //incorrect - types don't match:", " spy = mock( ->List.class<- , withSettings().delegatedInstance( ->new HashSet()<- );" )); } public static MockitoException spyAndDelegateAreMutuallyExclusive() { return new MockitoException(join( "Settings should not define a spy instance and a delegated instance at the same time." )); } public static MockitoException invalidArgumentRangeAtIdentityAnswerCreationTime() { return new MockitoException(join( "Invalid argument index.", "The index need to be a positive number that indicates the position of the argument to return.", "However it is possible to use the -1 value to indicates that the last argument should be", "returned.")); } public static MockitoException invalidArgumentPositionRangeAtInvocationTime(InvocationOnMock invocation, boolean willReturnLastParameter, int argumentIndex) { return new MockitoException(join( "Invalid argument index for the current invocation of method : ", " -> " + MockUtil.getMockName(invocation.getMock()) + "." + invocation.getMethod().getName() + "()", "", (willReturnLastParameter ? "Last parameter wanted" : "Wanted parameter at position " + argumentIndex) + " but " + possibleArgumentTypesOf(invocation), "The index need to be a positive number that indicates a valid position of the argument in the invocation.", "However it is possible to use the -1 value to indicates that the last argument should be returned.", "" )); } private static StringBuilder possibleArgumentTypesOf(InvocationOnMock invocation) { Class<?>[] parameterTypes = invocation.getMethod().getParameterTypes(); if (parameterTypes.length == 0) { return new StringBuilder("the method has no arguments.\n"); } StringBuilder stringBuilder = new StringBuilder("the possible argument indexes for this method are :\n"); for (int i = 0, parameterTypesLength = parameterTypes.length; i < parameterTypesLength; i++) { stringBuilder.append(" [").append(i); if (invocation.getMethod().isVarArgs() && i == parameterTypesLength - 1) { stringBuilder.append("+] ").append(parameterTypes[i].getComponentType().getSimpleName()).append(" <- Vararg").append("\n"); } else { stringBuilder.append("] ").append(parameterTypes[i].getSimpleName()).append("\n"); } } return stringBuilder; } public static MockitoException wrongTypeOfArgumentToReturn(InvocationOnMock invocation, String expectedType, Class<?> actualType, int argumentIndex) { return new WrongTypeOfReturnValue(join( "The argument of type '" + actualType.getSimpleName() + "' cannot be returned because the following ", "method should return the type '" + expectedType + "'", " -> " + MockUtil.getMockName(invocation.getMock()) + "." + invocation.getMethod().getName() + "()", "", "The reason for this error can be :", "1. The wanted argument position is incorrect.", "2. The answer is used on the wrong interaction.", "", "Position of the wanted argument is " + argumentIndex + " and " + possibleArgumentTypesOf(invocation), "***", "However if you're still unsure why you're getting above error read on.", "Due to the nature of the syntax above problem might occur because:", "1. This exception *might* occur in wrongly written multi-threaded tests.", " Please refer to Mockito FAQ on limitations of concurrency testing.", "2. A spy is stubbed using when(spy.foo()).then() syntax. It is safer to stub spies - ", " - with doReturn|Throw() family of methods. More in javadocs for Mockito.spy() method.", "" )); } public static MockitoException defaultAnswerDoesNotAcceptNullParameter() { return new MockitoException("defaultAnswer() does not accept null parameter"); } public static MockitoException serializableWontWorkForObjectsThatDontImplementSerializable(Class<?> classToMock) { return new MockitoException(join( "You are using the setting 'withSettings().serializable()' however the type you are trying to mock '" + classToMock.getSimpleName() + "'", "do not implement Serializable AND do not have a no-arg constructor.", "This combination is requested, otherwise you will get an 'java.io.InvalidClassException' when the mock will be serialized", "", "Also note that as requested by the Java serialization specification, the whole hierarchy need to implements Serializable,", "i.e. the top-most superclass has to implements Serializable.", "" )); } public static MockitoException delegatedMethodHasWrongReturnType(Method mockMethod, Method delegateMethod, Object mock, Object delegate) { return new MockitoException(join( "Methods called on delegated instance must have compatible return types with the mock.", "When calling: " + mockMethod + " on mock: " + MockUtil.getMockName(mock), "return type should be: " + mockMethod.getReturnType().getSimpleName() + ", but was: " + delegateMethod.getReturnType().getSimpleName(), "Check that the instance passed to delegatesTo() is of the correct type or contains compatible methods", "(delegate instance had type: " + delegate.getClass().getSimpleName() + ")" )); } public static MockitoException delegatedMethodDoesNotExistOnDelegate(Method mockMethod, Object mock, Object delegate) { return new MockitoException(join( "Methods called on mock must exist in delegated instance.", "When calling: " + mockMethod + " on mock: " + MockUtil.getMockName(mock), "no such method was found.", "Check that the instance passed to delegatesTo() is of the correct type or contains compatible methods", "(delegate instance had type: " + delegate.getClass().getSimpleName() + ")" )); } public static MockitoException usingConstructorWithFancySerializable(SerializableMode mode) { return new MockitoException("Mocks instantiated with constructor cannot be combined with " + mode + " serialization mode."); } public static MockitoException cannotCreateTimerWithNegativeDurationTime(long durationMillis) { return new FriendlyReminderException(join( "", "Don't panic! I'm just a friendly reminder!", "It is impossible for time to go backward, therefore...", "You cannot put negative value of duration: (" + durationMillis + ")", "as argument of timer methods (after(), timeout())", "" )); } public static MockitoException notAnException() { return new MockitoException(join( "Exception type cannot be null.", "This may happen with doThrow(Class)|thenThrow(Class) family of methods if passing null parameter.")); } public static UnnecessaryStubbingException formatUnncessaryStubbingException(Class<?> testClass, Collection<Invocation> unnecessaryStubbings) { StringBuilder stubbings = new StringBuilder(); int count = 1; for (Invocation u : unnecessaryStubbings) { stubbings.append("\n ").append(count++).append(". ").append(u.getLocation()); } String heading = (testClass != null)? "Unnecessary stubbings detected in test class: " + testClass.getSimpleName() : "Unnecessary stubbings detected."; return new UnnecessaryStubbingException(join( heading, "Clean & maintainable test code requires zero unnecessary code.", "Following stubbings are unnecessary (click to navigate to relevant line of code):" + stubbings, "Please remove unnecessary stubbings or use 'lenient' strictness. More info: javadoc for UnnecessaryStubbingException class." )); } public static void unncessaryStubbingException(List<Invocation> unused) { throw formatUnncessaryStubbingException(null, unused); } public static void potentialStubbingProblem( Invocation actualInvocation, Collection<Invocation> argMismatchStubbings) { StringBuilder stubbings = new StringBuilder(); int count = 1; for (Invocation s : argMismatchStubbings) { stubbings.append(" ").append(count++).append(". ").append(s); stubbings.append("\n ").append(s.getLocation()).append("\n"); } stubbings.deleteCharAt(stubbings.length()-1); //remove trailing end of line throw new PotentialStubbingProblem(join( "Strict stubbing argument mismatch. Please check:", " - this invocation of '" + actualInvocation.getMethod().getName() + "' method:", " " + actualInvocation, " " + actualInvocation.getLocation(), " - has following stubbing(s) with different arguments:", stubbings, "Typically, stubbing argument mismatch indicates user mistake when writing tests.", "Mockito fails early so that you can debug potential problem easily.", "However, there are legit scenarios when this exception generates false negative signal:", " - stubbing the same method multiple times using 'given().will()' or 'when().then()' API", " Please use 'will().given()' or 'doReturn().when()' API for stubbing.", " - stubbed method is intentionally invoked with different arguments by code under test", " Please use default or 'silent' JUnit Rule (equivalent of Strictness.LENIENT).", "For more information see javadoc for PotentialStubbingProblem class.")); } public static void redundantMockitoListener(String listenerType) { throw new RedundantListenerException(join( "Problems adding Mockito listener.", "Listener of type '" + listenerType + "' has already been added and not removed.", "It indicates that previous listener was not removed according to the API.", "When you add a listener, don't forget to remove the listener afterwards:", " Mockito.framework().removeListener(myListener);", "For more information, see the javadoc for RedundantListenerException class.")); } public static void unfinishedMockingSession() { throw new UnfinishedMockingSessionException(join( "Unfinished mocking session detected.", "Previous MockitoSession was not concluded with 'finishMocking()'.", "For examples of correct usage see javadoc for MockitoSession class.")); } }
package org.n52.sensorweb.wdc; import java.util.TimerTask; import java.util.concurrent.locks.ReentrantLock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DataCollectionTask extends TimerTask { private static Logger LOG = LoggerFactory.getLogger(DataCollectionTask.class); private final DataCollector dataCollector; private static final ReentrantLock oneCollectorLock = new ReentrantLock(true); public DataCollectionTask(final DataCollector checkerP) { dataCollector = checkerP; } @Override public void run() { LOG.info("*** Run dataCollector {}", dataCollector); // used to sync access to lastUsedDateFile and to not have more than one collector at a time. oneCollectorLock.lock(); // TODO here we should handle the file writing and appending on a global level try { dataCollector.collectWeatherData(); LOG.info("*** Ran dataCollector. Next run in '{}' minutes.",dataCollector.getParseIntervalMillis()/60000); } finally { oneCollectorLock.unlock(); } } @Override public boolean cancel() { LOG.info("Cancelling {}",this); return super.cancel(); } @Override protected void finalize() throws Throwable { LOG.debug("Finalizing {}",this); super.finalize(); } @Override public String toString() { return String.format("DataCollectionTask [dataCollector=%s]", dataCollector); } }
package org.pentaho.mondrian.tck; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import mondrian.olap.MondrianProperties; import mondrian.rolap.RolapConnection; import mondrian.rolap.RolapUtil; import org.olap4j.CellSet; import org.olap4j.OlapConnection; import org.olap4j.OlapStatement; import java.io.IOException; import java.io.Writer; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.sql.Connection; import java.sql.DriverManager; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutionException; public class MondrianContext extends Context { private static final LoadingCache<String, MondrianContext> instances = CacheBuilder.newBuilder().build( new CacheLoader<String, MondrianContext>() { @Override public MondrianContext load( String key ) throws Exception { Connection connection = DriverManager.getConnection( key ); OlapConnection olapConnection = connection.unwrap( OlapConnection.class ); return new MondrianContext( olapConnection ); } } ); private static final LoadingCache<String, Path> catalogs = CacheBuilder.newBuilder().build( new CacheLoader<String, Path>() { @Override public Path load( String key ) throws Exception { Path catalogFile = Files.createTempFile( "temp", ".xml" ); catalogFile.toFile().deleteOnExit(); try ( Writer writer = Files.newBufferedWriter( catalogFile, Charset.defaultCharset() ) ) { writer.write( key ); } return catalogFile; } } ); private OlapConnection olapConnection; private MondrianContext( final OlapConnection olapConnection ) { this.olapConnection = olapConnection; } public static MondrianContext forConnection( String connectionString ) throws ExecutionException, IOException { return instances.get( connectionString ); } public static MondrianContext forCatalog( String catalog ) throws IOException, ExecutionException { return forConnection( replaceCatalog( MondrianProperties.instance().TestConnectString.get(), catalogs.get( catalog ) ) ); } public static MondrianContext defaultContext() throws IOException, ExecutionException { return forConnection( MondrianProperties.instance().TestConnectString.get() ); } public void verify( MondrianExpectation expectation ) throws Exception { final List<String> sqls = new ArrayList<>(); RolapUtil.ExecuteQueryHook existingHook = RolapUtil.getHook(); RolapUtil.setHook( sqlCollector( sqls ) ); OlapStatement statement = olapConnection.createStatement(); CellSet cellSet = statement.executeOlapQuery( expectation.getQuery() ); RolapUtil.setHook( existingHook ); expectation.verify( cellSet, sqls, olapConnection.unwrap( RolapConnection.class ).getSchema().getDialect() ); } private RolapUtil.ExecuteQueryHook sqlCollector( final List<String> sqls ) { return new RolapUtil.ExecuteQueryHook() { @Override public void onExecuteQuery( String sql ) { sqls.add( sql ); } }; } private static String replaceCatalog( final String connectString, final Path catalogFile ) { return connectString.replaceFirst( "Catalog=[^;]+;", "Catalog=" + catalogFile.toString() .replaceAll( "\\\\", "/" ) + ";" ); } }
package org.smoothbuild.lang.object.db; import static org.smoothbuild.SmoothConstants.CHARSET; import static org.smoothbuild.lang.base.Location.unknownLocation; import static org.smoothbuild.lang.object.db.ObjectsDbException.corruptedObjectException; import static org.smoothbuild.lang.object.db.ObjectsDbException.objectsDbException; import static org.smoothbuild.lang.object.type.TypeNames.BLOB; import static org.smoothbuild.lang.object.type.TypeNames.BOOL; import static org.smoothbuild.lang.object.type.TypeNames.NOTHING; import static org.smoothbuild.lang.object.type.TypeNames.STRING; import static org.smoothbuild.lang.object.type.TypeNames.TYPE; import java.io.EOFException; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.inject.Inject; import org.smoothbuild.db.hashed.DecodingStringException; import org.smoothbuild.db.hashed.Hash; import org.smoothbuild.db.hashed.HashedDb; import org.smoothbuild.db.hashed.HashingBufferedSink; import org.smoothbuild.lang.base.Field; import org.smoothbuild.lang.object.base.ArrayBuilder; import org.smoothbuild.lang.object.base.BlobBuilder; import org.smoothbuild.lang.object.base.Bool; import org.smoothbuild.lang.object.base.SObject; import org.smoothbuild.lang.object.base.SString; import org.smoothbuild.lang.object.base.StructBuilder; import org.smoothbuild.lang.object.type.BlobType; import org.smoothbuild.lang.object.type.BoolType; import org.smoothbuild.lang.object.type.ConcreteArrayType; import org.smoothbuild.lang.object.type.ConcreteType; import org.smoothbuild.lang.object.type.NothingType; import org.smoothbuild.lang.object.type.StringType; import org.smoothbuild.lang.object.type.StructType; import org.smoothbuild.lang.object.type.TypeType; import okio.BufferedSource; public class ObjectsDb { private final HashedDb hashedDb; private final Map<Hash, ConcreteType> typesCache; private TypeType typeType; private BoolType boolType; private StringType stringType; private BlobType blobType; private NothingType nothingType; @Inject public ObjectsDb(HashedDb hashedDb) { this.hashedDb = hashedDb; this.typesCache = new HashMap<>(); } public ArrayBuilder arrayBuilder(ConcreteType elementType) { return new ArrayBuilder(arrayType(elementType), hashedDb); } public StructBuilder structBuilder(StructType type) { return new StructBuilder(type, hashedDb); } public BlobBuilder blobBuilder() { try { return new BlobBuilder(blobType(), hashedDb); } catch (IOException e) { throw objectsDbException(e); } } public SString string(String string) { try { return new SString(hashedDb.writeString(string), stringType(), hashedDb); } catch (IOException e) { throw objectsDbException(e); } } public Bool bool(boolean value) { return new Bool(writeBool(value), boolType(), hashedDb); } private Hash writeBool(boolean value) { try (HashingBufferedSink sink = hashedDb.sink()) { sink.writeByte(value ? 1 : 0); sink.close(); return sink.hash(); } catch (IOException e) { throw objectsDbException(e); } } public SObject get(Hash hash) { List<Hash> hashes = readHashes(hash); switch (hashes.size()) { case 1: // If Merkle tree root has only one child then it must // be Type("Type") smooth object. getType() will verify it. return getType(hash); case 2: ConcreteType type = getType(hashes.get(0)); if (type.equals(typeType())) { return getType(hash); } else { return type.newInstance(hashes.get(1)); } default: throw corruptedObjectException( hash, "Its Merkle tree root has " + hashes.size() + " children."); } } private List<Hash> readHashes(Hash hash) { try { return hashedDb.readHashes(hash); } catch (EOFException e) { throw corruptedObjectException(hash, "Its Merkle tree root is hash of byte sequence which size is not multiple of hash size."); } catch (IOException e) { throw objectsDbException(e); } } public TypeType typeType() { if (typeType == null) { typeType = new TypeType(writeBasicTypeData(TYPE), this, hashedDb); typesCache.put(typeType.hash(), typeType); } return typeType; } public BoolType boolType() { if (boolType == null) { boolType = new BoolType(writeBasicTypeData(BOOL), typeType(), hashedDb, this); typesCache.put(boolType.hash(), boolType); } return boolType; } public StringType stringType() { if (stringType == null) { stringType = new StringType(writeBasicTypeData(STRING), typeType(), hashedDb, this); typesCache.put(stringType.hash(), stringType); } return stringType; } public BlobType blobType() { if (blobType == null) { blobType = new BlobType(writeBasicTypeData(BLOB), typeType(), hashedDb, this); typesCache.put(blobType.hash(), blobType); } return blobType; } public NothingType nothingType() { if (nothingType == null) { nothingType = new NothingType(writeBasicTypeData(NOTHING), typeType(), hashedDb, this); typesCache.put(nothingType.hash(), nothingType); } return nothingType; } private Hash writeBasicTypeData(String name) { try { return hashedDb.writeHashes(hashedDb.writeString(name)); } catch (IOException e) { throw objectsDbException(e); } } public ConcreteArrayType arrayType(ConcreteType elementType) { Hash dataHash = writeArray(elementType); ConcreteArrayType superType = possiblyNullArrayType(elementType.superType()); return cacheType( new ConcreteArrayType(dataHash, typeType(), superType, elementType, hashedDb, this)); } private Hash writeArray(ConcreteType elementType) { try { return hashedDb.writeHashes(hashedDb.writeString(""), elementType.hash()); } catch (IOException e) { throw objectsDbException(e); } } private ConcreteArrayType possiblyNullArrayType(ConcreteType elementType) { return elementType == null ? null : arrayType(elementType); } public StructType structType(String name, Iterable<Field> fields) { Hash hash = writeStruct(name, fields); return cacheType(new StructType(hash, typeType(), name, fields, hashedDb, this)); } private Hash writeStruct(String name, Iterable<Field> fields) { try { return hashedDb.writeHashes(hashedDb.writeString(name), writeFields(fields)); } catch (IOException e) { throw objectsDbException(e); } } private Hash writeFields(Iterable<Field> fields) throws IOException { List<Hash> fieldHashes = new ArrayList<>(); for (Field field : fields) { fieldHashes.add(writeField(field.name(), field.type())); } return hashedDb.writeHashes(fieldHashes.toArray(new Hash[0])); } private Hash writeField(String name, ConcreteType type) throws IOException { return hashedDb.writeHashes(hashedDb.writeString(name), type.hash()); } private ConcreteType getType(Hash hash) { if (typesCache.containsKey(hash)) { return typesCache.get(hash); } else { try { return getTypeImpl(hash); } catch (IOException e) { throw objectsDbException(e); } } } private ConcreteType getTypeImpl(Hash hash) throws IOException { List<Hash> hashes = hashedDb.readHashes(hash); switch (hashes.size()) { case 1: if (!typeType().hash().equals(hash)) { throw corruptedObjectException(hash, "Expected object which is instance of 'Type' type " + "but its Merkle tree has only one child (so it should be Type type) but " + "it has different hash."); } return typeType(); case 2: Hash typeHash = hashes.get(0); if (!typeType().hash().equals(typeHash)) { throw corruptedObjectException(hash, "Expected object which is instance of 'Type' " + "type but its Merkle tree's first child is not Type type."); } Hash dataHash = hashes.get(1); return readFromDataHash(dataHash, hash); default: throw corruptedObjectException( hash, "Its Merkle tree root has " + hashes.size() + " children."); } } public ConcreteType readFromDataHash(Hash typeDataHash, Hash typeHash) throws IOException { try (BufferedSource source = hashedDb.source(typeDataHash)) { Hash nameHash = Hash.read(source); String name = decodeName(typeHash, nameHash); switch (name) { case BOOL: assertNoMoreData(typeHash, source, name); return boolType(); case STRING: assertNoMoreData(typeHash, source, name); return stringType(); case BLOB: assertNoMoreData(typeHash, source, name); return blobType(); case NOTHING: assertNoMoreData(typeHash, source, name); return nothingType(); case "": ConcreteType elementType = getType(Hash.read(source)); ConcreteArrayType superType = possiblyNullArrayType(elementType.superType()); return cacheType(new ConcreteArrayType(typeDataHash, typeType(), superType, elementType, hashedDb, this)); default: } Iterable<Field> fields = readFields(Hash.read(source), typeHash); assertNoMoreData(typeHash, source, "struct"); return cacheType(new StructType(typeDataHash, typeType(), name, fields, hashedDb, this)); } } private String decodeName(Hash typeHash, Hash nameHash) throws IOException { try { return hashedDb.readString(nameHash); } catch (DecodingStringException e) { throw corruptedObjectException(typeHash, "It is an instance of a Type which name cannot be " + "decoded using " + CHARSET + " encoding."); } } private static void assertNoMoreData(Hash typeHash, BufferedSource source, String typeName) throws IOException { if (!source.exhausted()) { throw corruptedObjectException(typeHash, "It is " + typeName + " type but its Merkle tree has unnecessary children."); } } private Iterable<Field> readFields(Hash hash, Hash typeHash) throws IOException { List<Field> result = new ArrayList<>(); for (Hash fieldHash : hashedDb.readHashes(hash)) { List<Hash> hashes = hashedDb.readHashes(fieldHash); if (hashes.size() != 2) { throw corruptedObjectException(typeHash, "It is struct type but one of its field hashes doesn't have two children but " + hashes.size() + "."); } String name = decodeFieldName(typeHash, hashes.get(0)); ConcreteType type = getType(hashes.get(1)); result.add(new Field(type, name, unknownLocation())); } return result; } private String decodeFieldName(Hash typeHash, Hash nameHash) throws IOException { try { return hashedDb.readString(nameHash); } catch (DecodingStringException e) { throw corruptedObjectException(typeHash, "It is an instance of a struct Type which field " + "name cannot be decoded using " + CHARSET + " encoding."); } } private <T extends ConcreteType> T cacheType(T type) { Hash hash = type.hash(); if (typesCache.containsKey(hash)) { return (T) typesCache.get(hash); } else { typesCache.put(hash, type); return type; } } }
package org.testng.reporters; import org.testng.IReporter; import org.testng.ISuite; import org.testng.ISuiteResult; import org.testng.ITestContext; import org.testng.ITestNGMethod; import org.testng.ITestResult; import org.testng.collections.ListMultiMap; import org.testng.collections.Lists; import org.testng.collections.Maps; import org.testng.internal.Utils; import org.testng.internal.annotations.Sets; import org.testng.xml.XmlSuite; import java.io.File; import java.io.PrintWriter; import java.io.StringWriter; import java.net.InetAddress; import java.net.UnknownHostException; import java.text.DecimalFormat; import java.util.Calendar; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; public class JUnitReportReporter implements IReporter { @Override public void generateReport(List<XmlSuite> xmlSuites, List<ISuite> suites, String defaultOutputDirectory) { Map<Class<?>, Set<ITestResult>> results = Maps.newHashMap(); Map<Class<?>, Set<ITestResult>> failedConfigurations = Maps.newHashMap(); ListMultiMap<Object, ITestResult> befores = Maps.newListMultiMap(); ListMultiMap<Object, ITestResult> afters = Maps.newListMultiMap(); for (ISuite suite : suites) { Map<String, ISuiteResult> suiteResults = suite.getResults(); for (ISuiteResult sr : suiteResults.values()) { ITestContext tc = sr.getTestContext(); addResults(tc.getPassedTests().getAllResults(), results); addResults(tc.getFailedTests().getAllResults(), results); addResults(tc.getSkippedTests().getAllResults(), results); addResults(tc.getFailedConfigurations().getAllResults(), failedConfigurations); for (ITestResult tr : tc.getPassedConfigurations().getAllResults()) { if (tr.getMethod().isBeforeMethodConfiguration()) { befores.put(tr.getInstance(), tr); } if (tr.getMethod().isAfterMethodConfiguration()) { afters.put(tr.getInstance(), tr); } } } } // A list of iterators for all the passed configuration, explanation below // ListMultiMap<Class<?>, ITestResult> beforeConfigurations = Maps.newListMultiMap(); // ListMultiMap<Class<?>, ITestResult> afterConfigurations = Maps.newListMultiMap(); // for (Map.Entry<Class<?>, Set<ITestResult>> es : passedConfigurations.entrySet()) { // for (ITestResult tr : es.getValue()) { // ITestNGMethod method = tr.getMethod(); // if (method.isBeforeMethodConfiguration()) { // beforeConfigurations.put(method.getRealClass(), tr); // if (method.isAfterMethodConfiguration()) { // afterConfigurations.put(method.getRealClass(), tr); // Map<Object, Iterator<ITestResult>> befores = Maps.newHashMap(); // for (Map.Entry<Class<?>, List<ITestResult>> es : beforeConfigurations.getEntrySet()) { // List<ITestResult> tr = es.getValue(); // for (ITestResult itr : es.getValue()) { // Map<Class<?>, Iterator<ITestResult>> afters = Maps.newHashMap(); // for (Map.Entry<Class<?>, List<ITestResult>> es : afterConfigurations.getEntrySet()) { // afters.put(es.getKey(), es.getValue().iterator()); for (Map.Entry<Class<?>, Set<ITestResult>> entry : results.entrySet()) { Class<?> cls = entry.getKey(); Properties p1 = new Properties(); p1.setProperty("name", cls.getName()); Date timeStamp = Calendar.getInstance().getTime(); p1.setProperty(XMLConstants.ATTR_TIMESTAMP, timeStamp.toGMTString()); List<TestTag> testCases = Lists.newArrayList(); int failures = 0; int errors = 0; int testCount = 0; float totalTime = 0; for (ITestResult tr: entry.getValue()) { TestTag testTag = new TestTag(); boolean isSuccess = tr.getStatus() == ITestResult.SUCCESS; if (! isSuccess) { if (tr.getThrowable() instanceof AssertionError) { errors++; } else { failures++; } } Properties p2 = new Properties(); p2.setProperty("classname", cls.getName()); p2.setProperty("name", getTestName(tr)); long time = tr.getEndMillis() - tr.getStartMillis(); time += getNextConfiguration(befores, tr); time += getNextConfiguration(afters, tr); p2.setProperty("time", "" + formatTime(time)); Throwable t = getThrowable(tr, failedConfigurations); if (! isSuccess && t != null) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); t.printStackTrace(pw); testTag.message = t.getMessage(); testTag.type = t.getClass().getName(); testTag.stackTrace = sw.toString(); testTag.errorTag = tr.getThrowable() instanceof AssertionError ? "error" : "failure"; } totalTime += time; testCount++; testTag.properties = p2; testCases.add(testTag); } p1.setProperty("failures", "" + failures); p1.setProperty("errors", "" + errors); p1.setProperty("name", cls.getName()); p1.setProperty("tests", "" + testCount); p1.setProperty("time", "" + formatTime(totalTime)); try { p1.setProperty(XMLConstants.ATTR_HOSTNAME, InetAddress.getLocalHost().getHostName()); } catch (UnknownHostException e) { // ignore } // Now that we have all the information we need, generate the file XMLStringBuffer xsb = new XMLStringBuffer(); xsb.addComment("Generated by " + getClass().getName()); xsb.push("testsuite", p1); for (TestTag testTag : testCases) { if (testTag.stackTrace == null) { xsb.addEmptyElement("testcase", testTag.properties); } else { xsb.push("testcase", testTag.properties); Properties p = new Properties(); if (testTag.message != null) { p.setProperty("message", testTag.message); } p.setProperty("type", testTag.type); xsb.push(testTag.errorTag, p); xsb.addCDATA(testTag.stackTrace); xsb.pop(testTag.errorTag); xsb.pop("testcase"); } } xsb.pop("testsuite"); String outputDirectory = defaultOutputDirectory + File.separator + "junitreports"; Utils.writeFile(outputDirectory, getFileName(cls), xsb.toXML()); } // System.out.println(xsb.toXML()); // System.out.println(""); } /** * Add the time of the configuration method to this test method. * * The only problem with this method is that the timing of a test method * might not be added to the time of the same configuration method that ran before * it but since they should all be equivalent, this should never be an issue. */ private long getNextConfiguration(ListMultiMap<Object, ITestResult> configurations, ITestResult tr) { long result = 0; List<ITestResult> confResults = configurations.get(tr.getInstance()); Map<ITestNGMethod, ITestResult> seen = Maps.newHashMap(); if (confResults != null) { for (ITestResult r : confResults) { if (! seen.containsKey(r.getMethod())) { result += r.getEndMillis() - r.getStartMillis(); seen.put(r.getMethod(), r); } } confResults.removeAll(seen.values()); } return result; } protected String getFileName(Class cls) { return "TEST-" + cls.getName() + ".xml"; } protected String getTestName(ITestResult tr) { return tr.getMethod().getMethodName(); } private String formatTime(float time) { DecimalFormat format = new DecimalFormat(" format.setMinimumFractionDigits(3); return format.format(time / 1000.0f); } private Throwable getThrowable(ITestResult tr, Map<Class<?>, Set<ITestResult>> failedConfigurations) { Throwable result = tr.getThrowable(); if (result == null && tr.getStatus() == ITestResult.SKIP) { // Attempt to grab the stack trace from the configuration failure for (Set<ITestResult> failures : failedConfigurations.values()) { for (ITestResult failure : failures) { // Naive implementation for now, eventually, we need to try to find // out if it's this failure that caused the skip since (maybe by // seeing if the class of the configuration method is assignable to // the class of the test method, although that's not 100% fool proof if (failure.getThrowable() != null) { return failure.getThrowable(); } } } } return result; } class TestTag { public Properties properties; public String message; public String type; public String stackTrace; public String errorTag; } private void addResults(Set<ITestResult> allResults, Map<Class<?>, Set<ITestResult>> out) { for (ITestResult tr : allResults) { Class<?> cls = tr.getMethod().getTestClass().getRealClass(); Set<ITestResult> l = out.get(cls); if (l == null) { l = Sets.newHashSet(); out.put(cls, l); } l.add(tr); } } }
package org.vaadin.viritin.button; import com.vaadin.server.FileDownloader; import com.vaadin.server.Resource; import com.vaadin.server.StreamResource; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PipedInputStream; import java.io.PipedOutputStream; /** * A helper class to implement typical file downloads. * <p> * With this class you'll get rid of lots of boilerplate code from your * application. It also inverts the bit cumbersome input-output stream API in * Vaadin so, normally you just "wire" this button to your backend method that * writes your resource to OutputStream (instead of playing around with piped streams or storing * resources temporary in memory. Example of usage: * <pre><code> * new DownloadButton(invoice::toPdf).setFileName("invoice.pdf") * </code></pre> * <p> * The button extension hooks FileDownloader extension internally and inverts * the cumbersome default Vaadin API. * <p> * The writing of response is also spawn to separate thread, so in case your * resource generation takes lots of time, the UI wont block. */ public class DownloadButton extends MButton { public interface ContentWriter { void write(OutputStream stream); } private ContentWriter writer; private final StreamResource streamResource = new StreamResource( new StreamResource.StreamSource() { @Override public InputStream getStream() { try { final PipedOutputStream out = new PipedOutputStream(); final PipedInputStream in = new PipedInputStream(out); writeResponce(out); return in; } catch (IOException ex) { throw new RuntimeException(ex); } } }, "file"); /** * Constructs a new Download button without ContentWriter. Be sure to set * the ContentWriter or override its getter, before instance is actually * used. */ public DownloadButton() { new FileDownloader(streamResource).extend(this); } public DownloadButton(ContentWriter writer) { this(); this.writer = writer; } /** * By default just spans a new raw thread to get the input. For strict Java * EE fellows, this might not suite, so override and use executor service. * * @param out the output stream where the output is targeted */ protected void writeResponce(final PipedOutputStream out) { new Thread() { @Override public void run() { try { getWriter().write(out); out.close(); } catch (IOException e) { throw new RuntimeException(e); } } }.start(); } public ContentWriter getWriter() { return writer; } public String getMimeType() { return streamResource.getFilename(); } public DownloadButton setMimeType(String mimeType) { streamResource.setMIMEType(mimeType); return this; } public DownloadButton setCacheTime(long cacheTime) { streamResource.setCacheTime(cacheTime); return this; } public DownloadButton setWriter(ContentWriter writer) { this.writer = writer; return this; } public String getFileName() { return streamResource.getFilename(); } public DownloadButton setFileName(String fileName) { streamResource.setFilename(fileName); return this; } @Override public DownloadButton withIcon(Resource icon) { setIcon(icon); return this; } }
package picard.sam.markduplicates; import htsjdk.samtools.*; import htsjdk.samtools.DuplicateScoringStrategy.ScoringStrategy; import htsjdk.samtools.util.*; import org.broadinstitute.barclay.argparser.Argument; import org.broadinstitute.barclay.argparser.CommandLineProgramProperties; import org.broadinstitute.barclay.help.DocumentedFeature; import picard.PicardException; import picard.cmdline.programgroups.ReadDataManipulationProgramGroup; import picard.sam.DuplicationMetrics; import picard.sam.markduplicates.util.*; import picard.sam.util.RepresentativeReadIndexer; import java.io.File; import java.util.Objects; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.Map; /** * A better duplication marking algorithm that handles all cases including clipped * and gapped alignments. * * @author Tim Fennell */ @CommandLineProgramProperties( summary = MarkDuplicates.USAGE_SUMMARY + MarkDuplicates.USAGE_DETAILS, oneLineSummary = MarkDuplicates.USAGE_SUMMARY, programGroup = ReadDataManipulationProgramGroup.class) @DocumentedFeature public class MarkDuplicates extends AbstractMarkDuplicatesCommandLineProgram { static final String USAGE_SUMMARY = "Identifies duplicate reads. "; static final String USAGE_DETAILS = "<p>This tool locates and tags duplicate reads in a SAM, BAM or CRAM file, where duplicate reads are " + "defined as originating from a single fragment of DNA. Duplicates can arise during sample preparation e.g. library " + "construction using PCR. See also " + "<a href='https://broadinstitute.github.io/picard/command-line-overview.html#EstimateLibraryComplexity'>EstimateLibraryComplexity</a>" + " for additional notes on PCR duplication artifacts. Duplicate reads can also result from a single amplification cluster, " + "incorrectly detected as multiple clusters by the optical sensor of the sequencing instrument. These duplication artifacts are " + "referred to as optical duplicates.</p>" + "" + "<p>The MarkDuplicates tool works by comparing sequences in the 5 prime positions of both reads and read-pairs in a SAM/BAM file. " + "A BARCODE_TAG option is available to facilitate duplicate marking using molecular barcodes. After duplicate reads are" + " collected, the tool differentiates the primary and duplicate reads using an algorithm that ranks reads by the sums " + "of their base-quality scores (default method). Note that this is different from directly checking if the sequences match, which " + "MarkDuplicates does not do.</p> " + "" + "<p>The tool's main output is a new SAM, BAM or CRAM file, in which duplicates have been identified in the SAM flags field for each" + " read. Duplicates are marked with the hexadecimal value of 0x0400, which corresponds to a decimal value of 1024. " + "If you are not familiar with this type of annotation, please see the following " + "<a href='https: "" + "<p>Although the bitwise flag annotation indicates whether a read was marked as a duplicate, it does not identify the type of " + "duplicate. To do this, a new tag called the duplicate type (DT) tag was recently added as an optional output in " + "the 'optional field' section of a SAM/BAM/CRAM file. Invoking the TAGGING_POLICY option," + " you can instruct the program to mark all the duplicates (All), only the optical duplicates (OpticalOnly), or no " + "duplicates (DontTag). The records within the output of a SAM/BAM/CRAM file will have values for the 'DT' tag (depending on the invoked " + "TAGGING_POLICY), as either library/PCR-generated duplicates (LB), or sequencing-platform artifact duplicates (SQ). " + "This tool uses the READ_NAME_REGEX and the OPTICAL_DUPLICATE_PIXEL_DISTANCE options as the primary methods to identify " + "and differentiate duplicate types. Set READ_NAME_REGEX to null to skip optical duplicate detection, e.g. for RNA-seq " + "or other data where duplicate sets are extremely large and estimating library complexity is not an aim. " + "Note that without optical duplicate counts, library size estimation will be inaccurate.</p> " + "<p>MarkDuplicates also produces a metrics file indicating the numbers of duplicates for both single- and paired-end reads.</p> " + "<p>The program can take either coordinate-sorted or query-sorted inputs, however the behavior is slightly different. " + "When the input is coordinate-sorted, unmapped mates of mapped records and supplementary/secondary alignments are not " + "marked as duplicates. However, when the input is query-sorted (actually query-grouped), " + "then unmapped mates and secondary/supplementary reads are not excluded from the duplication test and can be" + " marked as duplicate reads.</p> " + "<p>If desired, duplicates can be removed using the REMOVE_DUPLICATE and REMOVE_SEQUENCING_DUPLICATES options.</p>" + "" + "<h4>Usage example:</h4>" + "<pre>" + "java -jar picard.jar MarkDuplicates \\<br />" + " I=input.bam \\<br />" + " O=marked_duplicates.bam \\<br />" + " M=marked_dup_metrics.txt" + "</pre>" + "" + "Please see " + "<a href='http://broadinstitute.github.io/picard/picard-metric-definitions.html#DuplicationMetrics'>MarkDuplicates</a> " + "for detailed explanations of the output metrics." + "<hr />"; /** * Enum used to control how duplicates are flagged in the DT optional tag on each read. */ public enum DuplicateTaggingPolicy { DontTag, OpticalOnly, All } /** * The optional attribute in SAM/BAM/CRAM files used to store the duplicate type. */ public static final String DUPLICATE_TYPE_TAG = "DT"; /** * The duplicate type tag value for duplicate type: library. */ public static final String DUPLICATE_TYPE_LIBRARY = "LB"; /** * The duplicate type tag value for duplicate type: sequencing (optical & pad-hopping, or "co-localized"). */ public static final String DUPLICATE_TYPE_SEQUENCING = "SQ"; /** * The attribute in the SAM/BAM file used to store which read was selected as representative out of a duplicate set */ public static final String DUPLICATE_SET_INDEX_TAG = "DI"; /** * The attribute in the SAM/BAM file used to store the size of a duplicate set */ public static final String DUPLICATE_SET_SIZE_TAG = "DS"; /** * Enum for the possible values that a duplicate read can be tagged with in the DT attribute. */ public enum DuplicateType { LIBRARY(DUPLICATE_TYPE_LIBRARY), SEQUENCING(DUPLICATE_TYPE_SEQUENCING); private final String code; DuplicateType(final String code) { this.code = code; } public String code() { return this.code; } } private final Log log = Log.getInstance(MarkDuplicates.class); /** * If more than this many sequences in SAM file, don't spill to disk because there will not * be enough file handles. */ @Argument(shortName = "MAX_SEQS", doc = "This option is obsolete. ReadEnds will always be spilled to disk.") public int MAX_SEQUENCES_FOR_DISK_READ_ENDS_MAP = 50000; @Argument(shortName = "MAX_FILE_HANDLES", doc = "Maximum number of file handles to keep open when spilling read ends to disk. " + "Set this number a little lower than the per-process maximum number of file that may be open. " + "This number can be found by executing the 'ulimit -n' command on a Unix system.") public int MAX_FILE_HANDLES_FOR_READ_ENDS_MAP = 8000; @Argument(doc = "This number, plus the maximum RAM available to the JVM, determine the memory footprint used by " + "some of the sorting collections. If you are running out of memory, try reducing this number.") public double SORTING_COLLECTION_SIZE_RATIO = 0.25; @Argument(doc = "Barcode SAM tag (ex. BC for 10X Genomics)", optional = true) public String BARCODE_TAG = null; @Argument(doc = "Read one barcode SAM tag (ex. BX for 10X Genomics)", optional = true) public String READ_ONE_BARCODE_TAG = null; @Argument(doc = "Read two barcode SAM tag (ex. BX for 10X Genomics)", optional = true) public String READ_TWO_BARCODE_TAG = null; @Argument(doc = "If a read appears in a duplicate set, add two tags. The first tag, DUPLICATE_SET_SIZE_TAG (DS), " + "indicates the size of the duplicate set. The smallest possible DS value is 2 which occurs when two " + "reads map to the same portion of the reference only one of which is marked as duplicate. The second " + "tag, DUPLICATE_SET_INDEX_TAG (DI), represents a unique identifier for the duplicate set to which the " + "record belongs. This identifier is the index-in-file of the representative read that was selected out " + "of the duplicate set.", optional = true) public boolean TAG_DUPLICATE_SET_MEMBERS = false; @Argument(doc = "If true remove 'optical' duplicates and other duplicates that appear to have arisen from the " + "sequencing process instead of the library preparation process, even if REMOVE_DUPLICATES is false. " + "If REMOVE_DUPLICATES is true, all duplicates are removed and this option is ignored.") public boolean REMOVE_SEQUENCING_DUPLICATES = false; @Argument(doc = "Determines how duplicate types are recorded in the DT optional attribute.") public DuplicateTaggingPolicy TAGGING_POLICY = DuplicateTaggingPolicy.DontTag; @Argument(doc = "Clear DT tag from input SAM records. Should be set to false if input SAM doesn't have this tag. Default true") public boolean CLEAR_DT = true; @Argument(doc = "Treat UMIs as being duplex stranded. This option requires that the UMI consist of two equal length " + "strings that are separated by a hyphen (e.g. 'ATC-GTC'). Reads are considered duplicates if, in addition to standard " + "definition, have identical normalized UMIs. A UMI from the 'bottom' strand is normalized by swapping its content " + "around the hyphen (eg. ATC-GTC becomes GTC-ATC). A UMI from the 'top' strand is already normalized as it is. " + "Both reads from a read pair considered top strand if the read 1 unclipped 5' coordinate is less than the read " + "2 unclipped 5' coordinate. All chimeric reads and read fragments are treated as having come from the top strand. " + "With this option is it required that the BARCODE_TAG hold non-normalized UMIs. Default false.") public boolean DUPLEX_UMI = false; @Argument(doc = "SAM tag to uniquely identify the molecule from which a read was derived. Use of this option requires that " + "the BARCODE_TAG option be set to a non null value. Default null.", optional = true) public String MOLECULAR_IDENTIFIER_TAG = null; private SortingCollection<ReadEndsForMarkDuplicates> pairSort; private SortingCollection<ReadEndsForMarkDuplicates> fragSort; private SortingLongCollection duplicateIndexes; private SortingLongCollection opticalDuplicateIndexes; private SortingCollection<RepresentativeReadIndexer> representativeReadIndicesForDuplicates; private int numDuplicateIndices = 0; static private final long NO_SUCH_INDEX = Long.MAX_VALUE; // needs to be large so that that >= test fails for query-sorted traversal protected LibraryIdGenerator libraryIdGenerator = null; // this is initialized in buildSortedReadEndLists private int getReadOneBarcodeValue(final SAMRecord record) { return EstimateLibraryComplexity.getReadBarcodeValue(record, READ_ONE_BARCODE_TAG); } private int getReadTwoBarcodeValue(final SAMRecord record) { return EstimateLibraryComplexity.getReadBarcodeValue(record, READ_TWO_BARCODE_TAG); } public MarkDuplicates() { DUPLICATE_SCORING_STRATEGY = ScoringStrategy.SUM_OF_BASE_QUALITIES; } /** * Main work method. Reads the SAM file once and collects sorted information about * the 5' ends of both ends of each read (or just one end in the case of pairs). * Then makes a pass through those determining duplicates before re-reading the * input file and writing it out with duplication flags set correctly. */ protected int doWork() { IOUtil.assertInputsAreValid(INPUT); IOUtil.assertFileIsWritable(OUTPUT); IOUtil.assertFileIsWritable(METRICS_FILE); final boolean useBarcodes = (null != BARCODE_TAG || null != READ_ONE_BARCODE_TAG || null != READ_TWO_BARCODE_TAG); reportMemoryStats("Start of doWork"); log.info("Reading input file and constructing read end information."); buildSortedReadEndLists(useBarcodes); reportMemoryStats("After buildSortedReadEndLists"); generateDuplicateIndexes(useBarcodes, this.REMOVE_SEQUENCING_DUPLICATES || this.TAGGING_POLICY != DuplicateTaggingPolicy.DontTag); reportMemoryStats("After generateDuplicateIndexes"); log.info("Marking " + this.numDuplicateIndices + " records as duplicates."); if (this.READ_NAME_REGEX == null) { log.warn("Skipped optical duplicate cluster discovery; library size estimation may be inaccurate!"); } else { log.info("Found " + (this.libraryIdGenerator.getNumberOfOpticalDuplicateClusters()) + " optical duplicate clusters."); } final SamHeaderAndIterator headerAndIterator = openInputs(false); final SAMFileHeader header = headerAndIterator.header; final SAMFileHeader.SortOrder sortOrder = header.getSortOrder(); final SAMFileHeader outputHeader = header.clone(); log.info("Reads are assumed to be ordered by: " + sortOrder); // Setting the ASSUME_SORT_ORDER to equal queryname is understood to mean that the input is // queryname **grouped**. So that's what we set the output order to be, so that the validation will pass if (ASSUME_SORT_ORDER == SAMFileHeader.SortOrder.queryname) { outputHeader.setGroupOrder(SAMFileHeader.GroupOrder.query); outputHeader.setSortOrder(SAMFileHeader.SortOrder.unknown); log.info("Output will not be re-sorted. Output header will state SO:unknown GO:query"); } if (ASSUME_SORT_ORDER == null && sortOrder != SAMFileHeader.SortOrder.coordinate && sortOrder != SAMFileHeader.SortOrder.queryname || ASSUME_SORT_ORDER != null && ASSUME_SORT_ORDER != SAMFileHeader.SortOrder.coordinate && ASSUME_SORT_ORDER != SAMFileHeader.SortOrder.queryname) { throw new PicardException("This program requires input that are either coordinate or query sorted (according to the header, or at least ASSUME_SORT_ORDER and the content.) " + "Found ASSUME_SORT_ORDER=" + ASSUME_SORT_ORDER + " and header sortorder=" + sortOrder); } COMMENT.forEach(outputHeader::addComment); // Key: previous PG ID on a SAM Record (or null). Value: New PG ID to replace it. final Map<String, String> chainedPgIds = getChainedPgIds(outputHeader); try (SAMFileWriter out = new SAMFileWriterFactory().makeWriter(outputHeader, true, OUTPUT,REFERENCE_SEQUENCE)){ // Now copy over the file while marking all the necessary indexes as duplicates long recordInFileIndex = 0; long nextOpticalDuplicateIndex = this.opticalDuplicateIndexes != null && this.opticalDuplicateIndexes.hasNext() ? this.opticalDuplicateIndexes.next() : NO_SUCH_INDEX; long nextDuplicateIndex = (this.duplicateIndexes.hasNext() ? this.duplicateIndexes.next() : NO_SUCH_INDEX); // initialize variables for optional representative read tagging CloseableIterator<RepresentativeReadIndexer> representativeReadIterator = null; RepresentativeReadIndexer rri = null; int representativeReadIndexInFile = -1; int duplicateSetSize = -1; int nextRepresentativeIndex = -1; if (TAG_DUPLICATE_SET_MEMBERS) { representativeReadIterator = this.representativeReadIndicesForDuplicates.iterator(); if (representativeReadIterator.hasNext()) { rri = representativeReadIterator.next(); nextRepresentativeIndex = rri.readIndexInFile; representativeReadIndexInFile = rri.representativeReadIndexInFile; duplicateSetSize = rri.setSize; } } final ProgressLogger progress = new ProgressLogger(log, (int) 1e7, "Written"); final CloseableIterator<SAMRecord> iterator = headerAndIterator.iterator; String duplicateQueryName = null; while (iterator.hasNext()) { final SAMRecord rec = iterator.next(); DuplicationMetrics metrics = AbstractMarkDuplicatesCommandLineProgram.addReadToLibraryMetrics(rec, header, libraryIdGenerator); // Now try and figure out the next duplicate index (if going by coordinate. if going by query name, only do this // if the query name has changed. nextDuplicateIndex = nextIndexIfNeeded(sortOrder, recordInFileIndex, nextDuplicateIndex, duplicateQueryName, rec, this.duplicateIndexes); final boolean isDuplicate = recordInFileIndex == nextDuplicateIndex || (sortOrder == SAMFileHeader.SortOrder.queryname && recordInFileIndex > nextDuplicateIndex && rec.getReadName().equals(duplicateQueryName)); if (isDuplicate) { rec.setDuplicateReadFlag(true); AbstractMarkDuplicatesCommandLineProgram.addDuplicateReadToMetrics(rec, metrics); } else { rec.setDuplicateReadFlag(false); } nextOpticalDuplicateIndex = nextIndexIfNeeded(sortOrder, recordInFileIndex, nextOpticalDuplicateIndex, duplicateQueryName, rec, this.opticalDuplicateIndexes); final boolean isOpticalDuplicate = sortOrder == SAMFileHeader.SortOrder.queryname && recordInFileIndex > nextOpticalDuplicateIndex && rec.getReadName().equals(duplicateQueryName) || recordInFileIndex == nextOpticalDuplicateIndex; if (CLEAR_DT) { rec.setAttribute(DUPLICATE_TYPE_TAG, null); } if (this.TAGGING_POLICY != DuplicateTaggingPolicy.DontTag && rec.getDuplicateReadFlag()) { if (isOpticalDuplicate) { rec.setAttribute(DUPLICATE_TYPE_TAG, DuplicateType.SEQUENCING.code()); } else if (this.TAGGING_POLICY == DuplicateTaggingPolicy.All) { rec.setAttribute(DUPLICATE_TYPE_TAG, DuplicateType.LIBRARY.code()); } } // Tag any read pair that was in a duplicate set with the duplicate set size and a representative read name if (TAG_DUPLICATE_SET_MEMBERS) { final boolean needNextRepresentativeIndex = recordInFileIndex > nextRepresentativeIndex; if (needNextRepresentativeIndex && representativeReadIterator.hasNext()) { rri = representativeReadIterator.next(); nextRepresentativeIndex = rri.readIndexInFile; representativeReadIndexInFile = rri.representativeReadIndexInFile; duplicateSetSize = rri.setSize; } final boolean isInDuplicateSet = recordInFileIndex == nextRepresentativeIndex || (sortOrder == SAMFileHeader.SortOrder.queryname && recordInFileIndex > nextDuplicateIndex); if (isInDuplicateSet) { if (!rec.isSecondaryOrSupplementary() && !rec.getReadUnmappedFlag()) { if (TAG_DUPLICATE_SET_MEMBERS) { rec.setAttribute(DUPLICATE_SET_INDEX_TAG, representativeReadIndexInFile); rec.setAttribute(DUPLICATE_SET_SIZE_TAG, duplicateSetSize); } } } } // Set MOLECULAR_IDENTIFIER_TAG for SAMRecord rec if (BARCODE_TAG != null) { UmiUtil.setMolecularIdentifier(rec, "", MOLECULAR_IDENTIFIER_TAG, DUPLEX_UMI); } // Tag any read pair that was in a duplicate set with the duplicate set size and a representative read name if (TAG_DUPLICATE_SET_MEMBERS) { final boolean needNextRepresentativeIndex = recordInFileIndex > nextRepresentativeIndex; if (needNextRepresentativeIndex && representativeReadIterator.hasNext()) { rri = representativeReadIterator.next(); nextRepresentativeIndex = rri.readIndexInFile; representativeReadIndexInFile = rri.representativeReadIndexInFile; duplicateSetSize = rri.setSize; } final boolean isInDuplicateSet = recordInFileIndex == nextRepresentativeIndex || (sortOrder == SAMFileHeader.SortOrder.queryname && recordInFileIndex > nextDuplicateIndex); if (isInDuplicateSet && !rec.isSecondaryOrSupplementary() && !rec.getReadUnmappedFlag() && TAG_DUPLICATE_SET_MEMBERS) { rec.setAttribute(DUPLICATE_SET_INDEX_TAG, representativeReadIndexInFile); rec.setAttribute(DUPLICATE_SET_SIZE_TAG, duplicateSetSize); } } // Note, duplicateQueryName must be incremented after we have marked both optical and sequencing duplicates for queryname sorted files. if (isDuplicate) { duplicateQueryName = rec.getReadName(); } // Output the record if desired and bump the record index recordInFileIndex++; if (this.REMOVE_DUPLICATES && rec.getDuplicateReadFlag()) { continue; } if (this.REMOVE_SEQUENCING_DUPLICATES && isOpticalDuplicate) { continue; } if (PROGRAM_RECORD_ID != null && pgTagArgumentCollection.ADD_PG_TAG_TO_READS) { rec.setAttribute(SAMTag.PG.name(), chainedPgIds.get(rec.getStringAttribute(SAMTag.PG.name()))); } out.addAlignment(rec); progress.record(rec); } // remember to close the inputs log.info("Writing complete. Closing input iterator."); iterator.close(); log.info("Duplicate Index cleanup."); this.duplicateIndexes.cleanup(); if (TAG_DUPLICATE_SET_MEMBERS) { log.info("Representative read Index cleanup."); this.representativeReadIndicesForDuplicates.cleanup(); } log.info("Getting Memory Stats."); reportMemoryStats("Before output close"); } log.info("Closed outputs. Getting more Memory Stats."); reportMemoryStats("After output close"); // Write out the metrics finalizeAndWriteMetrics(libraryIdGenerator, getMetricsFile(), METRICS_FILE); return 0; } /** * package-visible for testing */ long numOpticalDuplicates() { return ((long) this.libraryIdGenerator.getOpticalDuplicatesByLibraryIdMap().getSumOfValues()); } // cast as long due to returning a double /** * Print out some quick JVM memory stats. */ private void reportMemoryStats(final String stage) { System.gc(); final Runtime runtime = Runtime.getRuntime(); log.info(stage + " freeMemory: " + runtime.freeMemory() + "; totalMemory: " + runtime.totalMemory() + "; maxMemory: " + runtime.maxMemory()); } /** * Goes through all the records in a file and generates a set of ReadEndsForMarkDuplicates objects that * hold the necessary information (reference sequence, 5' read coordinate) to do * duplication, caching to disk as necessary to sort them. */ private void buildSortedReadEndLists(final boolean useBarcodes) { final int sizeInBytes; if (useBarcodes) { sizeInBytes = ReadEndsForMarkDuplicatesWithBarcodes.getSizeOf(); } else { sizeInBytes = ReadEndsForMarkDuplicates.getSizeOf(); } MAX_RECORDS_IN_RAM = (int) (Runtime.getRuntime().maxMemory() / sizeInBytes) / 2; final int maxInMemory = (int) ((Runtime.getRuntime().maxMemory() * SORTING_COLLECTION_SIZE_RATIO) / sizeInBytes); log.info("Will retain up to " + maxInMemory + " data points before spilling to disk."); final ReadEndsForMarkDuplicatesCodec fragCodec, pairCodec, diskCodec; if (useBarcodes) { fragCodec = new ReadEndsForMarkDuplicatesWithBarcodesCodec(); pairCodec = new ReadEndsForMarkDuplicatesWithBarcodesCodec(); diskCodec = new ReadEndsForMarkDuplicatesWithBarcodesCodec(); } else { fragCodec = new ReadEndsForMarkDuplicatesCodec(); pairCodec = new ReadEndsForMarkDuplicatesCodec(); diskCodec = new ReadEndsForMarkDuplicatesCodec(); } this.pairSort = SortingCollection.newInstance(ReadEndsForMarkDuplicates.class, pairCodec, new ReadEndsMDComparator(useBarcodes), maxInMemory, TMP_DIR); this.fragSort = SortingCollection.newInstance(ReadEndsForMarkDuplicates.class, fragCodec, new ReadEndsMDComparator(useBarcodes), maxInMemory, TMP_DIR); final SamHeaderAndIterator headerAndIterator = openInputs(true); final SAMFileHeader.SortOrder assumedSortOrder = headerAndIterator.header.getSortOrder(); final SAMFileHeader header = headerAndIterator.header; final ReadEndsForMarkDuplicatesMap tmp = assumedSortOrder == SAMFileHeader.SortOrder.queryname ? new MemoryBasedReadEndsForMarkDuplicatesMap() : new DiskBasedReadEndsForMarkDuplicatesMap(MAX_FILE_HANDLES_FOR_READ_ENDS_MAP, diskCodec); long index = 0; final ProgressLogger progress = new ProgressLogger(log, (int) 1e6, "Read"); final CloseableIterator<SAMRecord> iterator = headerAndIterator.iterator; if (null == this.libraryIdGenerator) { this.libraryIdGenerator = new LibraryIdGenerator(header); } String duplicateQueryName = null; long duplicateIndex = NO_SUCH_INDEX; while (iterator.hasNext()) { final SAMRecord rec = iterator.next(); // This doesn't have anything to do with building sorted ReadEnd lists, but it can be done in the same pass // over the input if (PROGRAM_RECORD_ID != null) { // Gather all PG IDs seen in merged input files in first pass. These are gathered for two reasons: // - to know how many different PG records to create to represent this program invocation. // - to know what PG IDs are already used to avoid collisions when creating new ones. // Note that if there are one or more records that do not have a PG tag, then a null value // will be stored in this set. pgIdsSeen.add(rec.getStringAttribute(SAMTag.PG.name())); } // If working in query-sorted, need to keep index of first record with any given query-name. if (assumedSortOrder == SAMFileHeader.SortOrder.queryname && !rec.getReadName().equals(duplicateQueryName)) { duplicateQueryName = rec.getReadName(); duplicateIndex = index; } if (rec.getReadUnmappedFlag()) { if (rec.getReferenceIndex() == -1 && assumedSortOrder == SAMFileHeader.SortOrder.coordinate) { // When we hit the unmapped reads with no coordinate, no reason to continue (only in coordinate sort). break; } // If this read is unmapped but sorted with the mapped reads, just skip it. } else if (!rec.isSecondaryOrSupplementary()) { final long indexForRead = assumedSortOrder == SAMFileHeader.SortOrder.queryname ? duplicateIndex : index; final ReadEndsForMarkDuplicates fragmentEnd = buildReadEnds(header, indexForRead, rec, useBarcodes); this.fragSort.add(fragmentEnd); if (rec.getReadPairedFlag() && !rec.getMateUnmappedFlag()) { final StringBuilder key = new StringBuilder(); key.append(ReservedTagConstants.READ_GROUP_ID); key.append(rec.getReadName()); ReadEndsForMarkDuplicates pairedEnds = tmp.remove(rec.getReferenceIndex(), key.toString()); // See if we've already seen the first end or not if (pairedEnds == null) { // at this point pairedEnds and fragmentEnd are the same, but we need to make // a copy since pairedEnds will be modified when the mate comes along. pairedEnds = fragmentEnd.clone(); tmp.put(pairedEnds.read2ReferenceIndex, key.toString(), pairedEnds); } else { final int matesRefIndex = fragmentEnd.read1ReferenceIndex; final int matesCoordinate = fragmentEnd.read1Coordinate; // Set orientationForOpticalDuplicates, which always goes by the first then the second end for the strands. NB: must do this // before updating the orientation later. if (rec.getFirstOfPairFlag()) { pairedEnds.orientationForOpticalDuplicates = ReadEnds.getOrientationByte(rec.getReadNegativeStrandFlag(), pairedEnds.orientation == ReadEnds.R); if (useBarcodes) { ((ReadEndsForMarkDuplicatesWithBarcodes) pairedEnds).readOneBarcode = getReadOneBarcodeValue(rec); } } else { pairedEnds.orientationForOpticalDuplicates = ReadEnds.getOrientationByte(pairedEnds.orientation == ReadEnds.R, rec.getReadNegativeStrandFlag()); if (useBarcodes) { ((ReadEndsForMarkDuplicatesWithBarcodes) pairedEnds).readTwoBarcode = getReadTwoBarcodeValue(rec); } } // If the other read is actually later, simply add the other read's data as read2, else flip the reads if (matesRefIndex > pairedEnds.read1ReferenceIndex || (matesRefIndex == pairedEnds.read1ReferenceIndex && matesCoordinate >= pairedEnds.read1Coordinate)) { pairedEnds.read2ReferenceIndex = matesRefIndex; pairedEnds.read2Coordinate = matesCoordinate; pairedEnds.read2IndexInFile = indexForRead; pairedEnds.orientation = ReadEnds.getOrientationByte(pairedEnds.orientation == ReadEnds.R, rec.getReadNegativeStrandFlag()); // if the two read ends are in the same position, pointing in opposite directions, // the orientation is undefined and the procedure above // will depend on the order of the reads in the file. // To avoid this, we set it explicitly (to FR): if (pairedEnds.read2ReferenceIndex == pairedEnds.read1ReferenceIndex && pairedEnds.read2Coordinate == pairedEnds.read1Coordinate && pairedEnds.orientation == ReadEnds.RF) { pairedEnds.orientation = ReadEnds.FR; } } else { pairedEnds.read2ReferenceIndex = pairedEnds.read1ReferenceIndex; pairedEnds.read2Coordinate = pairedEnds.read1Coordinate; pairedEnds.read2IndexInFile = pairedEnds.read1IndexInFile; pairedEnds.read1ReferenceIndex = matesRefIndex; pairedEnds.read1Coordinate = matesCoordinate; pairedEnds.read1IndexInFile = indexForRead; pairedEnds.orientation = ReadEnds.getOrientationByte(rec.getReadNegativeStrandFlag(), pairedEnds.orientation == ReadEnds.R); } pairedEnds.score += DuplicateScoringStrategy.computeDuplicateScore(rec, this.DUPLICATE_SCORING_STRATEGY); this.pairSort.add(pairedEnds); } } } // Print out some stats every 1m reads ++index; if (progress.record(rec)) { log.info("Tracking " + tmp.size() + " as yet unmatched pairs. " + tmp.sizeInRam() + " records in RAM."); } } log.info("Read " + index + " records. " + tmp.size() + " pairs never matched."); iterator.close(); // Tell these collections to free up memory if possible. this.pairSort.doneAdding(); this.fragSort.doneAdding(); } /** * Builds a read ends object that represents a single read. */ private ReadEndsForMarkDuplicates buildReadEnds(final SAMFileHeader header, final long index, final SAMRecord rec, final boolean useBarcodes) { final ReadEndsForMarkDuplicates ends; if (useBarcodes) { ends = new ReadEndsForMarkDuplicatesWithBarcodes(); } else { ends = new ReadEndsForMarkDuplicates(); } ends.read1ReferenceIndex = rec.getReferenceIndex(); ends.read1Coordinate = rec.getReadNegativeStrandFlag() ? rec.getUnclippedEnd() : rec.getUnclippedStart(); ends.orientation = rec.getReadNegativeStrandFlag() ? ReadEnds.R : ReadEnds.F; ends.read1IndexInFile = index; ends.score = DuplicateScoringStrategy.computeDuplicateScore(rec, this.DUPLICATE_SCORING_STRATEGY); // Doing this lets the ends object know that it's part of a pair if (rec.getReadPairedFlag() && !rec.getMateUnmappedFlag()) { ends.read2ReferenceIndex = rec.getMateReferenceIndex(); } // Fill in the library ID ends.libraryId = libraryIdGenerator.getLibraryId(rec); // Fill in the location information for optical duplicates if (this.opticalDuplicateFinder.addLocationInformation(rec.getReadName(), ends)) { // calculate the RG number (nth in list) ends.readGroup = 0; final String rg = (String) rec.getAttribute(ReservedTagConstants.READ_GROUP_ID); final List<SAMReadGroupRecord> readGroups = header.getReadGroups(); if (rg != null && readGroups != null) { for (final SAMReadGroupRecord readGroup : readGroups) { if (readGroup.getReadGroupId().equals(rg)) { break; } else { ends.readGroup++; } } } } if (useBarcodes) { final ReadEndsForMarkDuplicatesWithBarcodes endsWithBarcode = (ReadEndsForMarkDuplicatesWithBarcodes) ends; final String topStrandNormalizedUmi = UmiUtil.getTopStrandNormalizedUmi(rec, BARCODE_TAG, DUPLEX_UMI); endsWithBarcode.barcode = Objects.hash(topStrandNormalizedUmi); if (!rec.getReadPairedFlag() || rec.getFirstOfPairFlag()) { endsWithBarcode.readOneBarcode = getReadOneBarcodeValue(rec); } else { endsWithBarcode.readTwoBarcode = getReadTwoBarcodeValue(rec); } } return ends; } /** * Goes through the accumulated ReadEndsForMarkDuplicates objects and determines which of them are * to be marked as duplicates. */ private void generateDuplicateIndexes(final boolean useBarcodes, final boolean indexOpticalDuplicates) { final int entryOverhead; if (TAG_DUPLICATE_SET_MEMBERS) { // Memory requirements for RepresentativeReadIndexer: // three int entries + overhead: (3 * 4) + 4 = 16 bytes entryOverhead = 16; } else { entryOverhead = SortingLongCollection.SIZEOF; } // Keep this number from getting too large even if there is a huge heap. int maxInMemory = (int) Math.min((Runtime.getRuntime().maxMemory() * 0.25) / entryOverhead, (double) (Integer.MAX_VALUE - 5)); // If we're also tracking optical duplicates, reduce maxInMemory, since we'll need two sorting collections if (indexOpticalDuplicates) { maxInMemory /= ((entryOverhead + SortingLongCollection.SIZEOF) / entryOverhead); this.opticalDuplicateIndexes = new SortingLongCollection(maxInMemory, TMP_DIR.toArray(new File[TMP_DIR.size()])); } log.info("Will retain up to " + maxInMemory + " duplicate indices before spilling to disk."); this.duplicateIndexes = new SortingLongCollection(maxInMemory, TMP_DIR.toArray(new File[TMP_DIR.size()])); if (TAG_DUPLICATE_SET_MEMBERS) { final RepresentativeReadIndexerCodec representativeIndexCodec = new RepresentativeReadIndexerCodec(); this.representativeReadIndicesForDuplicates = SortingCollection.newInstance(RepresentativeReadIndexer.class, representativeIndexCodec, Comparator.comparing(read -> read.readIndexInFile), maxInMemory, TMP_DIR); } ReadEndsForMarkDuplicates firstOfNextChunk = null; final List<ReadEndsForMarkDuplicates> nextChunk = new ArrayList<>(200); // First just do the pairs log.info("Traversing read pair information and detecting duplicates."); for (final ReadEndsForMarkDuplicates next : this.pairSort) { if (firstOfNextChunk != null && areComparableForDuplicates(firstOfNextChunk, next, true, useBarcodes)) { nextChunk.add(next); } else { handleChunk(nextChunk); nextChunk.clear(); nextChunk.add(next); firstOfNextChunk = next; } } handleChunk(nextChunk); this.pairSort.cleanup(); this.pairSort = null; // Now deal with the fragments log.info("Traversing fragment information and detecting duplicates."); boolean containsPairs = false; boolean containsFrags = false; firstOfNextChunk = null; for (final ReadEndsForMarkDuplicates next : this.fragSort) { if (firstOfNextChunk != null && areComparableForDuplicates(firstOfNextChunk, next, false, useBarcodes)) { nextChunk.add(next); containsPairs = containsPairs || next.isPaired(); containsFrags = containsFrags || !next.isPaired(); } else { if (nextChunk.size() > 1 && containsFrags) { markDuplicateFragments(nextChunk, containsPairs); } nextChunk.clear(); nextChunk.add(next); firstOfNextChunk = next; containsPairs = next.isPaired(); containsFrags = !next.isPaired(); } } markDuplicateFragments(nextChunk, containsPairs); this.fragSort.cleanup(); this.fragSort = null; log.info("Sorting list of duplicate records."); this.duplicateIndexes.doneAddingStartIteration(); if (this.opticalDuplicateIndexes != null) { this.opticalDuplicateIndexes.doneAddingStartIteration(); } if (TAG_DUPLICATE_SET_MEMBERS) { this.representativeReadIndicesForDuplicates.doneAdding(); } } private void handleChunk(List<ReadEndsForMarkDuplicates> nextChunk) { if (nextChunk.size() > 1) { markDuplicatePairs(nextChunk); if (TAG_DUPLICATE_SET_MEMBERS) { addRepresentativeReadIndex(nextChunk); } } else if (nextChunk.size() == 1) { addSingletonToCount(libraryIdGenerator); } } private boolean areComparableForDuplicates(final ReadEndsForMarkDuplicates lhs, final ReadEndsForMarkDuplicates rhs, final boolean compareRead2, final boolean useBarcodes) { boolean areComparable = lhs.libraryId == rhs.libraryId; if (useBarcodes && areComparable) { // areComparable is useful here to avoid the casts below final ReadEndsForMarkDuplicatesWithBarcodes lhsWithBarcodes = (ReadEndsForMarkDuplicatesWithBarcodes) lhs; final ReadEndsForMarkDuplicatesWithBarcodes rhsWithBarcodes = (ReadEndsForMarkDuplicatesWithBarcodes) rhs; areComparable = lhsWithBarcodes.barcode == rhsWithBarcodes.barcode && lhsWithBarcodes.readOneBarcode == rhsWithBarcodes.readOneBarcode && lhsWithBarcodes.readTwoBarcode == rhsWithBarcodes.readTwoBarcode; } if (areComparable) { areComparable = lhs.read1ReferenceIndex == rhs.read1ReferenceIndex && lhs.read1Coordinate == rhs.read1Coordinate && lhs.orientation == rhs.orientation; } if (areComparable && compareRead2) { areComparable = lhs.read2ReferenceIndex == rhs.read2ReferenceIndex && lhs.read2Coordinate == rhs.read2Coordinate; } return areComparable; } private void addIndexAsDuplicate(final long bamIndex) { this.duplicateIndexes.add(bamIndex); ++this.numDuplicateIndices; } private void addRepresentativeReadOfDuplicateSet(final long representativeReadIndexInFile, final int setSize, final long read1IndexInFile) { final RepresentativeReadIndexer rri = new RepresentativeReadIndexer(); rri.representativeReadIndexInFile = (int) representativeReadIndexInFile; rri.setSize = setSize; rri.readIndexInFile = (int) read1IndexInFile; this.representativeReadIndicesForDuplicates.add(rri); } /** * Takes a list of ReadEndsForMarkDuplicates objects and identify the representative read based on * quality score. For all members of the duplicate set, add the read1 index-in-file of the representative * read to the records of the first and second in a pair. This value becomes is used for * the 'DI' tag. */ private void addRepresentativeReadIndex(final List<ReadEndsForMarkDuplicates> list) { short maxScore = 0; ReadEndsForMarkDuplicates best = null; /** All read ends should have orientation FF, FR, RF, or RR **/ for (final ReadEndsForMarkDuplicates end : list) { if (end.score > maxScore || best == null) { maxScore = end.score; best = end; } } // for read name (for representative read name), add the last of the pair that was examined for (final ReadEndsForMarkDuplicates end : list) { addRepresentativeReadOfDuplicateSet(best.read1IndexInFile, list.size(), end.read1IndexInFile); addRepresentativeReadOfDuplicateSet(best.read1IndexInFile, list.size(), end.read2IndexInFile); } } /** * Takes a list of ReadEndsForMarkDuplicates objects and removes from it all objects that should * not be marked as duplicates. This assumes that the list contains objects representing pairs. */ private void markDuplicatePairs(final List<ReadEndsForMarkDuplicates> list) { short maxScore = 0; ReadEndsForMarkDuplicates best = null; /** All read ends should have orientation FF, FR, RF, or RR **/ for (final ReadEndsForMarkDuplicates end : list) { if (end.score > maxScore || best == null) { maxScore = end.score; best = end; } } if (this.READ_NAME_REGEX != null) { AbstractMarkDuplicatesCommandLineProgram.trackOpticalDuplicates(list, best, opticalDuplicateFinder, libraryIdGenerator); } for (final ReadEndsForMarkDuplicates end : list) { if (end != best) { addIndexAsDuplicate(end.read1IndexInFile); // in query-sorted case, these will be the same. // TODO: also in coordinate sorted, when one read is unmapped if (end.read2IndexInFile != end.read1IndexInFile) { addIndexAsDuplicate(end.read2IndexInFile); } if (end.isOpticalDuplicate && this.opticalDuplicateIndexes != null) { this.opticalDuplicateIndexes.add(end.read1IndexInFile); // We expect end.read2IndexInFile==read1IndexInFile when we are in queryname sorted files, as the read-pairs // will be sorted together and nextIndexIfNeeded() will only pull one index from opticalDuplicateIndexes. // This means that in queryname sorted order we will only pull from the sorting collection once, // where as we would pull twice for coordinate sorted files. if (end.read2IndexInFile != end.read1IndexInFile) { this.opticalDuplicateIndexes.add(end.read2IndexInFile); } } } } } /** * Method for deciding when to pull from the SortingLongCollection for the next read based on sorting order. * - If file is queryname sorted then we expect one index per pair of reads, so we only want to iterate when we * are no longer reading from that read-pair. * - If file is coordinate-sorted we want to base our iteration entirely on the indexes of both reads in the pair * <p> * This logic is applied to both Optical and Library duplicates * * @param sortOrder Sort order for the underlying bam file * @param recordInFileIndex Index of the current sam record rec * @param nextDuplicateIndex Index of next expected duplicate (optical or otherwise) in the file * @param lastQueryName Name of the last read seen (for keeping queryname sorted groups together) * @param rec Current record to compare against * @param duplicateIndexes DuplicateIndexes collection to iterate over * @return the duplicate after iteration */ private long nextIndexIfNeeded(final SAMFileHeader.SortOrder sortOrder, final long recordInFileIndex, final long nextDuplicateIndex, final String lastQueryName, final SAMRecord rec, final SortingLongCollection duplicateIndexes) { // Manage the flagging of optical/sequencing duplicates // Possibly figure out the next opticalDuplicate index (if going by coordinate, if going by query name, only do this // if the query name has changed) final boolean needNextDuplicateIndex = recordInFileIndex > nextDuplicateIndex && (sortOrder == SAMFileHeader.SortOrder.coordinate || !rec.getReadName().equals(lastQueryName)); if (needNextDuplicateIndex) { return (duplicateIndexes.hasNext() ? duplicateIndexes.next() : NO_SUCH_INDEX); } return nextDuplicateIndex; } /** * Takes a list of ReadEndsForMarkDuplicates objects and removes from it all objects that should * not be marked as duplicates. This will set the duplicate index for only list items are fragments. * * @param containsPairs true if the list also contains objects containing pairs, false otherwise. */ private void markDuplicateFragments(final List<ReadEndsForMarkDuplicates> list, final boolean containsPairs) { if (containsPairs) { for (final ReadEndsForMarkDuplicates end : list) { if (!end.isPaired()) { addIndexAsDuplicate(end.read1IndexInFile); } } } else { short maxScore = 0; ReadEndsForMarkDuplicates best = null; for (final ReadEndsForMarkDuplicates end : list) { if (end.score > maxScore || best == null) { maxScore = end.score; best = end; } } for (final ReadEndsForMarkDuplicates end : list) { if (end != best) { addIndexAsDuplicate(end.read1IndexInFile); } } } } /** * Comparator for ReadEndsForMarkDuplicates that orders by read1 position then pair orientation then read2 position. */ static class ReadEndsMDComparator implements Comparator<ReadEndsForMarkDuplicates> { final boolean useBarcodes; public ReadEndsMDComparator(final boolean useBarcodes) { this.useBarcodes = useBarcodes; } public int compare(final ReadEndsForMarkDuplicates lhs, final ReadEndsForMarkDuplicates rhs) { int compareDifference = lhs.libraryId - rhs.libraryId; if (useBarcodes) { final ReadEndsForMarkDuplicatesWithBarcodes lhsWithBarcodes = (ReadEndsForMarkDuplicatesWithBarcodes) lhs; final ReadEndsForMarkDuplicatesWithBarcodes rhsWithBarcodes = (ReadEndsForMarkDuplicatesWithBarcodes) rhs; if (compareDifference == 0) { compareDifference = Integer.compare(lhsWithBarcodes.barcode, rhsWithBarcodes.barcode); } if (compareDifference == 0) { compareDifference = Integer.compare(lhsWithBarcodes.readOneBarcode, rhsWithBarcodes.readOneBarcode); } if (compareDifference == 0) { compareDifference = Integer.compare(lhsWithBarcodes.readTwoBarcode, rhsWithBarcodes.readTwoBarcode); } } if (compareDifference == 0) { compareDifference = lhs.read1ReferenceIndex - rhs.read1ReferenceIndex; } if (compareDifference == 0) { compareDifference = lhs.read1Coordinate - rhs.read1Coordinate; } if (compareDifference == 0) { compareDifference = lhs.orientation - rhs.orientation; } if (compareDifference == 0) { compareDifference = lhs.read2ReferenceIndex - rhs.read2ReferenceIndex; } if (compareDifference == 0) { compareDifference = lhs.read2Coordinate - rhs.read2Coordinate; } if (compareDifference == 0) { compareDifference = lhs.getTile() - rhs.getTile(); } if (compareDifference == 0) { compareDifference = lhs.getX() - rhs.getX(); } if (compareDifference == 0) { compareDifference = lhs.getY() - rhs.getY(); } // The following is arbitrary and is only included for completeness. // Other implementations may chose to forgo this tiebreak if they do not have // access to the index-in-file of the records (e.g. SPARK implmentations) if (compareDifference == 0) { compareDifference = (int) (lhs.read1IndexInFile - rhs.read1IndexInFile); } if (compareDifference == 0) { compareDifference = (int) (lhs.read2IndexInFile - rhs.read2IndexInFile); } return compareDifference; } } }
package seedu.address.logic.commands; import java.util.Set; /** * Finds and lists all persons in address book whose name contains any of the argument keywords. * Keyword matching is case sensitive. */ public class FindCommand extends Command { public static final String COMMAND_WORD = "find"; public static final String PREFIX_NAME = "/n"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Finds all tasks whose names contain any of " + "the specified keywords (case-sensitive) and displays them as a list with index numbers.\n" + "Parameters: n/NAME \n" + "Example: " + COMMAND_WORD + " n/meeting"; private final Set<String> keywords; private final String prefix; public FindCommand(String pre, Set<String> keywords) { this.prefix = pre; this.keywords = keywords; } @Override public CommandResult execute() { if (prefix.equals(PREFIX_NAME)) { model.updateFilteredPersonList(keywords); return new CommandResult(getMessageForPersonListShownSummary(model.getFilteredPersonList().size())); } else { model.updateFilteredPersonList(keywords); return new CommandResult(getMessageForPersonListShownSummary(model.getFilteredPersonList().size())); } } }
package seedu.address.logic.commands; import java.util.Date; import java.util.List; import java.util.Optional; import seedu.address.commons.core.Messages; import seedu.address.commons.exceptions.IllegalDateTimeValueException; import seedu.address.commons.exceptions.IllegalValueException; import seedu.address.logic.commands.exceptions.CommandException; import seedu.address.logic.recurrenceparser.RecurrenceManager; import seedu.address.logic.recurrenceparser.RecurrenceParser; import seedu.address.logic.undo.UndoManager; import seedu.address.model.booking.UniqueBookingList; import seedu.address.model.label.UniqueLabelList; import seedu.address.model.task.Deadline; import seedu.address.model.task.ReadOnlyTask; import seedu.address.model.task.Recurrence; import seedu.address.model.task.Task; import seedu.address.model.task.Title; import seedu.address.model.task.UniqueTaskList; //@@author A0105287E public class MarkCommand extends Command { public static final String COMMAND_WORD = "mark"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Marks the task as completed or incomplete, task is " + "identified by the index number used in the last task listing.\n " + "Parameters: INDEX [completed|incomplete]\n" + "Example: mark 1 completed\n"; public static final String MESSAGE_MARK_TASK_SUCCESS = "Marked Task: %1$s"; public static final String MESSAGE_NOT_MARKED = "Status must be provided."; public static final String MESSAGE_DUPLICATE_TASK = "This task already exists in the task manager."; public static final String MESSAGE_TYPE_BOOKING = "Booking type of tasks cannot be marked as completed."; private static final RecurrenceParser recurrenceParser = new RecurrenceManager(); private final int filteredTaskListIndex; private final Boolean isCompleted; /** * @param filteredTaskListIndex the index of the task in the filtered task list to edit * */ public MarkCommand(int filteredTaskListIndex, Boolean isCompleted) { assert filteredTaskListIndex > 0; assert isCompleted != null; this.isCompleted = isCompleted; // converts filteredTaskListIndex from one-based to zero-based. this.filteredTaskListIndex = filteredTaskListIndex - 1; } @Override public CommandResult execute() throws CommandException { List<ReadOnlyTask> lastShownList = model.getFilteredTaskList(); if (filteredTaskListIndex >= lastShownList.size()) { throw new CommandException(Messages.MESSAGE_INVALID_TASKS_DISPLAYED_INDEX); } ReadOnlyTask taskToEdit = lastShownList.get(filteredTaskListIndex); if (!taskToEdit.getBookings().isEmpty()) { throw new CommandException(MESSAGE_TYPE_BOOKING); } try { Task editedTask = createEditedTask(taskToEdit, isCompleted); saveCurrentState(); model.updateTask(filteredTaskListIndex, editedTask); if (taskToEdit.isRecurring()) { Task newTask = createRecurringTask(taskToEdit); model.addTask(newTask); } } catch (UniqueTaskList.DuplicateTaskException dte) { throw new CommandException(MESSAGE_DUPLICATE_TASK); } catch (Exception e) { throw new CommandException(e.getMessage()); } return new CommandResult(String.format(MESSAGE_MARK_TASK_SUCCESS, taskToEdit)); } /** * Creates and returns a {@code Task} with the details of {@code taskToEdit} * edited with {@code editTaskDescriptor}. */ private static Task createEditedTask(ReadOnlyTask taskToEdit, Boolean isCompleted) throws IllegalValueException, IllegalDateTimeValueException, CommandException { assert taskToEdit != null; Optional<Deadline> updatedStartTime; Optional<Deadline> updatedDeadline; Title updatedTitle = new Title(taskToEdit.getTitle().toString()); updatedStartTime = taskToEdit.getStartTime(); updatedDeadline = taskToEdit.getDeadline(); UniqueLabelList updatedLabels = taskToEdit.getLabels().clone(); UniqueBookingList bookingList = taskToEdit.getBookings().clone(); Boolean isRecurring = taskToEdit.isRecurring(); Optional<Recurrence> updatedRecurrence = taskToEdit.getRecurrence(); return new Task(updatedTitle, updatedStartTime, updatedDeadline, isCompleted, updatedLabels, bookingList, isRecurring, updatedRecurrence); } /** * Creates and returns a {@code Task} a new instance of the recurring task */ private static Task createRecurringTask(ReadOnlyTask task) throws IllegalValueException, IllegalDateTimeValueException { Optional<Deadline> updatedStartTime; Optional<Deadline> updatedDeadline; if (task.getStartTime().isPresent()) { updatedStartTime = Optional.ofNullable(getRecurringDate(task.getStartTime().get(), task.getRecurrence().get())); updatedDeadline = Optional.ofNullable(getRecurringDate(task.getDeadline().get(), task.getRecurrence().get())); } else { updatedStartTime = task.getStartTime(); updatedDeadline = Optional.ofNullable(getRecurringDate(task.getDeadline().get(), task.getRecurrence().get())); } Title updatedTitle = task.getTitle(); UniqueLabelList updatedLabels = task.getLabels(); UniqueBookingList updatedBookings = task.getBookings().clone(); Boolean isRecurring = task.isRecurring(); Optional<Recurrence> updatedRecurrence = task.getRecurrence(); Boolean isCompleted = AddCommand.DEFAULT_TASK_STATE; return new Task(updatedTitle, updatedStartTime, updatedDeadline, isCompleted, updatedLabels, updatedBookings, isRecurring, updatedRecurrence); } /** * Creates and returns a {@code Deadline} a new instance of the updated deadline passed in */ private static Deadline getRecurringDate(Deadline date, Recurrence recurrence) throws IllegalValueException, IllegalDateTimeValueException { try { Date oldDate = date.getDateTime(); return new Deadline (recurrenceParser.getNextDate(oldDate, recurrence).toString()); } catch (IllegalValueException e) { throw new IllegalValueException(e.getMessage()); } catch (IllegalDateTimeValueException e) { throw new IllegalDateTimeValueException(); } } /** * Save the data in task manager if command is mutating the data */ public void saveCurrentState() { if (isMutating()) { try { UndoManager.getInstance().addStorageHistory(model.getTaskManager().getImmutableTaskList(), model.getTaskManager().getImmutableLabelList()); } catch (CloneNotSupportedException e) { e.printStackTrace(); } } } @Override public boolean isMutating() { return true; } }
package seedu.address.logic.commands; import java.util.Date; import java.util.List; import java.util.Optional; import seedu.address.commons.core.Messages; import seedu.address.commons.exceptions.IllegalDateTimeValueException; import seedu.address.commons.exceptions.IllegalValueException; import seedu.address.logic.commands.exceptions.CommandException; import seedu.address.logic.recurrenceparser.RecurrenceManager; import seedu.address.logic.recurrenceparser.RecurrenceParser; import seedu.address.logic.undo.UndoManager; import seedu.address.model.booking.UniqueBookingList; import seedu.address.model.label.UniqueLabelList; import seedu.address.model.task.Deadline; import seedu.address.model.task.ReadOnlyTask; import seedu.address.model.task.Recurrence; import seedu.address.model.task.Task; import seedu.address.model.task.Title; import seedu.address.model.task.UniqueTaskList; //@@author A0105287E public class MarkCommand extends Command { public static final String COMMAND_WORD = "mark"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Marks the task as completed or incomplete, task is " + "identified by the index number used in the last task listing.\n " + "Parameters: INDEX [completed|incomplete]\n" + "Example: mark 1 completed\n"; public static final String MESSAGE_MARK_TASK_SUCCESS = "Marked Task: %1$s"; public static final String MESSAGE_NOT_MARKED = "Status must be provided."; public static final String MESSAGE_DUPLICATE_TASK = "This task already exists in the task manager."; public static final String MESSAGE_TYPE_BOOKING = "Booking type of tasks cannot be marked as completed."; public static final String MESSAGE_RECURRING_INCOMPLETE_DISABLE = "Status of completed recurring task " + "cannot be changed."; private static final RecurrenceParser recurrenceParser = new RecurrenceManager(); private final int filteredTaskListIndex; private final Boolean isCompleted; /** * @param filteredTaskListIndex the index of the task in the filtered task list to edit * */ public MarkCommand(int filteredTaskListIndex, Boolean isCompleted) { assert filteredTaskListIndex > 0; assert isCompleted != null; this.isCompleted = isCompleted; // converts filteredTaskListIndex from one-based to zero-based. this.filteredTaskListIndex = filteredTaskListIndex - 1; } @Override public CommandResult execute() throws CommandException { List<ReadOnlyTask> lastShownList = model.getFilteredTaskList(); if (filteredTaskListIndex >= lastShownList.size()) { throw new CommandException(Messages.MESSAGE_INVALID_TASKS_DISPLAYED_INDEX); } ReadOnlyTask taskToEdit = lastShownList.get(filteredTaskListIndex); if (!taskToEdit.getBookings().isEmpty()) { throw new CommandException(MESSAGE_TYPE_BOOKING); } if (taskToEdit.isCompleted() && taskToEdit.isRecurring()) { throw new CommandException(MESSAGE_RECURRING_INCOMPLETE_DISABLE); } try { Task editedTask = createEditedTask(taskToEdit, isCompleted); saveCurrentState(); model.updateTask(filteredTaskListIndex, editedTask); if (taskToEdit.isRecurring()) { Task newTask = createRecurringTask(taskToEdit); model.addTask(newTask); } } catch (UniqueTaskList.DuplicateTaskException dte) { throw new CommandException(MESSAGE_DUPLICATE_TASK); } catch (Exception e) { throw new CommandException(e.getMessage()); } return new CommandResult(String.format(MESSAGE_MARK_TASK_SUCCESS, taskToEdit)); } /** * Creates and returns a {@code Task} with the details of {@code taskToEdit} * edited with {@code editTaskDescriptor}. */ private static Task createEditedTask(ReadOnlyTask taskToEdit, Boolean isCompleted) throws IllegalValueException, IllegalDateTimeValueException, CommandException { assert taskToEdit != null; Optional<Deadline> updatedStartTime; Optional<Deadline> updatedDeadline; Title updatedTitle = new Title(taskToEdit.getTitle().toString()); updatedStartTime = taskToEdit.getStartTime(); updatedDeadline = taskToEdit.getDeadline(); UniqueLabelList updatedLabels = taskToEdit.getLabels().clone(); UniqueBookingList bookingList = taskToEdit.getBookings().clone(); Boolean isRecurring = taskToEdit.isRecurring(); Optional<Recurrence> updatedRecurrence = taskToEdit.getRecurrence(); return new Task(updatedTitle, updatedStartTime, updatedDeadline, isCompleted, updatedLabels, bookingList, isRecurring, updatedRecurrence); } /** * Creates and returns a {@code Task} a new instance of the recurring task */ private static Task createRecurringTask(ReadOnlyTask task) throws IllegalValueException, IllegalDateTimeValueException { Optional<Deadline> updatedStartTime; Optional<Deadline> updatedDeadline; if (task.getStartTime().isPresent()) { updatedStartTime = Optional.ofNullable(getRecurringDate(task.getStartTime().get(), task.getRecurrence().get())); updatedDeadline = Optional.ofNullable(getRecurringDate(task.getDeadline().get(), task.getRecurrence().get())); } else { updatedStartTime = task.getStartTime(); updatedDeadline = Optional.ofNullable(getRecurringDate(task.getDeadline().get(), task.getRecurrence().get())); } Title updatedTitle = task.getTitle(); UniqueLabelList updatedLabels = task.getLabels(); UniqueBookingList updatedBookings = task.getBookings().clone(); Boolean isRecurring = task.isRecurring(); Optional<Recurrence> updatedRecurrence = task.getRecurrence(); Boolean isCompleted = AddCommand.DEFAULT_TASK_STATE; return new Task(updatedTitle, updatedStartTime, updatedDeadline, isCompleted, updatedLabels, updatedBookings, isRecurring, updatedRecurrence); } /** * Creates and returns a {@code Deadline} a new instance of the updated deadline passed in */ private static Deadline getRecurringDate(Deadline date, Recurrence recurrence) throws IllegalValueException, IllegalDateTimeValueException { try { Date oldDate = date.getDateTime(); return new Deadline (recurrenceParser.getNextDate(oldDate, recurrence).toString()); } catch (IllegalValueException e) { throw new IllegalValueException(e.getMessage()); } catch (IllegalDateTimeValueException e) { throw new IllegalDateTimeValueException(); } } /** * Save the data in task manager if command is mutating the data */ public void saveCurrentState() { if (isMutating()) { try { UndoManager.getInstance().addStorageHistory(model.getTaskManager().getImmutableTaskList(), model.getTaskManager().getImmutableLabelList()); } catch (CloneNotSupportedException e) { e.printStackTrace(); } } } @Override public boolean isMutating() { return true; } }
package seedu.jimi.commons.core; import seedu.jimi.commons.events.BaseEvent; /** * Base class for *Manager classes * * Registers the class' event handlers in eventsCenter */ public abstract class ComponentManager { protected EventsCenter eventsCenter; /** * Uses default {@link EventsCenter} */ public ComponentManager() { this(EventsCenter.getInstance()); } public ComponentManager(EventsCenter eventsCenter) { this.eventsCenter = eventsCenter; eventsCenter.registerHandler(this); } protected void raise(BaseEvent event) { eventsCenter.post(event); } }
package seedu.taskell.logic.commands; import java.util.ArrayList; import java.util.logging.Logger; import seedu.taskell.commons.core.LogsCenter; import seedu.taskell.model.CommandHistory; import seedu.taskell.model.task.Task; import seedu.taskell.model.task.UniqueTaskList.DuplicateTaskException; import seedu.taskell.model.task.UniqueTaskList.TaskNotFoundException; /** * Undo previously executed command (add or delete only for now) * Note: only for current session only (meaning after app is closed, history will be cleared) */ public class UndoCommand extends Command { private static final Logger logger = LogsCenter.getLogger(UndoCommand.class.getName()); public static final String COMMAND_WORD = "undo"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Undo most recent command.\n" + "Example: " + COMMAND_WORD; private static final String MESSAGE_DELETE_TASK_SUCCESS = "Deleted Task: %1$s"; private static final String MESSAGE_ADD_TASK_SUCCESS = "Task added back: %1$s"; private static final String MESSAGE_DUPLICATE_TASK = "This task already exists in the task manager"; private static final String MESSAGE_NO_TASK_TO_UNDO = "No add or delete commands available to undo."; private static final String MESSAGE_COMMAND_HISTORY_EMPTY = "No command history available."; private static final String MESSAGE_INVALID_INDEX = "Index is invalid"; private static ArrayList<CommandHistory> commandHistoryList; private int index; private CommandHistory commandHistory; public UndoCommand(int index) { logger.info("Creating UndoCommand with index: " + index); this.index = index; } public static ArrayList<String> getListOfCommandHistoryText() { assert commandHistoryList != null; assert !commandHistoryList.isEmpty(); ArrayList<String> list = new ArrayList<>(); for (CommandHistory history: commandHistoryList) { list.add(history.getCommandText()); } return list; } @Override public CommandResult execute() { if (commandHistoryList.isEmpty()) { return new CommandResult(String.format(MESSAGE_COMMAND_HISTORY_EMPTY)); } else if (index > commandHistoryList.size()) { return new CommandResult(String.format(MESSAGE_INVALID_INDEX)); } commandHistory = commandHistoryList.get(getOffset(index)); switch (commandHistory.getCommandType()) { case AddCommand.COMMAND_WORD: return undoAdd(); case DeleteCommand.COMMAND_WORD: return undoDelete(); default: return new CommandResult(String.format(MESSAGE_NO_TASK_TO_UNDO)); } } private int getOffset(int index) { return index - 1; } private CommandResult undoDelete() { try { model.addTask(commandHistory.getTask()); deleteCommandHistory(); return new CommandResult(String.format(MESSAGE_ADD_TASK_SUCCESS, commandHistory.getTask())); } catch (DuplicateTaskException e) { return new CommandResult(MESSAGE_DUPLICATE_TASK); } } private CommandResult undoAdd() { try { model.deleteTask(commandHistory.getTask()); deleteCommandHistory(); } catch (TaskNotFoundException e) { assert false : "The target task cannot be missing"; } return new CommandResult(String.format(MESSAGE_DELETE_TASK_SUCCESS, commandHistory.getTask())); } private void deleteCommandHistory() { commandHistoryList.remove(commandHistory); } public static void initializeCommandHistory() { if (commandHistoryList==null) { commandHistoryList = new ArrayList<>(); } } public static void addCommandToHistory(String commandText, String commandType, Task task) { assert commandHistoryList != null; commandHistoryList.add(new CommandHistory(commandText, commandType, task)); } public static void addTaskToCommandHistory(Task task) { logger.info("Adding task to history"); if (commandHistoryList.isEmpty()) { logger.warning("No command history to add task to"); return; } commandHistoryList.get(commandHistoryList.size()-1).setTask(task); } public static void deletePreviousCommand() { logger.info("Command unsuccessfully executed. Deleting command history."); if (commandHistoryList.isEmpty()) { logger.warning("No command history to delete"); return; } commandHistoryList.remove(commandHistoryList.size()-1); } }
/** @@author A0142130A **/ package seedu.taskell.logic.commands; import java.util.ArrayList; import java.util.logging.Logger; import seedu.taskell.commons.core.EventsCenter; import seedu.taskell.commons.core.LogsCenter; import seedu.taskell.commons.events.model.DisplayListChangedEvent; import seedu.taskell.model.CommandHistory; import seedu.taskell.model.task.Task; import seedu.taskell.model.task.UniqueTaskList.DuplicateTaskException; import seedu.taskell.model.task.UniqueTaskList.TaskNotFoundException; /** * Undo previously executed command (add or delete only for now) * Note: only for current session only (meaning after app is closed, history will be cleared) */ public class UndoCommand extends Command { private static final Logger logger = LogsCenter.getLogger(UndoCommand.class.getName()); public static final String COMMAND_WORD = "undo"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Undo a previously executed command.\n" + "Parameters: INDEX (must be a positive integer)\n" + "Example: " + COMMAND_WORD + " 3"; private static final String MESSAGE_DELETE_TASK_SUCCESS = "Deleted Task: %1$s"; private static final String MESSAGE_ADD_TASK_SUCCESS = "Task added back: %1$s"; private static final String MESSAGE_EDIT_TASK_SUCCESS = "Task edited back to old version: %1$s"; private static final String MESSAGE_DUPLICATE_TASK = "This task already exists in the task manager"; private static final String MESSAGE_NO_TASK_TO_UNDO = "No add or delete commands available to undo."; private static final String MESSAGE_COMMAND_HISTORY_EMPTY = "No command history available for undo."; private static final String MESSAGE_INVALID_INDEX = "Index is invalid"; private static ArrayList<CommandHistory> commandHistoryList; private int index; private CommandHistory commandHistory; public UndoCommand(int index) { logger.info("Creating UndoCommand with index: " + index); this.index = index; } public static ArrayList<String> getListOfCommandHistoryText() { assert commandHistoryList != null; assert !commandHistoryList.isEmpty(); ArrayList<String> list = new ArrayList<>(); for (CommandHistory history: commandHistoryList) { list.add(history.getCommandText()); } return list; } @Override public CommandResult execute() { if (commandHistoryList.isEmpty()) { return new CommandResult(String.format(MESSAGE_COMMAND_HISTORY_EMPTY)); } else if (index > commandHistoryList.size()) { return new CommandResult(String.format(MESSAGE_INVALID_INDEX)); } commandHistory = commandHistoryList.get(getOffset(index)); if (commandHistory.isRedoTrue()) { return redoUndo(); } switch (commandHistory.getCommandType()) { case AddCommand.COMMAND_WORD: return undoAdd(); case DeleteCommand.COMMAND_WORD: return undoDelete(); case EditDateCommand.COMMAND_WORD: return undoEdit(); default: logger.severe("CommandHistory is invalid"); return new CommandResult(String.format(MESSAGE_NO_TASK_TO_UNDO)); } } private CommandResult redoUndo() { switch (commandHistory.getCommandType()) { case AddCommand.COMMAND_WORD: return undoDelete(); case DeleteCommand.COMMAND_WORD: return undoAdd(); case EditDateCommand.COMMAND_WORD: return redoEdit(); default: logger.severe("CommandHistory is invalid"); return new CommandResult(String.format(MESSAGE_NO_TASK_TO_UNDO)); } } private CommandResult undoEdit() { try { model.editTask(commandHistory.getTask(), commandHistory.getOldTask()); deleteCommandHistory(); addUndoCommand(commandHistory); indicateDisplayListChanged(); return new CommandResult(String.format(MESSAGE_EDIT_TASK_SUCCESS, commandHistory.getOldTask())); } catch (DuplicateTaskException e) { return new CommandResult(MESSAGE_DUPLICATE_TASK); } catch (TaskNotFoundException e) { assert false : "The target task cannot be missing"; } assert false: "Undo edit should return a command result"; return null; } private CommandResult redoEdit() { try { model.editTask(commandHistory.getOldTask(), commandHistory.getTask()); deleteCommandHistory(); indicateDisplayListChanged(); return new CommandResult(String.format(MESSAGE_EDIT_TASK_SUCCESS, commandHistory.getTask())); } catch (DuplicateTaskException e) { return new CommandResult(MESSAGE_DUPLICATE_TASK); } catch (TaskNotFoundException e) { assert false : "The target task cannot be missing"; } assert false: "Redo edit should return a command result"; return null; } private CommandResult undoDelete() { try { model.addTask(commandHistory.getTask()); deleteCommandHistory(); addUndoCommand(commandHistory); indicateDisplayListChanged(); return new CommandResult(String.format(MESSAGE_ADD_TASK_SUCCESS, commandHistory.getTask())); } catch (DuplicateTaskException e) { return new CommandResult(MESSAGE_DUPLICATE_TASK); } } private CommandResult undoAdd() { try { model.deleteTask(commandHistory.getTask()); deleteCommandHistory(); addUndoCommand(commandHistory); indicateDisplayListChanged(); } catch (TaskNotFoundException e) { assert false : "The target task cannot be missing"; } return new CommandResult(String.format(MESSAGE_DELETE_TASK_SUCCESS, commandHistory.getTask())); } private void deleteCommandHistory() { commandHistoryList.remove(commandHistory); } private void addUndoCommand(CommandHistory commandHistory) { if (commandHistory.isRedoTrue()) { return; } commandHistory.setCommandText("undo " + commandHistory.getCommandText()); commandHistory.setToRedoToTrue(); commandHistoryList.add(commandHistory); } public static void initializeCommandHistory() { if (commandHistoryList==null) { commandHistoryList = new ArrayList<>(); } } private static int getOffset(int index) { return index - 1; } public static void addCommandToHistory(String commandText, String commandType) { assert commandHistoryList != null; commandHistoryList.add(new CommandHistory(commandText, commandType)); } public static void addTaskToCommandHistory(Task task) { logger.info("Adding task to history"); if (commandHistoryList.isEmpty()) { logger.warning("No command history to add task to"); return; } commandHistoryList.get(getOffset(commandHistoryList.size())).setTask(task); } public static void addOldTaskToCommandHistory(Task task) { logger.info("Adding old task to history"); if (commandHistoryList.isEmpty()) { logger.warning("No command history to add task to"); return; } commandHistoryList.get(getOffset(commandHistoryList.size())).setOldTask(task); } public static void deletePreviousCommand() { logger.info("Command unsuccessfully executed. Deleting command history."); if (commandHistoryList.isEmpty()) { logger.warning("No command history to delete"); return; } commandHistoryList.remove(getOffset(commandHistoryList.size())); } public void indicateDisplayListChanged() { EventsCenter.getInstance().post( new DisplayListChangedEvent(getListOfCommandHistoryText())); } }
package skyhussars.terrained; import java.io.File; import java.net.URL; import java.util.ResourceBundle; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.scene.control.Alert; import javafx.scene.control.Alert.AlertType; import javafx.scene.control.TextField; import javafx.stage.FileChooser; import javafx.stage.Stage; import javafx.util.converter.NumberStringConverter; import skyhussars.SkyHussars; /** * Controller class for TerrainEd. The controller is defined from SceneBuilder. * This class is not thread safe */ public class TerrainEdController implements Initializable{ @FXML private TextField terrainName; @FXML private TextField terrainSize; @FXML private TextField terrainLocation; private Stage stage; private final TerrainProperties terrainProperties = new TerrainProperties(); /** * This method handles the event when the user clicks on the About item in the menu */ public void handleAboutAction(){ Alert alert = new Alert(AlertType.INFORMATION); alert.setTitle("SkyHussars - TerrainEd"); alert.setHeaderText("TerrainEd - A terrain editor for SkyHussars"); alert.setContentText("Thank you for using SkyHussars and SkyHussars TerrainEd. \n Greetings from ZoltanTheHun"); alert.showAndWait(); } /** * This method handles the event when the user clicks on the Save item in the menu */ public void handleSaveAction(){ File file = saveAsChooser.showSaveDialog(stage); if(file != null) new Persistence().persist(terrainProperties, file); } /** * This method handles the event when the user clicks on the Open item in the menu */ public void handleOpenAction(){ openChooser.showOpenDialog(stage); } private final FileChooser saveAsChooser = new FileChooser(); {saveAsChooser.setTitle("Save a terrain definition");} {saveAsChooser.setInitialDirectory(new File(SkyHussars.APP_ROOT));} {saveAsChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter("json","*.json"));} private final FileChooser openChooser = new FileChooser(); {openChooser.setTitle("Open a terrain definition");} {openChooser.setInitialDirectory(new File(SkyHussars.APP_ROOT));} {openChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter("json","*.json"));} public TerrainEdController stage(Stage stage){this.stage = stage; return this;} @Override public void initialize(URL location, ResourceBundle resources) { terrainName.textProperty().bindBidirectional(terrainProperties.name); terrainSize.textProperty().bindBidirectional(terrainProperties.size, new NumberStringConverter()); terrainLocation.textProperty().bindBidirectional(terrainProperties.location); } }
package top.quantic.sentry.event; import com.vdurmont.emoji.Emoji; import com.vdurmont.emoji.EmojiManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import sx.blah.discord.api.internal.json.objects.EmbedObject; import sx.blah.discord.handle.impl.events.guild.member.UserBanEvent; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; public class UserBannedEvent extends SentryEvent { private static final Logger log = LoggerFactory.getLogger(UserBannedEvent.class); public UserBannedEvent(UserBanEvent event) { super(event); } @Override public UserBanEvent getSource() { return (UserBanEvent) source; } @Override public String getContentId() { return "ban:" + getSource().getClient().getOurUser().getID() + ":" + getSource().getGuild().getID() + ":" + getSource().getUser().getID() + "@" + getTimestamp(); } @Override public String asContent(Map<String, Object> dataMap) { String guildSpec = (String) dataMap.get("guilds"); List<String> guilds = null; if (guildSpec != null) { guilds = Arrays.asList(guildSpec.split(",|;")); } if (guilds == null) { log.info("No guilds specified - Add 'guilds' to dataMap"); return null; } if (guilds.contains(getSource().getGuild().getID())) { Emoji hammer = EmojiManager.getForAlias("hammer"); return hammer.getUnicode() + " " + getSource().getUser().getName(); } else { return null; } } @Override public EmbedObject asEmbed(Map<String, Object> dataMap) { return null; } @Override public Map<String, Object> asMap(Map<String, Object> dataMap) { return new LinkedHashMap<>(); } }
package org.metasyntactic.data; import android.os.Parcel; import android.os.Parcelable; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.List; /** @author cyrusn@google.com (Cyrus Najmabadi) */ public class Movie implements Parcelable, Serializable { private static final long serialVersionUID = -1715042667960786544L; private final String identifier; private final String canonicalTitle; private final String displayTitle; private final String rating; private final int length; // minutes; private final Date releaseDate; private final String poster; private final String synopsis; private final String studio; private final List<String> directors; private final List<String> cast; private final List<String> genres; private Movie(String identifier, String canonicalTitle, String displayTitle, String rating, int length, Date releaseDate, String poster, String synopsis, String studio, List<String> directors, List<String> cast, List<String> genres) { this.identifier = identifier; this.canonicalTitle = canonicalTitle; this.rating = rating; this.length = length; this.releaseDate = releaseDate; this.poster = poster; this.synopsis = synopsis; this.studio = studio; this.directors = directors; this.cast = cast; this.genres = genres; this.displayTitle = displayTitle; } public Movie(String identifier, String title, String rating, int length, Date releaseDate, String poster, String synopsis, String studio, List<String> directors, List<String> cast, List<String> genres) { this(identifier, makeCanonical(title), makeDisplay(title), rating, length, releaseDate, poster, synopsis, studio, directors, cast, genres); } private final static String[] articles = new String[]{ "Der", "Das", "Ein", "Eine", "The", "A", "An", "La", "Las", "Le", "Les", "Los", "El", "Un", "Une", "Una", "Il", "O", "Het", "De", "Os", "Az", "Den", "Al", "En", "L'" }; public static String makeCanonical(String title) { for (String article : articles) { if (title.endsWith(", " + article)) { return article + title.substring(0, title.length() - article.length() - 2); } } return title; } public static String makeDisplay(String title) { for (String article : articles) { if (title.startsWith(article + " ")) { return title.substring(article.length() + 1) + ", " + article; } } return title; } public int describeContents() { return 0; } public void writeToParcel(Parcel dest, int flags) { dest.writeString(identifier); dest.writeString(canonicalTitle); dest.writeString(displayTitle); dest.writeString(rating); dest.writeInt(length); dest.writeValue(releaseDate); dest.writeString(poster); dest.writeString(synopsis); dest.writeString(studio); dest.writeStringList(directors); dest.writeStringList(cast); dest.writeStringList(genres); } public static final Parcelable.Creator<Movie> CREATOR = new Parcelable.Creator<Movie>() { public Movie createFromParcel(Parcel source) { String identifier = source.readString(); String canonicalTitle = source.readString(); String displayTitle = source.readString(); String rating = source.readString(); int length = source.readInt(); Date releaseDate = (Date)source.readValue(null); String poster = source.readString(); String synopsis = source.readString(); String studio = source.readString(); List<String> directors = new ArrayList<String>(); source.readStringList(directors); List<String> cast = new ArrayList<String>(); source.readStringList(cast); List<String> genres = new ArrayList<String>(); source.readStringList(genres); return new Movie(identifier, canonicalTitle, displayTitle, rating, length, releaseDate, poster, synopsis, studio, directors, cast, genres); } public Movie[] newArray(int size) { return new Movie[size]; } }; }
package uk.ac.ic.wlgitbridge.server; import com.google.api.client.auth.oauth2.*; import com.google.api.client.http.GenericUrl; import org.apache.commons.codec.binary.Base64; import org.eclipse.jetty.server.Request; import uk.ac.ic.wlgitbridge.application.config.Oauth2; import uk.ac.ic.wlgitbridge.bridge.snapshot.SnapshotApi; import uk.ac.ic.wlgitbridge.snapshot.base.MissingRepositoryException; import uk.ac.ic.wlgitbridge.snapshot.base.ForbiddenException; import uk.ac.ic.wlgitbridge.snapshot.getdoc.GetDocRequest; import uk.ac.ic.wlgitbridge.util.Instance; import uk.ac.ic.wlgitbridge.util.Log; import uk.ac.ic.wlgitbridge.util.Util; import javax.servlet.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.util.*; public class Oauth2Filter implements Filter { public static final String ATTRIBUTE_KEY = "oauth2"; private final SnapshotApi snapshotApi; private final Oauth2 oauth2; public Oauth2Filter(SnapshotApi snapshotApi, Oauth2 oauth2) { this.snapshotApi = snapshotApi; this.oauth2 = oauth2; } @Override public void init(FilterConfig filterConfig) {} /** * The original request from git will not contain the Authorization header. * * So, for projects that need auth, we return 401. Git will swallow this * and prompt the user for user/pass, and then make a brand new request. * @param servletRequest * @param servletResponse * @param filterChain * @throws IOException * @throws ServletException */ @Override public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain ) throws IOException, ServletException { String project = Util.removeAllSuffixes( ((Request) servletRequest).getRequestURI().split("/")[1], ".git" ); // Reject v1 ids, the request will be rejected by v1 anyway if (project.matches("^[0-9]+[bcdfghjklmnpqrstvwxyz]{6,12}$") && !project.matches("^[0-9a-f]{24}$")) { Log.info("[{}] Request for v1 project, refusing", project); HttpServletResponse response = ((HttpServletResponse) servletResponse); response.setContentType("text/plain"); response.setStatus(404); PrintWriter w = response.getWriter(); List<String> l = Arrays.asList( "This project has not yet been moved into the new version", "of Overleaf. You will need to move it in order to continue working on it.", "Please visit this project online on www.overleaf.com to do this.", "", "You can find the new git remote url by selecting \"Git\" from", "the left sidebar in the project view.", "", "If this is unexpected, please contact us at support@overleaf.com, or", "see https: ); for (String line : l) { w.println(line); } w.close(); return; } Log.info("[{}] Checking if auth needed", project); GetDocRequest doc = new GetDocRequest(project); doc.request(); try { SnapshotApi.getResult( snapshotApi.getDoc(Optional.empty(), project)); } catch (ForbiddenException e) { Log.info("[{}] Auth needed", project); getAndInjectCredentials( project, servletRequest, servletResponse, filterChain ); return; } catch (MissingRepositoryException e) { handleMissingRepository(project, e, (HttpServletResponse) servletResponse); } Log.info("[{}] Auth not needed", project); filterChain.doFilter(servletRequest, servletResponse); } // TODO: this is ridiculous. Check for error cases first, then return/throw // TODO: also, use an Optional credential, since we treat it as optional private void getAndInjectCredentials( String projectName, ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain ) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) servletRequest; HttpServletResponse response = (HttpServletResponse) servletResponse; String capturedUsername = "(unknown)"; String authHeader = request.getHeader("Authorization"); if (authHeader != null) { Log.info("[{}] Authorization header present"); StringTokenizer st = new StringTokenizer(authHeader); if (st.hasMoreTokens()) { String basic = st.nextToken(); if (basic.equalsIgnoreCase("Basic")) { try { String credentials = new String( Base64.decodeBase64(st.nextToken()), "UTF-8" ); String[] split = credentials.split(":",2); if (split.length == 2) { String username = split[0]; String password = split[1]; String accessToken = null; if (username.length() > 0) { capturedUsername = username; } try { accessToken = new PasswordTokenRequest( Instance.httpTransport, Instance.jsonFactory, new GenericUrl( oauth2.getOauth2Server() + "/oauth/token" ), username, password ).setClientAuthentication( new ClientParametersAuthentication( oauth2.getOauth2ClientID(), oauth2.getOauth2ClientSecret() ) ).execute().getAccessToken(); } catch (TokenResponseException e) { handleNeedAuthorization(projectName, capturedUsername, e.getStatusCode(), request, response); return; } final Credential cred = new Credential.Builder( BearerToken.authorizationHeaderAccessMethod( ) ).build(); cred.setAccessToken(accessToken); servletRequest.setAttribute(ATTRIBUTE_KEY, cred); filterChain.doFilter( servletRequest, servletResponse ); } else { handleNeedAuthorization(projectName, capturedUsername, 0, request, response); } } catch (UnsupportedEncodingException e) { throw new Error("Couldn't retrieve authentication", e); } } } } else { handleNeedAuthorization(projectName, capturedUsername, 0, request, response); } } @Override public void destroy() {} private void handleNeedAuthorization( String projectName, String userName, int statusCode, HttpServletRequest servletRequest, HttpServletResponse servletResponse ) throws IOException { Log.info( "[{}] Unauthorized, User '{}' status={} ip={}", projectName, userName, statusCode, servletRequest.getRemoteAddr() ); HttpServletResponse response = servletResponse; response.setContentType("text/plain"); response.setHeader("WWW-Authenticate", "Basic realm=\"Git Bridge\""); response.setStatus(401); PrintWriter w = response.getWriter(); w.println( "Please sign in using your email address and Overleaf password." ); w.println(); w.println( "*Note*: if you sign in to Overleaf using another provider, " + "such " ); w.println( "as Google or Twitter, you need to set a password " + "on your Overleaf " ); w.println( "account first. " + "Please see https: ); w.println("more information."); w.close(); } private void handleMissingRepository( String projectName, MissingRepositoryException e, HttpServletResponse response ) throws IOException { Log.info("[{}] Project missing.", projectName); response.setContentType("text/plain"); // git special-cases 404 to give "repository '%s' not found", // rather than displaying the raw status code. response.setStatus(404); PrintWriter w = response.getWriter(); for (String line : e.getDescriptionLines()) { w.println(line); } w.close(); } }
package net.nanase.nanasetter.plugin; import java.util.EnumSet; /** * <p> Twitter () * <p> * * * * @author Tomona Nanase * @since Nanasetter 0.1 */ public enum PluginPermission { READ_REST, WRITE, READ_STREAMING, EXTEND, CONFIGURE, ACCESS_DIRECT_MESSAGE, RISK,; public final static EnumSet<PluginPermission> FULL = EnumSet.allOf(PluginPermission.class); public final static EnumSet<PluginPermission> NONE = EnumSet.noneOf(PluginPermission.class); }
package owltools.sim2; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Vector; import org.apache.commons.io.IOUtils; import org.apache.commons.io.LineIterator; import org.apache.commons.math3.stat.descriptive.SummaryStatistics; import org.apache.log4j.Logger; import org.semanticweb.elk.owlapi.ElkReasonerFactory; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.reasoner.Node; import org.semanticweb.owlapi.reasoner.NodeSet; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.impl.OWLClassNode; import owltools.sim2.SimpleOwlSim.Direction; import owltools.sim2.SimpleOwlSim.Metric; import owltools.sim2.SimpleOwlSim.SimConfigurationProperty; import owltools.sim2.io.SimResultRenderer.AttributesSimScores; import owltools.sim2.scores.AttributePairScores; import owltools.sim2.scores.ElementPairScores; import owltools.sim2.scores.ScoreMatrix; import com.googlecode.javaewah.EWAHCompressedBitmap; import com.googlecode.javaewah.IntIterator; /** * Faster implementation of OwlSim * * Makes use of integers to index classes, and bitmaps to represent class sets. * * @author cjm * */ public class FastOwlSim extends AbstractOwlSim implements OwlSim { private Logger LOG = Logger.getLogger(FastOwlSim.class); public OWLClass debugClass; int debugClassMessages = 0; int debugClassMessages2 = 0; private Map<OWLNamedIndividual, Set<OWLClass>> elementToDirectAttributesMap; // CACHES // all direct and inferred classes for al individual // todo: resolve redundancy with inferredTypesMap private Map<OWLNamedIndividual, Set<Node<OWLClass>>> elementToInferredAttributesMap; private Map<OWLClass,Set<Node<OWLClass>>> superclassMap; // cache of Type(i)->Cs private Map<OWLNamedIndividual,Set<Node<OWLClass>>> inferredTypesMap; private Map<OWLClass,Set<Integer>> superclassIntMap; private Map<OWLNamedIndividual,Set<Integer>> inferredTypesIntMap; // BITMAP CACHES // for efficiency, we store sets of classes as bitmaps. // each class is assigned an integer value, resulting // it bitmaps of with |c| bits // given a class index, return superclasses, as a bitmap // e.g. if class C has index 7, then superclassBitmapIndex[7] returns a bitmap, // in which every "1" value is the index of a superclass of C private EWAHCompressedBitmap[] superclassBitmapIndex; private Map<OWLClass,EWAHCompressedBitmap> superclassBitmapMap; // given an individual, return superclasses as a bit map private Map<OWLNamedIndividual, EWAHCompressedBitmap> inferredTypesBitmapMap; // cache of Type(i)->BM Map<OWLClass,EWAHCompressedBitmap> properSuperclassBitmapMap; // CLASS INDICES // Each class is assigned a numeric index. // We can collapse sets of equivalent classes into a node, which // has an arbitrarily assigned representative element. // note that the following are for *all* classes in the ontology. // E.g. may include anatomy classes in a phenotype analysis. // to limit memory usage, first filter ontology to classes with members // (being sure to retain inferred axioms). // in practice this may not be necessary, as 2D arrays are only used // for classes with members. // maps a set of equivalent classes to a representative Map<Node<OWLClass>, OWLClass> representativeClassMap; // maps a class to a representative for that class (typically itself) Map<OWLClass, OWLClass> classTorepresentativeClassMap; // maps a class to a unique integer Map<OWLClass,Integer> classIndex; // maps a class index to a class OWLClass[] classArray; // all types used directly. // same as elementToDirectAttributesMap.values() private Set<OWLClass> allTypesDirect = null; // all Types used in Type(e) for all e in E. // note this excludes classes with no (inferred) members // same as elementToInferredAttributesMap.values() private Set<OWLClass> allTypesInferred = null; // cache of information content, by class private Map<OWLClass, Double> icCache = new HashMap<OWLClass,Double>(); // cache of information content, by class index Double[] icClassArray = null; // private Map<ClassIntPair, Set<Integer>> classPairLCSMap; // private Map<ClassIntPair, ScoreAttributeSetPair> classPairICLCSMap; // used for storing IC values as integers final int scaleFactor = 1000; //short[][] ciPairScaledScore; ScoreAttributeSetPair[][] testCache = null; boolean[][] ciPairIsCached = null; int[][] ciPairLCS = null; @Override public void dispose() { showTimings(); } // represents a pair of classes using their indices // NOTE; replaced by arrays @Deprecated private class ClassIntPair { int c; int d; public ClassIntPair(int c, int d) { super(); this.c = c; this.d = d; } @Override public int hashCode() { final int prime = 991; int result = 1; result = prime * result + c; result = prime * result + d; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ClassIntPair other = (ClassIntPair) obj; return c == other.c && d == other.d; } } /** * @param sourceOntology */ public FastOwlSim(OWLOntology sourceOntology) { reasoner = new ElkReasonerFactory().createReasoner(sourceOntology); } /** * @param reasoner */ public FastOwlSim(OWLReasoner reasoner) { this.reasoner = reasoner; } @Override public Set<OWLClass> getAllAttributeClasses() { return allTypesInferred; // note - only attributes used directly or indirectly } private int getClassIndex(OWLClass c) throws UnknownOWLClassException { Integer ix = classIndex.get(c); if (ix == null) { throw new UnknownOWLClassException(c); } return ix; } private int getClassIndex(Node<OWLClass> n) throws UnknownOWLClassException { OWLClass c = n.getRepresentativeElement(); return getClassIndex(c); } // not yet implemented: guaranteed to yield and indexed class private OWLClass getIndexedClass(Node<OWLClass> n) throws UnknownOWLClassException { if (representativeClassMap == null) representativeClassMap = new HashMap<Node<OWLClass>, OWLClass>(); else if (representativeClassMap.containsKey(n)) return representativeClassMap.get(n); for (OWLClass c : n.getEntities()) { if (classIndex.containsKey(c)) { representativeClassMap.put(n,c); return c; } } throw new UnknownOWLClassException(n.getRepresentativeElement()); } /* (non-Javadoc) * @see owltools.sim2.OwlSim#createElementAttributeMapFromOntology() */ @Override public void createElementAttributeMapFromOntology() throws UnknownOWLClassException { getReasoner().flush(); // cache E -> Type(E) elementToDirectAttributesMap = new HashMap<OWLNamedIndividual,Set<OWLClass>>(); elementToInferredAttributesMap = new HashMap<OWLNamedIndividual,Set<Node<OWLClass>>>(); allTypesDirect = new HashSet<OWLClass>(); allTypesInferred = new HashSet<OWLClass>(); Set<OWLNamedIndividual> inds = getSourceOntology().getIndividualsInSignature(true); for (OWLNamedIndividual e : inds) { // The attribute classes for an individual are the direct inferred // named types. We assume that grouping classes have already been // generated. NodeSet<OWLClass> nodesetDirect = getReasoner().getTypes(e, true); NodeSet<OWLClass> nodesetInferred = getReasoner().getTypes(e, false); allTypesDirect.addAll(nodesetDirect.getFlattened()); allTypesInferred.addAll(nodesetInferred.getFlattened()); elementToDirectAttributesMap.put(e, nodesetDirect.getFlattened()); elementToInferredAttributesMap.put(e, nodesetInferred.getNodes()); // TODO - remove deprecated classes elementToDirectAttributesMap.get(e).remove(owlThing()); elementToInferredAttributesMap.get(e).remove(owlThing()); } LOG.info("|TypesUsedDirectly|="+allTypesDirect.size()); LOG.info("|TypesUsedInferred|="+allTypesInferred.size()); Set<OWLClass> cset; //cset = getSourceOntology().getClassesInSignature(true); cset = allTypesInferred; LOG.info("|C|="+cset.size()); LOG.info("|I|="+inds.size()); Set<Node<OWLClass>> nodes = new HashSet<Node<OWLClass>>(); for (OWLClass c : cset) { nodes.add(getReasoner().getEquivalentClasses(c)); } LOG.info("|N|="+nodes.size()); // TODO - use thisg // classes are collapsed into nodes. Create a map from node to // class, and a map of every class // to the representative element from its node representativeClassMap = new HashMap<Node<OWLClass>, OWLClass>(); classTorepresentativeClassMap = new HashMap<OWLClass, OWLClass>(); for (Node<OWLClass> n : nodes) { OWLClass c = n.getRepresentativeElement(); representativeClassMap.put(n, c); for (OWLClass c2 : n.getEntities()) { classTorepresentativeClassMap.put(c2, c); } } // Create a bidirectional index, class by number int n=0; classArray = (OWLClass[]) Array.newInstance(OWLClass.class, cset.size()+1); classIndex = new HashMap<OWLClass,Integer>(); // 0th element is always owlThing (root) classArray[n] = owlThing(); classIndex.put(owlThing(), n); n++; // TODO - investigate if ordering elements makes a difference; // e.g. if more frequent classes recieve lower bit indices this // may speed certain BitMap operations? for (OWLClass c : cset) { if (c.equals(owlThing())) continue; classArray[n] = c; classIndex.put(c, n); n++; } // ensure cached cset.add(owlThing()); for (OWLClass c : cset) { ancsCachedModifiable(c); ancsIntsCachedModifiable(c); ancsBitmapCachedModifiable(c); } for (OWLNamedIndividual e : inds) { // force cacheing ancsBitmapCachedModifiable(e); } // cache - this is for ALL classes in signature for (OWLClass c : cset) { getInformationContentForAttribute(c); getInformationContentForAttribute(classIndex.get(c)); } this.computeSystemStats(); } // TODO - change set to be (ordered) List, to avoid sorting each time private EWAHCompressedBitmap convertIntsToBitmap(Set<Integer> bits) { EWAHCompressedBitmap bm = new EWAHCompressedBitmap(); ArrayList<Integer> bitlist = new ArrayList<Integer>(bits); // necessary for EWAH API, otherwise silently fails Collections.sort(bitlist); for (Integer i : bitlist) { bm.set(i.intValue()); } return bm; } // cached proper superclasses (i.e. excludes equivalent classes) as BitMap private EWAHCompressedBitmap ancsProperBitmapCachedModifiable(OWLClass c) { if (properSuperclassBitmapMap != null && properSuperclassBitmapMap.containsKey(c)) { return properSuperclassBitmapMap.get(c); } Set<Integer> ancsInts = new HashSet<Integer>(); for (Node<OWLClass> anc : reasoner.getSuperClasses(c, false)) { // TODO - verify robust for non-Rep elements OWLClass ac = anc.getRepresentativeElement(); if (ac.equals(thing)) continue; ancsInts.add(classIndex.get(ac)); } EWAHCompressedBitmap bm = convertIntsToBitmap(ancsInts); if (properSuperclassBitmapMap == null) properSuperclassBitmapMap = new HashMap<OWLClass,EWAHCompressedBitmap>(); properSuperclassBitmapMap.put(c, bm); return bm; } private EWAHCompressedBitmap ancsBitmapCachedModifiable(OWLClass c) throws UnknownOWLClassException { if (superclassBitmapMap != null && superclassBitmapMap.containsKey(c)) { return superclassBitmapMap.get(c); } Set<Integer> caints = ancsIntsCachedModifiable(c); EWAHCompressedBitmap bm = convertIntsToBitmap(caints); if (superclassBitmapMap == null) superclassBitmapMap = new HashMap<OWLClass,EWAHCompressedBitmap>(); superclassBitmapMap.put(c, bm); return bm; } private EWAHCompressedBitmap ancsBitmapCachedModifiable(int cix) throws UnknownOWLClassException { if (superclassBitmapIndex != null && superclassBitmapIndex[cix] != null) { return superclassBitmapIndex[cix]; } Set<Integer> caints = ancsIntsCachedModifiable(classArray[cix]); EWAHCompressedBitmap bm = convertIntsToBitmap(caints); if (superclassBitmapIndex == null) superclassBitmapIndex = new EWAHCompressedBitmap[classArray.length]; superclassBitmapIndex[cix] = bm; return bm; } private EWAHCompressedBitmap ancsBitmapCachedModifiable(OWLNamedIndividual i) throws UnknownOWLClassException { if (inferredTypesBitmapMap != null && inferredTypesBitmapMap.containsKey(i)) { return inferredTypesBitmapMap.get(i); } Set<Integer> caints = ancsIntsCachedModifiable(i); EWAHCompressedBitmap bm = convertIntsToBitmap(caints); if (inferredTypesBitmapMap == null) inferredTypesBitmapMap = new HashMap<OWLNamedIndividual,EWAHCompressedBitmap>(); inferredTypesBitmapMap.put(i, bm); return bm; } private EWAHCompressedBitmap ancsBitmapCached(Set<OWLClass> cset) throws UnknownOWLClassException { Set<Integer> csetInts = new HashSet<Integer>(); for (OWLClass c : cset) { csetInts.add(classIndex.get(c)); } return convertIntsToBitmap(csetInts); } private Set<Integer> ancsIntsCachedModifiable(OWLClass c) throws UnknownOWLClassException { if (superclassIntMap != null && superclassIntMap.containsKey(c)) { return superclassIntMap.get(c); } Set<Integer> a = ancsInts(c); if (superclassIntMap == null) superclassIntMap = new HashMap<OWLClass,Set<Integer>>(); superclassIntMap.put(c, a); return a; } // TODO - make this an ordered list, for faster bitmaps private Set<Integer> ancsIntsCachedModifiable(OWLNamedIndividual i) throws UnknownOWLClassException { if (inferredTypesIntMap != null && inferredTypesIntMap.containsKey(i)) { return inferredTypesIntMap.get(i); } Set<Integer> a = ancsInts(i); if (inferredTypesIntMap == null) inferredTypesIntMap = new HashMap<OWLNamedIndividual,Set<Integer>>(); inferredTypesIntMap.put(i, a); return a; } // all ancestors as IntSet // note that for equivalence sets, the representative element is returned private Set<Integer> ancsInts(OWLClass c) throws UnknownOWLClassException { Set<Node<OWLClass>> ancs = ancsCachedModifiable(c); Set<Integer> ancsInts = new HashSet<Integer>(); OWLClass thing = owlThing(); for (Node<OWLClass> anc : ancs) { // TODO - verify robust for non-Rep elements OWLClass ac = anc.getRepresentativeElement(); if (ac.equals(thing)) continue; Integer ix = classIndex.get(ac); if (ix == null) { throw new UnknownOWLClassException(ac); } ancsInts.add(ix.intValue()); } return ancsInts; } private Set<Integer> ancsInts(OWLNamedIndividual i) throws UnknownOWLClassException { Set<Node<OWLClass>> ancs = ancsCachedModifiable(i); Set<Integer> ancsInts = new HashSet<Integer>(); OWLClass thing = owlThing(); for (Node<OWLClass> anc : ancs) { // TODO - verify robust for non-Rep elements OWLClass ac = anc.getRepresentativeElement(); if (ac.equals(thing)) continue; Integer ix = classIndex.get(ac); if (ix == null) { throw new UnknownOWLClassException(ac); } ancsInts.add(ix.intValue()); } return ancsInts; } private Set<Node<OWLClass>> ancsCachedModifiable(OWLClass c) { if (superclassMap != null && superclassMap.containsKey(c)) { return superclassMap.get(c); } Set<Node<OWLClass>> a = ancs(c); if (superclassMap == null) superclassMap = new HashMap<OWLClass,Set<Node<OWLClass>>>(); superclassMap.put(c, a); return a; } private Set<Node<OWLClass>> ancsCachedModifiable(OWLNamedIndividual i) { if (inferredTypesMap != null && inferredTypesMap.containsKey(i)) { return inferredTypesMap.get(i); } Set<Node<OWLClass>> a = ancs(i); if (inferredTypesMap == null) inferredTypesMap = new HashMap<OWLNamedIndividual,Set<Node<OWLClass>>>(); inferredTypesMap.put(i, a); return a; } private Set<Node<OWLClass>> ancs(OWLClass c) { NodeSet<OWLClass> ancs = getReasoner().getSuperClasses(c, false); Set<Node<OWLClass>> nodes = new HashSet<Node<OWLClass>>(ancs.getNodes()); nodes.add(getReasoner().getEquivalentClasses(c)); nodes.remove(owlThingNode()); return nodes; } private Set<Node<OWLClass>> ancs(OWLNamedIndividual i) { Set<Node<OWLClass>> nodes = getReasoner().getTypes(i, false).getNodes(); nodes.remove(owlThingNode()); return nodes; } @Override public Set<OWLClass> getAttributesForElement(OWLNamedIndividual e) throws UnknownOWLClassException { if (elementToDirectAttributesMap == null) createElementAttributeMapFromOntology(); return new HashSet<OWLClass>(elementToDirectAttributesMap.get(e)); } @Override public Set<OWLNamedIndividual> getElementsForAttribute(OWLClass c) throws UnknownOWLClassException { if (!this.getAllAttributeClasses().contains(c)) { throw new UnknownOWLClassException(c); } return getReasoner().getInstances(c, false).getFlattened(); } @Override public int getNumElementsForAttribute(OWLClass c) { try { return this.getElementsForAttribute(c).size(); } catch (UnknownOWLClassException e) { return 0; } } @Override public Set<OWLNamedIndividual> getAllElements() { // Note: will only return elements that have >=1 attributes return elementToDirectAttributesMap.keySet(); } @Override public Double getInformationContentForAttribute(OWLClass c) throws UnknownOWLClassException { if (icCache.containsKey(c)) return icCache.get(c); int freq = getNumElementsForAttribute(c); Double ic = null ; if (freq > 0) { ic = -Math.log(((double) (freq) / getCorpusSize())) / Math.log(2); // experimental: use depth in graph as tie-breaker. // where SF is large enough to make overall increase negligible int numAncs = ancsBitmapCachedModifiable(c).cardinality(); double bump = numAncs / (double) scaleFactor; if (bump > 0.2) { LOG.warn("Bump = "+bump+" for "+c); } ic += bump; } icCache.put(c, ic); return ic; } // gets IC by class index, cacheing if required Double getInformationContentForAttribute(int cix) throws UnknownOWLClassException { // check if present in cache; if so, use cached value if (icClassArray != null && icClassArray[cix] != null) { return icClassArray[cix]; } // not cached - retrieve IC using the class OWLClass c = classArray[cix]; Double ic = getInformationContentForAttribute(c); if (debugClass != null && c.equals(debugClass)) { LOG.info("DEBUG "+c+" IX:"+cix+" IC= "+ic); } // place results in cache, creating a new cache if none exists if (icClassArray == null) { icClassArray = new Double[classArray.length]; } icClassArray[cix] = ic; return ic; } @Override public Set<Node<OWLClass>> getInferredAttributes(OWLNamedIndividual a) { return new HashSet<Node<OWLClass>>(elementToInferredAttributesMap.get(a)); } @Override public Set<Node<OWLClass>> getNamedReflexiveSubsumers(OWLClass a) { return ancs(a); } @Override public Set<Node<OWLClass>> getNamedCommonSubsumers(OWLClass c, OWLClass d) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(c); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(d); EWAHCompressedBitmap cad = bmc.and(bmd); Set<Node<OWLClass>> nodes = new HashSet<Node<OWLClass>>(); for (int ix : cad.toArray()) { OWLClassNode node = new OWLClassNode(classArray[ix]); nodes.add(node); } return nodes; } private Set<Node<OWLClass>> getNamedCommonSubsumers(int cix, int dix) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(cix); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(dix); EWAHCompressedBitmap cad = bmc.and(bmd); Set<Node<OWLClass>> nodes = new HashSet<Node<OWLClass>>(); for (int ix : cad.toArray()) { OWLClassNode node = new OWLClassNode(classArray[ix]); nodes.add(node); } return nodes; } private EWAHCompressedBitmap getNamedCommonSubsumersAsBitmap(int cix, int dix) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(cix); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(dix); EWAHCompressedBitmap cad = bmc.and(bmd); return cad; } //@Override public Set<Node<OWLClass>> getNamedUnionSubsumers(OWLClass c, OWLClass d) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(c); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(d); EWAHCompressedBitmap cud = bmc.or(bmd); Set<Node<OWLClass>> nodes = new HashSet<Node<OWLClass>>(); for (int ix : cud.toArray()) { OWLClassNode node = new OWLClassNode(classArray[ix]); nodes.add(node); } return nodes; } @Override public int getNamedCommonSubsumersCount(OWLClass c, OWLClass d) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(c); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(d); return bmc.andCardinality(bmd); } @Override public Set<Node<OWLClass>> getNamedCommonSubsumers(OWLNamedIndividual i, OWLNamedIndividual j) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(i); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(j); EWAHCompressedBitmap cad = bmc.and(bmd); Set<Node<OWLClass>> nodes = new HashSet<Node<OWLClass>>(); for (int ix : cad.toArray()) { OWLClassNode node = new OWLClassNode(classArray[ix]); nodes.add(node); } return nodes; } private Set<Node<OWLClass>> getNamedUnionSubsumers(OWLNamedIndividual i, OWLNamedIndividual j) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(i); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(j); EWAHCompressedBitmap cad = bmc.or(bmd); Set<Node<OWLClass>> nodes = new HashSet<Node<OWLClass>>(); for (int ix : cad.toArray()) { OWLClassNode node = new OWLClassNode(classArray[ix]); nodes.add(node); } return nodes; } @Override public Set<Node<OWLClass>> getNamedLowestCommonSubsumers(OWLClass c, OWLClass d) throws UnknownOWLClassException { EWAHCompressedBitmap cad = getNamedLowestCommonSubsumersAsBitmap(c, d); Set<Node<OWLClass>> nodes = new HashSet<Node<OWLClass>>(); // TODO - optimize this & ensure all elements of an equivalence set are included for (int ix : cad.toArray()) { OWLClassNode node = new OWLClassNode(classArray[ix]); nodes.add(node); } return nodes; } private Set<Node<OWLClass>> getNamedLowestCommonSubsumers(int cix, int dix) throws UnknownOWLClassException { EWAHCompressedBitmap cad = getNamedLowestCommonSubsumersAsBitmap(cix, dix); Set<Node<OWLClass>> nodes = new HashSet<Node<OWLClass>>(); // TODO - optimize this & ensure all elements of an equivalence set are included for (int ix : cad.toArray()) { OWLClassNode node = new OWLClassNode(classArray[ix]); nodes.add(node); } return nodes; } // fast bitmap implementation of LCS private EWAHCompressedBitmap getNamedLowestCommonSubsumersAsBitmap(OWLClass c, OWLClass d) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(c); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(d); EWAHCompressedBitmap cad = bmc.and(bmd); int[] csInts = cad.toArray(); for (int ix : csInts) { cad = cad.andNot(ancsProperBitmapCachedModifiable(classArray[ix])); } return cad; } private EWAHCompressedBitmap getNamedLowestCommonSubsumersAsBitmap(int cix, int dix) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(cix); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(dix); EWAHCompressedBitmap cad = bmc.and(bmd); int[] csInts = cad.toArray(); for (int ix : csInts) { cad = cad.andNot(ancsProperBitmapCachedModifiable(classArray[ix])); } return cad; } @Deprecated private Set<Node<OWLClass>> getNamedLowestCommonSubsumersNaive(OWLClass a, OWLClass b) throws UnknownOWLClassException { // currently no need to cache this, as only called from // getLowestCommonSubsumerIC, which does its own caching Set<Node<OWLClass>> commonSubsumerNodes = getNamedCommonSubsumers(a, b); Set<Node<OWLClass>> rNodes = new HashSet<Node<OWLClass>>(); // remove redundant for (Node<OWLClass> node : commonSubsumerNodes) { rNodes.addAll(getReasoner().getSuperClasses( node.getRepresentativeElement(), false).getNodes()); } commonSubsumerNodes.removeAll(rNodes); return commonSubsumerNodes; } @Override public double getAttributeSimilarity(OWLClass c, OWLClass d, Metric metric) throws UnknownOWLClassException { if (metric.equals(Metric.JACCARD)) { return getAttributeJaccardSimilarity(c, d); } else if (metric.equals(Metric.OVERLAP)) { return getNamedCommonSubsumers(c, d).size(); } else if (metric.equals(Metric.NORMALIZED_OVERLAP)) { return getNamedCommonSubsumers(c, d).size() / Math.min(getNamedReflexiveSubsumers(c).size(), getNamedReflexiveSubsumers(d).size()); } else if (metric.equals(Metric.DICE)) { // TODO return -1; } else { return 0; } } @Override public AttributePairScores getPairwiseSimilarity(OWLClass c, OWLClass d) throws UnknownOWLClassException { AttributePairScores s = new AttributePairScores(c,d); EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(c); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(d); s.simjScore = bmc.andCardinality(bmd) / (double) bmc.orCardinality(bmd); s.asymmetricSimjScore = bmc.andCardinality(bmd) / (double) bmd.cardinality(); s.inverseAsymmetricSimjScore = bmc.andCardinality(bmd) / (double) bmc.cardinality(); ScoreAttributeSetPair sap = getLowestCommonSubsumerWithIC(c, d); s.lcsIC = sap.score; s.lcsSet = sap.attributeClassSet; return s; } @Override public double getAttributeJaccardSimilarity(OWLClass c, OWLClass d) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(c); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(d); return bmc.andCardinality(bmd) / (double) bmc.orCardinality(bmd); } @Override public int getAttributeJaccardSimilarityAsPercent(OWLClass c, OWLClass d) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(c); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(d); return (bmc.andCardinality(bmd) * 100) / bmc.orCardinality(bmd); } @Override public double getElementJaccardSimilarity(OWLNamedIndividual i, OWLNamedIndividual j) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(i); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(j); return bmc.andCardinality(bmd) / (double) bmc.orCardinality(bmd); } @Override public int getElementJaccardSimilarityAsPercent(OWLNamedIndividual i, OWLNamedIndividual j) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(i); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(j); return (bmc.andCardinality(bmd) * 100) / bmc.orCardinality(bmd); } @Override public double getAsymmetricAttributeJaccardSimilarity(OWLClass c, OWLClass d) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(c); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(d); return bmc.andCardinality(bmd) / (double) bmd.cardinality(); } //@Override public double getAsymmetricElementJaccardSimilarity(OWLNamedIndividual i, OWLNamedIndividual j) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(i); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(j); return bmc.andCardinality(bmd) / (double) bmd.cardinality(); } //@Override public int getAsymmetricElementJaccardSimilarityAsPercent(OWLNamedIndividual i, OWLNamedIndividual j) throws UnknownOWLClassException { EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(i); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(j); return (bmc.andCardinality(bmd) * 100) / bmd.cardinality(); } // SimGIC // TODO - optimize @Override public double getElementGraphInformationContentSimilarity( OWLNamedIndividual i, OWLNamedIndividual j) throws UnknownOWLClassException { // TODO - optimize long t = System.currentTimeMillis(); EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(i); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(j); EWAHCompressedBitmap cad = bmc.and(bmd); EWAHCompressedBitmap cud = bmc.or(bmd); //Set<Node<OWLClass>> ci = getNamedCommonSubsumers(i, j); //Set<Node<OWLClass>> cu = getNamedUnionSubsumers(i, j); double sumICboth = 0; double sumICunion = 0; // faster than translating to integer list IntIterator it = cud.intIterator(); while (it.hasNext()) { int x = it.next(); double ic = getInformationContentForAttribute(x); // TODO - we can avoid doing this twice by using xor in the bitmap sumICunion += ic; if (cad.get(x)) { sumICboth += ic; } } totalTimeGIC += tdelta(t); this.totalCallsGIC++; return sumICboth / sumICunion; } // TODO - optimize @Override public double getAttributeGraphInformationContentSimilarity( OWLClass c, OWLClass d) throws UnknownOWLClassException { return getAttributeGraphInformationContentSimilarity(classIndex.get(c), classIndex.get(d)); } private double getAttributeGraphInformationContentSimilarity( int cix, int dix) throws UnknownOWLClassException { long t = System.currentTimeMillis(); EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(cix); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(dix); EWAHCompressedBitmap cad = bmc.and(bmd); EWAHCompressedBitmap cud = bmc.or(bmd); double sumICboth = 0; double sumICunion = 0; // faster than translating to integer list IntIterator it = cud.intIterator(); while (it.hasNext()) { int x = it.next(); double ic = getInformationContentForAttribute(x); // TODO - we can avoid doing this twice by using xor in the bitmap sumICunion += ic; if (cad.get(x)) { sumICboth += ic; } } totalTimeGIC += tdelta(t); this.totalCallsGIC++; return sumICboth / sumICunion; } //@Override public double getAsymmetricElementGraphInformationContentSimilarity( OWLNamedIndividual i, OWLNamedIndividual j) throws UnknownOWLClassException { // TODO - optimize Set<Node<OWLClass>> ci = getNamedCommonSubsumers(i, j); Set<Node<OWLClass>> cu = this.getInferredAttributes(j); double sumICboth = 0; double sumICunion = 0; for (Node<OWLClass> c : cu) { // TODO - we can avoid doing this twice by using xor in the bitmap sumICunion += getInformationContentForAttribute(c .getRepresentativeElement()); if (ci.contains(c)) { sumICboth += getInformationContentForAttribute(c .getRepresentativeElement()); } } return sumICboth / sumICunion; } // NOTE: current implementation will also return redundant classes if // they rank the same - use groupwise if true maxIC required @Override public ScoreAttributeSetPair getSimilarityMaxIC(OWLNamedIndividual i, OWLNamedIndividual j) throws UnknownOWLClassException { Set<Node<OWLClass>> atts = getNamedCommonSubsumers(i,j); ScoreAttributeSetPair best = new ScoreAttributeSetPair(0.0); for (Node<OWLClass> n : atts) { OWLClass c = n.getRepresentativeElement(); Double ic = getInformationContentForAttribute(c); if (Math.abs(ic - best.score) < 0.000001) { // tie for best attribute best.addAttributeClass(c); } if (ic > best.score) { best = new ScoreAttributeSetPair(ic, c); } } return best; } @Override public ScoreAttributeSetPair getSimilarityBestMatchAverageAsym( OWLNamedIndividual i, OWLNamedIndividual j) { // TODO Auto-generated method stub return null; } @Override public ScoreAttributeSetPair getSimilarityBestMatchAverageAsym( OWLNamedIndividual i, OWLNamedIndividual j, Metric metric) { // TODO Auto-generated method stub return null; } @Override public ScoreAttributeSetPair getSimilarityBestMatchAverage( OWLNamedIndividual i, OWLNamedIndividual j, Metric metric, Direction dir) { // TODO Auto-generated method stub return null; } @Override public ElementPairScores getGroupwiseSimilarity(OWLNamedIndividual i, OWLNamedIndividual j) throws UnknownOWLClassException { ElementPairScores s = new ElementPairScores(i,j); populateSimilarityMatrix(i, j, s); s.simGIC = getElementGraphInformationContentSimilarity(i, j); s.combinedScore = (int) (s.simGIC * 100); // default - TODO - combined return s; } protected void populateSimilarityMatrix( OWLNamedIndividual i, OWLNamedIndividual j, ElementPairScores ijscores) throws UnknownOWLClassException { /* EWAHCompressedBitmap bmc = ancsBitmapCachedModifiable(i); EWAHCompressedBitmap bmd = ancsBitmapCachedModifiable(j); EWAHCompressedBitmap cad = bmc.and(bmd); EWAHCompressedBitmap cud = bmc.or(bmd); Set<Node<OWLClass>> nodes = new HashSet<Node<OWLClass>>(); for (int ix : cad.toArray()) { OWLClassNode node = new OWLClassNode(classArray[ix]); nodes.add(node); } */ /* ijscores.simGIC = getElementGraphInformationContentSimilarity(i, j); ijscores.asymmetricSimGIC = getAsymmetricElementGraphInformationContentSimilarity(i, j); ijscores.inverseAsymmetricSimGIC = getAsymmetricElementGraphInformationContentSimilarity(j, i); */ ijscores.simjScore = getElementJaccardSimilarity(i, j); ijscores.asymmetricSimjScore = getAsymmetricElementJaccardSimilarity(i, j); ijscores.inverseAsymmetricSimjScore = getAsymmetricElementJaccardSimilarity(j, i); Vector<OWLClass> cs = new Vector<OWLClass>(getAttributesForElement(i)); Vector<OWLClass> ds = new Vector<OWLClass>(getAttributesForElement(j)); populateSimilarityMatrix(cs, ds, ijscores); } private void populateSimilarityMatrix(Vector<OWLClass> cs, Vector<OWLClass> ds, ElementPairScores ijscores) throws UnknownOWLClassException { ScoreAttributeSetPair bestsap = null; int csize = cs.size(); int dsize = ds.size(); ijscores.cs = cs; ijscores.ds = ds; double total = 0.0; double[][] scoreMatrix = new double[csize][dsize]; ScoreAttributeSetPair[][] sapMatrix = new ScoreAttributeSetPair[csize][dsize]; ScoreAttributeSetPair[] bestSapForC = new ScoreAttributeSetPair[csize]; ScoreAttributeSetPair[] bestSapForD = new ScoreAttributeSetPair[dsize]; double bestMatchCTotal = 0; double bestMatchDTotal = 0; // populate matrix for (int cx=0; cx<csize; cx++) { OWLClass c = cs.elementAt(cx); int cix = classIndex.get(c); ScoreAttributeSetPair bestcsap = null; for (int dx=0; dx<dsize; dx++) { OWLClass d = ds.elementAt(dx); int dix = classIndex.get(d); ScoreAttributeSetPair sap = getLowestCommonSubsumerWithIC(cix,dix); if (sap == null) continue; sapMatrix[cx][dx] = sap; double score = sap.score; total += score; if (bestsap == null || score >= bestsap.score) { bestsap = sap; } if (bestcsap == null || score >= bestcsap.score) { bestcsap = sap; } } bestSapForC[cx] = bestcsap; if (bestcsap != null) bestMatchCTotal += bestcsap.score; } // retrieve best values for each D for (int dx=0; dx<dsize; dx++) { ScoreAttributeSetPair bestdsap = null; for (int cx=0; cx<csize; cx++) { ScoreAttributeSetPair sap = sapMatrix[cx][dx]; if (sap != null && (bestdsap == null || sap.score >= bestdsap.score)) { bestdsap = sap; } } if (bestdsap != null) { bestSapForD[dx] = bestdsap; bestMatchDTotal += bestdsap.score; } } // TODO - use these ijscores.avgIC = total / (csize * dsize); ijscores.bmaAsymIC = bestMatchCTotal / (double)csize; ijscores.bmaInverseAsymIC = bestMatchDTotal / (double)dsize; ijscores.bmaSymIC = (bestMatchCTotal + bestMatchDTotal) / (double)(csize+dsize); ijscores.iclcsMatrix = new ScoreMatrix<ScoreAttributeSetPair>(); ijscores.iclcsMatrix.matrix = sapMatrix; ijscores.iclcsMatrix.bestForC = bestSapForC; ijscores.iclcsMatrix.bestForD = bestSapForD; if (bestsap != null) { ijscores.maxIC = bestsap.score; ijscores.maxICwitness = bestsap.attributeClassSet; } else { //LOG.warn("No best S.A.P. for "+ijscores); ijscores.maxIC = 0.0; ijscores.maxICwitness = null; } } // uses integer 2D array cache private ScoreAttributeSetPair getLowestCommonSubsumerWithIC(int cix, int dix) throws UnknownOWLClassException { return getLowestCommonSubsumerWithIC(cix, dix, null); } private ScoreAttributeSetPair getLowestCommonSubsumerWithIC(int cix, int dix, Double thresh) throws UnknownOWLClassException { // if cache is disabled altogether, head straight for the implementation if (isDisableLCSCache) { return getLowestCommonSubsumerWithICNoCache(cix, dix); } // if the cache does not exist, initialize it if (ciPairIsCached == null) { // Estimates: 350mb for MP // 5.4Gb for 30k classes //int size = this.getAllAttributeClasses().size(); int size = classArray.length; LOG.info("Creating 2D cache of "+size+" * "+size); ciPairIsCached = new boolean[size][size]; ciPairLCS = new int[size][size]; LOG.info("Created LCS cache"+size+" * "+size); //ciPairScaledScore = new short[size][size]; //LOG.info("Created score cache cache"+size+" * "+size); } // if either (1) an entry exists in the cache or (2) we are // building the cache from fresh, then do a lookup if (!isNoLookupForLCSCache && ciPairIsCached[cix][dix]) { // TODO null vs 0 int lcsix = ciPairLCS[cix][dix]; return new ScoreAttributeSetPair(icClassArray[lcsix], classArray[lcsix]); } if (!isNoLookupForLCSCache && isLCSCacheFullyPopulated) { // true if a pre-generated cache has been loaded and there is no entry for this pair; // a cache excludes certain pairs if they are below threshold (see below) return null; } // use base method and cache results. // if score is below threshold, then nothing is returned or cached. // This reduces the size of the cache. ScoreAttributeSetPair sap = getLowestCommonSubsumerWithICNoCache(cix, dix); if (debugClass != null && sap.attributeClassSet != null && debugClassMessages < 100 && sap.attributeClassSet.contains(debugClass)) { LOG.info("DEBUG1 "+debugClass+" ix="+ classIndex.get(debugClass)+" "+sap.score); debugClassMessages++; } if (thresh != null && sap.score < thresh) { return null; } ciPairIsCached[cix][dix] = true; OWLClass lcsCls = null; if (sap.attributeClassSet != null && !sap.attributeClassSet.isEmpty()) { // we take an arbitrary member lcsCls = sap.attributeClassSet.iterator().next(); int lcsix = classIndex.get(lcsCls); ciPairLCS[cix][dix] = lcsix; if (debugClass != null && lcsCls.equals(debugClass) && debugClassMessages2 < 100) { LOG.info("DEBUG2 "+lcsix+" " +sap.attributeClassSet+" sap.score="+sap.score); debugClassMessages2++; } //icClassArray[lcsix] = sap.score; } else { //TODO - remove obsoletes LOG.warn("uh oh"+classArray[cix] + " "+ classArray[dix]+" "+sap.attributeClassSet); } return sap; } @Override public ScoreAttributeSetPair getLowestCommonSubsumerWithIC(OWLClass c, OWLClass d) throws UnknownOWLClassException { return getLowestCommonSubsumerWithIC(classIndex.get(c), classIndex.get(d)); } @Override public ScoreAttributeSetPair getLowestCommonSubsumerWithIC(OWLClass c, OWLClass d, Double thresh) throws UnknownOWLClassException { return getLowestCommonSubsumerWithIC(classIndex.get(c), classIndex.get(d), thresh); } private ScoreAttributeSetPair getLowestCommonSubsumerWithICNoCache(int cix, int dix) throws UnknownOWLClassException { long t = System.currentTimeMillis(); EWAHCompressedBitmap cad = getNamedLowestCommonSubsumersAsBitmap(cix, dix); Set<OWLClass> lcsClasses = new HashSet<OWLClass>(); double maxScore = 0.0; for (int ix : cad.toArray()) { double score = getInformationContentForAttribute(ix); double sdiff = score - maxScore; if (sdiff >= 0) { if (sdiff > 0.01) { lcsClasses= new HashSet<OWLClass>(Collections.singleton(classArray[ix])); maxScore = score; } else { lcsClasses.add(classArray[ix]); maxScore = score; } } // if (score == maxScore) { // lcsClasses.add(classArray[ix]); // maxScore = score; // else if (score >= maxScore) { // lcsClasses= new HashSet<OWLClass>(Collections.singleton(classArray[ix])); // maxScore = score; } if (lcsClasses.size() == 0) { // TODO - remove obsoletes //LOG.warn("Hmmmm "+c+" "+d+" "+lcs); } totalTimeLCSIC += tdelta(t); this.totalCallsLCSIC++; return new ScoreAttributeSetPair(maxScore, lcsClasses); } public List<ElementPairScores> findMatches(Set<OWLClass> atts, String targetIdSpace) throws UnknownOWLClassException { Set<OWLClass> csetFilteredDirect = new HashSet<OWLClass>(); // direct Set<OWLClass> cset = new HashSet<OWLClass>(); // closure Set<OWLClass> redundant = new HashSet<OWLClass>(); // closure boolean isIgnoreUnknownClasses = false; List<ElementPairScores> scoreSets = new ArrayList<ElementPairScores>(); int minSimJPct = (int) (getPropertyAsDouble(SimConfigurationProperty.minimumSimJ, 0.05) * 100); double minMaxIC = getPropertyAsDouble(SimConfigurationProperty.minimumMaxIC, 2.5); // FIND CLOSURE for (OWLClass c : atts) { if (!this.getAllAttributeClasses().contains(c)) { if (isIgnoreUnknownClasses) continue; throw new UnknownOWLClassException(c); } csetFilteredDirect.add(c); for (Node<OWLClass> n : getNamedReflexiveSubsumers(c)) { cset.add(n.getRepresentativeElement()); } for (Node<OWLClass> n :getNamedSubsumers(c)) { redundant.addAll(n.getEntities()); } } csetFilteredDirect.removeAll(redundant); Vector csetV = new Vector<OWLClass>(atts.size()); for (OWLClass c : csetFilteredDirect) { csetV.add(c); } // benchmarking long tSimJ = 0; int nSimJ = 0; long tMaxIC = 0; int nMaxIC = 0; long tSimGIC = 0; int nSimGIC = 0; long tBMA = 0; int nBMA = 0; long startTime = System.currentTimeMillis(); // for calculation of phenodigm score double maxMaxIC = 0.0; double maxBMA = 0.0; EWAHCompressedBitmap searchProfileBM = ancsBitmapCached(cset); for (OWLNamedIndividual j : getAllElements()) { if (targetIdSpace != null && !j.getIRI().toString().contains("/"+targetIdSpace+"_")) { continue; } long t = System.currentTimeMillis(); //LOG.info(" Comparing with:"+j); // SIMJ EWAHCompressedBitmap jAttsBM = ancsBitmapCachedModifiable(j); int cadSize = searchProfileBM.andCardinality(jAttsBM); int cudSize = searchProfileBM.orCardinality(jAttsBM); int simJPct = (cadSize * 100) / cudSize; nSimJ++; tSimJ += tdelta(t); if (nSimJ % 100 == 0) { LOG.info("tSimJ = "+tSimJ +" / "+nSimJ); LOG.info("tMaxIC = "+tMaxIC +" / "+nMaxIC); LOG.info("tSimGIC = "+tSimGIC +" / "+nSimGIC); LOG.info("tBMA = "+tBMA +" / "+nBMA); } if (simJPct < minSimJPct) { //LOG.info("simJ pct too low : "+simJPct+" = "+cadSize+" / "+cudSize); continue; } ElementPairScores s = new ElementPairScores(null, j); s.simjScore = simJPct / (double) 100; EWAHCompressedBitmap cad = searchProfileBM.and(jAttsBM); // COMMON SUBSUMERS (ALL) Set<OWLClass> csSet = new HashSet<OWLClass>(); for (int ix : cad.toArray()) { csSet.add(classArray[ix]); } // MAXIC // TODO - evaluate if this is optimal; // MaxIC falls out of BMA calculation, but it may be useful // to calculate here to test if more expensive AxA is required t = System.currentTimeMillis(); ScoreAttributeSetPair best = new ScoreAttributeSetPair(0.0); double icBest = 0; double icSumCAD = 0; for (int ix : cad.toArray()) { Double ic = getInformationContentForAttribute(ix); //OWLClass c = n.getRepresentativeElement(); if (ic > icBest) { icBest = ic; } icSumCAD += ic; } tMaxIC += tdelta(t); nMaxIC++; if (icBest > maxMaxIC) { maxMaxIC = icBest; } if (icBest < minMaxIC) { //LOG.info("maxIC too low : "+icBest); continue; } s.maxIC = icBest; //LOG.info("computing simGIC"); // SIMGIC t = System.currentTimeMillis(); EWAHCompressedBitmap cud = searchProfileBM.or(jAttsBM); double icSumCUD = 0; for (int ix : cud.toArray()) { Double ic = getInformationContentForAttribute(ix); icSumCUD += ic; } s.simGIC = icSumCAD / icSumCUD; tSimGIC += tdelta(t); nSimGIC++; // BEST MATCHES t = System.currentTimeMillis(); Vector dsetV = new Vector<OWLClass>(atts.size()); for (OWLClass d : this.getAttributesForElement(j)) { dsetV.add(d); } populateSimilarityMatrix(csetV, dsetV, s); if (s.bmaSymIC > maxBMA) { maxBMA = s.bmaAsymIC; } tBMA += tdelta(t); nBMA++; scoreSets.add(s); } // calculate combined/phenodigm score // TODO - calculateCombinedScores(scoreSets, maxMaxIC, maxBMA); LOG.info("tSimJ = "+tSimJ +" / "+nSimJ); LOG.info("tSearch = "+tdelta(startTime) +" / "+nSimJ); LOG.info("Sorting "+scoreSets.size()+" matches"); Collections.sort(scoreSets); return scoreSets; } public void calculateCombinedScores(List<ElementPairScores> scoreSets, double maxMaxIC, double maxBMA) { int maxMaxIC100 = (int)(maxMaxIC * 100); int maxBMA100 = (int)(maxBMA * 100); LOG.info("Calculating combinedScores - upper bounds = "+maxMaxIC100+ " " + maxBMA100); // TODO - optimize this by using % scores as inputs for (ElementPairScores s : scoreSets) { int pctMaxScore = ((int) (s.maxIC * 10000)) / maxMaxIC100; int pctAvgScore = ((int) (s.bmaSymIC * 10000)) / maxMaxIC100; s.combinedScore = (pctMaxScore + pctAvgScore)/2; } } /** * * @param c * @param ds * @return scores * @throws UnknownOWLClassException */ // TODO - rewrite @Override public List<AttributesSimScores> compareAllAttributes(OWLClass c, Set<OWLClass> ds) throws UnknownOWLClassException { List<AttributesSimScores> scoresets = new ArrayList<AttributesSimScores>(); EWAHCompressedBitmap bmc = this.ancsBitmapCachedModifiable(c); int cSize = bmc.cardinality(); Set<AttributesSimScores> best = new HashSet<AttributesSimScores>(); Double bestScore = null; for (OWLClass d : ds) { EWAHCompressedBitmap bmd = this.ancsBitmapCachedModifiable(d); int dSize = bmd.cardinality(); int cadSize = bmc.andCardinality(bmd); int cudSize = bmc.orCardinality(bmd); AttributesSimScores s = new AttributesSimScores(c,d); s.simJScore = cadSize / (double)cudSize; s.AsymSimJScore = cadSize / (double) dSize; //ClassExpressionPair pair = new ClassExpressionPair(c, d); //ScoreAttributePair lcs = getLowestCommonSubsumerIC(pair, cad, null); //s.lcsScore = lcs; scoresets.add(s); if (bestScore == null) { best.add(s); bestScore = s.simJScore; } else if (bestScore == s.simJScore) { best.add(s); } else if (s.simJScore > bestScore) { bestScore = s.simJScore; best = new HashSet<AttributesSimScores>(Collections.singleton(s)); } } for (AttributesSimScores s : best) { s.isBestMatch = true; } return scoresets; } // UTIL OWLClass thing = null; Node<OWLClass> thingNode = null; /** * Convenience method. Warning: method name may change * @return owl:Thing (root class) */ public OWLClass owlThing() { if (thing == null) thing = getSourceOntology().getOWLOntologyManager().getOWLDataFactory().getOWLThing(); return thing; } /** * Convenience method. Warning: method name may change * @return root class (owl:Thing and anything equivalent) */ public Node<OWLClass> owlThingNode() { if (thingNode == null) thingNode = getReasoner().getTopClassNode(); return thingNode; } /* (non-Javadoc) * @see owltools.sim2.AbstractOwlSim#saveState(java.lang.String) */ public void saveState(String fileName) throws IOException { FileOutputStream fos = new FileOutputStream(fileName); IOUtils.write("classArray:"+classArray.length+"\n", fos); for (int i=0; i<classArray.length; i++) { IOUtils.write("classArray "+i+" "+classArray[i]+"\n", fos); } IOUtils.write("classIndex:"+classIndex.keySet().size()+"\n", fos); for (OWLClass c : classIndex.keySet()) { IOUtils.write("classIndex "+c+" "+classIndex.get(c)+"\n", fos); } IOUtils.write("attributeClass-index: "+getAllAttributeClasses().size()+"\n", fos); for (OWLClass c : this.getAllAttributeClasses()) { IOUtils.write("attributeClass-index "+c+" "+classIndex.get(c)+"\n", fos); } IOUtils.write("icClassArray:\n", fos); for (int i=0; i<icClassArray.length; i++) { IOUtils.write("icClassArray "+i+" "+icClassArray[i]+"\n", fos); } IOUtils.write("icCache:\n", fos); for (OWLClass c : icCache.keySet()) { IOUtils.write("icCache "+c+" "+icCache.get(c)+"\n", fos); } IOUtils.write("classToRepresentativeClassMap:\n", fos); for (OWLClass c : classTorepresentativeClassMap.keySet()) { IOUtils.write("classTorepresentativeClassMap "+c+" "+classTorepresentativeClassMap.get(c)+"\n", fos); } IOUtils.write("representativeClassMap:\n", fos); for (Node<OWLClass> n : representativeClassMap.keySet()) { IOUtils.write("representativeClassMap "+n+" "+representativeClassMap.get(n)+"\n", fos); } fos.close(); } @Override public void saveLCSCache(String fileName, Double thresholdIC) throws IOException { FileOutputStream fos = new FileOutputStream(fileName); // iterate through all classes fetching their ICs, using the class index. // this has the side effect of ensuring that icClassArray is populated. int n=0; for (OWLClass c : this.getAllAttributeClasses()) { try { int cix = classIndex.get(c); Double ic = getInformationContentForAttribute(cix); Double icCheck = icClassArray[cix]; // for debugging Double icBase = getInformationContentForAttribute(c); LOG.info("Class "+c+" has ix="+cix+" IC="+ic+ " IC(check)="+icCheck+" IC(base)="+icBase+ " C(check)="+getShortId(classArray[cix])); n++; } catch (UnknownOWLClassException e) { LOG.error("cannot find IC values for class "+c, e); throw new IOException("unknown: "+c); } } // We assume ciPairIsCached is populated for ( int cix = 0; cix< ciPairIsCached.length; cix++) { boolean[] arr = ciPairIsCached[cix]; OWLClass c = classArray[cix]; for ( int dix = 0; dix< arr.length; dix++) { if (arr[dix]) { //double s = ciPairScaledScore[cix][dix] / (double) scaleFactor; int lcsix = ciPairLCS[cix][dix]; Double s = icClassArray[lcsix]; if (s == null || s.isNaN() || s.isInfinite()) { throw new IOException("No IC for "+classArray[lcsix]); } if (thresholdIC == null || s.doubleValue() >= thresholdIC) { OWLClass d = classArray[dix]; OWLClass lcs = classArray[lcsix]; IOUtils.write(getShortId((OWLClass) c) +"\t" + getShortId((OWLClass) d) + "\t" + s + "\t" + getShortId(lcs) + "\n", fos); } } } } fos.close(); } /** * @param fileName * @throws IOException */ @Override public void loadLCSCache(String fileName) throws IOException { try { clearLCSCache(); } catch (UnknownOWLClassException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new IOException("Cannot clear cache"); } LOG.info("Loading LCS cache from "+fileName); FileInputStream s = new FileInputStream(fileName); //List<String> lines = IOUtils.readLines(s); LineIterator itr = IOUtils.lineIterator(s, null); while (itr.hasNext()) { //for (String line : lines) { String line = itr.nextLine(); String[] vals = line.split("\t"); OWLClass c1 = getOWLClassFromShortId(vals[0]); OWLClass c2 = getOWLClassFromShortId(vals[1]); OWLClass a = getOWLClassFromShortId(vals[3]); Integer cix = classIndex.get(c1); Integer dix = classIndex.get(c2); Integer aix = classIndex.get(a); if (cix == null) { LOG.error("Unknown class C: "+c1); } if (dix == null) { LOG.error("Unknown class D: "+c2); } if (aix == null) { LOG.error("Unknown ancestor class: "+a); } ciPairIsCached[cix][dix] = true; //ciPairScaledScore[cix][dix] = (short)(Double.valueOf(vals[2]) * scaleFactor); // TODO - set all IC caches ciPairLCS[cix][dix] = aix; } s.close(); LOG.info("Finished loading LCS cache from "+fileName); isLCSCacheFullyPopulated = true; } @Override protected void setInformtionContectForAttribute(OWLClass c, Double v) { icCache.put(c, v); if (icClassArray == null) icClassArray = new Double[classArray.length]; if (!classIndex.containsKey(c)) { LOG.warn("Non-indexed class: "+c); } icClassArray[classIndex.get(c)] = v; } @Override protected void clearInformationContentCache() { LOG.info("Clearing IC cache"); testCache = null; icCache = new HashMap<OWLClass,Double>(); icClassArray = null; } protected void clearLCSCache() throws UnknownOWLClassException { LOG.info("Clearing LCS cache"); if (classArray == null) { createElementAttributeMapFromOntology(); } ciPairLCS = new int[classArray.length][classArray.length]; //ciPairScaledScore = new short[classArray.length][classArray.length]; ciPairIsCached = new boolean[classArray.length][classArray.length]; } @Override public Map<OWLNamedIndividual, Set<OWLClass>> getElementToAttributesMap() { if (elementToDirectAttributesMap == null) try { createElementAttributeMapFromOntology(); } catch (UnknownOWLClassException e) { // TODO Auto-generated catch block e.printStackTrace(); } return elementToDirectAttributesMap; } @Override public SummaryStatistics getSimStatistics(String stat) { // TODO Auto-generated method stub return null; } @Override public void calculateMetricStats(Set<OWLNamedIndividual> iset, Set<OWLNamedIndividual> jset) throws UnknownOWLClassException { LOG.info("Calculating all-by-all summary statistics for all metrics"); for (String m : metrics) { metricStatMeans.put(m, new SummaryStatistics()); metricStatMins.put(m, new SummaryStatistics()); metricStatMaxes.put(m, new SummaryStatistics()); } for (OWLNamedIndividual i : iset) { HashMap<String,SummaryStatistics> metricStatIndividual = new HashMap<String,SummaryStatistics>(); for (String m : metrics) { metricStatIndividual.put(m, new SummaryStatistics()); } for (OWLNamedIndividual j : jset) { ElementPairScores gwsim = this.getGroupwiseSimilarity(i, j); metricStatIndividual.get("bmaAsymIC").addValue(gwsim.bmaAsymIC); metricStatIndividual.get("bmaSymIC").addValue(gwsim.bmaSymIC); metricStatIndividual.get("bmaInverseAsymIC").addValue(gwsim.bmaInverseAsymIC); metricStatIndividual.get("combinedScore").addValue(gwsim.combinedScore); metricStatIndividual.get("simJ").addValue(gwsim.simjScore); metricStatIndividual.get("simGIC").addValue(gwsim.simGIC); metricStatIndividual.get("maxIC").addValue(gwsim.maxIC); } for (String m : metrics) { metricStatMins.get(m).addValue(metricStatIndividual.get(m).getMin()); metricStatMeans.get(m).addValue(metricStatIndividual.get(m).getMean()); metricStatMaxes.get(m).addValue(metricStatIndividual.get(m).getMax()); } } } }
package org.bbop.gui; import java.awt.Component; import java.awt.Cursor; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.event.MouseMotionListener; import java.awt.event.MouseWheelEvent; import java.awt.event.MouseWheelListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import javax.swing.JMenuItem; import javax.swing.JPopupMenu; import javax.swing.SwingUtilities; import org.bbop.graph.DefaultNodeFactory; import org.bbop.graph.DefaultNodeLabelProvider; import org.bbop.graph.DefaultTypeColorManager; import org.bbop.graph.GraphLayout; import org.bbop.graph.HTMLNodeLabelProvider; import org.bbop.graph.LabelBasedNodeSizeProvider; import org.bbop.graph.LinkDatabase; import org.bbop.graph.LinkDatabase.Link; import org.bbop.graph.LinkDatabaseLayoutEngine; import org.bbop.graph.NamedChildProvider; import org.bbop.graph.NodeDecorator; import org.bbop.graph.NodeLabelProvider; import org.bbop.graph.NodeSizeProvider; import org.bbop.graph.OELink; import org.bbop.graph.OENode; import org.bbop.graph.PCNode; import org.bbop.graph.RelayoutListener; import org.bbop.graph.RightClickMenuBehavior; import org.bbop.graph.RightClickMenuFactory; import org.bbop.graph.SingleCameraPanHandler; import org.bbop.graph.bounds.BoundsGuarantor; import org.bbop.graph.bounds.ZoomToAllGuarantor; import org.bbop.graph.collapse.CollapsibleLinkDatabase; import org.bbop.graph.collapse.DefaultLinkDatabase; import org.bbop.graph.collapse.ExpandCollapseListener; import org.bbop.graph.collapse.ExpansionEvent; import org.bbop.graph.collapse.LinkButtonBehavior; import org.bbop.graph.focus.FocusPicker; import org.bbop.graph.focus.FocusedNodeListener; import org.bbop.graph.tooltip.LinkTooltipFactory; import org.bbop.graph.tooltip.TooltipBehavior; import org.bbop.graph.zoom.ZoomWidgetBehavior; import org.bbop.piccolo.ExtensibleCanvas; import org.bbop.piccolo.ExtensibleRoot; import org.bbop.piccolo.FullPaintCamera; import org.bbop.piccolo.NamedChildMorpher; import org.bbop.piccolo.PiccoloUtil; import org.semanticweb.owlapi.model.OWLObject; import org.semanticweb.owlapi.reasoner.OWLReasoner; import owltools.graph.OWLGraphWrapper; import edu.umd.cs.piccolo.PCamera; import edu.umd.cs.piccolo.PNode; import edu.umd.cs.piccolo.PRoot; import edu.umd.cs.piccolo.activities.PActivity; import edu.umd.cs.piccolo.activities.PActivity.PActivityDelegate; import edu.umd.cs.piccolo.activities.PInterpolatingActivity; import edu.umd.cs.piccolo.event.PInputEvent; import edu.umd.cs.piccolo.util.PBounds; import edu.umd.cs.piccolo.util.PPickPath; /*** This class does most of the work for the Graph Editor component */ public class GraphCanvas extends ExtensibleCanvas implements RightClickMenuProvider { // generated private static final long serialVersionUID = 3863061306003913893L; private static final Object CURRENT_DECORATOR_ANIMATIONS = new Object(); private static final long DEFAULT_LAYOUT_DURATION = 750; private static final Comparator<Object> LAYOUT_ORDERING_COMPARATOR = new Comparator<Object>() { @Override public int compare(Object o1, Object o2) { if (o1 instanceof PNode && o2 instanceof PNode) { PNode n1 = (PNode) o1; PNode n2 = (PNode) o2; n1.invalidatePaint(); n2.invalidatePaint(); if (o1 instanceof OENode && o2 instanceof OELink) return 1; else if (o2 instanceof OENode && o1 instanceof OELink) return -1; else return 0; } else return 0; } }; @SuppressWarnings("unchecked") static void decorateNode(PRoot root, PNode canvas, Collection<NodeDecorator> decorators, boolean noAnimation, boolean postLayout) { Collection<PActivity> currentActivities = (Collection<PActivity>) canvas.getAttribute(CURRENT_DECORATOR_ANIMATIONS); if (currentActivities == null) { currentActivities = new LinkedList<PActivity>(); canvas.addAttribute(CURRENT_DECORATOR_ANIMATIONS, currentActivities); } else { for (PActivity activity : currentActivities) { activity.terminate(PActivity.TERMINATE_WITHOUT_FINISHING); } } for (int i = 0; i < canvas.getChildrenCount(); i++) { PNode node = canvas.getChild(i); for (NodeDecorator decorator : decorators) { if (postLayout || !decorator.onlyDecorateAfterLayout()) { PActivity activity = decorator.decorate(node, noAnimation); if (activity != null) { root.addActivity(activity); currentActivities.add(activity); } } } } } private boolean isLayingOut = false; private OWLObject focus = null; private PNode newLayer; private PActivity relayoutActivity; private static class CanvasConfig { static boolean useFocusPicker = false; // does not make sense for this application static boolean useToolTip = true; // tooltips are always nice static boolean useZoomWidget = true; // allow to static boolean useBoundsGuarantor = false; // the bounds generator refocuses on the current nodes, // this is a bit too dynamic if you allow also collapsible nodes NodeLabelProvider nodeLabelProvider; NodeSizeProvider nodeSizeProvider; NamedChildMorpher morpher = new NamedChildMorpher(); LinkDatabaseLayoutEngine layoutEngine; KeyListener keyListener; MouseListener mouseListener; MouseMotionListener mouseMotionListener; MouseWheelListener mouseWheelListener; RightClickMenuFactory menuFactory; RightClickMenuBehavior rightClickBehavior = new RightClickMenuBehavior(); List<ViewBehavior> viewBehaviors = new LinkedList<ViewBehavior>(); private long layoutDuration = DEFAULT_LAYOUT_DURATION; boolean disableAnimations = false; Collection<NodeDecorator> decorators = new LinkedList<NodeDecorator>(); DefaultNodeFactory nodeFactory; Collection<FocusedNodeListener> focusedNodeListeners = new ArrayList<FocusedNodeListener>(); Collection<RelayoutListener> layoutListeners = new ArrayList<RelayoutListener>(); LinkDatabase database; CollapsibleLinkDatabase collapsibleDatabase; } private final CanvasConfig config; public GraphCanvas(GraphLayout graphLayout, OWLGraphWrapper graph, OWLReasoner reasoner) { super(); config = new CanvasConfig(); config.nodeLabelProvider = new HTMLNodeLabelProvider("<center><font face='Arial'>$name$</font></center>", new DefaultNodeLabelProvider(graph)); config.nodeSizeProvider = new LabelBasedNodeSizeProvider(config.nodeLabelProvider); if (CanvasConfig.useFocusPicker) { addViewBehavior(new FocusPicker()); } if (CanvasConfig.useToolTip) { addViewBehavior(new TooltipBehavior()); } if (CanvasConfig.useZoomWidget) { addViewBehavior(new ZoomWidgetBehavior(8, 20)); } if (CanvasConfig.useBoundsGuarantor) { addViewBehavior(new BoundsGuarantor() { @Override protected void installDefaultCyclers() { addBoundsGuarantor(new ZoomToAllGuarantor(canvas)); } }); } addViewBehavior(new LinkButtonBehavior()); DefaultTypeColorManager typeManager = new DefaultTypeColorManager(graph); config.nodeFactory = new DefaultNodeFactory(typeManager, typeManager, config.nodeLabelProvider, new LinkTooltipFactory(graph)); config.database = new DefaultLinkDatabase(graph, reasoner); config.collapsibleDatabase = new CollapsibleLinkDatabase(config.database); config.collapsibleDatabase.addListener(new ExpandCollapseListener() { @Override public void expandStateChanged(ExpansionEvent e) { relayout(); } }); config.layoutEngine = new LinkDatabaseLayoutEngine(config.collapsibleDatabase , graphLayout, config.nodeFactory, config.nodeSizeProvider, config.nodeLabelProvider); setPanEventHandler(new SingleCameraPanHandler()); getPanEventHandler().setAutopan(false); setAutoscrolls(false); installListeners(); } @Override protected PCamera createCamera() { return new FullPaintCamera(); } public void decorate() { decorateNode(getRoot(), getLayer(), config.decorators, false, false); } public void dim() { setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); } @Override public void fillInMenu(MouseEvent e, JPopupMenu menu) { PInputEvent event = new PInputEvent(getRoot().getDefaultInputManager(), e); event.setPath(getCamera().pick(e.getX(), e.getY(), 1)); Collection<JMenuItem> factories = config.menuFactory.getMenuItems(this, event); if (factories != null) { for (JMenuItem item : factories) { if (item == null) continue; if (item == RightClickMenuFactory.SEPARATOR_ITEM) menu.addSeparator(); else menu.add(item); } } } public PBounds getBounds(Collection<OWLObject> pcs) { PBounds bounds = null; for (OWLObject pc : pcs) { if (pc == null) continue; PNode node = getNode(pc); if (node == null) continue; if (bounds == null) { bounds = new PBounds(node.getXOffset(), node.getYOffset(), node .getWidth(), node.getHeight()); } else bounds.add(new PBounds(node.getXOffset(), node.getYOffset(), node.getWidth(), node.getHeight())); } return bounds; } public boolean getDisableAnimations() { return config.disableAnimations; } public PNode getFinalLayoutVersion(Object key) { if (!isLayingOut()) return null; return config.morpher.getProvider().getNamedChild(key, newLayer); } public long getLayoutDuration() { return config.layoutDuration; } public LinkDatabase getLinkDatabase() { return config.database; } public CollapsibleLinkDatabase getCollapsibleLinkDatabase() { return config.collapsibleDatabase; } public float getMaxZoom() { return 1.5f; } public float getMinZoom() { PBounds zoomDim = getLayer().getFullBounds(); float viewWidth = (float) getCamera().getWidth(); float zoomWidth = (float) zoomDim.getWidth(); float viewHeight = (float) getCamera().getHeight(); float zoomHeight = (float) zoomDim.getHeight(); float minZoom = Math.min(Math.min(viewWidth / zoomWidth, viewHeight / zoomHeight), 1f); return minZoom; } public NamedChildMorpher getMorpher() { return config.morpher; } public NamedChildProvider getNamedChildProvider() { return config.morpher.getProvider(); } public PNode getNewLayer() { if (!isLayingOut()) throw new IllegalStateException( "getNewLayer() can only be called while the canvas is laying out"); return newLayer; } public PCNode<?> getNode(int x, int y) { PPickPath path = getCamera().pick(x, y, 1); PCNode<?> node = (PCNode<?>) PiccoloUtil.getNodeOfClass(path, OENode.class, OELink.class); return node; } public OENode getNode(OWLObject pc) { NamedChildProvider provider = getNamedChildProvider(); return (OENode) provider.getNamedChild(pc, getLayer()); } public NodeLabelProvider getNodeLabelProvider() { return config.nodeLabelProvider; } public RightClickMenuBehavior getRightClickBehavior() { return config.rightClickBehavior; } private void addViewBehavior(ViewBehavior viewBehavior) { config.viewBehaviors.add(viewBehavior); viewBehavior.install(this); } public void addDecorator(NodeDecorator decorator) { config.decorators.add(decorator); } public void addFocusedNodeListener(FocusedNodeListener listener) { config.focusedNodeListeners.add(listener); } public void addRelayoutListener(RelayoutListener listener) { config.layoutListeners.add(listener); } public void removeDecorator(NodeDecorator decorator) { config.decorators.remove(decorator); } public void removeFocusedNodeListener(FocusedNodeListener listener) { config.focusedNodeListeners.remove(listener); } public void removeRelayoutListener(RelayoutListener listener) { config.layoutListeners.remove(listener); } /** * This method installs mouse and key listeners on the canvas that forward * those events to piccolo. */ @Override protected void installInputSources() { if (config.mouseListener == null) { config.mouseListener = new MouseListener() { private boolean isButton1Pressed; private boolean isButton2Pressed; private boolean isButton3Pressed; @Override public void mouseClicked(MouseEvent e) { sendInputEventToInputManager(e, MouseEvent.MOUSE_CLICKED); } @Override public void mouseEntered(MouseEvent e) { MouseEvent simulated = null; if ((e.getModifiersEx() & (InputEvent.BUTTON1_DOWN_MASK | InputEvent.BUTTON2_DOWN_MASK | InputEvent.BUTTON3_DOWN_MASK)) != 0) { simulated = new MouseEvent((Component) e.getSource(), MouseEvent.MOUSE_DRAGGED, e.getWhen(), e .getModifiers(), e.getX(), e.getY(), e .getClickCount(), e.isPopupTrigger(), e .getButton()); } else { simulated = new MouseEvent((Component) e.getSource(), MouseEvent.MOUSE_MOVED, e.getWhen(), e .getModifiers(), e.getX(), e.getY(), e .getClickCount(), e.isPopupTrigger(), e .getButton()); } sendInputEventToInputManager(e, MouseEvent.MOUSE_ENTERED); sendInputEventToInputManager(simulated, simulated.getID()); } @Override public void mouseExited(MouseEvent e) { } @Override public void mousePressed(MouseEvent e) { requestFocus(); boolean shouldBalanceEvent = false; if (e.getButton() == MouseEvent.NOBUTTON) { if ((e.getModifiers() & MouseEvent.BUTTON1_MASK) == MouseEvent.BUTTON1_MASK) { e = new MouseEvent((Component) e.getSource(), MouseEvent.MOUSE_PRESSED, e.getWhen(), e .getModifiers(), e.getX(), e.getY(), e.getClickCount(), e .isPopupTrigger(), MouseEvent.BUTTON1); } else if ((e.getModifiers() & MouseEvent.BUTTON2_MASK) == MouseEvent.BUTTON2_MASK) { e = new MouseEvent((Component) e.getSource(), MouseEvent.MOUSE_PRESSED, e.getWhen(), e .getModifiers(), e.getX(), e.getY(), e.getClickCount(), e .isPopupTrigger(), MouseEvent.BUTTON2); } else if ((e.getModifiers() & MouseEvent.BUTTON3_MASK) == MouseEvent.BUTTON3_MASK) { e = new MouseEvent((Component) e.getSource(), MouseEvent.MOUSE_PRESSED, e.getWhen(), e .getModifiers(), e.getX(), e.getY(), e.getClickCount(), e .isPopupTrigger(), MouseEvent.BUTTON3); } } switch (e.getButton()) { case MouseEvent.BUTTON1: if (isButton1Pressed) { shouldBalanceEvent = true; } isButton1Pressed = true; break; case MouseEvent.BUTTON2: if (isButton2Pressed) { shouldBalanceEvent = true; } isButton2Pressed = true; break; case MouseEvent.BUTTON3: if (isButton3Pressed) { shouldBalanceEvent = true; } isButton3Pressed = true; break; } if (shouldBalanceEvent) { MouseEvent balanceEvent = new MouseEvent((Component) e .getSource(), MouseEvent.MOUSE_RELEASED, e .getWhen(), e.getModifiers(), e.getX(), e .getY(), e.getClickCount(), e.isPopupTrigger(), e.getButton()); sendInputEventToInputManager(balanceEvent, MouseEvent.MOUSE_RELEASED); } sendInputEventToInputManager(e, MouseEvent.MOUSE_PRESSED); } @Override public void mouseReleased(MouseEvent e) { boolean shouldBalanceEvent = false; if (e.getButton() == MouseEvent.NOBUTTON) { if ((e.getModifiers() & MouseEvent.BUTTON1_MASK) == MouseEvent.BUTTON1_MASK) { e = new MouseEvent((Component) e.getSource(), MouseEvent.MOUSE_RELEASED, e.getWhen(), e .getModifiers(), e.getX(), e.getY(), e.getClickCount(), e .isPopupTrigger(), MouseEvent.BUTTON1); } else if ((e.getModifiers() & MouseEvent.BUTTON2_MASK) == MouseEvent.BUTTON2_MASK) { e = new MouseEvent((Component) e.getSource(), MouseEvent.MOUSE_RELEASED, e.getWhen(), e .getModifiers(), e.getX(), e.getY(), e.getClickCount(), e .isPopupTrigger(), MouseEvent.BUTTON2); } else if ((e.getModifiers() & MouseEvent.BUTTON3_MASK) == MouseEvent.BUTTON3_MASK) { e = new MouseEvent((Component) e.getSource(), MouseEvent.MOUSE_RELEASED, e.getWhen(), e .getModifiers(), e.getX(), e.getY(), e.getClickCount(), e .isPopupTrigger(), MouseEvent.BUTTON3); } } switch (e.getButton()) { case MouseEvent.BUTTON1: if (!isButton1Pressed) { shouldBalanceEvent = true; } isButton1Pressed = false; break; case MouseEvent.BUTTON2: if (!isButton2Pressed) { shouldBalanceEvent = true; } isButton2Pressed = false; break; case MouseEvent.BUTTON3: if (!isButton3Pressed) { shouldBalanceEvent = true; } isButton3Pressed = false; break; } if (shouldBalanceEvent) { MouseEvent balanceEvent = new MouseEvent((Component) e .getSource(), MouseEvent.MOUSE_PRESSED, e .getWhen(), e.getModifiers(), e.getX(), e .getY(), e.getClickCount(), e.isPopupTrigger(), e.getButton()); sendInputEventToInputManager(balanceEvent, MouseEvent.MOUSE_PRESSED); } sendInputEventToInputManager(e, MouseEvent.MOUSE_RELEASED); } }; addMouseListener(config.mouseListener); } if (config.mouseMotionListener == null) { config.mouseMotionListener = new MouseMotionListener() { @Override public void mouseDragged(MouseEvent e) { sendInputEventToInputManager(e, MouseEvent.MOUSE_DRAGGED); } @Override public void mouseMoved(MouseEvent e) { sendInputEventToInputManager(e, MouseEvent.MOUSE_MOVED); } }; addMouseMotionListener(config.mouseMotionListener); } if (config.mouseWheelListener == null) { config.mouseWheelListener = new MouseWheelListener() { @Override public void mouseWheelMoved(MouseWheelEvent e) { sendInputEventToInputManager(e, e.getScrollType()); if (!e.isConsumed() && getParent() != null) { getParent().dispatchEvent(e); } } }; addMouseWheelListener(config.mouseWheelListener); } if (config.keyListener == null) { config.keyListener = new KeyListener() { @Override public void keyPressed(KeyEvent e) { sendInputEventToInputManager(e, KeyEvent.KEY_PRESSED); } @Override public void keyReleased(KeyEvent e) { sendInputEventToInputManager(e, KeyEvent.KEY_RELEASED); } @Override public void keyTyped(KeyEvent e) { sendInputEventToInputManager(e, KeyEvent.KEY_TYPED); } }; addKeyListener(config.keyListener); } } protected void installListeners() { addRelayoutListener(new RelayoutListener() { @Override public void relayoutComplete() { getZoomEventHandler().setMaxScale(getMaxZoom()); float minZoom = getMinZoom(); getZoomEventHandler().setMinScale(minZoom); } @Override public void relayoutStarting() { } }); getCamera().addPropertyChangeListener(PCamera.PROPERTY_VIEW_TRANSFORM, new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { getZoomEventHandler().setMaxScale(getMaxZoom()); float minZoom = getMinZoom(); getZoomEventHandler().setMinScale(minZoom); } }); } public Collection<OENode> getVisibleNodes() { Collection<OENode> out = new ArrayList<OENode>(); for(OWLObject obj : config.collapsibleDatabase.getObjects()) { OENode node = getNode(obj); if (node != null) { out.add(node); } } return out; } public boolean isLayingOut() { return isLayingOut; } public void panToObjects() { PBounds centerBounds = getLayer().getFullBoundsReference(); getCamera().animateViewToCenterBounds(centerBounds, false, getLayoutDuration()); } public void relayout() { if (config.collapsibleDatabase == null) return; isLayingOut = true; dim(); if (getDisableAnimations()) { int width = getWidth(); int height = getHeight(); if (width < 1) width = 1; if (height < 1) height = 1; ((ExtensibleRoot) getRoot()).setDisableUpdates(true); repaint(); // and the component still works normally. } if (relayoutActivity != null) { relayoutActivity.terminate(PActivity.TERMINATE_WITHOUT_FINISHING); relayoutActivity = null; } newLayer = config.layoutEngine.getNewLayer(); decorateNode(getRoot(), newLayer, config.decorators, true, true); config.morpher.setNewNodeOriginNode(getFocusedNode()); relayoutActivity = config.morpher.morph(getLayer(), newLayer, getLayoutDuration()); if (relayoutActivity instanceof PInterpolatingActivity) { ((PInterpolatingActivity) relayoutActivity).setSlowInSlowOut(false); } relayoutActivity.setDelegate(new PActivityDelegate() { @SuppressWarnings("unchecked") @Override public void activityFinished(PActivity activity) { isLayingOut = false; newLayer = null; Collections.sort(getLayer().getChildrenReference(), LAYOUT_ORDERING_COMPARATOR); fireRelayoutCompleteEvent(); decorateNode(getRoot(), getLayer(), config.decorators, true, true); undim(); repaint(); // This line stops the graph from appearing in a small box at the bottom righthand corner. } @Override public void activityStarted(PActivity activity) { fireRelayoutStartingEvent(); } @Override public void activityStepped(PActivity activity) { } }); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (relayoutActivity == null) return; getRoot().addActivity(relayoutActivity); ((ExtensibleRoot) getRoot()).setDisableUpdates(false); } }); } public OENode getFocusedNode() { if (focus == null) return null; else return getNode(focus); } public void setFocusedObject(OWLObject focusedObject) { OWLObject oldFocused = this.focus; this.focus = focusedObject; fireFocusedNodeChanged(oldFocused, focusedObject); } private void fireFocusedNodeChanged(OWLObject oldNode, OWLObject newNode) { for (FocusedNodeListener listener : config.focusedNodeListeners) { listener.focusedChanged(oldNode, newNode); } } private void fireRelayoutCompleteEvent() { List<RelayoutListener> defensiveCopy = new ArrayList<RelayoutListener>(config.layoutListeners); for (RelayoutListener listener : defensiveCopy) { listener.relayoutComplete(); } } private void fireRelayoutStartingEvent() { List<RelayoutListener> defensiveCopy = new ArrayList<RelayoutListener>(config.layoutListeners); for (RelayoutListener listener : defensiveCopy) { listener.relayoutStarting(); } } public void show(Collection<OWLObject> pcs, boolean zoom) { PBounds b = getBounds(pcs); getCamera().animateViewToCenterBounds(b, zoom, getLayoutDuration()); } public void undim() { setCursor(Cursor.getDefaultCursor()); } public void zoomToObjects() { PBounds centerBounds = getLayer().getFullBoundsReference(); getCamera().animateViewToCenterBounds(centerBounds, true, getLayoutDuration()); } public void redraw() { relayout(); } public void removeVisibleObjects(Collection<OWLObject> visible) { Collection<OWLObject> current = new HashSet<OWLObject>(); for (OWLObject io : config.collapsibleDatabase.getObjects()) { current.add(io); } current.removeAll(getLinkedObjectCollection(visible)); config.collapsibleDatabase.setVisibleObjects(current, false); } public void addVisibleObjects(Collection<Link> visible) { Collection<OWLObject> current = new HashSet<OWLObject>(); for (OWLObject io : config.collapsibleDatabase.getObjects()) { current.add(io); } Collection<OWLObject> loCol = getLinkedObjectCollection(visible); current.addAll(loCol); config.collapsibleDatabase.setVisibleObjects(current, false); } protected Collection<OWLObject> getLinkedObjectCollection(Collection<?> pcs) { Collection<OWLObject> out = new HashSet<OWLObject>(); for (Object pc : pcs) { if (pc instanceof OWLObject) { OWLObject node = (OWLObject) pc; out.add(node); } else if (pc instanceof Link) { Link link = (Link) pc; OWLObject source = link.getSource(); if (source != null) { out.add(source); } OWLObject target = link.getTarget(); if (target != null) { out.add(target); } } } return out; } public Collection<OWLObject> getVisibleObjects() { Collection<OWLObject> out = new HashSet<OWLObject>(); for (OWLObject io : config.collapsibleDatabase.getObjects()) { out.add(io); } return out; } public void reset() { config.collapsibleDatabase.setVisibleObjects(config.database.getRoots(), false); relayout(); } }
package net.jueb.util4j.test; import java.util.Scanner; import org.jctools.queues.MpscChunkedArrayQueue; import org.jctools.queues.MpscCompoundQueue; import org.jctools.queues.MpscGrowableArrayQueue; import org.jctools.queues.MpscLinkedQueue; import org.jctools.queues.MpscUnboundedArrayQueue; import org.jctools.queues.atomic.MpscLinkedAtomicQueue; public class TestJctools { { MpscCompoundQueue<Runnable> queue1; /** * MPSCMaxCapacity * * . */ MpscGrowableArrayQueue<Runnable> queue2=new MpscGrowableArrayQueue<>(10); MpscUnboundedArrayQueue<Runnable> queue3=new MpscUnboundedArrayQueue<>(8); MpscLinkedAtomicQueue<Runnable> queue4=new MpscLinkedAtomicQueue<>(); MpscLinkedQueue<Runnable> queue5=MpscLinkedQueue.newMpscLinkedQueue(); } public static void main(String[] args) { Scanner sc = new Scanner(System.in); sc.nextLine(); TestJctools t=new TestJctools(); t.teste11(); sc.nextLine(); } public void teste1() { MpscCompoundQueue[] qs=new MpscCompoundQueue[65535]; for(int i=0;i<qs.length;i++) { qs[i]=new MpscCompoundQueue<>(1024); } } public void teste11() { MpscChunkedArrayQueue[] qs=new MpscChunkedArrayQueue[65535]; for(int i=0;i<qs.length;i++) { qs[i]=new MpscChunkedArrayQueue<>(1048576); } } public void teste2() { MpscGrowableArrayQueue[] qs=new MpscGrowableArrayQueue[65535]; for(int i=0;i<qs.length;i++) { qs[i]=new MpscGrowableArrayQueue<>(8); } } public void teste3() { MpscUnboundedArrayQueue[] qs=new MpscUnboundedArrayQueue[65535]; for(int i=0;i<qs.length;i++) { qs[i]=new MpscUnboundedArrayQueue<>(8); } } public void teste5() { MpscLinkedQueue[] qs=new MpscLinkedQueue[65535]; for(int i=0;i<qs.length;i++) { qs[i]=MpscLinkedQueue.newMpscLinkedQueue(); } } /** * MpscLinkedQueue */ public void teste4() { MpscLinkedAtomicQueue[] qs=new MpscLinkedAtomicQueue[65535]; for(int i=0;i<qs.length;i++) { qs[i]=new MpscLinkedAtomicQueue<>(); } } }
package com.zsoft.hubgroupdemo; import java.util.ArrayList; import java.util.List; import org.json.JSONArray; import com.zsoft.SignalA.Hubs.HubConnection; import com.zsoft.SignalA.Hubs.HubInvokeCallback; import com.zsoft.SignalA.Hubs.HubOnDataCallback; import com.zsoft.SignalA.Hubs.IHubProxy; import com.zsoft.SignalA.Transport.StateBase; import com.zsoft.SignalA.transport.longpolling.LongPollingTransport; import android.net.Uri; import android.os.Bundle; import android.app.Activity; import android.content.OperationApplicationException; import android.support.v4.app.Fragment; import android.view.Menu; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; public class MainActivity extends Activity { private EditText mGroupNameTextBox; private EditText mMessageTextBox; private Button mJoinGroupButton; private Button mBroadcastToAllButton; private Button mBroadcastToGroupButton; protected HubConnection con = null; protected IHubProxy hub = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mGroupNameTextBox = (EditText) this.findViewById(R.id.groupName); mMessageTextBox = (EditText) this.findViewById(R.id.message_text); mJoinGroupButton = (Button) this.findViewById(R.id.joinGroupButton); mJoinGroupButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { JoinGroup(mGroupNameTextBox.getText().toString()); } }); mBroadcastToAllButton = (Button) this.findViewById(R.id.broadcastToAllButton); mBroadcastToAllButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { HubInvokeCallback callback = new HubInvokeCallback() { @Override public void OnResult(boolean succeeded, String response) { Toast.makeText(MainActivity.this, response, Toast.LENGTH_SHORT).show(); } @Override public void OnError(Exception ex) { Toast.makeText(MainActivity.this, "Error: " + ex.getMessage(), Toast.LENGTH_SHORT).show(); } }; List<String> args = new ArrayList<String>(1); args.add(mMessageTextBox.getText().toString()); hub.Invoke("SendMessageToAll", args, callback); } }); mBroadcastToGroupButton = (Button) this.findViewById(R.id.broadcastToGroupButton); mBroadcastToGroupButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { HubInvokeCallback callback = new HubInvokeCallback() { @Override public void OnResult(boolean succeeded, String response) { Toast.makeText(MainActivity.this, response, Toast.LENGTH_SHORT).show(); } @Override public void OnError(Exception ex) { Toast.makeText(MainActivity.this, "Error: " + ex.getMessage(), Toast.LENGTH_SHORT).show(); } }; List<String> args = new ArrayList<String>(2); args.add(mGroupNameTextBox.getText().toString()); args.add(mMessageTextBox.getText().toString()); hub.Invoke("SendMessageToGroup", args, callback); } }); Connect(Uri.parse("http://signalrgrouptest.azurewebsites.net/")); } public void Connect(Uri address) { con = new HubConnection(address.toString(), this, new LongPollingTransport()) { @Override public void OnStateChanged(StateBase oldState, StateBase newState) { //tvStatus.setText(oldState.getState() + " -> " + newState.getState()); switch(newState.getState()) { case Connected: mJoinGroupButton.setEnabled(true); mBroadcastToAllButton.setEnabled(true); mBroadcastToGroupButton.setEnabled(true); JoinGroup("test"); break; default: mJoinGroupButton.setEnabled(false); mBroadcastToAllButton.setEnabled(false); mBroadcastToGroupButton.setEnabled(false); break; } } @Override public void OnError(Exception exception) { Toast.makeText(MainActivity.this, "On error: " + exception.getMessage(), Toast.LENGTH_LONG).show(); } }; try { hub = con.CreateHubProxy("testhub"); } catch (OperationApplicationException e) { // TODO Auto-generated catch block e.printStackTrace(); } hub.On("DisplayMessage", new HubOnDataCallback() { @Override public void OnReceived(JSONArray args) { for(int i=0; i<args.length(); i++) { Toast.makeText(MainActivity.this, "New message\n" + args.opt(i).toString(), Toast.LENGTH_SHORT).show(); } } }); con.Start(); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } private void JoinGroup(String groupName) { HubInvokeCallback callback = new HubInvokeCallback() { @Override public void OnResult(boolean succeeded, String response) { if(succeeded) { Toast.makeText(MainActivity.this, "Joined group", Toast.LENGTH_SHORT).show(); } else { Toast.makeText(MainActivity.this, "Failed to join group", Toast.LENGTH_SHORT).show(); } } @Override public void OnError(Exception ex) { Toast.makeText(MainActivity.this, "Error: " + ex.getMessage(), Toast.LENGTH_SHORT).show(); } }; List<String> args = new ArrayList<String>(1); args.add(groupName); hub.Invoke("JoinGroup", args, callback); } }
package com.tolstykh.textviewrichdrawable.helper; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.support.annotation.ColorInt; import android.support.annotation.DrawableRes; import android.support.annotation.NonNull; import android.support.graphics.drawable.VectorDrawableCompat; import android.support.v4.graphics.drawable.DrawableCompat; import android.support.v4.view.ViewCompat; import android.util.AttributeSet; import android.util.Log; import android.widget.TextView; import com.tolstykh.library.R; import com.tolstykh.textviewrichdrawable.DrawableEnriched; public class RichDrawableHelper implements DrawableEnriched { private static final int LEFT_DRAWABLE_INDEX = 0; private static final int TOP_DRAWABLE_INDEX = 1; private static final int RIGHT_DRAWABLE_INDEX = 2; private static final int BOTTOM_DRAWABLE_INDEX = 3; private TextView mView; private int mDrawableWidth; private int mDrawableHeight; @ColorInt private int mDrawableTint; public RichDrawableHelper(@NonNull TextView view, AttributeSet attrs, int defStyleAttr, int defStyleRes) { mView = view; TypedArray array = view.getContext() .obtainStyledAttributes(attrs, R.styleable.TextViewRichDrawable, defStyleAttr, defStyleRes); int drawableStartVectorId; int drawableTopVectorId; int drawableEndVectorId; int drawableBottomVectorId; try { mDrawableWidth = array.getDimensionPixelSize(R.styleable.TextViewRichDrawable_compoundDrawableWidth, UNDEFINED); mDrawableHeight = array.getDimensionPixelSize(R.styleable.TextViewRichDrawable_compoundDrawableHeight, UNDEFINED); drawableStartVectorId = array.getResourceId(R.styleable.TextViewRichDrawable_drawableStartVector, UNDEFINED); drawableTopVectorId = array.getResourceId(R.styleable.TextViewRichDrawable_drawableTopVector, UNDEFINED); drawableEndVectorId = array.getResourceId(R.styleable.TextViewRichDrawable_drawableEndVector, UNDEFINED); drawableBottomVectorId = array.getResourceId(R.styleable.TextViewRichDrawable_drawableBottomVector, UNDEFINED); mDrawableTint = array.getColor(R.styleable.TextViewRichDrawable_drawableTint, UNDEFINED); } finally { array.recycle(); } if (mDrawableWidth > 0 || mDrawableHeight > 0 || drawableStartVectorId > 0 || drawableTopVectorId > 0 || drawableEndVectorId > 0 || drawableBottomVectorId > 0) { initCompoundDrawables(drawableStartVectorId, drawableTopVectorId, drawableEndVectorId, drawableBottomVectorId); } } private void initCompoundDrawables(int drawableStartVectorId, int drawableTopVectorId, int drawableEndVectorId, int drawableBottomVectorId) { Drawable[] drawables = mView.getCompoundDrawables(); boolean rtl = ViewCompat.getLayoutDirection(mView) == ViewCompat.LAYOUT_DIRECTION_RTL; if (drawableStartVectorId != UNDEFINED) { drawables[rtl ? RIGHT_DRAWABLE_INDEX : LEFT_DRAWABLE_INDEX] = getVectorDrawable(drawableStartVectorId); } if (drawableTopVectorId != UNDEFINED) { drawables[TOP_DRAWABLE_INDEX] = getVectorDrawable(drawableTopVectorId); } if (drawableEndVectorId != UNDEFINED) { drawables[rtl ? LEFT_DRAWABLE_INDEX : RIGHT_DRAWABLE_INDEX] = getVectorDrawable(drawableEndVectorId); } if (drawableBottomVectorId != UNDEFINED) { drawables[BOTTOM_DRAWABLE_INDEX] = getVectorDrawable(drawableBottomVectorId); } if (mDrawableHeight > 0 || mDrawableWidth > 0) { for (Drawable drawable : drawables) { if (drawable == null) { continue; } Rect realBounds = new Rect(0, 0, drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight()); float actualDrawableWidth = realBounds.width(); float actualDrawableHeight = realBounds.height(); float actualDrawableRatio = actualDrawableHeight / actualDrawableWidth; float scale; // check if both width and height defined then adjust drawable size according to the ratio if (mDrawableHeight > 0 && mDrawableWidth > 0) { float placeholderRatio = mDrawableHeight / (float) mDrawableWidth; if (placeholderRatio > actualDrawableRatio) { scale = mDrawableWidth / actualDrawableWidth; } else { scale = mDrawableHeight / actualDrawableHeight; } } else if (mDrawableHeight > 0) { // only height defined scale = mDrawableHeight / actualDrawableHeight; } else { // only width defined scale = mDrawableWidth / actualDrawableWidth; } actualDrawableWidth = actualDrawableWidth * scale; actualDrawableHeight = actualDrawableHeight * scale; realBounds.right = realBounds.left + Math.round(actualDrawableWidth); realBounds.bottom = realBounds.top + Math.round(actualDrawableHeight); drawable.setBounds(realBounds); } } else { for (Drawable drawable : drawables) { if (drawable == null) { continue; } drawable.setBounds(new Rect(0, 0, drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight())); } } if (mDrawableTint != UNDEFINED) { for (int i = 0; i < drawables.length; i++) { if (drawables[i] == null) { continue; } Drawable tintedDrawable = DrawableCompat.wrap(drawables[i]); DrawableCompat.setTint(tintedDrawable.mutate(), mDrawableTint); drawables[i] = tintedDrawable; } } mView.setCompoundDrawables(drawables[LEFT_DRAWABLE_INDEX], drawables[TOP_DRAWABLE_INDEX], drawables[RIGHT_DRAWABLE_INDEX], drawables[BOTTOM_DRAWABLE_INDEX]); } private Resources getResources() { return mView.getResources(); } private Context getContext() { return mView.getContext(); } private VectorDrawableCompat getVectorDrawable(@DrawableRes int resId) { return VectorDrawableCompat.create(getResources(), resId, getContext().getTheme()); } /** * {@inheritDoc} */ public int getCompoundDrawableHeight() { return mDrawableHeight; } /** * {@inheritDoc} */ public int getCompoundDrawableWidth() { return mDrawableWidth; } }
package assignment2; import java.awt.Point; import java.io.File; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.List; import java.util.Arrays; import java.util.Scanner; public class Game { private boolean debug; private ArrayList<String> history; private Scanner sc; public Game(boolean debug) { this.debug = debug; sc = new Scanner(System.in); } private boolean hadError(Code code, Code guess) { // Invalid peg length if (guess.getCode().length != code.getCode().length) return true; // Invalid color List<String> validColors = Arrays.asList(GameConfiguration.colors); for (String guessColor : guess.getCode()) if (!(validColors.contains(guessColor))) return true; return false; } private void showHistory() { System.out.println("\nHistory: "); for (String item : history) System.out.println(item); System.out.println(); } private void runGame() { System.out.println("\nGenerating secret code ....\n"); Code code = new Code(GameConfiguration.pegNumber); boolean invalidInput = false; history = new ArrayList<String>(); for (int i = GameConfiguration.guessNumber; i > 0; i if (debug) System.out.println("Secret Code (debug mode): "+code+"\n"); if (!invalidInput) System.out.println("You have "+i+" guesses left."); System.out.println("What is your next guess?"); System.out.println("Type in the characters for your guess and press enter."); System.out.print("Enter guess: "); String s = sc.nextLine(); if (s.equalsIgnoreCase("history")) { showHistory(); i++; continue; } Code guess = new Code(s); invalidInput = hadError(code, guess); if (invalidInput) { System.out.println(guess + " -> " + "INVALID GUESS\n"); i++; continue; } if (i <= 1) { System.out.println("Sorry, you are out of guesses. You lose, boo-hoo.\n"); break; } System.out.print("\n" + guess + " -> " + "Result: "); Point pegs = code.getPegs(guess); String blackPegStr = pegs.x > 0 ? pegs.x + " black peg" : "no black pegs"; String whitePegStr = pegs.y > 0 ? pegs.y + " white peg" : "no white pegs"; if (pegs.x > 1) blackPegStr += "s"; if (pegs.y > 1) whitePegStr += "s"; String result = ""; result = blackPegStr + ", " + whitePegStr; if (pegs.x == code.getCode().length) result = "4 black pegs You win!!"; history.add(guess + "\t\t" + result); System.out.println(result + "\n"); if (pegs.x == code.getCode().length) break; } } public static void main(String[] args) { try { // Print intro text Scanner intro = new Scanner(new File("src/assignment2/intro.txt")); while (intro.hasNextLine()) System.out.println(intro.nextLine()); intro.close(); // Check if user is ready to play System.out.print("\nYou have "+GameConfiguration.guessNumber+" to figure out the secret code or you lose the game. Are you ready to play? (Y/N): "); Scanner sc = new Scanner(System.in); String s = sc.nextLine(); if (s.equalsIgnoreCase("y")) { // Play the game while (s.equalsIgnoreCase("y")) { Game g = new Game(true); g.runGame(); System.out.print("Are you ready for another game (Y/N): "); s = sc.nextLine(); } sc.close(); } else System.exit(0); } catch (FileNotFoundException e) { e.printStackTrace(); } } }
package org.apache.maven.mercury; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.apache.maven.mercury.artifact.ArtifactMetadata; import org.apache.maven.mercury.artifact.ArtifactScopeEnum; import org.apache.maven.mercury.metadata.DependencyBuilder; import org.apache.maven.mercury.metadata.DependencyBuilderFactory; import org.apache.maven.mercury.repository.api.MetadataResults; import org.apache.maven.mercury.repository.api.Repository; import org.apache.maven.mercury.repository.api.RepositoryReader; import org.apache.maven.mercury.repository.local.m2.LocalRepositoryM2; import org.apache.maven.mercury.repository.remote.m2.RemoteRepositoryM2; import org.apache.maven.mercury.transport.api.Server; import org.apache.maven.mercury.util.FileUtil; import org.junit.After; import org.junit.Before; import org.junit.Test; /** * * * @author Oleg Gusakov * @version $Id$ * */ public class MavenDependencyProcessorTest { LocalRepositoryM2 _localRepo; RemoteRepositoryM2 _remoteRepo; File _localRepoFile; static final String _remoteRepoDir = "./target/test-classes/repo"; File _remoteRepoFile; static String _remoteRepoUrlPrefix = "http://localhost:"; static String _remoteRepoUrlSufix = "/maven2"; // HttpTestServer _jetty; int _port; DependencyBuilder _depBuilder; /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { MavenDependencyProcessor dp = new MavenDependencyProcessor(); _localRepoFile = File.createTempFile( "maven-mercury-", "-test-repo" ); FileUtil.delete( _localRepoFile ); _localRepoFile.mkdirs(); _localRepoFile.deleteOnExit(); _localRepo = new LocalRepositoryM2( "localRepo", _localRepoFile, dp ); _remoteRepoFile = new File( _remoteRepoDir ); // _jetty = new HttpTestServer( _remoteRepoFile, _remoteRepoUrlSufix ); // FIXME 2009-02-12 Oleg: disabling not to mess with jetty server. Will move to Mercury ITs // _jetty.start(); // _port = _jetty.getPort(); // Server server = new Server( "testRemote", new URL(_remoteRepoUrlPrefix + _port + _remoteRepoUrlSufix) ); _remoteRepoUrlPrefix = "http://repo2.maven.org:"; _port = 80; _remoteRepoUrlSufix = "/maven2"; Server server = new Server( "testRemote", new URL(_remoteRepoUrlPrefix + _port + _remoteRepoUrlSufix) ); _remoteRepo = new RemoteRepositoryM2( server, dp ); ArrayList<Repository> repos = new ArrayList<Repository>(2); repos.add( _localRepo ); repos.add( _remoteRepo ); _depBuilder = DependencyBuilderFactory.create( DependencyBuilderFactory.JAVA_DEPENDENCY_MODEL, repos, null, null, null ); } /** * @throws java.lang.Exception */ @After public void tearDown() throws Exception { // if( _jetty != null ) // FIXME 2009-02-12 Oleg: disabling not to mess with jetty server. Will move to Mercury ITs // _jetty.stop(); // _jetty.destroy(); } @Test public void testDummy() throws Exception { } /** * Test method for {@link org.apache.maven.mercury.MavenDependencyProcessor#getDependencies(org.apache.maven.mercury.artifact.ArtifactMetadata, org.apache.maven.mercury.builder.api.MetadataReader, java.util.Map, java.util.Map)}. */ @Test public void testMavenVersion() throws Exception { RepositoryReader rr = _remoteRepo.getReader(); // String gav = "org.apache.maven.plugins:maven-dependency-plugin:2.0"; String gav = "asm:asm-xml:3.0"; ArtifactMetadata bmd = new ArtifactMetadata( gav ); ArrayList<ArtifactMetadata> query = new ArrayList<ArtifactMetadata>(1); query.add( bmd ); MetadataResults res = rr.readDependencies( query ); assertNotNull( res ); assertFalse( res.hasExceptions() ); assertTrue( res.hasResults() ); List<ArtifactMetadata> deps = res.getResult( bmd ); assertNotNull( deps ); assertFalse( deps.isEmpty() ); ArtifactMetadata md = deps.get(0); System.out.println("found "+gav+" dependencies: "+deps); assertEquals( "3.0", md.getVersion() ); assertEquals( ArtifactScopeEnum.compile, md.getArtifactScope() ); } @Test public void testForNPE() throws Exception { RepositoryReader rr = _remoteRepo.getReader(); // String gav = "org.apache.maven.plugins:maven-dependency-plugin:2.0"; String gav = "org.codehaus.plexus:plexus-compiler-api:1.5.3::jar"; ArtifactMetadata bmd = new ArtifactMetadata( gav ); ArrayList<ArtifactMetadata> query = new ArrayList<ArtifactMetadata>(1); query.add( bmd ); MetadataResults res = rr.readDependencies( query ); assertNotNull( res ); assertFalse( res.hasExceptions() ); assertTrue( res.hasResults() ); List<ArtifactMetadata> deps = res.getResult( bmd ); assertNotNull( deps ); assertFalse( deps.isEmpty() ); ArtifactMetadata md = deps.get(0); System.out.println("found "+gav+" dependencies: "+deps); // assertEquals( "3.0", md.getVersion() ); // assertEquals( ArtifactScopeEnum.compile, md.getArtifactScope() ); } }
package org.navalplanner.web.planner; import static org.navalplanner.web.I18nHelper._; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Map.Entry; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.Hibernate; import org.joda.time.LocalDate; import org.navalplanner.business.common.IAdHocTransactionService; import org.navalplanner.business.common.IOnTransaction; import org.navalplanner.business.labels.entities.Label; import org.navalplanner.business.orders.daos.IOrderElementDAO; import org.navalplanner.business.orders.entities.OrderElement; import org.navalplanner.business.orders.entities.OrderStatusEnum; import org.navalplanner.business.planner.daos.IResourceAllocationDAO; import org.navalplanner.business.planner.daos.ITaskElementDAO; import org.navalplanner.business.planner.entities.Dependency; import org.navalplanner.business.planner.entities.DerivedAllocation; import org.navalplanner.business.planner.entities.GenericResourceAllocation; import org.navalplanner.business.planner.entities.ResourceAllocation; import org.navalplanner.business.planner.entities.SpecificResourceAllocation; import org.navalplanner.business.planner.entities.StartConstraintType; import org.navalplanner.business.planner.entities.Task; import org.navalplanner.business.planner.entities.TaskElement; import org.navalplanner.business.planner.entities.TaskStartConstraint; import org.navalplanner.business.planner.entities.Dependency.Type; import org.navalplanner.business.resources.daos.ICriterionDAO; import org.navalplanner.business.resources.daos.IResourceDAO; import org.navalplanner.business.resources.entities.Criterion; import org.navalplanner.business.resources.entities.Machine; import org.navalplanner.business.resources.entities.MachineWorkersConfigurationUnit; import org.navalplanner.business.resources.entities.Resource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.zkoss.ganttz.adapters.DomainDependency; import org.zkoss.ganttz.data.DependencyType; import org.zkoss.ganttz.data.ITaskFundamentalProperties; import org.zkoss.ganttz.data.constraint.Constraint; import org.zkoss.ganttz.data.constraint.DateConstraint; @Component @Scope(BeanDefinition.SCOPE_PROTOTYPE) public class TaskElementAdapter implements ITaskElementAdapter { private static final Log LOG = LogFactory.getLog(TaskElementAdapter.class); @Autowired private IAdHocTransactionService transactionService; @Autowired private IOrderElementDAO orderElementDAO; @Autowired private ITaskElementDAO taskDAO; @Autowired private IResourceDAO resourceDAO; @Autowired private ICriterionDAO criterionDAO; @Autowired private IResourceAllocationDAO resourceAllocationDAO; public TaskElementAdapter() { } private class TaskElementWrapper implements ITaskFundamentalProperties { private final TaskElement taskElement; protected TaskElementWrapper(TaskElement taskElement) { this.taskElement = taskElement; } @Override public void setName(String name) { taskElement.setName(name); } @Override public void setNotes(String notes) { taskElement.setNotes(notes); } @Override public String getName() { return taskElement.getName(); } @Override public String getNotes() { return taskElement.getNotes(); } @Override public Date getBeginDate() { return taskElement.getStartDate(); } @Override public long getLengthMilliseconds() { return taskElement.getEndDate().getTime() - taskElement.getStartDate().getTime(); } @Override public long setBeginDate(final Date beginDate) { Long runOnReadOnlyTransaction = transactionService .runOnReadOnlyTransaction(new IOnTransaction<Long>() { @Override public Long execute() { stepsBeforePossibleReallocation(); Long result = setBeginDateInsideTransaction(beginDate); return result; } }); return runOnReadOnlyTransaction; } private void reattachAllResourcesForTask() { Set<Resource> resources = resourcesForTask(); for (Resource each : resources) { resourceDAO.reattach(each); } for (Machine machine : Resource.machines(resources)) { Set<MachineWorkersConfigurationUnit> configurationUnits = machine .getConfigurationUnits(); for (MachineWorkersConfigurationUnit eachUnit : configurationUnits) { Hibernate.initialize(eachUnit); } } } private Set<Resource> resourcesForTask() { Set<ResourceAllocation<?>> resourceAllocations = taskElement.getSatisfiedResourceAllocations(); Set<Resource> resources = new HashSet<Resource>(); for (ResourceAllocation<?> each : resourceAllocations) { resources.addAll(each.getAssociatedResources()); for (DerivedAllocation derivedAllocation : each .getDerivedAllocations()) { resources .addAll(derivedAllocation.getResources()); } } return resources; } private Long setBeginDateInsideTransaction(final Date beginDate) { taskElement.moveTo(beginDate); return getLengthMilliseconds(); } @Override public void setLengthMilliseconds(final long lengthMilliseconds) { transactionService .runOnReadOnlyTransaction(new IOnTransaction<Void>() { @Override public Void execute() { stepsBeforePossibleReallocation(); updateEndDate(lengthMilliseconds); return null; } }); } private void updateEndDate(long lengthMilliseconds) { taskElement.resizeTo(new Date(getBeginDate().getTime() + lengthMilliseconds)); } @Override public Date getHoursAdvanceEndDate() { OrderElement orderElement = taskElement.getOrderElement(); Integer assignedHours = orderElementDAO .getAssignedHours(orderElement); LocalDate date = calculateLimitDate(assignedHours); if (date == null) { Integer hours = 0; if (orderElement != null) { hours = orderElement.getWorkHours(); } if (hours == 0) { return getBeginDate(); } else { BigDecimal percentage = new BigDecimal(assignedHours) .setScale(2).divide(new BigDecimal(hours), RoundingMode.DOWN); date = calculateLimitDate(percentage); } } return date.toDateTimeAtStartOfDay().toDate(); } @Override public BigDecimal getHoursAdvancePercentage() { OrderElement orderElement = taskElement.getOrderElement(); if (orderElement != null) { return orderElementDAO.getHoursAdvancePercentage(orderElement); } else { return new BigDecimal(0); } } @Override public Date getAdvanceEndDate() { OrderElement orderElement = taskElement.getOrderElement(); BigDecimal advancePercentage; Integer hours; if (orderElement != null) { advancePercentage = orderElement .getAdvancePercentage(); hours = taskElement.getTotalHoursAssigned(); } else { advancePercentage = new BigDecimal(0); hours = Integer.valueOf(0); } Integer advanceHours = advancePercentage.multiply( new BigDecimal(hours)).intValue(); LocalDate date = calculateLimitDate(advanceHours); if (date == null) { date = calculateLimitDate(advancePercentage); } return date.toDateTimeAtStartOfDay().toDate(); } private LocalDate calculateLimitDate(BigDecimal advancePercentage) { Long totalMillis = getLengthMilliseconds(); Long advanceMillis = advancePercentage.multiply( new BigDecimal(totalMillis)).longValue(); return new LocalDate(getBeginDate().getTime() + advanceMillis); } @Override public BigDecimal getAdvancePercentage() { if (taskElement.getOrderElement() != null) { return taskElement.getOrderElement().getAdvancePercentage(); } return new BigDecimal(0); } private LocalDate calculateLimitDate(Integer hours) { if (hours == null || hours == 0) { return null; } boolean limitReached = false; Integer count = 0; LocalDate lastDay = null; Integer hoursLastDay = 0; Map<LocalDate, Integer> daysMap = taskElement .getHoursAssignedByDay(); if (daysMap.isEmpty()) { return null; } for (Entry<LocalDate, Integer> entry : daysMap.entrySet()) { lastDay = entry.getKey(); hoursLastDay = entry.getValue(); count += hoursLastDay; if (count >= hours) { limitReached = true; break; } } if (!limitReached) { while (count < hours) { count += hoursLastDay; lastDay = lastDay.plusDays(1); } } return lastDay.plusDays(1); } @Override public String getTooltipText() { return transactionService .runOnReadOnlyTransaction(new IOnTransaction<String>() { @Override public String execute() { orderElementDAO .reattachUnmodifiedEntity(taskElement .getOrderElement()); return buildTooltipText(); } }); } @Override public String getLabelsText() { return transactionService .runOnReadOnlyTransaction(new IOnTransaction<String>() { @Override public String execute() { orderElementDAO .reattachUnmodifiedEntity(taskElement .getOrderElement()); return buildLabelsText(); } }); } @Override public String getResourcesText() { if (taskElement.getOrderElement() == null) { return ""; } try { return transactionService .runOnAnotherReadOnlyTransaction(new IOnTransaction<String>() { @Override public String execute() { orderElementDAO .reattachUnmodifiedEntity(taskElement .getOrderElement()); return buildResourcesText(); } }); } catch (Exception e) { LOG.error("error calculating resources text", e); return ""; } } private Set<Label> getLabelsFromElementAndPredecesors( OrderElement order) { if (order != null) { if (order.getParent() == null) { return order.getLabels(); } else { HashSet<Label> labels = new HashSet<Label>(order .getLabels()); labels.addAll(getLabelsFromElementAndPredecesors(order .getParent())); return labels; } } return new HashSet<Label>(); } private String buildLabelsText() { StringBuilder result = new StringBuilder(); if (taskElement.getOrderElement() != null) { Set<Label> labels = getLabelsFromElementAndPredecesors(taskElement .getOrderElement()); if (!labels.isEmpty()) { for (Label label : labels) { result.append(label.getName()).append(","); } result.delete(result.length() - 1, result .length()); } } return result.toString(); } private String buildResourcesText() { List<String> result = new ArrayList<String>(); for (ResourceAllocation<?> each : taskElement .getSatisfiedResourceAllocations()) { if (each instanceof SpecificResourceAllocation) { for (Resource r : each.getAssociatedResources()) { String representation = r.getName(); if (!result.contains(representation)) { result.add(representation); } } } else { String representation = extractRepresentationForGeneric((GenericResourceAllocation) each); if (!result.contains(representation)) { result.add(representation); } } } return StringUtils.join(result, ", "); } private String extractRepresentationForGeneric( GenericResourceAllocation generic) { if (!generic.isNewObject()) { resourceAllocationDAO.reattach(generic); } Set<Criterion> criterions = generic.getCriterions(); List<String> forCriterionRepresentations = new ArrayList<String>(); if (!criterions.isEmpty()) { for (Criterion c : criterions) { criterionDAO.reattachUnmodifiedEntity(c); forCriterionRepresentations.add(c.getName()); } } else { forCriterionRepresentations.add((_("All workers"))); } return "[" + StringUtils.join(forCriterionRepresentations, ", ") + "]"; } private String buildTooltipText() { StringBuilder result = new StringBuilder(); result.append(_("Name: {0}", getName()) + "<br/>"); result.append(_("Advance") + ": ").append( getAdvancePercentage().multiply(new BigDecimal(100))) .append("% , "); result.append(_("Hours invested") + ": ").append( getHoursAdvancePercentage().multiply(new BigDecimal(100))) .append("% <br/>"); result.append(_("State") +": ").append(getOrderState()); String labels = buildLabelsText(); if (!labels.equals("")) { result.append("<div class='tooltip-labels'>" + _("Labels") + ": " + labels + "</div>"); } return result.toString(); } private String getOrderState() { String cssClass; OrderStatusEnum state = taskElement.getOrderElement().getOrder().getState(); if(Arrays.asList(OrderStatusEnum.ACCEPTED, OrderStatusEnum.OFFERED,OrderStatusEnum.STARTED, OrderStatusEnum.SUBCONTRACTED_PENDING_ORDER) .contains(state)) { if(taskElement.getAssignedStatus() == "assigned") { cssClass="order-open-assigned"; } else { cssClass="order-open-unassigned"; } } else { cssClass="order-closed"; } return "<font class='" + cssClass + "'>" + state.toString() + "</font>"; } @Override public List<Constraint<Date>> getStartConstraints() { if (taskElement instanceof Task) { Task task = (Task) taskElement; TaskStartConstraint startConstraint = task.getStartConstraint(); final StartConstraintType constraintType = startConstraint .getStartConstraintType(); switch (constraintType) { case AS_SOON_AS_POSSIBLE: return Collections.emptyList(); case START_IN_FIXED_DATE: return Collections.singletonList(DateConstraint .equalTo(startConstraint.getConstraintDate())); case START_NOT_EARLIER_THAN: return Collections.singletonList(DateConstraint .biggerOrEqualThan(startConstraint .getConstraintDate())); default: throw new RuntimeException("can't handle " + constraintType); } } else if (taskElement.isMilestone()) { return Collections.singletonList(DateConstraint .biggerOrEqualThan(taskElement.getStartDate())); } else { return Collections.emptyList(); } } @Override public void moveTo(Date date) { setBeginDate(date); if (taskElement instanceof Task) { Task task = (Task) taskElement; task.explicityMoved(date); } } @Override public Date getDeadline() { LocalDate deadline = taskElement.getDeadline(); if (deadline == null) { return null; } return deadline.toDateTimeAtStartOfDay().toDate(); } @Override public Date getConsolidatedline() { if (!taskElement.isLeaf() || !taskElement.hasConsolidations()) { return null; } LocalDate consolidatedline = ((Task) taskElement) .getFirstDayNotConsolidated(); if (consolidatedline == null) { return null; } return consolidatedline.minusDays(1).toDateTimeAtStartOfDay() .toDate(); } @Override public boolean isSubcontracted() { return taskElement.isSubcontracted(); } @Override public boolean isLimiting() { return taskElement.isLimiting(); } @Override public boolean isLimitingAndHasDayAssignments() { return taskElement.isLimitingAndHasDayAssignments(); } public boolean hasConsolidations() { return taskElement.hasConsolidations(); } private void stepsBeforePossibleReallocation() { taskDAO.reattach(taskElement); reattachAllResourcesForTask(); } @Override public boolean canBeExplicitlyResized() { return taskElement.canBeExplicitlyResized(); } @Override public String getAssignedStatus() { return taskElement.getAssignedStatus(); } @Override public boolean isFixed() { return taskElement.isLimitingAndHasDayAssignments(); } } @Override public ITaskFundamentalProperties adapt(final TaskElement taskElement) { return new TaskElementWrapper(taskElement); } @Override public List<DomainDependency<TaskElement>> getIncomingDependencies( TaskElement taskElement) { return toDomainDependencies(taskElement .getDependenciesWithThisDestination()); } @Override public List<DomainDependency<TaskElement>> getOutcomingDependencies( TaskElement taskElement) { return toDomainDependencies(taskElement .getDependenciesWithThisOrigin()); } private List<DomainDependency<TaskElement>> toDomainDependencies( Collection<? extends Dependency> dependencies) { List<DomainDependency<TaskElement>> result = new ArrayList<DomainDependency<TaskElement>>(); for (Dependency dependency : dependencies) { result.add(DomainDependency.createDependency( dependency.getOrigin(), dependency.getDestination(), toGanntType(dependency .getType()))); } return result; } private DependencyType toGanntType(Type type) { switch (type) { case END_START: return DependencyType.END_START; case START_START: return DependencyType.START_START; case END_END: return DependencyType.END_END; case START_END: default: throw new RuntimeException(_("{0} not supported yet", type)); } } private Type toDomainType(DependencyType type) { switch (type) { case END_START: return Type.END_START; case START_START: return Type.START_START; case END_END: return Type.END_END; default: throw new RuntimeException(_("{0} not supported yet", type)); } } @Override public void addDependency(DomainDependency<TaskElement> dependency) { TaskElement source = dependency.getSource(); TaskElement destination = dependency.getDestination(); Type domainType = toDomainType(dependency.getType()); Dependency.create(source, destination, domainType); } @Override public boolean canAddDependency(DomainDependency<TaskElement> dependency) { return true; } @Override public void removeDependency(DomainDependency<TaskElement> dependency) { TaskElement source = dependency.getSource(); Type type = toDomainType(dependency.getType()); source.removeDependencyWithDestination(dependency.getDestination(), type); } }
package org.pac4j.core.util; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; import java.net.URLEncoder; import java.util.Collection; import java.util.Date; import org.pac4j.core.context.HttpConstants; import org.pac4j.core.exception.TechnicalException; import org.pac4j.core.io.Resource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class gathers all the utilities methods. * * @author Jerome Leleu * @since 1.4.0 */ public final class CommonHelper { private static final Logger logger = LoggerFactory.getLogger(CommonHelper.class); public static final String RESOURCE_PREFIX = "resource"; public static final String CLASSPATH_PREFIX = "classpath"; public static final String HTTP_PREFIX = "http"; public static final String HTTPS_PREFIX = "https"; public static final String INVALID_PATH_MESSAGE = "begin with '" + RESOURCE_PREFIX + ":', '" + CLASSPATH_PREFIX + ":', '" + HTTP_PREFIX + ":' or it must be a physical readable non-empty local file " + "at the path specified."; /** * Return if the String is not blank. * * @param s string * @return if the String is not blank */ public static boolean isNotBlank(final String s) { if (s == null) { return false; } return s.trim().length() > 0; } /** * Return if the String is blank. * * @param s string * @return if the String is blank */ public static boolean isBlank(final String s) { return !isNotBlank(s); } /** * Compare two String to see if they are equals (both null is ok). * * @param s1 string * @param s2 string * @return if two String are equals */ public static boolean areEquals(final String s1, final String s2) { return s1 == null ? s2 == null : s1.equals(s2); } /** * Compare two String to see if they are equals ignoring the case and the blank spaces (both null is ok). * * @param s1 string * @param s2 string * @return if two String are equals ignoring the case and the blank spaces */ public static boolean areEqualsIgnoreCaseAndTrim(final String s1, final String s2) { if (s1 == null && s2 == null) { return true; } else if (s1 != null && s2 != null) { return s1.trim().equalsIgnoreCase(s2.trim()); } else { return false; } } /** * Compare two String to see if they are not equals. * * @param s1 string * @param s2 string * @return if two String are not equals */ public static boolean areNotEquals(final String s1, final String s2) { return !areEquals(s1, s2); } /** * Return if a collection is empty. * * @param coll a collection * @return whether it is empty */ public static boolean isEmpty(final Collection<?> coll) { return coll == null || coll.isEmpty(); } /** * Return if a collection is not empty. * * @param coll a collection * @return whether it is not empty */ public static boolean isNotEmpty(final Collection<?> coll) { return !isEmpty(coll); } /** * Verify that a boolean is true otherwise throw a {@link TechnicalException}. * * @param value the value to be checked for truth * @param message the message to include in the exception if the value is false */ public static void assertTrue(final boolean value, final String message) { if (!value) { throw new TechnicalException(message); } } /** * Verify that a String is not blank otherwise throw a {@link TechnicalException}. * * @param name name if the string * @param value value of the string */ public static void assertNotBlank(final String name, final String value) { assertTrue(!isBlank(value), name + " cannot be blank"); } /** * Verify that an Object is not <code>null</code> otherwise throw a {@link TechnicalException}. * * @param name name of the object * @param obj object */ public static void assertNotNull(final String name, final Object obj) { assertTrue(obj != null, name + " cannot be null"); } /** * Verify that an Object is <code>null</code> otherwise throw a {@link TechnicalException}. * * @param name name of the object * @param obj object */ public static void assertNull(final String name, final Object obj) { assertTrue(obj == null, name + " must be null"); } /** * Add a new parameter to an url. * * @param url url * @param name name of the parameter * @param value value of the parameter * @return the new url with the parameter appended */ public static String addParameter(final String url, final String name, final String value) { if (url != null) { final StringBuilder sb = new StringBuilder(); sb.append(url); if (name != null) { if (url.indexOf("?") >= 0) { sb.append("&"); } else { sb.append("?"); } sb.append(name); sb.append("="); if (value != null) { sb.append(urlEncode(value)); } } return sb.toString(); } return null; } /** * URL encode a text using UTF-8. * * @param text text to encode * @return the encoded text */ public static String urlEncode(final String text) { try { return URLEncoder.encode(text, HttpConstants.UTF8_ENCODING); } catch (final UnsupportedEncodingException e) { String message = "Unable to encode text : " + text; throw new TechnicalException(message, e); } } /** * Build a normalized "toString" text for an object. * * @param clazz class * @param args arguments * @return a normalized "toString" text */ public static String toString(final Class<?> clazz, final Object... args) { final StringBuilder sb = new StringBuilder(); sb.append(" sb.append(clazz.getSimpleName()); sb.append(" boolean b = true; for (final Object arg : args) { if (b) { sb.append(" "); sb.append(arg); sb.append(":"); } else { sb.append(" "); sb.append(arg); sb.append(" |"); } b = !b; } return sb.toString(); } /** * Returns an {@link InputStream} from given name depending on its format: * - loads from the classloader if name starts with "resource:" * - loads as {@link FileInputStream} otherwise * * Caller is responsible for closing inputstream * * @param name name of the resource * @return the input stream */ public static InputStream getInputStreamFromName(String name) { int prefixEnd = name.indexOf(":"); String prefix = null; String path = name; if (prefixEnd != -1) { prefix = name.substring(0, prefixEnd); path = name.substring(prefixEnd + 1); } if (prefix == null) { try { return new FileInputStream(path); } catch (FileNotFoundException e) { throw new TechnicalException(e); } } switch (prefix) { case RESOURCE_PREFIX: if (!path.startsWith("/")) { path = "/" + path; } // The choice here was to keep legacy behavior and remove / prior to // calling classloader.getResourceAsStream.. or make it work exactly // as it did before but have different behavior for resource: and // classpath: // My decision was to keep legacy working the same. return CommonHelper.class.getResourceAsStream(path); case CLASSPATH_PREFIX: return Thread.currentThread().getContextClassLoader().getResourceAsStream(path); case HTTP_PREFIX: logger.warn("file is retrieved from an insecure http endpoint [{}]", path); return getInputStreamViaHttp(name); case HTTPS_PREFIX: return getInputStreamViaHttp(name); default: throw new TechnicalException("prefix is handled:" + prefix); } } private static InputStream getInputStreamViaHttp(String name) { URLConnection con = null; try { URL url = new URL(name); con = url.openConnection(); return con.getInputStream(); } catch (IOException ex) { // Close the HTTP connection (if applicable). if (con instanceof HttpURLConnection) { ((HttpURLConnection) con).disconnect(); } throw new TechnicalException(ex); } } public static Resource getResource(final String filePath) { return new Resource() { @Override public InputStream getInputStream() throws IOException { return getInputStreamFromName(filePath); } @Override public String getFilename() { throw new UnsupportedOperationException("not implemented"); } @Override public boolean exists() { throw new UnsupportedOperationException("not implemented"); } }; } /** * Return a random string of a certain size. * * @param size the size * @return the random size */ public static String randomString(final int size) { return java.util.UUID.randomUUID().toString().replace("-", "").substring(0, size); } /** * Copy a date. * * @param original original date * @return date copy */ public static Date newDate(final Date original) { return original != null ? new Date(original.getTime()) : null; } /** * Taken from commons-lang3 */ private static final String EMPTY = ""; private static final int INDEX_NOT_FOUND = -1; public static String substringBetween(String str, String open, String close) { if (str == null || open == null || close == null) { return null; } int start = str.indexOf(open); if (start != INDEX_NOT_FOUND) { int end = str.indexOf(close, start + open.length()); if (end != INDEX_NOT_FOUND) { return str.substring(start + open.length(), end); } } return null; } public static String substringAfter(String str, String separator) { if (isEmpty(str)) { return str; } if (separator == null) { return EMPTY; } int pos = str.indexOf(separator); if (pos == INDEX_NOT_FOUND) { return EMPTY; } return str.substring(pos + separator.length()); } private static boolean isEmpty(CharSequence cs) { return cs == null || cs.length() == 0; } }
package istic.gla.groupb.nivimoju.API; import istic.gla.groupb.nivimoju.container.DroneContainer; import istic.gla.groupb.nivimoju.container.InterventionContainer; import istic.gla.groupb.nivimoju.drone.engine.DroneEngine; import istic.gla.groupb.nivimoju.entity.Intervention; import istic.gla.groupb.nivimoju.entity.Resource; import org.apache.log4j.Logger; import javax.ws.rs.*; import javax.ws.rs.Path; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.Collection; @Path("intervention") public class InterventionAPI { Logger logger = Logger.getLogger(InterventionAPI.class); /** * Gets all the interventions running * @return A list of interventions */ @GET @Produces(MediaType.APPLICATION_JSON) public Response getInterventions() { Collection<Intervention> inters = InterventionContainer.getInstance().getInterventions(); logger.info("intervention:" + inters); return Response.ok(inters).build(); } /** * Gets Intervention by ID * @return An intervention */ @Path("/{idintervention}") @GET @Produces(MediaType.APPLICATION_JSON) public Response getInterventionById( @PathParam("idintervention") long idintervention) { Intervention intervention = InterventionContainer.getInstance().getInterventionById(idintervention); return Response.ok(intervention).build(); } /** * Creates a new intervention with a default list of vehicle in function of the sinister code * @param intervention * @return The id of the created intervention */ @Path("/create") @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response createIntervention( Intervention intervention) { Intervention resultat = InterventionContainer.getInstance().createIntervention(intervention); return Response.ok(resultat).build(); } /** * Update an intervention * @param intervention * @return The id of the updated intervention */ @Path("/update") @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response updateIntervention( Intervention intervention) { Intervention result = InterventionContainer.getInstance().updateIntervention(intervention); return Response.ok(result).build(); } /** * Stops the intervention * @param inter The id of the intervention * @return OK if the intervention has been correctly stopped */ @PUT @Path("{inter}/stopped") public Response stopIntervention(@PathParam("inter") String inter) { return Response.ok().build(); } /** * Gets all the agents of an intervention * @param inter The id of the intervention * @return A list of agents */ @GET @Path("{inter}/agent") public Response getAgents(@PathParam("inter") String inter) { return Response.ok().build(); } /** * Adds an agent on an intervention * @param inter The id of the intervention * @param agent The id of the agent * @return OK if the agent has been correctly added to the intervention */ @PUT @Path("{inter}/agent/{agent}") public Response addAgent( @PathParam("inter") String inter, @PathParam("agent") String agent) { return Response.ok().build(); } /** * Gets all the resources of an intervention * @param inter The id of the intervention * @return A list of resources */ @GET @Path("{inter}/resources") public Response getResources(@PathParam("inter") String inter) { return Response.ok().build(); } /** * Changes the state of a resource * @param inter The id of the intervention * @param res The id of the resource * @param state A String representing the state * @return OK if the state has been correctly updated */ @PUT @Path("{inter}/resources/{res}/{state}") public Response changeResourceState( @PathParam("inter") Long inter, @PathParam("res") Long res, @PathParam("state") String state) { try { Intervention intervention = InterventionContainer.getInstance().changeResourceState(inter, res, state); return Response.ok(intervention).build(); } catch (Exception ex) { return Response.serverError().build(); } } /** * Requests a vehicle for the intervention * @param inter The id of the intervention * @param vehicle the label of the requested vehicle type * @return The id of the requested vehicle */ @PUT @Path("{inter}/resources/{vehicle}") public Response requestVehicle( @PathParam("inter") Long inter, @PathParam("vehicle") String vehicle) { Intervention intervention = InterventionContainer.getInstance().addResource(inter, vehicle); return Response.ok(intervention).build(); } /** * Places the vehicle at coordinates with a role * @param inter The id of the intervention * @param newResource resource * @return OK if the vehicle has been correctly placed */ @POST @Path("{inter}/resources/update") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response placeVehicle( Resource newResource, @PathParam("inter") Long inter) { Intervention intervention = InterventionContainer.getInstance().placeVehicle(inter, newResource); return Response.ok(intervention).build(); } /** * create a path for an intervention and assign a drone to it * @return CREATED if successful, SERVICE_UNAVAILABLE if no drone is available, NOT_FOUND if the intervention does not exist */ @POST @Path("update/paths") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response updatePaths(Intervention intervention) { logger.info("updating path for intervention " + intervention.getId()); Intervention oldInter = InterventionContainer.getInstance().getInterventionById(intervention.getId()); if(oldInter == null){ logger.warn("intervention does not seem to exist in db"); return Response.status(Response.Status.NOT_FOUND) .build(); } //request or free drones int neededDroneNumber = intervention.getWatchPath().size() + intervention.getWatchArea().size(); int currentlyAssignedDroneNumber = DroneContainer.getInstance().getDronesAssignedTo(intervention.getId()).size(); int deltaDroneNumber = neededDroneNumber - currentlyAssignedDroneNumber; if(deltaDroneNumber > 0) { logger.info(String.format("the path update is asking for %d new drones", deltaDroneNumber)); DroneContainer.getInstance().requestDrones(intervention.getId(), deltaDroneNumber); } else if(deltaDroneNumber < 0) { logger.info(String.format("the path update is asking for liberation of %d drones", -deltaDroneNumber)); DroneContainer.getInstance().freeDrones(intervention.getId(), -deltaDroneNumber); } else { logger.info("the path update does not need to change drone affectations"); } //alerting the engine DroneEngine.getInstance().computeForIntervention(intervention); return Response.status(Response.Status.OK) .entity(intervention) .build(); } }
package cc.topicexplorer.web; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import cc.commandmanager.core.Context; /** * Servlet implementation class TestServlet */ public class JsonServlet extends HttpServlet { private static final long serialVersionUID = 1L; private static final Logger logger = Logger.getLogger(JsonServlet.class); @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String command = request.getParameter("Command"); response.setCharacterEncoding("UTF8"); PrintWriter writer = response.getWriter(); Context context = new Context(WebChainManagement.getContext()); context.bind("SERVLET_WRITER", writer); int offset = (request.getParameter("offset") != null) ? Integer.parseInt(request.getParameter("offset")) : 0; Set<String> startCommands = new HashSet<String>(); if (command != null) { if (command.contains("getDoc")) { context.bind("SHOW_DOC_ID", request.getParameter("DocId")); startCommands.add("ShowDocCoreCreate"); } else if (command.contains("bestDocs")) { context.bind("TOPIC_ID", request.getParameter("TopicId")); context.bind("OFFSET", offset); @SuppressWarnings("unchecked") Enumeration<String> parameterNames = request.getParameterNames(); while (parameterNames.hasMoreElements()) { String paramName = parameterNames.nextElement(); context.bind(paramName, request.getParameter(paramName)); } startCommands.add("BestDocsCoreCreate"); } else if (command.contains("allTerms")) { startCommands.add("AllTermsCoreCreate"); } else if (command.contains("autocomplete")) { context.bind("SEARCH_WORD", request.getParameter("SearchWord")); startCommands.add("AutocompleteCoreCreate"); } else if (command.contains("search")) { context.bind("SEARCH_WORD", request.getParameter("SearchWord")); context.bind("OFFSET", offset); startCommands.add("SearchCoreCreate"); } else if (command.contains("getBestFrames")) { startCommands.add("BestFrameCreate"); } else if (command.contains("getFrames")) { context.bind("TOPIC_ID", request.getParameter("topicId")); context.bind("OFFSET", offset); startCommands.add("FrameCreate"); } else if (command.contains("getDates")) { startCommands.add("GetDatesTimeCreate"); } WebChainManagement.executeCommands(WebChainManagement.getOrderedCommands(startCommands), context); } else { startCommands.add("InitCoreCreate"); writer.print("{\"FRONTEND_VIEWS\":" + this.getFrontendViews((Properties) context.get("properties")) + ",\"JSON\":"); WebChainManagement.executeCommands(WebChainManagement.getOrderedCommands(startCommands), context); Properties properties = (Properties) context.get("properties"); String plugins = properties.getProperty("plugins"); String[] pluginArray = plugins.split(","); List<String> pluginList = new ArrayList<String>(); for (String element : pluginArray) { pluginList.add("\"" + element + "\""); } writer.print(", \"PLUGINS\":" + pluginList.toString()); writer.print(", \"LIMIT\":" + Integer.parseInt(properties.getProperty("DocBrowserLimit"))); writer.print("}"); } } /** * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO Auto-generated method stub } private String getFrontendViews(Properties properties) { String plugins = properties.getProperty("plugins"); String pluginArray[] = plugins.split(","); List<String> frontendViews = new ArrayList<String>(); // init String frontendViewArray[] = properties.get("FrontendViews").toString().split(","); for (int j = 0; j < frontendViewArray.length; j++) { if (!frontendViews.contains("\"" + frontendViewArray[j] + "\"")) { frontendViews.add("\"" + frontendViewArray[j] + "\""); } } for (String element : pluginArray) { if(!element.isEmpty()) { try { frontendViewArray = properties .get(element.substring(0, 1).toUpperCase() + element.substring(1) + "_FrontendViews") .toString().split(","); for (int k = 0; k < frontendViewArray.length; k++) { if (!frontendViews.contains("\"" + frontendViewArray[k] + "\"")) { frontendViews.add("\"" + frontendViewArray[k] + "\""); } } } catch (Exception e) { // TODO Specify exception type! logger.info("Property " + element.substring(0, 1).toUpperCase() + element.substring(1) + "_FrontendViews not found"); } } } logger.info(frontendViews.toString()); return frontendViews.toString(); } }
package com.yahoo.vespa.hosted.node.admin.maintenance; import io.airlift.command.Cli; import io.airlift.command.Command; import io.airlift.command.Help; import io.airlift.command.Option; import io.airlift.command.ParseArgumentsUnexpectedException; import io.airlift.command.ParseOptionMissingException; /** * @author valerijf */ public class Maintainer { @SuppressWarnings("unchecked") public static void main(String[] args) { Cli.CliBuilder<Runnable> builder = Cli.<Runnable>builder("maintainer.jar") .withDescription("This tool makes it easy to delete old log files and other node-admin app data.") .withDefaultCommand(Help.class) .withCommands(Help.class, DeleteOldAppDataArguments.class, DeleteOldLogsArguments.class); Cli<Runnable> gitParser = builder.build(); try { gitParser.parse(args).run(); } catch (ParseArgumentsUnexpectedException | ParseOptionMissingException e) { System.err.println(e.getMessage()); gitParser.parse("help").run(); } } @Command(name = "delete-old-app-data", description = "Deletes all data within a folder and its sub-folders which matches the criteria") private static class DeleteOldAppDataArguments implements Runnable { @Option(name = {"--path"}, required = true, description = "Path to directory which contains the app data") private String path; @Option(name = {"--max_age"}, description = "Delete files older than (in seconds)") private long maxAge = DeleteOldAppData.DEFAULT_MAX_AGE_IN_SECONDS; @Option(name = {"--prefix"}, description = "Delete files that start with prefix") private String prefix; @Option(name = {"--suffix"}, description = "Delete files that end with suffix") private String suffix; @Override public void run() { DeleteOldAppData.deleteOldAppData(path, maxAge, prefix, suffix, true); } } @Command(name = "delete-old-logs", description = "Deletes all log files that match the criteria in path") private static class DeleteOldLogsArguments implements Runnable { @Option(name = {"--path"}, required = true, description = "Path to directory which contains the app data") private String path; @Option(name = {"--max_age"}, description = "Delete files older than (in seconds)") private long maxAge = DeleteOldAppData.DEFAULT_MAX_AGE_IN_SECONDS; @Option(name = {"--prefix"}, description = "Delete files that start with prefix") private String prefix; @Option(name = {"--suffix"}, description = "Delete files that end with suffix") private String suffix; @Override public void run() { DeleteOldAppData.deleteOldAppData(path, maxAge, prefix, suffix, false); } } }
package org.eclipse.scanning.device.ui.points; import java.io.File; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.eclipse.core.resources.IFile; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IAction; import org.eclipse.jface.action.IContributionManager; import org.eclipse.jface.action.IMenuListener; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.util.IPropertyChangeListener; import org.eclipse.jface.util.LocalSelectionTransfer; import org.eclipse.jface.util.PropertyChangeEvent; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.TreeSelection; import org.eclipse.richbeans.widgets.file.FileSelectionDialog; import org.eclipse.richbeans.widgets.internal.GridUtils; import org.eclipse.richbeans.widgets.table.ISeriesItemDescriptor; import org.eclipse.richbeans.widgets.table.SeriesTable; import org.eclipse.scanning.api.event.IEventService; import org.eclipse.scanning.api.points.IPointGenerator; import org.eclipse.scanning.api.points.IPointGeneratorService; import org.eclipse.scanning.api.points.models.IScanPathModel; import org.eclipse.scanning.api.scan.ui.ControlTree; import org.eclipse.scanning.device.ui.Activator; import org.eclipse.scanning.device.ui.DevicePreferenceConstants; import org.eclipse.scanning.device.ui.ServiceHolder; import org.eclipse.scanning.device.ui.device.ControlTreeUtils; import org.eclipse.scanning.device.ui.model.ModelView; import org.eclipse.scanning.device.ui.util.PageUtil; import org.eclipse.scanning.device.ui.util.Stashing; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CLabel; import org.eclipse.swt.dnd.DropTarget; import org.eclipse.swt.dnd.DropTargetAdapter; import org.eclipse.swt.dnd.DropTargetEvent; import org.eclipse.swt.dnd.FileTransfer; import org.eclipse.swt.dnd.TextTransfer; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.events.MouseAdapter; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.ui.IMemento; import org.eclipse.ui.IViewReference; import org.eclipse.ui.IViewSite; import org.eclipse.ui.PartInitException; import org.eclipse.ui.part.ResourceTransfer; import org.eclipse.ui.part.ViewPart; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This view allows users to build up arbitrary scans * and run them. * * @author Matthew Gerring * * TODO Convert to e4 view. * */ public class ScanView extends ViewPart { public static final String ID = "org.eclipse.scanning.device.ui.scanEditor"; private static final Logger logger = LoggerFactory.getLogger(ScanView.class); // Services private IPointGeneratorService pservice; private IEventService eservice; private SeriesTable seriesTable; private GeneratorFilter pointsFilter; // Data private List<GeneratorDescriptor<?>> saved; private ControlTree startTree, endTree; // File private Stashing stash; // Preferences private IPreferenceStore store; public ScanView() { this.pservice = ServiceHolder.getGeneratorService(); this.eservice = ServiceHolder.getEventService(); this.seriesTable = new SeriesTable(); this.pointsFilter = new GeneratorFilter(pservice, eservice.getEventConnectorService(), seriesTable); this.stash = new Stashing("org.eclipse.scanning.device.ui.scan.models.json", ServiceHolder.getEventConnectorService()); this.store = Activator.getDefault().getPreferenceStore(); store.setDefault(DevicePreferenceConstants.START_POSITION, false); store.setDefault(DevicePreferenceConstants.END_POSITION, false); } @Override public void init(IViewSite site, IMemento memento) throws PartInitException { super.init(site, memento); if (stash.isStashed()) { try { final List<IScanPathModel> models = stash.unstash(List.class); this.saved = pointsFilter.createDescriptors(models); } catch (Exception e) { logger.error("Cannot load generators to memento!", e); } } } @Override public void saveState(IMemento memento) { try { final List<Object> models = pointsFilter.getModels(seriesTable.getSeriesItems()); stash.stash(models); Stashing tstash = new Stashing(DevicePreferenceConstants.START_POSITION+".json", ServiceHolder.getEventConnectorService()); tstash.stash(startTree); tstash = new Stashing(DevicePreferenceConstants.END_POSITION+".json", ServiceHolder.getEventConnectorService()); tstash.stash(endTree); } catch (Exception ne) { logger.error("Cannot save generators to memento!", ne); } } @Override public void createPartControl(Composite parent) { final Composite content = new Composite(parent, SWT.NONE); GridLayout layout = new GridLayout(1, false); content.setLayout(layout); GridUtils.removeMargins(content); layout.marginTop = 10; Composite startButton = createPositionButton(content, DevicePreferenceConstants.START_POSITION, "Start Position", "icons/position-start.png"); final GeneratorLabelProvider prov = new GeneratorLabelProvider(0); seriesTable.createControl(content, prov, SWT.FULL_SELECTION | SWT.SINGLE); seriesTable.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); seriesTable.setHeaderVisible(false); Composite endButton = createPositionButton(content, DevicePreferenceConstants.END_POSITION, "End Position", "icons/position-end.png"); final IViewSite site = getViewSite(); final DelegatingSelectionProvider selectionProvider = new DelegatingSelectionProvider(seriesTable.getSelectionProvider()); site.setSelectionProvider(selectionProvider); this.startTree = createControlTree(DevicePreferenceConstants.START_POSITION, "Start Position"); this.endTree = createControlTree(DevicePreferenceConstants.END_POSITION, "End Position"); createMouseListener(startButton, endButton, DevicePreferenceConstants.START_POSITION, selectionProvider, startTree); createMouseListener(endButton, startButton, DevicePreferenceConstants.END_POSITION, selectionProvider, endTree); createActions(site); final MenuManager rightClick = new MenuManager("#PopupMenu"); rightClick.setRemoveAllWhenShown(true); //createActions(rightClick); rightClick.addMenuListener(new IMenuListener() { @Override public void menuAboutToShow(IMenuManager manager) { setDynamicMenuOptions(manager); } }); // Here's the data, lets show it seriesTable.setMenuManager(rightClick); seriesTable.setInput(saved, pointsFilter); DropTarget dt = seriesTable.getDropTarget(); dt.setTransfer(new Transfer[] { TextTransfer.getInstance(), FileTransfer.getInstance(), ResourceTransfer.getInstance(), LocalSelectionTransfer.getTransfer() }); dt.addDropListener(new DropTargetAdapter() { @Override public void drop(DropTargetEvent event) { Object dropData = event.data; if (dropData instanceof TreeSelection) { TreeSelection selectedNode = (TreeSelection) dropData; Object obj[] = selectedNode.toArray(); for (int i = 0; i < obj.length; i++) { if (obj[i] instanceof IFile) { IFile file = (IFile) obj[i]; readScans(file.getLocation().toOSString()); return; } } } else if (dropData instanceof String[]) { for (String path : (String[])dropData){ readScans(path); return; } } } }); // Try to ensure that the model view and regions view are initialized IViewReference ref = PageUtil.getPage().findViewReference(ScanRegionView.ID); if (ref!=null) ref.getView(true); ref = PageUtil.getPage().findViewReference(ModelView.ID); if (ref!=null) ref.getView(true); } private ControlTree createControlTree(String id, String name) { // TODO FIXME The default control tree for the start and end positions should have their own definitions // or the ability to create them. This code remembers what the user sets for start/end but // the initial fields simply come from the same as the ControlView ones. Stashing stash = new Stashing(id+".json", ServiceHolder.getEventConnectorService()); ControlTree tree = null; try { if (stash.isStashed()) tree = stash.unstash(ControlTree.class); } catch (Exception ne) { logger.warn("Getting tree from "+stash, ne); tree = null; } if (tree == null) { tree = ControlTreeUtils.parseDefaultXML(); try { tree = ControlTreeUtils.clone(tree); } catch (Exception e) { logger.warn("Getting tree from default XML", e); } } if (tree==null) return null; tree.setName(id); tree.setDisplayName(name); tree.build(); return tree; } private void createMouseListener(Composite position, Composite otherPosition, String propName, DelegatingSelectionProvider prov, ControlTree tree) { position.addMouseListener(new MouseAdapter() { public void mouseDown(MouseEvent e) { setPositionSelected(position, otherPosition, prov, tree); } }); store.addPropertyChangeListener(new IPropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent event) { if (position.isDisposed()) { store.removePropertyChangeListener(this); return; } boolean show = store.getBoolean(propName); GridUtils.setVisible(position, show); position.getParent().layout(new Control[]{position}); if (show) { setPositionSelected(position, otherPosition, prov, tree); } } }); } protected void setPositionSelected(Composite position, Composite otherPosition, DelegatingSelectionProvider prov, ControlTree tree) { position.setFocus(); seriesTable.deselectAll(); position.setBackground(position.getDisplay().getSystemColor(SWT.COLOR_TITLE_BACKGROUND)); otherPosition.setBackground(position.getDisplay().getSystemColor(SWT.COLOR_WHITE)); seriesTable.addSelectionListener(new ISelectionChangedListener() { @Override public void selectionChanged(SelectionChangedEvent event) { position.setBackground(position.getDisplay().getSystemColor(SWT.COLOR_WHITE)); seriesTable.removeSelectionListener(this); } }); prov.fireSelection(new StructuredSelection(tree)); } private Composite createPositionButton(final Composite content, final String propName, String label, String iconPath) { final CLabel position = new CLabel(content, SWT.LEFT); position.setBackground(content.getDisplay().getSystemColor(SWT.COLOR_WHITE)); position.setImage(Activator.getImageDescriptor(iconPath).createImage()); position.setText(label); position.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false)); GridUtils.setVisible(position, store.getBoolean(propName)); return position; } @Override public Object getAdapter(Class clazz) { if (clazz==IScanPathModel.class) { ISeriesItemDescriptor selected = seriesTable.getSelected(); if (!(selected instanceof GeneratorDescriptor)) return null; return ((GeneratorDescriptor)selected).getModel(); } else if (clazz==IPointGenerator.class || clazz==IPointGenerator[].class) { return getGenerators(); }else if (clazz==Object[].class) { return getModels(); }else if (clazz==List.class) { return getModels(); } return null; } private IAction add; private IAction delete; private IAction clear; private String lastPath = null; private final static String[] extensions = new String[]{"json", "*.*"}; private final static String[] files = new String[]{"Scan files (json)", "All Files"}; private void createActions(final IViewSite site) { IToolBarManager tmanager = site.getActionBars().getToolBarManager(); IMenuManager mmanager = site.getActionBars().getMenuManager(); IAction start = new Action("Set start position\nThis is the position before a scan", IAction.AS_CHECK_BOX) { public void run() { store.setValue(DevicePreferenceConstants.START_POSITION, isChecked()); } }; start.setChecked(store.getBoolean(DevicePreferenceConstants.START_POSITION)); start.setImageDescriptor(Activator.getImageDescriptor("icons/position-start.png")); IAction end = new Action("Set end position\nThe position after a scan", IAction.AS_CHECK_BOX) { public void run() { store.setValue(DevicePreferenceConstants.END_POSITION, isChecked()); } }; end.setChecked(store.getBoolean(DevicePreferenceConstants.END_POSITION)); end.setImageDescriptor(Activator.getImageDescriptor("icons/position-end.png")); addGroup("location", tmanager, start, end); addGroup("location", mmanager, start, end); add = new Action("Insert", Activator.getImageDescriptor("icons/clipboard-list.png")) { public void run() { seriesTable.addNew(); } }; delete = new Action("Delete", Activator.getImageDescriptor("icons/clipboard--minus.png")) { public void run() { seriesTable.delete(); } }; clear = new Action("Clear", Activator.getImageDescriptor("icons/clipboard-empty.png")) { public void run() { boolean ok = MessageDialog.openQuestion(site.getShell(), "Confirm Clear Scan", "Do you want to clear the scan?"); if (!ok) return; seriesTable.clear(); } }; addGroup("manage", tmanager, add, delete, clear); addGroup("manage", mmanager, add, delete, clear); final IAction save = new Action("Save scan", IAction.AS_PUSH_BUTTON) { public void run() { List<IScanPathModel> models = getModels(); if (models == null) return; FileSelectionDialog dialog = new FileSelectionDialog(site.getShell()); if (lastPath != null) dialog.setPath(lastPath); dialog.setExtensions(extensions); dialog.setNewFile(true); dialog.setFolderSelector(false); dialog.create(); if (dialog.open() == Dialog.CANCEL) return; String path = dialog.getPath(); if (!path.endsWith(extensions[0])) { //pipeline should always be saved to .nxs path = path.concat("." + extensions[0]); } saveScans(path, models); lastPath = path; } }; final IAction load = new Action("Load scan", IAction.AS_PUSH_BUTTON) { public void run() { FileSelectionDialog dialog = new FileSelectionDialog(site.getShell()); dialog.setExtensions(extensions); dialog.setFiles(files); dialog.setNewFile(false); dialog.setFolderSelector(false); if (lastPath != null) dialog.setPath(lastPath); dialog.create(); if (dialog.open() == Dialog.CANCEL) return; String path = dialog.getPath(); readScans(path); lastPath = path; } }; save.setImageDescriptor(Activator.getImageDescriptor("icons/mask-import-wiz.png")); load.setImageDescriptor(Activator.getImageDescriptor("icons/mask-export-wiz.png")); addGroup("file", tmanager, save, load); addGroup("file", mmanager, save, load); final IAction lock = new Action("Lock scan editing", IAction.AS_CHECK_BOX) { public void run() { store.setValue(DevicePreferenceConstants.LOCK_SCAN_SEQUENCE, isChecked()); seriesTable.setLockEditing(isChecked()); add.setEnabled(!isChecked()); delete.setEnabled(!isChecked()); clear.setEnabled(!isChecked()); } }; lock.setImageDescriptor(Activator.getImageDescriptor("icons/lock.png")); lock.setChecked(store.getBoolean(DevicePreferenceConstants.LOCK_SCAN_SEQUENCE)); add.setEnabled(!lock.isChecked()); delete.setEnabled(!lock.isChecked()); clear.setEnabled(!lock.isChecked()); seriesTable.setLockEditing(lock.isChecked()); addGroup("lock", tmanager, lock); addGroup("lock", mmanager, lock); } private void addGroup(String id, IContributionManager manager, IAction... actions) { manager.add(new Separator(id)); for (IAction action : actions) { manager.add(action); } } private void saveScans(String filename, List<IScanPathModel> models) { Stashing stash = new Stashing(new File(filename), ServiceHolder.getEventConnectorService()); stash.save(models, getViewSite().getShell()); } private void readScans(String filePath) { Stashing stash = new Stashing(new File(filePath), ServiceHolder.getEventConnectorService()); List<IScanPathModel> models = stash.load(List.class, getViewSite().getShell()); try { this.saved = pointsFilter.createDescriptors(models); this.seriesTable.setInput(saved, pointsFilter); } catch (Exception e) { logger.error("Unexpected error refreshing saved models in "+getClass().getSimpleName(), e); } } private IPointGenerator<?>[] getGenerators() { final List<ISeriesItemDescriptor> desi = seriesTable.getSeriesItems(); if (desi != null) { Iterator<ISeriesItemDescriptor> it = desi.iterator(); while (it.hasNext()) if ((!(it.next() instanceof GeneratorDescriptor))) it.remove(); } if (desi==null || desi.isEmpty()) return null; final IPointGenerator<?>[] pipeline = new IPointGenerator<?>[desi.size()]; for (int i = 0; i < desi.size(); i++) { try { pipeline[i] = (IPointGenerator<?>)desi.get(i).getSeriesObject(); } catch (Exception e) { e.printStackTrace(); return null; } } return pipeline; } private List<IScanPathModel> getModels() { IPointGenerator<?>[] gens = getGenerators(); List<IScanPathModel> mods = new ArrayList<>(gens.length); for (int i = 0; i < gens.length; i++) mods.add((IScanPathModel)gens[i].getModel()); return mods; } private void setDynamicMenuOptions(IMenuManager mm) { mm.add(add); mm.add(delete); mm.add(clear); mm.add(new Separator()); IPointGenerator<?> gen = null; try { ISeriesItemDescriptor selected = seriesTable.getSelected(); if (!(selected instanceof GeneratorDescriptor)) return; gen = ((GeneratorDescriptor)selected).getSeriesObject(); } catch (Exception e1) { } final IAction passUnMod = new Action("Enabled", IAction.AS_CHECK_BOX) { public void run() { ISeriesItemDescriptor current = seriesTable.getSelected(); if (current instanceof GeneratorDescriptor) { try { ((GeneratorDescriptor)current).getSeriesObject().setEnabled(isChecked()); seriesTable.refreshTable(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } } }; if (gen != null && !gen.isEnabled()) passUnMod.setChecked(true); mm.add(passUnMod); } @Override public void dispose() { seriesTable.dispose(); } @Override public void setFocus() { seriesTable.setFocus(); } }
package org.spoofax.modelware.emf.trans; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import org.eclipse.emf.common.util.BasicEList; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EAnnotation; import org.eclipse.emf.ecore.EAttribute; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EDataType; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.util.EcoreUtil; import org.spoofax.interpreter.terms.IStrategoAppl; import org.spoofax.interpreter.terms.IStrategoList; import org.spoofax.interpreter.terms.IStrategoString; import org.spoofax.interpreter.terms.IStrategoTerm; import org.spoofax.modelware.emf.utils.Utils; public class Tree2modelConverter { private final EPackage pack; private final List<Reference> references = new LinkedList<Reference>(); private final HashMap<IStrategoTerm, EObject> uriMap = new HashMap<IStrategoTerm, EObject>(); public Tree2modelConverter(EPackage pack) { this.pack = pack; } public EObject convert(IStrategoTerm term) { EObject result = convert((IStrategoAppl) term.getSubterm(0)); setReferences(); return result; } private EObject convert(IStrategoAppl term) { IStrategoList URIs = (IStrategoList) term.getSubterm(0); IStrategoTerm QID = term.getSubterm(1); IStrategoList slots = (IStrategoList) term.getSubterm(2); EClass c = getClass(QID); EObject obj = pack.getEFactoryInstance().create(c); for (IStrategoTerm uri : URIs.getAllSubterms()) { if (!uriMap.containsKey(uri)) { uriMap.put(uri, obj); } } for (int i = 0; i < slots.getAllSubterms().length; i++) { EStructuralFeature f = getFeature(c, i); setFeature(slots.getAllSubterms()[i], obj, f); } return obj; } private void setFeature(IStrategoTerm t, EObject obj, EStructuralFeature f) { if (Utils.isNone(t) || Utils.isEmptyList(t)) { return; } if (Utils.isSome(t)) { t = t.getSubterm(0); // normalization } final boolean isList = t.getSubterm(0).isList(); if (!isList) { IStrategoTerm list = Utils.termFactory.makeList(t.getSubterm(0)); t = Utils.termFactory.makeAppl(((IStrategoAppl) t).getConstructor(), list); // normalization } String featureType = ((IStrategoAppl) t).getConstructor().getName(); if (featureType.equals("Link")) { references.add(new Reference(obj, f, t.getSubterm(0))); } else { List<Object> values = new LinkedList<Object>(); for (IStrategoTerm subTerm : t.getSubterm(0).getAllSubterms()) { if (featureType.equals("Data")) { EDataType type = ((EAttribute) f).getEAttributeType(); values.add(EcoreUtil.createFromString(type, ((IStrategoString) subTerm).stringValue())); } else if (featureType.equals("Contain")) { values.add(convert((IStrategoAppl) subTerm)); } } obj.eSet(f, isList ? values : values.get(0)); } } private EClass getClass(IStrategoTerm QID) { return (EClass) pack.getEClassifier(((IStrategoString) QID.getSubterm(1)).stringValue()); } private EStructuralFeature getFeature(EClass c, int i) { EAnnotation featureIndexes = c.getEAnnotation(Constants.ANNO_FEATURE_INDEX); if (featureIndexes != null) { String featureName = featureIndexes.getDetails().get(Integer.toString(i)); return c.getEStructuralFeature(featureName); } else { return c.getEAllStructuralFeatures().get(i); } } private void setReferences() { for (Reference ref : references) { EList<EObject> results = new BasicEList<EObject>(); for (IStrategoTerm uri : ref.uris.getAllSubterms()) { results.add(uriMap.get(uri)); } ref.object.eSet(ref.feature, ref.feature.isMany() ? results : results.get(0)); } } private class Reference { public final EObject object; public final EStructuralFeature feature; public final IStrategoTerm uris; public Reference(EObject object, EStructuralFeature feature, IStrategoTerm uris) { this.object = object; this.feature = feature; this.uris = uris; } } }
package net.mafro.android.wakeonlan; import android.os.Bundle; import android.content.Context; import android.content.ContentResolver; import android.database.Cursor; import android.content.ContentValues; import android.net.Uri; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.view.ViewParent; import android.widget.TextView; import android.widget.AdapterView; import android.widget.ResourceCursorAdapter; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import net.mafro.android.widget.StarButton; public class HistoryListItemAdapter extends ResourceCursorAdapter implements OnCheckedChangeListener { private static final String TAG = "HistoryListItemAdapter"; private Context context; private ContentResolver content; public HistoryListItemAdapter(Context context, Cursor cursor) { super(context, R.layout.history_row, cursor); this.context = context; this.content = context.getContentResolver(); } @Override public void bindView(View view, Context context, Cursor cursor) { //load our column indexes int idColumn = cursor.getColumnIndex(History.Items._ID); int titleColumn = cursor.getColumnIndex(History.Items.TITLE); int macColumn = cursor.getColumnIndex(History.Items.MAC); int ipColumn = cursor.getColumnIndex(History.Items.IP); int portColumn = cursor.getColumnIndex(History.Items.PORT); int isStarredColumn = cursor.getColumnIndex(History.Items.IS_STARRED); //Log.d(TAG+":bindView", Integer.toString(cursor.getInt(idColumn))); TextView vtitle = (TextView) view.findViewById(R.id.history_row_title); TextView vmac = (TextView) view.findViewById(R.id.history_row_mac); TextView vip = (TextView) view.findViewById(R.id.history_row_ip); TextView vport = (TextView) view.findViewById(R.id.history_row_port); StarButton star = (StarButton) view.findViewById(R.id.history_row_star); //bind the cursor data to the form items vtitle.setText(cursor.getString(titleColumn)); vmac.setText(cursor.getString(macColumn)); vip.setText(cursor.getString(ipColumn)); vport.setText(Integer.toString(cursor.getInt(portColumn))); //remove click handler to prevent recursive calls star.setOnCheckedChangeListener(null); //change the star state if different boolean starred = (cursor.getInt(isStarredColumn) != 0); //non-zero == true star.setChecked(starred); star.render(); //add event listener to star button star.setOnCheckedChangeListener(this); //save our record _ID in the star's tag star.setTag(cursor.getInt(idColumn)); } public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { //extract record's _ID from tag int id = ((Integer) ((StarButton) buttonView).getTag()).intValue(); if(isChecked) { setIsStarred(id, 1); }else{ setIsStarred(id, 0); } } private void setIsStarred(int id, int value) { //update history setting is_starred to value ContentValues values = new ContentValues(1); values.put(History.Items.IS_STARRED, value); Uri itemUri = Uri.withAppendedPath(History.Items.CONTENT_URI, Integer.toString(id)); this.content.update(itemUri, values, null, null); } }
package com.intellij.util.ui; import com.intellij.icons.AllIcons; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.registry.Registry; import com.intellij.ui.ColorUtil; import com.intellij.ui.Gray; import com.intellij.ui.JBColor; import com.intellij.ui.JreHiDpiUtil; import com.intellij.ui.border.CustomLineBorder; import com.intellij.ui.scale.DerivedScaleType; import com.intellij.ui.scale.JBUIScale; import com.intellij.ui.scale.Scale; import com.intellij.ui.scale.UserScaleContext; import com.intellij.util.ui.components.BorderLayoutPanel; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import javax.swing.plaf.BorderUIResource; import javax.swing.plaf.UIResource; import java.awt.*; import java.awt.font.TextAttribute; import java.lang.ref.WeakReference; import java.util.HashMap; import java.util.Map; /** * @author Konstantin Bulenkov * @author tav */ @SuppressWarnings("UseJBColor") public class JBUI { /** * @deprecated use {@link JBUIScale#sysScale()} */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") public static float sysScale() { return JBUIScale.sysScale(); } /** * Returns the pixel scale factor, corresponding to the default monitor device. */ public static float pixScale() { return JreHiDpiUtil.isJreHiDPIEnabled() ? JBUIScale.sysScale() * JBUIScale.scale(1f) : JBUIScale.scale(1f); } /** * Returns "f" scaled by pixScale(). */ public static float pixScale(float f) { return pixScale() * f; } /** * Returns the pixel scale factor, corresponding to the provided configuration. * In the IDE-managed HiDPI mode defaults to {@link #pixScale()} */ public static float pixScale(@Nullable GraphicsConfiguration gc) { return JreHiDpiUtil.isJreHiDPIEnabled() ? JBUIScale.sysScale(gc) * JBUIScale.scale(1f) : JBUIScale.scale(1f); } /** * Returns the pixel scale factor, corresponding to the device the provided component is tied to. * In the IDE-managed HiDPI mode defaults to {@link #pixScale()} */ public static float pixScale(@Nullable Component comp) { return pixScale(comp != null ? comp.getGraphicsConfiguration() : null); } /** * @deprecated use {@link JBUIScale#scale(float)} */ @Deprecated public static float scale(float f) { return JBUIScale.scale(f); } /** * @return 'i' scaled by the user scale factor */ public static int scale(int i) { return JBUIScale.scale(i); } public static int scaleFontSize(float fontSize) { return JBUIScale.scaleFontSize(fontSize); } @NotNull public static JBValue value(float value) { return new JBValue.Float(value); } @NotNull public static JBValue uiIntValue(@NotNull String key, int defValue) { return new JBValue.UIInteger(key, defValue); } @NotNull public static JBDimension size(int width, int height) { return new JBDimension(width, height); } @NotNull public static JBDimension size(int widthAndHeight) { return new JBDimension(widthAndHeight, widthAndHeight); } @NotNull public static JBDimension size(Dimension size) { if (size instanceof JBDimension) { JBDimension newSize = ((JBDimension)size).newSize(); return size instanceof UIResource ? newSize.asUIResource() : newSize; } return new JBDimension(size.width, size.height); } @NotNull public static JBInsets insets(int top, int left, int bottom, int right) { return new JBInsets(top, left, bottom, right); } @NotNull public static JBInsets insets(int all) { return new JBInsets(all, all, all, all); } @NotNull public static JBInsets insets(@NonNls @NotNull String propName, @NotNull JBInsets defaultValue) { Insets i = UIManager.getInsets(propName); return i != null ? JBInsets.create(i) : defaultValue; } @NotNull public static JBInsets insets(int topBottom, int leftRight) { return JBInsets.create(topBottom, leftRight); } @NotNull public static JBInsets emptyInsets() { return new JBInsets(0, 0, 0, 0); } @NotNull public static JBInsets insetsTop(int t) { return new JBInsets(t, 0, 0, 0); } @NotNull public static JBInsets insetsLeft(int l) { return new JBInsets(0, l, 0, 0); } @NotNull public static JBInsets insetsBottom(int b) { return new JBInsets(0, 0, b, 0); } @NotNull public static JBInsets insetsRight(int r) { return new JBInsets(0, 0, 0, r); } /** * @deprecated Use {@link JBUIScale#scaleIcon(JBScalableIcon)}. */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") @NotNull public static <T extends JBScalableIcon> T scale(@NotNull T icon) { return JBUIScale.scaleIcon(icon); } /** * @deprecated Use {@link JBUIScale#scaleIcon(JBScalableIcon)}. */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") @NotNull public static <T extends JBIcon> T scale(@NotNull T icon) { //noinspection unchecked return (T)icon.withIconPreScaled(false); } @NotNull public static JBDimension emptySize() { return new JBDimension(0, 0); } @NotNull public static JBInsets insets(@NotNull Insets insets) { return JBInsets.create(insets); } /** * @deprecated use {@link JBUIScale#isUsrHiDPI()} instead */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") public static boolean isHiDPI() { return JBUIScale.isUsrHiDPI(); } /** * @deprecated use {@link JBUIScale#isUsrHiDPI()} */ @Deprecated public static boolean isUsrHiDPI() { return JBUIScale.isUsrHiDPI(); } /** * Returns whether the {@link DerivedScaleType#PIX_SCALE} scale factor assumes HiDPI-awareness in the provided graphics config. * An equivalent of {@code isHiDPI(pixScale(gc))} */ public static boolean isPixHiDPI(@Nullable GraphicsConfiguration gc) { return JBUIScale.isHiDPI(pixScale(gc)); } /** * Returns whether the {@link DerivedScaleType#PIX_SCALE} scale factor assumes HiDPI-awareness in the provided component's device. * An equivalent of {@code isHiDPI(pixScale(comp))} */ public static boolean isPixHiDPI(@Nullable Component comp) { return JBUIScale.isHiDPI(pixScale(comp)); } public static final class Fonts { @NotNull public static JBFont label() { return JBFont.label(); } @NotNull public static JBFont label(float size) { return JBFont.label().deriveFont(JBUIScale.scale(size)); } @NotNull public static JBFont smallFont() { return JBFont.label().deriveFont(UIUtil.getFontSize(UIUtil.FontSize.SMALL)); } @NotNull public static JBFont miniFont() { return JBFont.label().deriveFont(UIUtil.getFontSize(UIUtil.FontSize.MINI)); } @NotNull public static JBFont create(@NonNls String fontFamily, int size) { return JBFont.create(new Font(fontFamily, Font.PLAIN, size)); } @NotNull public static JBFont toolbarFont() { return SystemInfo.isMac ? smallFont() : JBFont.label(); } @NotNull public static JBFont toolbarSmallComboBoxFont() { return label(11); } } private static final JBEmptyBorder SHARED_EMPTY_INSTANCE = new JBEmptyBorder(0); @SuppressWarnings("UseDPIAwareBorders") public static final class Borders { @NotNull public static JBEmptyBorder empty(int top, int left, int bottom, int right) { if (top == 0 && left == 0 && bottom == 0 && right == 0) { return SHARED_EMPTY_INSTANCE; } return new JBEmptyBorder(top, left, bottom, right); } @NotNull public static JBEmptyBorder empty(int topAndBottom, int leftAndRight) { return empty(topAndBottom, leftAndRight, topAndBottom, leftAndRight); } @NotNull public static JBEmptyBorder emptyTop(int offset) { return empty(offset, 0, 0, 0); } @NotNull public static JBEmptyBorder emptyLeft(int offset) { return empty(0, offset, 0, 0); } @NotNull public static JBEmptyBorder emptyBottom(int offset) { return empty(0, 0, offset, 0); } @NotNull public static JBEmptyBorder emptyRight(int offset) { return empty(0, 0, 0, offset); } @NotNull public static JBEmptyBorder empty() { return empty(0, 0, 0, 0); } @NotNull public static Border empty(int offsets) { return empty(offsets, offsets, offsets, offsets); } @NotNull public static Border customLine(Color color, int top, int left, int bottom, int right) { return new CustomLineBorder(color, insets(top, left, bottom, right)); } @NotNull public static Border customLine(Color color, int thickness) { return customLine(color, thickness, thickness, thickness, thickness); } @NotNull public static Border customLine(Color color) { return customLine(color, 1); } @NotNull public static Border merge(@Nullable Border source, @NotNull Border extra, boolean extraIsOutside) { if (source == null) return extra; return new CompoundBorder(extraIsOutside ? extra : source, extraIsOutside? source : extra); } } public static final class Panels { @NotNull public static BorderLayoutPanel simplePanel() { return new BorderLayoutPanel(); } @NotNull public static BorderLayoutPanel simplePanel(Component comp) { return simplePanel().addToCenter(comp); } @NotNull public static BorderLayoutPanel simplePanel(int hgap, int vgap) { return new BorderLayoutPanel(hgap, vgap); } } public static Border asUIResource(@NotNull Border border) { if (border instanceof UIResource) return border; return new BorderUIResource(border); } @SuppressWarnings("UnregisteredNamedColor") public static final class CurrentTheme { public interface Component { Color FOCUSED_BORDER_COLOR = JBColor.namedColor("Component.focusedBorderColor", 0x87AFDA, 0x466D94); } public static final class ActionButton { @NotNull public static Color pressedBackground() { return JBColor.namedColor("ActionButton.pressedBackground", Gray.xCF); } @NotNull public static Color pressedBorder() { return JBColor.namedColor("ActionButton.pressedBorderColor", Gray.xCF); } @NotNull public static Color focusedBorder() { return JBColor.namedColor("ActionButton.focusedBorderColor", new JBColor(0x62b8de, 0x5eacd0)); } @NotNull public static Color hoverBackground() { return JBColor.namedColor("ActionButton.hoverBackground", Gray.xDF); } @NotNull public static Color hoverBorder() { return JBColor.namedColor("ActionButton.hoverBorderColor", Gray.xDF); } @NotNull public static Color hoverSeparatorColor() { return JBColor.namedColor("ActionButton.hoverSeparatorColor", new JBColor(Gray.xB3, Gray.x6B)); } } public static final class ActionsList { public static final Color MNEMONIC_FOREGROUND = JBColor.namedColor("Label.infoForeground", new JBColor(Gray.x78, Gray.x8C)); @NotNull public static Insets numberMnemonicInsets() { return insets("ActionsList.mnemonicsBorderInsets", insets(0, 8, 1, 6)); } @NotNull public static Insets cellPadding() { return insets("ActionsList.cellBorderInsets", insets(1, 12, 1, 12)); } @NotNull public static int elementIconGap() { return new JBValue.UIInteger("ActionsList.icon.gap", scale(8)).get(); } @NotNull public static Font applyStylesForNumberMnemonic(Font font) { if (SystemInfo.isWindows) { Map<TextAttribute, Object> attributes = new HashMap<>(font.getAttributes()); attributes.put(TextAttribute.UNDERLINE, TextAttribute.UNDERLINE_ON); return font.deriveFont(attributes); } return font; } } public static final class Button { @NotNull public static Color buttonColorStart() { return JBColor.namedColor("Button.startBackground", JBColor.namedColor("Button.darcula.startColor", 0x555a5c)); } @NotNull public static Color buttonColorEnd() { return JBColor.namedColor("Button.endBackground", JBColor.namedColor("Button.darcula.endColor", 0x414648)); } @NotNull public static Color defaultButtonColorStart() { return JBColor.namedColor("Button.default.startBackground", JBColor.namedColor("Button.darcula.defaultStartColor", 0x384f6b)); } @NotNull public static Color defaultButtonColorEnd() { return JBColor.namedColor("Button.default.endBackground", JBColor.namedColor("Button.darcula.defaultEndColor", 0x233143)); } @NotNull public static Color focusBorderColor(boolean isDefaultButton) { return isDefaultButton ? JBColor.namedColor("Button.default.focusedBorderColor", JBColor.namedColor("Button.darcula.defaultFocusedOutlineColor", 0x87afda)) : JBColor.namedColor("Button.focusedBorderColor", JBColor.namedColor("Button.darcula.focusedOutlineColor", 0x87afda)); } @NotNull public static Color buttonOutlineColorStart(boolean isDefaultButton) { return isDefaultButton ? JBColor.namedColor("Button.default.startBorderColor", JBColor.namedColor("Button.darcula.outlineDefaultStartColor", Gray.xBF)) : JBColor.namedColor("Button.startBorderColor", JBColor.namedColor("Button.darcula.outlineStartColor", Gray.xBF)); } @NotNull public static Color buttonOutlineColorEnd(boolean isDefaultButton) { return isDefaultButton ? JBColor.namedColor("Button.default.endBorderColor", JBColor.namedColor("Button.darcula.outlineDefaultEndColor", Gray.xB8)) : JBColor.namedColor("Button.endBorderColor", JBColor.namedColor("Button.darcula.outlineEndColor", Gray.xB8)); } @NotNull public static Color disabledOutlineColor() { return JBColor.namedColor("Button.disabledBorderColor", JBColor.namedColor("Button.darcula.disabledOutlineColor", Gray.xCF)); } } public static final class CustomFrameDecorations { @NotNull public static Color separatorForeground() { return JBColor.namedColor("Separator.separatorColor", new JBColor(0xcdcdcd, 0x515151)); } @NotNull public static Color titlePaneButtonHoverBackground() { return JBColor.namedColor("TitlePane.Button.hoverBackground", new JBColor(ColorUtil.withAlpha(Color.BLACK, .1), ColorUtil.withAlpha(Color.WHITE, .1))); } @NotNull public static Color titlePaneButtonPressBackground() { return titlePaneButtonHoverBackground(); } @NotNull public static Color titlePaneInactiveBackground() { return JBColor.namedColor("TitlePane.inactiveBackground", titlePaneBackground()); } @NotNull public static Color titlePaneBackground(boolean active) { return active ? titlePaneBackground() : titlePaneInactiveBackground(); } @NotNull public static Color titlePaneBackground() { return JBColor.namedColor("TitlePane.background", paneBackground()); } @NotNull public static Color titlePaneInfoForeground() { return JBColor.namedColor("TitlePane.infoForeground", new JBColor(0x616161, 0x919191)); } @NotNull public static Color titlePaneInactiveInfoForeground() { return JBColor.namedColor("TitlePane.inactiveInfoForeground", new JBColor(0xA6A6A6, 0x737373)); } @NotNull public static Color paneBackground() { return JBColor.namedColor("Panel.background", Gray.xCD); } } public static final class DefaultTabs { @NotNull public static Color underlineColor() { return JBColor.namedColor("DefaultTabs.underlineColor", new JBColor(0x4083C9, 0x4A88C7)); } public static int underlineHeight() { return getInt("DefaultTabs.underlineHeight", JBUIScale.scale(3)); } @NotNull public static Color inactiveUnderlineColor() { return JBColor.namedColor("DefaultTabs.inactiveUnderlineColor", new JBColor(0x9ca7b8, 0x747a80)); } @NotNull public static Color borderColor() { return JBColor.namedColor("DefaultTabs.borderColor", UIUtil.CONTRAST_BORDER_COLOR); } @NotNull public static Color background() { return JBColor.namedColor("DefaultTabs.background", new JBColor(0xECECEC, 0x3C3F41)); } @NotNull public static Color hoverBackground() { return JBColor.namedColor("DefaultTabs.hoverBackground", new JBColor(ColorUtil.withAlpha(Color.BLACK, .10), ColorUtil.withAlpha(Color.BLACK, .35))); } public static Color underlinedTabBackground() { return UIManager.getColor("DefaultTabs.underlinedTabBackground"); } @NotNull public static Color underlinedTabForeground() { return JBColor.namedColor("DefaultTabs.underlinedTabForeground", UIUtil.getLabelForeground()); } @NotNull public static Color inactiveColoredTabBackground() { return JBColor.namedColor("DefaultTabs.inactiveColoredTabBackground", new JBColor(ColorUtil.withAlpha(Color.BLACK, .07), ColorUtil.withAlpha(new Color(0x3C3F41), .60))); } } public static final class DebuggerTabs { public static int underlineHeight() { return getInt("DebuggerTabs.underlineHeight", JBUIScale.scale(2)); } public static Color underlinedTabBackground() { return UIManager.getColor("DebuggerTabs.underlinedTabBackground"); } } public static final class EditorTabs { @NotNull public static Color underlineColor() { return JBColor.namedColor("EditorTabs.underlineColor", DefaultTabs.underlineColor()); } public static int underlineHeight() { return getInt("EditorTabs.underlineHeight", DefaultTabs.underlineHeight()); } @NotNull public static Color inactiveUnderlineColor() { return JBColor.namedColor("EditorTabs.inactiveUnderlineColor", DefaultTabs.inactiveUnderlineColor()); } public static Color underlinedTabBackground() { return UIManager.getColor("EditorTabs.underlinedTabBackground"); } public static Insets tabInsets() { return insets("EditorTabs.tabInsets", insets(0, 8)); } @NotNull public static Color borderColor() { return JBColor.namedColor("EditorTabs.borderColor", DefaultTabs.borderColor()); } @NotNull public static Color background() { return JBColor.namedColor("EditorTabs.background", DefaultTabs.background()); } @NotNull public static Color hoverBackground() { return JBColor.namedColor("EditorTabs.hoverBackground", DefaultTabs.hoverBackground()); } @NotNull public static Color inactiveColoredFileBackground() { return JBColor.namedColor("EditorTabs.inactiveColoredFileBackground", DefaultTabs.inactiveColoredTabBackground()); } @NotNull public static Color underlinedTabForeground() { return JBColor.namedColor("EditorTabs.underlinedTabForeground", DefaultTabs.underlinedTabForeground()); } } public static final class StatusBar { @NotNull public static Color hoverBackground() { return JBColor.namedColor("StatusBar.hoverBackground", ActionButton.hoverBackground()); } } public static final class ToolWindow { @NotNull public static Color borderColor() { return JBColor.namedColor("ToolWindow.HeaderTab.borderColor", DefaultTabs.borderColor()); } @NotNull public static Color underlinedTabForeground() { return JBColor.namedColor("ToolWindow.HeaderTab.underlinedTabForeground", DefaultTabs.underlinedTabForeground()); } @NotNull public static Color hoverBackground() { return JBColor.namedColor("ToolWindow.HeaderTab.hoverBackground", DefaultTabs.hoverBackground()); } @NotNull public static Color inactiveUnderlineColor() { return JBColor.namedColor("ToolWindow.HeaderTab.inactiveUnderlineColor", DefaultTabs.inactiveUnderlineColor()); } @NotNull public static Color underlineColor() { return JBColor.namedColor("ToolWindow.HeaderTab.underlineColor", DefaultTabs.underlineColor()); } public static Color underlinedTabBackground() { return UIManager.getColor("ToolWindow.HeaderTab.underlinedTabBackground"); } public static Color hoverInactiveBackground() { return JBColor.namedColor("ToolWindow.HeaderTab.hoverInactiveBackground", hoverBackground()); } public static Color underlinedTabInactiveBackground() { return UIManager.getColor("ToolWindow.HeaderTab.underlinedTabInactiveBackground"); } @NotNull public static Color underlinedTabInactiveForeground() { return JBColor.namedColor("ToolWindow.HeaderTab.underlinedTabInactiveForeground", underlinedTabForeground()); } /** * @deprecated obsolete UI */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") @NotNull public static Color tabSelectedBackground() { return Registry.is("toolwindow.active.tab.use.contrast.background") ? Registry.getColor("toolwindow.active.tab.contrast.background.color", JBColor.GRAY) : JBColor.namedColor("ToolWindow.HeaderTab.selectedInactiveBackground", JBColor.namedColor("ToolWindow.header.tab.selected.background", 0xDEDEDE)); } /** * @deprecated obsolete UI */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") @NotNull public static Color tabHoveredBackground() { return hoverInactiveBackground(); } /** * @deprecated obsolete UI */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") @NotNull public static Color tabHoveredBackground(boolean active) { return active ? hoverBackground() : hoverInactiveBackground(); } @NotNull public static Color headerBackground(boolean active) { return active ? headerActiveBackground() : headerBackground(); } @NotNull public static Color headerBackground() { return JBColor.namedColor("ToolWindow.Header.inactiveBackground", JBColor.namedColor("ToolWindow.header.background", 0xECECEC)); } @NotNull public static Color headerBorderBackground() { return JBColor.namedColor("ToolWindow.Header.borderColor", DefaultTabs.borderColor()); } @NotNull public static Color headerActiveBackground() { return JBColor.namedColor("ToolWindow.Header.background", JBColor.namedColor("ToolWindow.header.active.background", 0xE2E6EC)); } /** * @deprecated obsolete UI */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") public static int tabVerticalPadding() { return getInt("ToolWindow.HeaderTab.verticalPadding", JBUIScale.scale(6)); } public static int underlineHeight() { return getInt("ToolWindow.HeaderTab.underlineHeight", DefaultTabs.underlineHeight()); } @NotNull public static Font headerFont() { return JBFont.label(); } public static float overrideHeaderFontSizeOffset() { Object offset = UIManager.get("ToolWindow.Header.font.size.offset"); if (offset instanceof Integer) { return ((Integer)offset).floatValue(); } return 0; } @NotNull public static Color hoveredIconBackground() { return JBColor.namedColor("ToolWindow.HeaderCloseButton.background", JBColor.namedColor("ToolWindow.header.closeButton.background", 0xB9B9B9)); } @NotNull public static Icon closeTabIcon(boolean hovered) { return hovered ? getIcon("ToolWindow.header.closeButton.hovered.icon", AllIcons.Actions.CloseHovered) : getIcon("ToolWindow.header.closeButton.icon", AllIcons.Actions.Close); } @NotNull public static Icon comboTabIcon(boolean hovered) { return hovered ? getIcon("ToolWindow.header.comboButton.hovered.icon", AllIcons.General.ArrowDown) : getIcon("ToolWindow.header.comboButton.icon", AllIcons.General.ArrowDown); } } public static final class Label { @NotNull public static Color foreground(boolean selected) { return selected ? JBColor.namedColor("Label.selectedForeground", 0xFFFFFF) : JBColor.namedColor("Label.foreground", 0x000000); } @NotNull public static Color foreground() { return foreground(false); } @NotNull public static Color disabledForeground(boolean selected) { return selected ? JBColor.namedColor("Label.selectedDisabledForeground", 0x999999) : JBColor.namedColor("Label.disabledForeground", JBColor.namedColor("Label.disabledText", 0x999999)); } @NotNull public static Color disabledForeground() { return disabledForeground(false); } } public static final class Popup { public static Color headerBackground(boolean active) { return active ? JBColor.namedColor("Popup.Header.activeBackground", 0xe6e6e6) : JBColor.namedColor("Popup.Header.inactiveBackground", 0xededed); } public static int headerHeight(boolean hasControls) { return hasControls ? JBUIScale.scale(28) : JBUIScale.scale(24); } public static Color borderColor(boolean active) { return active ? JBColor.namedColor("Popup.borderColor", JBColor.namedColor("Popup.Border.color", 0x808080)) : JBColor.namedColor("Popup.inactiveBorderColor", JBColor.namedColor("Popup.inactiveBorderColor", 0xaaaaaa)); } public static Color toolbarPanelColor() { return JBColor.namedColor("Popup.Toolbar.background", 0xf7f7f7); } public static Color toolbarBorderColor() { return JBColor.namedColor("Popup.Toolbar.borderColor", JBColor.namedColor("Popup.Toolbar.Border.color", 0xf7f7f7)); } public static int toolbarHeight() { return JBUIScale.scale(28); } public static Color separatorColor() { return JBColor.namedColor("Popup.separatorColor", new JBColor(Color.gray.brighter(), Gray.x51)); } public static Color separatorTextColor() { return JBColor.namedColor("Popup.separatorForeground", Color.gray); } public static int minimumHintWidth() { return JBUIScale.scale(170); } } public static final class Focus { private static final Color GRAPHITE_COLOR = new JBColor(new Color(0x8099979d, true), new Color(0x676869)); @NotNull public static Color focusColor() { return UIUtil.isGraphite() ? GRAPHITE_COLOR : JBColor.namedColor("Component.focusColor", JBColor.namedColor("Focus.borderColor", 0x8ab2eb)); } @NotNull public static Color defaultButtonColor() { return StartupUiUtil.isUnderDarcula() ? JBColor.namedColor("Button.default.focusColor", JBColor.namedColor("Focus.defaultButtonBorderColor", 0x97c3f3)) : focusColor(); } @NotNull public static Color errorColor(boolean active) { return active ? JBColor.namedColor("Component.errorFocusColor", JBColor.namedColor("Focus.activeErrorBorderColor", 0xe53e4d)) : JBColor.namedColor("Component.inactiveErrorFocusColor", JBColor.namedColor("Focus.inactiveErrorBorderColor", 0xebbcbc)); } @NotNull public static Color warningColor(boolean active) { return active ? JBColor.namedColor("Component.warningFocusColor", JBColor.namedColor("Focus.activeWarningBorderColor", 0xe2a53a)) : JBColor.namedColor("Component.inactiveWarningFocusColor", JBColor.namedColor("Focus.inactiveWarningBorderColor", 0xffd385)); } } public static final class TabbedPane { public static final Color ENABLED_SELECTED_COLOR = JBColor.namedColor("TabbedPane.underlineColor", JBColor.namedColor("TabbedPane.selectedColor", 0x4083C9)); public static final Color DISABLED_SELECTED_COLOR = JBColor.namedColor("TabbedPane.disabledUnderlineColor", JBColor.namedColor("TabbedPane.selectedDisabledColor", Gray.xAB)); public static final Color DISABLED_TEXT_COLOR = JBColor.namedColor("TabbedPane.disabledForeground", JBColor.namedColor("TabbedPane.disabledText", Gray.x99)); public static final Color HOVER_COLOR = JBColor.namedColor("TabbedPane.hoverColor", Gray.xD9); public static final Color FOCUS_COLOR = JBColor.namedColor("TabbedPane.focusColor", 0xDAE4ED); public static final JBValue TAB_HEIGHT = new JBValue.UIInteger("TabbedPane.tabHeight", 32); public static final JBValue SELECTION_HEIGHT = new JBValue.UIInteger("TabbedPane.tabSelectionHeight", 3); } public static final class BigPopup { @NotNull public static Color headerBackground() { return JBColor.namedColor("SearchEverywhere.Header.background", 0xf2f2f2); } @NotNull public static Insets tabInsets() { return JBInsets.create(0, 12); } @NotNull public static Color selectedTabColor() { return JBColor.namedColor("SearchEverywhere.Tab.selectedBackground", 0xdedede); } @NotNull public static Color selectedTabTextColor() { return JBColor.namedColor("SearchEverywhere.Tab.selectedForeground", 0x000000); } @NotNull public static Color searchFieldBackground() { return JBColor.namedColor("SearchEverywhere.SearchField.background", 0xffffff); } @NotNull public static Color searchFieldBorderColor() { return JBColor.namedColor("SearchEverywhere.SearchField.borderColor", 0xbdbdbd); } @NotNull public static Insets searchFieldInsets() { return insets(0, 6, 0, 5); } public static int maxListHeight() { return JBUIScale.scale(600); } @NotNull public static Color listSeparatorColor() { return JBColor.namedColor("SearchEverywhere.List.separatorColor", Gray.xDC); } @NotNull public static Color listTitleLabelForeground() { return JBColor.namedColor("SearchEverywhere.List.separatorForeground", UIUtil.getLabelDisabledForeground()); } @NotNull public static Color searchFieldGrayForeground() { return JBColor.namedColor("SearchEverywhere.SearchField.infoForeground", JBColor.GRAY); } @NotNull public static Color advertiserForeground() { return JBColor.namedColor("SearchEverywhere.Advertiser.foreground", JBColor.GRAY); } @NotNull public static Border advertiserBorder() { return new JBEmptyBorder(insets("SearchEverywhere.Advertiser.foreground", insetsLeft(8))); } @NotNull public static Color advertiserBackground() { return JBColor.namedColor("SearchEverywhere.Advertiser.background", 0xf2f2f2); } } public static final class Advertiser { @NotNull public static Color foreground() { Color foreground = JBUI.CurrentTheme.BigPopup.advertiserForeground(); return JBColor.namedColor("Popup.Advertiser.foreground", foreground); } @NotNull public static Color background() { Color background = JBUI.CurrentTheme.BigPopup.advertiserBackground(); return JBColor.namedColor("Popup.Advertiser.background", background); } @NotNull public static Border border() { return new JBEmptyBorder(insets("Popup.Advertiser.borderInsets", insets(5, 10, 5, 15))); } @NotNull public static Color borderColor() { return JBColor.namedColor("Popup.Advertiser.borderColor", Gray._135); } } public static final class Validator { @NotNull public static Color errorBorderColor() { return JBColor.namedColor("ValidationTooltip.errorBorderColor", 0xE0A8A9); } @NotNull public static Color errorBackgroundColor() { return JBColor.namedColor("ValidationTooltip.errorBackground", JBColor.namedColor("ValidationTooltip.errorBackgroundColor", 0xF5E6E7)); } @NotNull public static Color warningBorderColor() { return JBColor.namedColor("ValidationTooltip.warningBorderColor", 0xE0CEA8); } @NotNull public static Color warningBackgroundColor() { return JBColor.namedColor("ValidationTooltip.warningBackground", JBColor.namedColor("ValidationTooltip.warningBackgroundColor", 0xF5F0E6)); } } public static final class Link { public static final Color FOCUSED_BORDER_COLOR = JBColor.namedColor("Link.focusedBorderColor", Component.FOCUSED_BORDER_COLOR); public interface Foreground { Color DISABLED = JBColor.namedColor("Link.disabledForeground", Label.disabledForeground()); Color ENABLED = JBColor.namedColor("Link.activeForeground", JBColor.namedColor("link.foreground", 0x589DF6)); Color HOVERED = JBColor.namedColor("Link.hoverForeground", JBColor.namedColor("link.hover.foreground", ENABLED)); Color PRESSED = JBColor.namedColor("Link.pressedForeground", JBColor.namedColor("link.pressed.foreground", 0xF00000, 0xBA6F25)); Color VISITED = JBColor.namedColor("Link.visitedForeground", JBColor.namedColor("link.visited.foreground", 0x800080, 0x9776A9)); Color SECONDARY = JBColor.namedColor("Link.secondaryForeground", 0x779DBD, 0x5676A0); } /** * @deprecated use {@link Foreground#ENABLED} instead */ @ApiStatus.ScheduledForRemoval(inVersion = "2022.1") @Deprecated @NotNull public static Color linkColor() { return Foreground.ENABLED; } /** * @deprecated use {@link Foreground#HOVERED} instead */ @ApiStatus.ScheduledForRemoval(inVersion = "2022.1") @Deprecated @NotNull public static Color linkHoverColor() { return Foreground.HOVERED; } /** * @deprecated use {@link Foreground#PRESSED} instead */ @ApiStatus.ScheduledForRemoval(inVersion = "2022.1") @Deprecated @NotNull public static Color linkPressedColor() { return Foreground.PRESSED; } /** * @deprecated use {@link Foreground#VISITED} instead */ @ApiStatus.ScheduledForRemoval(inVersion = "2022.1") @Deprecated @NotNull public static Color linkVisitedColor() { return Foreground.VISITED; } /** * @deprecated use {@link Foreground#SECONDARY} instead */ @ApiStatus.ScheduledForRemoval(inVersion = "2022.1") @Deprecated @NotNull public static Color linkSecondaryColor() { return Foreground.SECONDARY; } } public static final class Tooltip { @NotNull public static Color shortcutForeground () { return JBColor.namedColor("ToolTip.shortcutForeground", new JBColor(0x787878, 0x999999)); } @NotNull public static Color borderColor() { return JBColor.namedColor("ToolTip.borderColor", new JBColor(0xadadad, 0x636569)); } } public interface ContextHelp { @NotNull Color FOREGROUND = JBColor.namedColor("Label.infoForeground", new JBColor(Gray.x78, Gray.x8C)); } public static final class Arrow { @NotNull public static Color foregroundColor(boolean enabled) { return enabled ? JBColor.namedColor("ComboBox.ArrowButton.iconColor", JBColor.namedColor("ComboBox.darcula.arrowButtonForeground", Gray.x66)) : JBColor.namedColor("ComboBox.ArrowButton.disabledIconColor", JBColor.namedColor("ComboBox.darcula.arrowButtonDisabledForeground", Gray.xAB)); } @NotNull public static Color backgroundColor(boolean enabled, boolean editable) { return enabled ? editable ? JBColor.namedColor("ComboBox.ArrowButton.background", JBColor.namedColor("ComboBox.darcula.editable.arrowButtonBackground", Gray.xFC)) : JBColor.namedColor("ComboBox.ArrowButton.nonEditableBackground", JBColor.namedColor("ComboBox.darcula.arrowButtonBackground", Gray.xFC)) : UIUtil.getPanelBackground(); } } public static final class NewClassDialog { @NotNull public static Color searchFieldBackground() { return JBColor.namedColor("NewClass.SearchField.background", 0xffffff); } @NotNull public static Color panelBackground() { return JBColor.namedColor("NewClass.Panel.background", 0xf2f2f2); } @NotNull public static Color bordersColor() { return JBColor.namedColor( "TextField.borderColor", JBColor.namedColor("Component.borderColor", new JBColor(0xbdbdbd, 0x646464)) ); } public static int fieldsSeparatorWidth() { return getInt("NewClass.separatorWidth", JBUIScale.scale(10)); } } public static final class NotificationError { @NotNull public static Color backgroundColor() { return JBColor.namedColor("Notification.ToolWindow.errorBackground", new JBColor(0xffcccc, 0x704745)); } @NotNull public static Color foregroundColor() { return JBColor.namedColor("Notification.ToolWindow.errorForeground", UIUtil.getToolTipForeground()); } @NotNull public static Color borderColor() { return JBColor.namedColor("Notification.ToolWindow.errorBorderColor", new JBColor(0xd69696, 0x998a8a)); } } public static final class NotificationInfo { @NotNull public static Color backgroundColor() { return JBColor.namedColor("Notification.ToolWindow.informativeBackground", new JBColor(0xbaeeba, 0x33412E)); } @NotNull public static Color foregroundColor() { return JBColor.namedColor("Notification.ToolWindow.informativeForeground", UIUtil.getToolTipForeground()); } @NotNull public static Color borderColor() { return JBColor.namedColor("Notification.ToolWindow.informativeBorderColor", new JBColor(0xa0bf9d, 0x85997a)); } } public static final class NotificationWarning { @NotNull public static Color backgroundColor() { return JBColor.namedColor("Notification.ToolWindow.warningBackground", new JBColor(0xf9f78e, 0x5a5221)); } @NotNull public static Color foregroundColor() { return JBColor.namedColor("Notification.ToolWindow.warningForeground", UIUtil.getToolTipForeground()); } @NotNull public static Color borderColor() { return JBColor.namedColor("Notification.ToolWindow.warningBorderColor", new JBColor(0xbab824, 0xa69f63)); } } private static final Color DEFAULT_RENDERER_BACKGROUND = new JBColor(0xFFFFFF, 0x3C3F41); private static final Color DEFAULT_RENDERER_SELECTION_BACKGROUND = new JBColor(0x3875D6, 0x2F65CA); private static final Color DEFAULT_RENDERER_SELECTION_INACTIVE_BACKGROUND = new JBColor(0xD4D4D4, 0x0D293E); private static final Color DEFAULT_RENDERER_HOVER_BACKGROUND = new JBColor(0xEDF5FC, 0x464A4D); private static final Color DEFAULT_RENDERER_HOVER_INACTIVE_BACKGROUND = new JBColor(0xF5F5F5, 0x464A4D); public interface List { Color BACKGROUND = JBColor.namedColor("List.background", DEFAULT_RENDERER_BACKGROUND); Color FOREGROUND = JBColor.namedColor("List.foreground", Label.foreground(false)); static @NotNull Color background(boolean selected, boolean focused) { return selected ? Selection.background(focused) : BACKGROUND; } static @NotNull Color foreground(boolean selected, boolean focused) { return selected ? Selection.foreground(focused) : FOREGROUND; } final class Selection { private static final Color BACKGROUND = JBColor.namedColor("List.selectionBackground", DEFAULT_RENDERER_SELECTION_BACKGROUND); private static final Color FOREGROUND = JBColor.namedColor("List.selectionForeground", Label.foreground(true)); public static @NotNull Color background(boolean focused) { if (focused && UIUtil.isUnderDefaultMacTheme()) { double alpha = getInt("List.selectedItemAlpha", 75); if (0 <= alpha && alpha < 100) return ColorUtil.mix(Color.WHITE, BACKGROUND, alpha / 100.0); } return focused ? BACKGROUND : Inactive.BACKGROUND; } public static @NotNull Color foreground(boolean focused) { return focused ? FOREGROUND : Inactive.FOREGROUND; } private interface Inactive { Color BACKGROUND = JBColor.namedColor("List.selectionInactiveBackground", DEFAULT_RENDERER_SELECTION_INACTIVE_BACKGROUND); Color FOREGROUND = JBColor.namedColor("List.selectionInactiveForeground", List.FOREGROUND); } } final class Hover { private static final Color BACKGROUND = JBColor.namedColor("List.hoverBackground", DEFAULT_RENDERER_HOVER_BACKGROUND); public static @NotNull Color background(boolean focused) { return focused ? BACKGROUND : Inactive.BACKGROUND; } private interface Inactive { Color BACKGROUND = JBColor.namedColor("List.hoverInactiveBackground", DEFAULT_RENDERER_HOVER_INACTIVE_BACKGROUND); } } } public interface Table { Color BACKGROUND = JBColor.namedColor("Table.background", DEFAULT_RENDERER_BACKGROUND); Color FOREGROUND = JBColor.namedColor("Table.foreground", Label.foreground(false)); static @NotNull Color background(boolean selected, boolean focused) { return selected ? Selection.background(focused) : BACKGROUND; } static @NotNull Color foreground(boolean selected, boolean focused) { return selected ? Selection.foreground(focused) : FOREGROUND; } final class Selection { private static final Color BACKGROUND = JBColor.namedColor("Table.selectionBackground", DEFAULT_RENDERER_SELECTION_BACKGROUND); private static final Color FOREGROUND = JBColor.namedColor("Table.selectionForeground", Label.foreground(true)); public static @NotNull Color background(boolean focused) { return focused ? BACKGROUND : Inactive.BACKGROUND; } public static @NotNull Color foreground(boolean focused) { return focused ? FOREGROUND : Inactive.FOREGROUND; } private interface Inactive { Color BACKGROUND = JBColor.namedColor("Table.selectionInactiveBackground", DEFAULT_RENDERER_SELECTION_INACTIVE_BACKGROUND); Color FOREGROUND = JBColor.namedColor("Table.selectionInactiveForeground", Table.FOREGROUND); } } final class Hover { private static final Color BACKGROUND = JBColor.namedColor("Table.hoverBackground", DEFAULT_RENDERER_HOVER_BACKGROUND); public static @NotNull Color background(boolean focused) { return focused ? BACKGROUND : Inactive.BACKGROUND; } private interface Inactive { Color BACKGROUND = JBColor.namedColor("Table.hoverInactiveBackground", DEFAULT_RENDERER_HOVER_INACTIVE_BACKGROUND); } } } public interface Tree { Color BACKGROUND = JBColor.namedColor("Tree.background", DEFAULT_RENDERER_BACKGROUND); Color FOREGROUND = JBColor.namedColor("Tree.foreground", Label.foreground(false)); static @NotNull Color background(boolean selected, boolean focused) { return selected ? Selection.background(focused) : BACKGROUND; } static @NotNull Color foreground(boolean selected, boolean focused) { return selected ? Selection.foreground(focused) : FOREGROUND; } final class Selection { private static final Color BACKGROUND = JBColor.namedColor("Tree.selectionBackground", DEFAULT_RENDERER_SELECTION_BACKGROUND); private static final Color FOREGROUND = JBColor.namedColor("Tree.selectionForeground", Label.foreground(true)); public static @NotNull Color background(boolean focused) { return focused ? BACKGROUND : Inactive.BACKGROUND; } public static @NotNull Color foreground(boolean focused) { return focused ? FOREGROUND : Inactive.FOREGROUND; } private interface Inactive { Color BACKGROUND = JBColor.namedColor("Tree.selectionInactiveBackground", DEFAULT_RENDERER_SELECTION_INACTIVE_BACKGROUND); Color FOREGROUND = JBColor.namedColor("Tree.selectionInactiveForeground", Tree.FOREGROUND); } } final class Hover { private static final Color BACKGROUND = JBColor.namedColor("Tree.hoverBackground", DEFAULT_RENDERER_HOVER_BACKGROUND); public static @NotNull Color background(boolean focused) { return focused ? BACKGROUND : Inactive.BACKGROUND; } private interface Inactive { Color BACKGROUND = JBColor.namedColor("Tree.hoverInactiveBackground", DEFAULT_RENDERER_HOVER_INACTIVE_BACKGROUND); } } } } public static int getInt(@NonNls @NotNull String propertyName, int defaultValue) { Object value = UIManager.get(propertyName); return value instanceof Integer ? (Integer)value : defaultValue; } @NotNull private static Icon getIcon(@NonNls @NotNull String propertyName, @NotNull Icon defaultIcon) { Icon icon = UIManager.getIcon(propertyName); return icon == null ? defaultIcon : icon; } @NotNull private static Border getBorder(@NonNls @NotNull String propertyName, @NotNull Border defaultBorder) { Border border = UIManager.getBorder(propertyName); return border == null ? defaultBorder : border; } /* * The scaling classes/methods below are kept for binary compatibility with plugins built with IJ SDK 2018.3-2019.1 */ /** * @deprecated Use {@link UserScaleContext}. */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") public static class BaseScaleContext extends UserScaleContext { @SuppressWarnings("MethodOverloadsMethodOfSuperclass") public boolean update(@Nullable BaseScaleContext ctx) { return super.update(ctx); } public boolean update(@NotNull Scale scale) { return setScale(scale); } } /** * @deprecated Use {@link com.intellij.ui.scale.ScaleContext}. */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") @SuppressWarnings({"ClassNameSameAsAncestorName", "MethodOverridesStaticMethodOfSuperclass"}) public static final class ScaleContext extends com.intellij.ui.scale.ScaleContext { private ScaleContext() { } @NotNull public static ScaleContext create() { return new ScaleContext(); } @NotNull public static ScaleContext create(@Nullable Component comp) { final ScaleContext ctx = new ScaleContext(com.intellij.ui.scale.ScaleType.SYS_SCALE.of(JBUIScale.sysScale(comp))); if (comp != null) ctx.compRef = new WeakReference<>(comp); return ctx; } @NotNull public static ScaleContext create(@NotNull Scale scale) { return new ScaleContext(scale); } private ScaleContext(@NotNull Scale scale) { setScale(scale); } @Override public boolean update(@Nullable BaseScaleContext context) { return super.update(context); } } /** * @deprecated Use {@link JBScalableIcon}. */ @Deprecated @ApiStatus.ScheduledForRemoval(inVersion = "2021.3") @SuppressWarnings("DeprecatedIsStillUsed") public abstract static class JBIcon<T extends JBScalableIcon> extends JBScalableIcon { public JBIcon() { super(); } public JBIcon(T icon) { super(icon); } } }
package org.apache.xerces.validators.schema; import org.apache.xerces.framework.XMLErrorReporter; import org.apache.xerces.validators.common.Grammar; import org.apache.xerces.validators.common.GrammarResolver; import org.apache.xerces.validators.common.GrammarResolverImpl; import org.apache.xerces.validators.common.XMLElementDecl; import org.apache.xerces.validators.common.XMLAttributeDecl; import org.apache.xerces.validators.schema.SchemaSymbols; import org.apache.xerces.validators.schema.XUtil; import org.apache.xerces.validators.schema.identity.Field; import org.apache.xerces.validators.schema.identity.IdentityConstraint; import org.apache.xerces.validators.schema.identity.Key; import org.apache.xerces.validators.schema.identity.KeyRef; import org.apache.xerces.validators.schema.identity.Selector; import org.apache.xerces.validators.schema.identity.Unique; import org.apache.xerces.validators.schema.identity.XPathException; import org.apache.xerces.validators.datatype.DatatypeValidator; import org.apache.xerces.validators.datatype.DatatypeValidatorFactoryImpl; import org.apache.xerces.validators.datatype.UnionDatatypeValidator; //CR implementation import org.apache.xerces.validators.datatype.InvalidDatatypeValueException; import org.apache.xerces.utils.StringPool; import org.w3c.dom.Element; import java.io.IOException; import java.util.*; import java.net.URL; import java.net.MalformedURLException; //REVISIT: for now, import everything in the DOM package import org.w3c.dom.*; //Unit Test import org.apache.xerces.parsers.DOMParser; import org.apache.xerces.validators.common.XMLValidator; import org.apache.xerces.validators.datatype.DatatypeValidator.*; import org.apache.xerces.validators.datatype.InvalidDatatypeValueException; import org.apache.xerces.framework.XMLContentSpec; import org.apache.xerces.utils.QName; import org.apache.xerces.utils.NamespacesScope; import org.apache.xerces.parsers.SAXParser; import org.apache.xerces.framework.XMLParser; import org.apache.xerces.framework.XMLDocumentScanner; import org.xml.sax.InputSource; import org.xml.sax.SAXParseException; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.w3c.dom.Document; import org.apache.xml.serialize.OutputFormat; import org.apache.xml.serialize.XMLSerializer; import org.apache.xerces.validators.schema.SchemaSymbols; /** * Instances of this class get delegated to Traverse the Schema and * to populate the Grammar internal representation by * instances of Grammar objects. * Traverse a Schema Grammar: * As of April 07, 2000 the following is the * XML Representation of Schemas and Schema components, * Chapter 4 of W3C Working Draft. * <schema * attributeFormDefault = qualified | unqualified * blockDefault = #all or (possibly empty) subset of {equivClass, extension, restriction} * elementFormDefault = qualified | unqualified * finalDefault = #all or (possibly empty) subset of {extension, restriction} * id = ID * targetNamespace = uriReference * version = string> * Content: ((include | import | annotation)* , ((simpleType | complexType | element | group | attribute | attributeGroup | notation) , annotation*)+) * </schema> * * * <attribute * form = qualified | unqualified * id = ID * name = NCName * ref = QName * type = QName * use = default | fixed | optional | prohibited | required * value = string> * Content: (annotation? , simpleType?) * </> * * <element * abstract = boolean * block = #all or (possibly empty) subset of {equivClass, extension, restriction} * default = string * equivClass = QName * final = #all or (possibly empty) subset of {extension, restriction} * fixed = string * form = qualified | unqualified * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * name = NCName * nullable = boolean * ref = QName * type = QName> * Content: (annotation? , (simpleType | complexType)? , (unique | key | keyref)*) * </> * * * <complexType * abstract = boolean * base = QName * block = #all or (possibly empty) subset of {extension, restriction} * content = elementOnly | empty | mixed | textOnly * derivedBy = extension | restriction * final = #all or (possibly empty) subset of {extension, restriction} * id = ID * name = NCName> * Content: (annotation? , (((minExclusive | minInclusive | maxExclusive | maxInclusive | precision | scale | length | minLength | maxLength | encoding | period | duration | enumeration | pattern)* | (element | group | all | choice | sequence | any)*) , ((attribute | attributeGroup)* , anyAttribute?))) * </> * * * <attributeGroup * id = ID * name = NCName * ref = QName> * Content: (annotation?, (attribute|attributeGroup), anyAttribute?) * </> * * <anyAttribute * id = ID * namespace = ##any | ##other | ##local | list of {uri, ##targetNamespace}> * Content: (annotation?) * </anyAttribute> * * <group * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * name = NCName * ref = QName> * Content: (annotation? , (element | group | all | choice | sequence | any)*) * </> * * <all * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </all> * * <choice * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </choice> * * <sequence * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </sequence> * * * <any * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * namespace = ##any | ##other | ##local | list of {uri, ##targetNamespace} * processContents = lax | skip | strict> * Content: (annotation?) * </any> * * <unique * id = ID * name = NCName> * Content: (annotation? , (selector , field+)) * </unique> * * <key * id = ID * name = NCName> * Content: (annotation? , (selector , field+)) * </key> * * <keyref * id = ID * name = NCName * refer = QName> * Content: (annotation? , (selector , field+)) * </keyref> * * <selector> * Content: XPathExprApprox : An XPath expression * </selector> * * <field> * Content: XPathExprApprox : An XPath expression * </field> * * * <notation * id = ID * name = NCName * public = A public identifier, per ISO 8879 * system = uriReference> * Content: (annotation?) * </notation> * * <annotation> * Content: (appinfo | documentation)* * </annotation> * * <include * id = ID * schemaLocation = uriReference> * Content: (annotation?) * </include> * * <import * id = ID * namespace = uriReference * schemaLocation = uriReference> * Content: (annotation?) * </import> * * <simpleType * id = ID * name = NCName * Content: (annotation? , ((list | restriction | union))) * </simpleType> * * <restriction * base = QName * id = ID * Content: (annotation? , (simpleType? , (duration | encoding | enumeration | length | maxExclusive | maxInclusive | maxLength | minExclusive | minInclusive | minLength | pattern | period | precision | scale | whiteSpace)*)) * </restriction> * * <list * id = ID * itemType = QName * Content: (annotation? , (simpleType?)) * </list> * * <union * id = ID * memberTypes = List of QName * Content: (annotation? , (simpleType*)) * </union> * * * <length * id = ID * value = nonNegativeInteger> * Content: ( annotation? ) * </length> * * <minLength * id = ID * value = nonNegativeInteger> * Content: ( annotation? ) * </minLength> * * <maxLength * id = ID * value = nonNegativeInteger> * Content: ( annotation? ) * </maxLength> * * * <pattern * id = ID * value = string> * Content: ( annotation? ) * </pattern> * * * <enumeration * id = ID * value = string> * Content: ( annotation? ) * </enumeration> * * <maxInclusive * id = ID * value = string> * Content: ( annotation? ) * </maxInclusive> * * <maxExclusive * id = ID * value = string> * Content: ( annotation? ) * </maxExclusive> * * <minInclusive * id = ID * value = string> * Content: ( annotation? ) * </minInclusive> * * * <minExclusive * id = ID * value = string> * Content: ( annotation? ) * </minExclusive> * * <precision * id = ID * value = nonNegativeInteger> * Content: ( annotation? ) * </precision> * * <scale * id = ID * value = nonNegativeInteger> * Content: ( annotation? ) * </scale> * * <encoding * id = ID * value = | hex | base64 > * Content: ( annotation? ) * </encoding> * * * <duration * id = ID * value = timeDuration> * Content: ( annotation? ) * </duration> * * <period * id = ID * value = timeDuration> * Content: ( annotation? ) * </period> * * * @author Eric Ye, Jeffrey Rodriguez, Andy Clark * * @see org.apache.xerces.validators.common.Grammar * * @version $Id$ */ public class TraverseSchema implements NamespacesScope.NamespacesHandler{ //CONSTANTS private static final int TOP_LEVEL_SCOPE = -1; /** Identity constraint keywords. */ private static final String[] IDENTITY_CONSTRAINTS = { SchemaSymbols.ELT_UNIQUE, SchemaSymbols.ELT_KEY, SchemaSymbols.ELT_KEYREF }; //debuggin private static boolean DEBUGGING = false; /** Compile to true to debug identity constraints. */ private static boolean DEBUG_IDENTITY_CONSTRAINTS = false; //CR Implementation private static boolean DEBUG_UNION = false; private static boolean CR_IMPL = false; //private data members private XMLErrorReporter fErrorReporter = null; private StringPool fStringPool = null; private GrammarResolver fGrammarResolver = null; private SchemaGrammar fSchemaGrammar = null; private Element fSchemaRootElement; private DatatypeValidatorFactoryImpl fDatatypeRegistry = null; private Hashtable fComplexTypeRegistry = new Hashtable(); private Hashtable fAttributeDeclRegistry = new Hashtable(); private Vector fIncludeLocations = new Vector(); private Vector fImportLocations = new Vector(); private int fAnonTypeCount =0; private int fScopeCount=0; private int fCurrentScope=TOP_LEVEL_SCOPE; private int fSimpleTypeAnonCount = 0; private Stack fCurrentTypeNameStack = new Stack(); private Hashtable fElementRecurseComplex = new Hashtable(); private boolean fElementDefaultQualified = false; private boolean fAttributeDefaultQualified = false; private int fTargetNSURI; private String fTargetNSURIString = ""; private NamespacesScope fNamespacesScope = null; private String fCurrentSchemaURL = ""; private XMLAttributeDecl fTempAttributeDecl = new XMLAttributeDecl(); private XMLElementDecl fTempElementDecl = new XMLElementDecl(); private EntityResolver fEntityResolver = null; // REVISIT: maybe need to be moved into SchemaGrammar class public class ComplexTypeInfo { public String typeName; public DatatypeValidator baseDataTypeValidator; public ComplexTypeInfo baseComplexTypeInfo; public int derivedBy = 0; public int blockSet = 0; public int finalSet = 0; public boolean isAbstract = false; public int scopeDefined = -1; public int contentType; public int contentSpecHandle = -1; public int templateElementIndex = -1; public int attlistHead = -1; public DatatypeValidator datatypeValidator; } //REVISIT: verify the URI. public final static String SchemaForSchemaURI = "http: private TraverseSchema( ) { // new TraverseSchema() is forbidden; } public void setGrammarResolver(GrammarResolver grammarResolver){ fGrammarResolver = grammarResolver; } public void startNamespaceDeclScope(int prefix, int uri){ //TO DO } public void endNamespaceDeclScope(int prefix){ //TO DO, do we need to do anything here? } private String resolvePrefixToURI (String prefix) throws Exception { String uriStr = fStringPool.toString(fNamespacesScope.getNamespaceForPrefix(fStringPool.addSymbol(prefix))); if (uriStr == null) { // REVISIT: Localize reportGenericSchemaError("prefix : [" + prefix +"] can not be resolved to a URI"); return ""; } //REVISIT, !!!! a hack: needs to be updated later, cause now we only use localpart to key build-in datatype. if ( prefix.length()==0 && uriStr.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && fTargetNSURIString.length() == 0) { uriStr = ""; } return uriStr; } public TraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver, XMLErrorReporter errorReporter, String schemaURL, EntityResolver entityResolver ) throws Exception { fErrorReporter = errorReporter; fCurrentSchemaURL = schemaURL; fEntityResolver = entityResolver; doTraverseSchema(root, stringPool, schemaGrammar, grammarResolver); } public TraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver, XMLErrorReporter errorReporter, String schemaURL ) throws Exception { fErrorReporter = errorReporter; fCurrentSchemaURL = schemaURL; doTraverseSchema(root, stringPool, schemaGrammar, grammarResolver); } public TraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver ) throws Exception { doTraverseSchema(root, stringPool, schemaGrammar, grammarResolver); } public void doTraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver) throws Exception { fNamespacesScope = new NamespacesScope(this); fSchemaRootElement = root; fStringPool = stringPool; fSchemaGrammar = schemaGrammar; fGrammarResolver = grammarResolver; fDatatypeRegistry = (DatatypeValidatorFactoryImpl) fGrammarResolver.getDatatypeRegistry(); fDatatypeRegistry.expandRegistryToFullSchemaSet();//Expand to registry type to contain all primitive datatype if (root == null) { // REVISIT: Anything to do? return; } //Make sure namespace binding is defaulted String rootPrefix = root.getPrefix(); if( rootPrefix == null || rootPrefix.length() == 0 ){ String xmlns = root.getAttribute("xmlns"); if( xmlns.length() == 0 ) root.setAttribute("xmlns", SchemaSymbols.URI_SCHEMAFORSCHEMA ); } //Retrieve the targetnamespace URI information fTargetNSURIString = root.getAttribute(SchemaSymbols.ATT_TARGETNAMESPACE); if (fTargetNSURIString==null) { fTargetNSURIString=""; } fTargetNSURI = fStringPool.addSymbol(fTargetNSURIString); if (fGrammarResolver == null) { // REVISIT: Localize reportGenericSchemaError("Internal error: don't have a GrammarResolver for TraverseSchema"); } else{ // for complex type registry, attribute decl registry and // namespace mapping, needs to check whether the passed in // Grammar was a newly instantiated one. if (fSchemaGrammar.getComplexTypeRegistry() == null ) { fSchemaGrammar.setComplexTypeRegistry(fComplexTypeRegistry); } else { fComplexTypeRegistry = fSchemaGrammar.getComplexTypeRegistry(); } if (fSchemaGrammar.getAttirubteDeclRegistry() == null ) { fSchemaGrammar.setAttributeDeclRegistry(fAttributeDeclRegistry); } else { fAttributeDeclRegistry = fSchemaGrammar.getAttirubteDeclRegistry(); } if (fSchemaGrammar.getNamespacesScope() == null ) { fSchemaGrammar.setNamespacesScope(fNamespacesScope); } else { fNamespacesScope = fSchemaGrammar.getNamespacesScope(); } fSchemaGrammar.setDatatypeRegistry(fDatatypeRegistry); fSchemaGrammar.setTargetNamespaceURI(fTargetNSURIString); fGrammarResolver.putGrammar(fTargetNSURIString, fSchemaGrammar); } // Retrived the Namespace mapping from the schema element. NamedNodeMap schemaEltAttrs = root.getAttributes(); int i = 0; Attr sattr = null; boolean seenXMLNS = false; while ((sattr = (Attr)schemaEltAttrs.item(i++)) != null) { String attName = sattr.getName(); if (attName.startsWith("xmlns:")) { String attValue = sattr.getValue(); String prefix = attName.substring(attName.indexOf(":")+1); fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(prefix), fStringPool.addSymbol(attValue) ); } if (attName.equals("xmlns")) { String attValue = sattr.getValue(); fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(""), fStringPool.addSymbol(attValue) ); seenXMLNS = true; } } if (!seenXMLNS && fTargetNSURIString.length() == 0 ) { fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(""), fStringPool.addSymbol("") ); } fElementDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ELEMENTFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); fAttributeDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ATTRIBUTEFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); //REVISIT, really sticky when noTargetNamesapce, for now, we assume everyting is in the same name space); if (fTargetNSURI == StringPool.EMPTY_STRING) { fElementDefaultQualified = true; //fAttributeDefaultQualified = true; } //fScopeCount++; fCurrentScope = -1; checkTopLevelDuplicateNames(root); //extract all top-level attribute, attributeGroup, and group Decls and put them in the 3 hasn table in the SchemaGrammar. extractTopLevel3Components(root); for (Element child = XUtil.getFirstChildElement(root); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); if (name.equals(SchemaSymbols.ELT_ANNOTATION) ) { traverseAnnotationDecl(child); } else if (name.equals(SchemaSymbols.ELT_SIMPLETYPE )) { traverseSimpleTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_COMPLEXTYPE )) { traverseComplexTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_ELEMENT )) { traverseElementDecl(child); } else if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { //traverseAttributeGroupDecl(child); } else if (name.equals( SchemaSymbols.ELT_ATTRIBUTE ) ) { traverseAttributeDecl( child, null ); } else if (name.equals(SchemaSymbols.ELT_GROUP) && child.getAttribute(SchemaSymbols.ATT_REF).equals("")) { //traverseGroupDecl(child); } else if (name.equals(SchemaSymbols.ELT_NOTATION)) { ; //TO DO } else if (name.equals(SchemaSymbols.ELT_INCLUDE)) { traverseInclude(child); } else if (name.equals(SchemaSymbols.ELT_IMPORT)) { traverseImport(child); } } // for each child node } // traverseSchema(Element) private void checkTopLevelDuplicateNames(Element root) { //TO DO : !!! } private void extractTopLevel3Components(Element root){ for (Element child = XUtil.getFirstChildElement(root); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); String compName = child.getAttribute(SchemaSymbols.ATT_NAME); if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { fSchemaGrammar.topLevelAttrGrpDecls.put(compName, child); } else if (name.equals( SchemaSymbols.ELT_ATTRIBUTE ) ) { fSchemaGrammar.topLevelAttrDecls.put(compName, child); } else if ( name.equals(SchemaSymbols.ELT_GROUP) ) { fSchemaGrammar.topLevelGroupDecls.put(compName, child); } } // for each child node } /** * Expands a system id and returns the system id as a URL, if * it can be expanded. A return value of null means that the * identifier is already expanded. An exception thrown * indicates a failure to expand the id. * * @param systemId The systemId to be expanded. * * @return Returns the URL object representing the expanded system * identifier. A null value indicates that the given * system identifier is already expanded. * */ private String expandSystemId(String systemId, String currentSystemId) throws Exception{ String id = systemId; // check for bad parameters id if (id == null || id.length() == 0) { return systemId; } // if id already expanded, return try { URL url = new URL(id); if (url != null) { return systemId; } } catch (MalformedURLException e) { // continue on... } // normalize id id = fixURI(id); // normalize base URL base = null; URL url = null; try { if (currentSystemId == null) { String dir; try { dir = fixURI(System.getProperty("user.dir")); } catch (SecurityException se) { dir = ""; } if (!dir.endsWith("/")) { dir = dir + "/"; } base = new URL("file", "", dir); } else { base = new URL(currentSystemId); } // expand id url = new URL(base, id); } catch (Exception e) { // let it go through } if (url == null) { return systemId; } return url.toString(); } /** * Fixes a platform dependent filename to standard URI form. * * @param str The string to fix. * * @return Returns the fixed URI string. */ private static String fixURI(String str) { // handle platform dependent strings str = str.replace(java.io.File.separatorChar, '/'); // Windows fix if (str.length() >= 2) { char ch1 = str.charAt(1); if (ch1 == ':') { char ch0 = Character.toUpperCase(str.charAt(0)); if (ch0 >= 'A' && ch0 <= 'Z') { str = "/" + str; } } } // done return str; } private void traverseInclude(Element includeDecl) throws Exception { String location = includeDecl.getAttribute(SchemaSymbols.ATT_SCHEMALOCATION); // expand it before passing it to the parser InputSource source = null; if (fEntityResolver != null) { source = fEntityResolver.resolveEntity("", location); } if (source == null) { location = expandSystemId(location, fCurrentSchemaURL); source = new InputSource(location); } else { // create a string for uniqueness of this included schema in fIncludeLocations if (source.getPublicId () != null) location = source.getPublicId (); location += (',' + source.getSystemId ()); } if (fIncludeLocations.contains((Object)location)) { return; } fIncludeLocations.addElement((Object)location); DOMParser parser = new IgnoreWhitespaceParser(); parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() ); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://xml.org/sax/features/namespaces", true); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } try { parser.parse( source ); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { //e.printStackTrace(); } Document document = parser.getDocument(); //Our Grammar Element root = null; if (document != null) { root = document.getDocumentElement(); } if (root != null) { String targetNSURI = root.getAttribute(SchemaSymbols.ATT_TARGETNAMESPACE); if (targetNSURI.length() > 0 && !targetNSURI.equals(fTargetNSURIString) ) { // REVISIT: Localize reportGenericSchemaError("included schema '"+location+"' has a different targetNameSpace '" +targetNSURI+"'"); } else { // We not creating another TraverseSchema object to compile // the included schema file, because the scope count, anon-type count // should not be reset for a included schema, this can be fixed by saving // the counters in the Schema Grammar, boolean saveElementDefaultQualified = fElementDefaultQualified; boolean saveAttributeDefaultQualified = fAttributeDefaultQualified; int saveScope = fCurrentScope; String savedSchemaURL = fCurrentSchemaURL; Element saveRoot = fSchemaRootElement; fSchemaRootElement = root; fCurrentSchemaURL = location; traverseIncludedSchema(root); fCurrentSchemaURL = savedSchemaURL; fCurrentScope = saveScope; fElementDefaultQualified = saveElementDefaultQualified; fAttributeDefaultQualified = saveAttributeDefaultQualified; fSchemaRootElement = saveRoot; } } } private void traverseIncludedSchema(Element root) throws Exception { // Retrived the Namespace mapping from the schema element. NamedNodeMap schemaEltAttrs = root.getAttributes(); int i = 0; Attr sattr = null; boolean seenXMLNS = false; while ((sattr = (Attr)schemaEltAttrs.item(i++)) != null) { String attName = sattr.getName(); if (attName.startsWith("xmlns:")) { String attValue = sattr.getValue(); String prefix = attName.substring(attName.indexOf(":")+1); fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(prefix), fStringPool.addSymbol(attValue) ); } if (attName.equals("xmlns")) { String attValue = sattr.getValue(); fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(""), fStringPool.addSymbol(attValue) ); seenXMLNS = true; } } if (!seenXMLNS && fTargetNSURIString.length() == 0 ) { fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(""), fStringPool.addSymbol("") ); } fElementDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ELEMENTFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); fAttributeDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ATTRIBUTEFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); //REVISIT, really sticky when noTargetNamesapce, for now, we assume everyting is in the same name space); if (fTargetNSURI == StringPool.EMPTY_STRING) { fElementDefaultQualified = true; //fAttributeDefaultQualified = true; } //fScopeCount++; fCurrentScope = -1; checkTopLevelDuplicateNames(root); //extract all top-level attribute, attributeGroup, and group Decls and put them in the 3 hasn table in the SchemaGrammar. extractTopLevel3Components(root); for (Element child = XUtil.getFirstChildElement(root); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); if (name.equals(SchemaSymbols.ELT_ANNOTATION) ) { traverseAnnotationDecl(child); } else if (name.equals(SchemaSymbols.ELT_SIMPLETYPE )) { traverseSimpleTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_COMPLEXTYPE )) { traverseComplexTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_ELEMENT )) { traverseElementDecl(child); } else if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { //traverseAttributeGroupDecl(child); } else if (name.equals( SchemaSymbols.ELT_ATTRIBUTE ) ) { traverseAttributeDecl( child, null ); } else if (name.equals(SchemaSymbols.ELT_GROUP) && child.getAttribute(SchemaSymbols.ATT_REF).equals("")) { //traverseGroupDecl(child); } else if (name.equals(SchemaSymbols.ELT_NOTATION)) { ; //TO DO } else if (name.equals(SchemaSymbols.ELT_INCLUDE)) { traverseInclude(child); } else if (name.equals(SchemaSymbols.ELT_IMPORT)) { traverseImport(child); } } // for each child node } private void traverseImport(Element importDecl) throws Exception { String location = importDecl.getAttribute(SchemaSymbols.ATT_SCHEMALOCATION); // expand it before passing it to the parser InputSource source = null; if (fEntityResolver != null) { source = fEntityResolver.resolveEntity("", location); } if (source == null) { location = expandSystemId(location, fCurrentSchemaURL); source = new InputSource(location); } else { // create a string for uniqueness of this imported schema in fImportLocations if (source.getPublicId () != null) location = source.getPublicId (); location += (',' + source.getSystemId ()); } if (fImportLocations.contains((Object)location)) { return; } fImportLocations.addElement((Object)location); String namespaceString = importDecl.getAttribute(SchemaSymbols.ATT_NAMESPACE); SchemaGrammar importedGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(namespaceString); if (importedGrammar == null) { importedGrammar = new SchemaGrammar(); } DOMParser parser = new IgnoreWhitespaceParser(); parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() ); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://xml.org/sax/features/namespaces", true); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } try { parser.parse( source ); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { e.printStackTrace(); } Document document = parser.getDocument(); //Our Grammar Element root = null; if (document != null) { root = document.getDocumentElement(); } if (root != null) { String targetNSURI = root.getAttribute(SchemaSymbols.ATT_TARGETNAMESPACE); if (!targetNSURI.equals(namespaceString) ) { // REVISIT: Localize reportGenericSchemaError("imported schema '"+location+"' has a different targetNameSpace '" +targetNSURI+"' from what is declared '"+namespaceString+"'."); } else new TraverseSchema(root, fStringPool, importedGrammar, fGrammarResolver, fErrorReporter, location, fEntityResolver); } else { reportGenericSchemaError("Could not get the doc root for imported Schema file: "+location); } } /** * No-op - Traverse Annotation Declaration * * @param comment */ private void traverseAnnotationDecl(Element comment) { //TO DO return ; } /** * Traverse SimpleType declaration: * <simpleType * abstract = boolean * base = QName * derivedBy = | list | restriction : restriction * id = ID * name = NCName> * Content: ( annotation? , ( minExclusive | minInclusive | maxExclusive | maxInclusive | precision | scale | length | minLength | maxLength | encoding | period | duration | enumeration | pattern )* ) * </simpleType> * * @param simpleTypeDecl * @return */ private int traverseSimpleTypeDecl( Element simpleTypeDecl ) throws Exception { if ( CR_IMPL ) { return traverseSimpleType(simpleTypeDecl); } String varietyProperty = simpleTypeDecl.getAttribute( SchemaSymbols.ATT_DERIVEDBY ); if (varietyProperty.length() == 0) { varietyProperty = SchemaSymbols.ATTVAL_RESTRICTION; } String nameProperty = simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME ); String baseTypeQNameProperty = simpleTypeDecl.getAttribute( SchemaSymbols.ATT_BASE ); String abstractProperty = simpleTypeDecl.getAttribute( SchemaSymbols.ATT_ABSTRACT ); int newSimpleTypeName = -1; if ( nameProperty.equals("")) { // anonymous simpleType newSimpleTypeName = fStringPool.addSymbol( "#S#"+fSimpleTypeAnonCount++ ); } else newSimpleTypeName = fStringPool.addSymbol( nameProperty ); int basetype; DatatypeValidator baseValidator = null; if( baseTypeQNameProperty!= null ) { basetype = fStringPool.addSymbol( baseTypeQNameProperty ); String prefix = ""; String localpart = baseTypeQNameProperty; int colonptr = baseTypeQNameProperty.indexOf(":"); if ( colonptr > 0) { prefix = baseTypeQNameProperty.substring(0,colonptr); localpart = baseTypeQNameProperty.substring(colonptr+1); } String uri = resolvePrefixToURI(prefix); baseValidator = getDatatypeValidator(uri, localpart); if (baseValidator == null) { Element baseTypeNode = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (baseTypeNode != null) { traverseSimpleTypeDecl( baseTypeNode ); baseValidator = getDatatypeValidator(uri, localpart); if (baseValidator == null) { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_BASE ), simpleTypeDecl.getAttribute(SchemaSymbols.ATT_NAME) }); return -1; //reportGenericSchemaError("Base type could not be found : " + baseTypeQNameProperty); } } else { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_BASE ), simpleTypeDecl.getAttribute(SchemaSymbols.ATT_NAME) }); return -1; //reportGenericSchemaError("Base type could not be found : " + baseTypeQNameProperty); } } } // Any Children if so then check Content otherwise bail out Element content = XUtil.getFirstChildElement( simpleTypeDecl ); int numFacets = 0; Hashtable facetData = null; if( content != null ) { //Content follows: ( annotation? , facets* ) //annotation ? ( 0 or 1 ) if( content.getLocalName().equals( SchemaSymbols.ELT_ANNOTATION ) ){ traverseAnnotationDecl( content ); content = XUtil.getNextSiblingElement(content); } //TODO: If content is annotation again should raise validation error // if( content.getLocalName().equal( SchemaSymbols.ELT_ANNOTATION ) { // throw ValidationException(); } //facets * ( 0 or more ) int numEnumerationLiterals = 0; facetData = new Hashtable(); Vector enumData = new Vector(); while (content != null) { if (content.getNodeType() == Node.ELEMENT_NODE) { Element facetElt = (Element) content; numFacets++; if (facetElt.getLocalName().equals(SchemaSymbols.ELT_ENUMERATION)) { numEnumerationLiterals++; String enumVal = facetElt.getAttribute(SchemaSymbols.ATT_VALUE); enumData.addElement(enumVal); //Enumerations can have annotations ? ( 0 | 1 ) Element enumContent = XUtil.getFirstChildElement( facetElt ); if( enumContent != null && enumContent != null && enumContent.getLocalName().equals( SchemaSymbols.ELT_ANNOTATION ) ){ traverseAnnotationDecl( content ); } //TODO: If enumContent is encounter again should raise validation error // enumContent.getNextSibling(); // if( enumContent.getLocalName().equal( SchemaSymbols.ELT_ANNOTATIO ) { // throw ValidationException(); } } else { facetData.put(facetElt.getLocalName(),facetElt.getAttribute( SchemaSymbols.ATT_VALUE )); } } //content = (Element) content.getNextSibling(); content = XUtil.getNextSiblingElement(content); } if (numEnumerationLiterals > 0) { facetData.put(SchemaSymbols.ELT_ENUMERATION, enumData); } } // create & register validator for "generated" type if it doesn't exist String nameOfType = fStringPool.toString( newSimpleTypeName); if (fTargetNSURIString.length () != 0) { nameOfType = fTargetNSURIString+","+nameOfType; } try { DatatypeValidator newValidator = fDatatypeRegistry.getDatatypeValidator( nameOfType ); if( newValidator == null ) { // not previously registered boolean derivedByList = varietyProperty.equals( SchemaSymbols.ATTVAL_LIST ) ? true:false; fDatatypeRegistry.createDatatypeValidator( nameOfType, baseValidator, facetData, derivedByList ); } } catch (Exception e) { reportSchemaError(SchemaMessageProvider.DatatypeError,new Object [] { e.getMessage() }); } return fStringPool.addSymbol(nameOfType); } //@param: elm - top element //@param: content - content must be annotation? or some other simple content //@param: isEmpty: -- true if (annotation?, smth_else), false if (annotation?) //check for Annotation if it is present //REVISIT: this function should be used in all traverse* methods! private Element checkContent( Element elm, Element content, boolean isEmpty ) throws Exception { //isEmpty = true-> means content can be null! if ( content == null) { if (!isEmpty) { reportSchemaError(SchemaMessageProvider.ContentError, new Object [] { elm.getAttribute( SchemaSymbols.ATT_NAME )}); } return null; } if (content.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) { traverseAnnotationDecl( content ); content = XUtil.getNextSiblingElement(content); if (content == null ) { //must be followed by <simpleType?> if (!isEmpty) { reportSchemaError(SchemaMessageProvider.ContentError, new Object [] { elm.getAttribute( SchemaSymbols.ATT_NAME )}); } return null; } if (content.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) { reportSchemaError(SchemaMessageProvider.AnnotationError, new Object [] { elm.getAttribute( SchemaSymbols.ATT_NAME )}); return null; } //return null if expected only annotation?, else returns updated content } return content; } //@param: elm - top element //@param: baseTypeStr - type (base/itemType/memberTypes) //return DatatypeValidator available for the baseTypeStr. //REVISIT: this function should be used in some|all traverse* methods! private DatatypeValidator findDTValidator (Element elm, String baseTypeStr ) throws Exception{ int baseType = fStringPool.addSymbol( baseTypeStr ); String prefix = ""; DatatypeValidator baseValidator = null; String localpart = baseTypeStr; int colonptr = baseTypeStr.indexOf(":"); if ( colonptr > 0) { prefix = baseTypeStr.substring(0,colonptr); localpart = baseTypeStr.substring(colonptr+1); } String uri = resolvePrefixToURI(prefix); baseValidator = getDatatypeValidator(uri, localpart); if (baseValidator == null) { Element baseTypeNode = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (baseTypeNode != null) { traverseSimpleTypeDecl( baseTypeNode ); baseValidator = getDatatypeValidator(uri, localpart); } } if ( baseValidator == null ) { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { elm.getAttribute( SchemaSymbols.ATT_BASE ), elm.getAttribute(SchemaSymbols.ATT_NAME)}); } return baseValidator; } /** * Traverse SimpleType declaration: * <simpleType * id = ID * name = NCName> * Content: (annotation? , ((list | restriction | union))) * </simpleType> * traverse <list>|<restriction>|<union> * * @param simpleTypeDecl * @return */ private int traverseSimpleType( Element simpleTypeDecl ) throws Exception { //REVISIT: remove all DEBUG_UNION. if (DEBUG_UNION) { System.out.println(" } //REVISIT: are we checking for attributes and other definitions that should not be there? String nameProperty = simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME ); boolean list = false; boolean union = false; boolean restriction = false; int newSimpleTypeName = -1; if ( nameProperty.equals("")) { // anonymous simpleType newSimpleTypeName = fStringPool.addSymbol( "#S#"+fSimpleTypeAnonCount++ ); } else newSimpleTypeName = fStringPool.addSymbol( nameProperty ); //annotation?,(list|restriction|union) Element content = XUtil.getFirstChildElement(simpleTypeDecl); content = checkContent(simpleTypeDecl, content, false); if (content == null) { return (-1); } //use content.getLocalName for the cases there "xsd:" is a prefix, ei. "xsd:list" String varietyProperty = content.getLocalName(); String baseTypeQNameProperty = null; Vector dTValidators = null; int size = 0; StringTokenizer unionMembers = null; int numOfTypes = 0; //list/restriction = 1, union = "+" if (DEBUG_UNION) { System.out.println("[varietyProperty]:"+ varietyProperty ); } //REVISIT: change symbols from ATTVAL_ to ELM_. if (varietyProperty.equals(SchemaSymbols.ATTVAL_LIST)) { //traverse List baseTypeQNameProperty = content.getAttribute( SchemaSymbols.ATT_ITEMTYPE ); list = true; } else if (varietyProperty.equals(SchemaSymbols.ATTVAL_RESTRICTION)) { //traverse Restriction baseTypeQNameProperty = content.getAttribute( SchemaSymbols.ATT_BASE ); restriction= true; } else if (varietyProperty.equals(SchemaSymbols.ATTVAL_UNION)) { //traverse union union = true; baseTypeQNameProperty = content.getAttribute( SchemaSymbols.ATT_MEMBERTYPES); if (baseTypeQNameProperty != "" ) { unionMembers = new StringTokenizer( baseTypeQNameProperty ); size = unionMembers.countTokens(); } else { size = 1; //at least one must be seen as <simpleType> decl } dTValidators = new Vector (size, 2); } else { reportSchemaError(SchemaMessageProvider.FeatureUnsupported, new Object [] { varietyProperty }); return -1; } int typeNameIndex; DatatypeValidator baseValidator = null; if (DEBUG_UNION) { System.out.println("[nameProperty]= " +nameProperty); System.out.println("[base]= " +baseTypeQNameProperty+";"); System.out.println("[size]= " +size); if (unionMembers!=null) { System.out.println("[unionMembers]= " +unionMembers.toString()); } } if ( baseTypeQNameProperty == "" ) { //must 'see' <simpleType> //content = {annotation?,simpleType?...} content = XUtil.getFirstChildElement(content); //check content (annotation?, ...) content = checkContent(simpleTypeDecl, content, false); if (content == null) { return (-1); } if (content.getLocalName().equals( SchemaSymbols.ELT_SIMPLETYPE )) { //Test... typeNameIndex = traverseSimpleTypeDecl(content); if (DEBUG_UNION) { System.out.println("[After traverseSimpleTypeDecl]: " +fStringPool.toString(typeNameIndex)); System.out.println("[traverseSimpleTypeDecl]: " + nameProperty); } if (typeNameIndex!=-1) { baseValidator=fDatatypeRegistry.getDatatypeValidator(fStringPool.toString(typeNameIndex)); if (baseValidator !=null && union) { dTValidators.addElement((DatatypeValidator)baseValidator); } } if ( typeNameIndex == -1 || baseValidator == null) { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { content.getAttribute( SchemaSymbols.ATT_BASE ), content.getAttribute(SchemaSymbols.ATT_NAME) }); return -1; } } } //end - must see simpleType? else { //base was provided - get proper validator. numOfTypes = 1; if (union) { numOfTypes= size; } for (int i=0; i<numOfTypes; i++) { //find all validators if (union) { baseTypeQNameProperty = unionMembers.nextToken(); } baseValidator = findDTValidator ( simpleTypeDecl, baseTypeQNameProperty); if ( baseValidator == null) { return (-1); } if (union) { dTValidators.addElement((DatatypeValidator)baseValidator); //add validator to structure } //REVISIT: Should we raise exception here? // if baseValidator.isInstanceOf(LIST) and UNION if ( list && (baseValidator instanceof UnionDatatypeValidator)) { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_BASE ), simpleTypeDecl.getAttribute(SchemaSymbols.ATT_NAME)}); return -1; } } } //end - base is available // move to next child // restriction ->[simpleType]->[facets] OR // restriction ->[facets] if (baseTypeQNameProperty == "") { //we already got the first kid of union/list/restriction content = XUtil.getNextSiblingElement( content ); } else { //we need to look at first kid of union/list/restriction content = XUtil.getFirstChildElement(content); } //get more types for union if any if (union) { int index=size; while (content!=null) { if (DEBUG_UNION) { System.out.println("[start Union types traversal] + " + content.getNodeName()); System.out.println(index+"-Getting all other simpletypes"); System.out.println("content: " + content.getNodeName()); } typeNameIndex = traverseSimpleTypeDecl(content); if (typeNameIndex!=-1) { baseValidator=fDatatypeRegistry.getDatatypeValidator(fStringPool.toString(typeNameIndex)); if (baseValidator != null) { if (DEBUG_UNION) { System.out.println("validator to add: " + baseValidator.toString()); } dTValidators.addElement((DatatypeValidator)baseValidator); } } if ( baseValidator == null || typeNameIndex == -1) { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_BASE ), simpleTypeDecl.getAttribute(SchemaSymbols.ATT_NAME)}); return (-1); } content = XUtil.getNextSiblingElement( content ); } } // end - traverse Union Hashtable facetData =null; int numFacets=0; if (restriction && content != null) { int numEnumerationLiterals = 0; facetData = new Hashtable(); Vector enumData = new Vector(); while (content != null) { if (content.getNodeType() == Node.ELEMENT_NODE) { numFacets++; if (content.getLocalName().equals(SchemaSymbols.ELT_ENUMERATION)) { numEnumerationLiterals++; String enumVal = content.getAttribute(SchemaSymbols.ATT_VALUE); enumData.addElement(enumVal); //Enumerations can have annotations ? ( 0 | 1 ) checkContent(simpleTypeDecl, XUtil.getFirstChildElement( content ), true); } else { facetData.put(content.getLocalName(),content.getAttribute( SchemaSymbols.ATT_VALUE )); } } content = XUtil.getNextSiblingElement(content); } if (numEnumerationLiterals > 0) { facetData.put(SchemaSymbols.ELT_ENUMERATION, enumData); } } else if (list && content!=null) { // report error - must not have any children! reportSchemaError(SchemaMessageProvider.ListUnionRestrictionError, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME )}); //REVISIT: should we return? } else if (union && content!=null) { //report error - must not have any children! reportSchemaError(SchemaMessageProvider.ListUnionRestrictionError, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME )}); //REVISIT: should we return? } // create & register validator for "generated" type if it doesn't exist String nameOfType = fStringPool.toString( newSimpleTypeName); if (fTargetNSURIString.length () != 0) { nameOfType = fTargetNSURIString+","+nameOfType; } try { DatatypeValidator newValidator = fDatatypeRegistry.getDatatypeValidator( nameOfType ); if( newValidator == null ) { // not previously registered if (list) { fDatatypeRegistry.createDatatypeValidator( nameOfType, baseValidator, facetData,true); } else if (restriction) { fDatatypeRegistry.createDatatypeValidator( nameOfType, baseValidator, facetData,false); } else { //union fDatatypeRegistry.createDatatypeValidator( nameOfType, dTValidators); } } } catch (Exception e) { reportSchemaError(SchemaMessageProvider.DatatypeError,new Object [] { e.getMessage() }); } return fStringPool.addSymbol(nameOfType); } /* * <any * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * namespace = ##any | ##other | ##local | list of {uri, ##targetNamespace} * processContents = lax | skip | strict> * Content: (annotation?) * </any> */ private int traverseAny(Element child) throws Exception { int anyIndex = -1; String namespace = child.getAttribute(SchemaSymbols.ATT_NAMESPACE).trim(); String processContents = child.getAttribute("processContents").trim(); int processContentsAny = XMLContentSpec.CONTENTSPECNODE_ANY; int processContentsAnyOther = XMLContentSpec.CONTENTSPECNODE_ANY_OTHER; int processContentsAnyLocal = XMLContentSpec.CONTENTSPECNODE_ANY_LOCAL; if (processContents.length() > 0 && !processContents.equals("strict")) { if (processContents.equals("lax")) { processContentsAny = XMLContentSpec.CONTENTSPECNODE_ANY_LAX; processContentsAnyOther = XMLContentSpec.CONTENTSPECNODE_ANY_OTHER_LAX; processContentsAnyLocal = XMLContentSpec.CONTENTSPECNODE_ANY_LOCAL_LAX; } else if (processContents.equals("skip")) { processContentsAny = XMLContentSpec.CONTENTSPECNODE_ANY_SKIP; processContentsAnyOther = XMLContentSpec.CONTENTSPECNODE_ANY_OTHER_SKIP; processContentsAnyLocal = XMLContentSpec.CONTENTSPECNODE_ANY_LOCAL_SKIP; } } if (namespace.length() == 0 || namespace.equals("##any")) { anyIndex = fSchemaGrammar.addContentSpecNode(processContentsAny, -1, -1, false); } else if (namespace.equals("##other")) { String uri = child.getOwnerDocument().getDocumentElement().getAttribute("targetNamespace"); int uriIndex = fStringPool.addSymbol(uri); anyIndex = fSchemaGrammar.addContentSpecNode(processContentsAnyOther, -1, uriIndex, false); } else if (namespace.equals("##local")) { anyIndex = fSchemaGrammar.addContentSpecNode(processContentsAnyLocal, -1, -1, false); } else if (namespace.length() > 0) { StringTokenizer tokenizer = new StringTokenizer(namespace); Vector tokens = new Vector(); while (tokenizer.hasMoreElements()) { String token = tokenizer.nextToken(); if (token.equals("##targetNamespace")) { token = child.getOwnerDocument().getDocumentElement().getAttribute("targetNamespace"); } tokens.addElement(token); } String uri = (String)tokens.elementAt(0); int uriIndex = fStringPool.addSymbol(uri); int leafIndex = fSchemaGrammar.addContentSpecNode(processContentsAny, -1, uriIndex, false); int valueIndex = leafIndex; int count = tokens.size(); if (count > 1) { uri = (String)tokens.elementAt(1); uriIndex = fStringPool.addSymbol(uri); leafIndex = fSchemaGrammar.addContentSpecNode(processContentsAny, -1, uriIndex, false); int otherValueIndex = leafIndex; int choiceIndex = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, valueIndex, otherValueIndex, false); for (int i = 2; i < count; i++) { uri = (String)tokens.elementAt(i); uriIndex = fStringPool.addSymbol(uri); leafIndex = fSchemaGrammar.addContentSpecNode(processContentsAny, -1, uriIndex, false); otherValueIndex = leafIndex; choiceIndex = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, choiceIndex, otherValueIndex, false); } anyIndex = choiceIndex; } else { anyIndex = leafIndex; } } else { // REVISIT: Localize reportGenericSchemaError("Empty namespace attribute for any element"); } return anyIndex; } public DatatypeValidator getDatatypeValidator(String uri, String localpart) { DatatypeValidator dv = null; if (uri.length()==0 || uri.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA)) { dv = fDatatypeRegistry.getDatatypeValidator( localpart ); } else { dv = fDatatypeRegistry.getDatatypeValidator( uri+","+localpart ); } return dv; } /* * <anyAttribute * id = ID * namespace = ##any | ##other | ##local | list of {uri, ##targetNamespace}> * Content: (annotation?) * </anyAttribute> */ private XMLAttributeDecl traverseAnyAttribute(Element anyAttributeDecl) throws Exception { XMLAttributeDecl anyAttDecl = new XMLAttributeDecl(); String processContents = anyAttributeDecl.getAttribute(SchemaSymbols.ATT_PROCESSCONTENTS).trim(); String namespace = anyAttributeDecl.getAttribute(SchemaSymbols.ATT_NAMESPACE).trim(); String curTargetUri = anyAttributeDecl.getOwnerDocument().getDocumentElement().getAttribute("targetNamespace"); if ( namespace.length() == 0 || namespace.equals(SchemaSymbols.ATTVAL_TWOPOUNDANY) ) { anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_ANY; } else if (namespace.equals(SchemaSymbols.ATTVAL_TWOPOUNDOTHER)) { anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_OTHER; anyAttDecl.name.uri = fStringPool.addSymbol(curTargetUri); } else if (namespace.equals(SchemaSymbols.ATTVAL_TWOPOUNDLOCAL)) { anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_LOCAL; } else if (namespace.length() > 0){ anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_LIST; StringTokenizer tokenizer = new StringTokenizer(namespace); int aStringList = fStringPool.startStringList(); Vector tokens = new Vector(); while (tokenizer.hasMoreElements()) { String token = tokenizer.nextToken(); if (token.equals("##targetNamespace")) { token = curTargetUri; } if (!fStringPool.addStringToList(aStringList, fStringPool.addSymbol(token))){ reportGenericSchemaError("Internal StringPool error when reading the "+ "namespace attribute for anyattribute declaration"); } } fStringPool.finishStringList(aStringList); anyAttDecl.enumeration = aStringList; } else { // REVISIT: Localize reportGenericSchemaError("Empty namespace attribute for anyattribute declaration"); } // default processContents is "strict"; anyAttDecl.defaultType = XMLAttributeDecl.PROCESSCONTENTS_STRICT; if (processContents.equals(SchemaSymbols.ATTVAL_SKIP)){ anyAttDecl.defaultType = XMLAttributeDecl.PROCESSCONTENTS_SKIP; } else if (processContents.equals(SchemaSymbols.ATTVAL_LAX)) { anyAttDecl.defaultType = XMLAttributeDecl.PROCESSCONTENTS_LAX; } return anyAttDecl; } private XMLAttributeDecl mergeTwoAnyAttribute(XMLAttributeDecl oneAny, XMLAttributeDecl anotherAny) { if (oneAny.type == -1) { return oneAny; } if (anotherAny.type == -1) { return anotherAny; } if (oneAny.type == XMLAttributeDecl.TYPE_ANY_ANY) { return anotherAny; } if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_ANY) { return oneAny; } if (oneAny.type == XMLAttributeDecl.TYPE_ANY_OTHER) { if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_OTHER) { if ( anotherAny.name.uri == oneAny.name.uri ) { return oneAny; } else { oneAny.type = -1; return oneAny; } } else if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_LOCAL) { return anotherAny; } else if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { if (!fStringPool.stringInList(anotherAny.enumeration, oneAny.name.uri) ) { return anotherAny; } else { int[] anotherAnyURIs = fStringPool.stringListAsIntArray(anotherAny.enumeration); int newList = fStringPool.startStringList(); for (int i=0; i< anotherAnyURIs.length; i++) { if (anotherAnyURIs[i] != oneAny.name.uri ) { fStringPool.addStringToList(newList, anotherAnyURIs[i]); } } fStringPool.finishStringList(newList); anotherAny.enumeration = newList; return anotherAny; } } } if (oneAny.type == XMLAttributeDecl.TYPE_ANY_LOCAL) { if ( anotherAny.type == XMLAttributeDecl.TYPE_ANY_OTHER || anotherAny.type == XMLAttributeDecl.TYPE_ANY_LOCAL) { return oneAny; } else if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { oneAny.type = -1; return oneAny; } } if (oneAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { if ( anotherAny.type == XMLAttributeDecl.TYPE_ANY_OTHER){ if (!fStringPool.stringInList(oneAny.enumeration, anotherAny.name.uri) ) { return oneAny; } else { int[] oneAnyURIs = fStringPool.stringListAsIntArray(oneAny.enumeration); int newList = fStringPool.startStringList(); for (int i=0; i< oneAnyURIs.length; i++) { if (oneAnyURIs[i] != anotherAny.name.uri ) { fStringPool.addStringToList(newList, oneAnyURIs[i]); } } fStringPool.finishStringList(newList); oneAny.enumeration = newList; return oneAny; } } else if ( anotherAny.type == XMLAttributeDecl.TYPE_ANY_LOCAL) { oneAny.type = -1; return oneAny; } else if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { int[] result = intersect2sets( fStringPool.stringListAsIntArray(oneAny.enumeration), fStringPool.stringListAsIntArray(anotherAny.enumeration)); int newList = fStringPool.startStringList(); for (int i=0; i<result.length; i++) { fStringPool.addStringToList(newList, result[i]); } fStringPool.finishStringList(newList); oneAny.enumeration = newList; return oneAny; } } // should never go there; return oneAny; } int[] intersect2sets(int[] one, int[] theOther){ int[] result = new int[(one.length>theOther.length?one.length:theOther.length)]; // simple implemention, int count = 0; for (int i=0; i<one.length; i++) { for(int j=0; j<theOther.length; j++) { if (one[i]==theOther[j]) { result[count++] = one[i]; } } } int[] result2 = new int[count]; System.arraycopy(result, 0, result2, 0, count); return result2; } /** * Traverse ComplexType Declaration. * * <complexType * abstract = boolean * base = QName * block = #all or (possibly empty) subset of {extension, restriction} * content = elementOnly | empty | mixed | textOnly * derivedBy = extension | restriction * final = #all or (possibly empty) subset of {extension, restriction} * id = ID * name = NCName> * Content: (annotation? , (((minExclusive | minInclusive | maxExclusive * | maxInclusive | precision | scale | length | minLength * | maxLength | encoding | period | duration | enumeration * | pattern)* | (element | group | all | choice | sequence | any)*) , * ((attribute | attributeGroup)* , anyAttribute?))) * </complexType> * @param complexTypeDecl * @return */ //REVISIT: TO DO, base and derivation ??? private int traverseComplexTypeDecl( Element complexTypeDecl ) throws Exception { String isAbstract = complexTypeDecl.getAttribute( SchemaSymbols.ATT_ABSTRACT ); String base = complexTypeDecl.getAttribute(SchemaSymbols.ATT_BASE); String blockSet = complexTypeDecl.getAttribute( SchemaSymbols.ATT_BLOCK ); String content = complexTypeDecl.getAttribute(SchemaSymbols.ATT_CONTENT); String derivedBy = complexTypeDecl.getAttribute( SchemaSymbols.ATT_DERIVEDBY ); String finalSet = complexTypeDecl.getAttribute( SchemaSymbols.ATT_FINAL ); String typeId = complexTypeDecl.getAttribute( SchemaSymbols.ATTVAL_ID ); String typeName = complexTypeDecl.getAttribute(SchemaSymbols.ATT_NAME); boolean isNamedType = false; // traverseComplexTypeDecl supports the April version of the schema spec. // For candidate recommendation support, traverseComplexTypeDeclCR must be // invoked if (CR_IMPL) return traverseComplexTypeDeclCR(complexTypeDecl); if ( DEBUGGING ) System.out.println("traversing complex Type : " + typeName +","+base+","+content+"."); if (typeName.equals("")) { // gensym a unique name typeName = "#"+fAnonTypeCount++; } else { fCurrentTypeNameStack.push(typeName); isNamedType = true; } if (isTopLevel(complexTypeDecl)) { String fullName = fTargetNSURIString+","+typeName; ComplexTypeInfo temp = (ComplexTypeInfo) fComplexTypeRegistry.get(fullName); if (temp != null ) { return fStringPool.addSymbol(fullName); } } int scopeDefined = fScopeCount++; int previousScope = fCurrentScope; fCurrentScope = scopeDefined; Element child = null; int contentSpecType = -1; int csnType = 0; int left = -2; int right = -2; ComplexTypeInfo baseTypeInfo = null; //if base is a complexType; DatatypeValidator baseTypeValidator = null; //if base is a simple type or a complex type derived from a simpleType DatatypeValidator simpleTypeValidator = null; int baseTypeSymbol = -1; String fullBaseName = ""; boolean baseIsSimpleSimple = false; boolean baseIsComplexSimple = false; boolean baseFromAnotherSchema = false; String baseTypeSchemaURI = null; boolean derivedByRestriction = true; boolean derivedByExtension = false; int baseContentSpecHandle = -1; Element baseTypeNode = null; //int parsedderivedBy = parseComplexDerivedBy(derivedBy); //handle the inhreitance here. if (base.length()>0) { //first check if derivedBy is present if (derivedBy.length() == 0) { // REVISIT: Localize reportGenericSchemaError("derivedBy must be present when base is present in " +SchemaSymbols.ELT_COMPLEXTYPE +" "+ typeName); derivedBy = SchemaSymbols.ATTVAL_EXTENSION; } if (derivedBy.equals(SchemaSymbols.ATTVAL_EXTENSION)) { derivedByRestriction = false; } String prefix = ""; String localpart = base; int colonptr = base.indexOf(":"); if ( colonptr > 0) { prefix = base.substring(0,colonptr); localpart = base.substring(colonptr+1); } int localpartIndex = fStringPool.addSymbol(localpart); String typeURI = resolvePrefixToURI(prefix); // check if the base type is from the same Schema; if ( ! typeURI.equals(fTargetNSURIString) && ! typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && typeURI.length() != 0 ) /*REVISIT, !!!! a hack: for schema that has no target namespace, e.g. personal-schema.xml*/{ baseFromAnotherSchema = true; baseTypeSchemaURI = typeURI; baseTypeInfo = getTypeInfoFromNS(typeURI, localpart); if (baseTypeInfo == null) { baseTypeValidator = getTypeValidatorFromNS(typeURI, localpart); if (baseTypeValidator == null) { //TO DO: report error here; System.out.println("Could not find base type " +localpart + " in schema " + typeURI); } else{ baseIsSimpleSimple = true; } } } else { fullBaseName = typeURI+","+localpart; // assume the base is a complexType and try to locate the base type first baseTypeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fullBaseName); // if not found, 2 possibilities: 1: ComplexType in question has not been compiled yet; // 2: base is SimpleTYpe; if (baseTypeInfo == null) { baseTypeValidator = getDatatypeValidator(typeURI, localpart); if (baseTypeValidator == null) { baseTypeNode = getTopLevelComponentByName(SchemaSymbols.ELT_COMPLEXTYPE,localpart); if (baseTypeNode != null) { baseTypeSymbol = traverseComplexTypeDecl( baseTypeNode ); baseTypeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fStringPool.toString(baseTypeSymbol)); //REVISIT: should it be fullBaseName; } else { baseTypeNode = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (baseTypeNode != null) { baseTypeSymbol = traverseSimpleTypeDecl( baseTypeNode ); simpleTypeValidator = baseTypeValidator = getDatatypeValidator(typeURI, localpart); if (simpleTypeValidator == null) { //TO DO: signal error here. } baseIsSimpleSimple = true; } else { // REVISIT: Localize reportGenericSchemaError("Base type could not be found : " + base); } } } else { simpleTypeValidator = baseTypeValidator; baseIsSimpleSimple = true; } } } //Schema Spec : 5.11: Complex Type Definition Properties Correct : 2 if (baseIsSimpleSimple && derivedByRestriction) { // REVISIT: Localize reportGenericSchemaError("base is a simpledType, can't derive by restriction in " + typeName); } //if the base is a complexType if (baseTypeInfo != null ) { //Schema Spec : 5.11: Derivation Valid ( Extension ) 1.1.1 // 5.11: Derivation Valid ( Restriction, Complex ) 1.2.1 if (derivedByRestriction) { //REVISIT: check base Type's finalset does not include "restriction" } else { //REVISIT: check base Type's finalset doest not include "extension" } if ( baseTypeInfo.contentSpecHandle > -1) { if (derivedByRestriction) { //REVISIT: !!! really hairy staff to check the particle derivation OK in 5.10 checkParticleDerivationOK(complexTypeDecl, baseTypeNode); } baseContentSpecHandle = baseTypeInfo.contentSpecHandle; } else if ( baseTypeInfo.datatypeValidator != null ) { baseTypeValidator = baseTypeInfo.datatypeValidator; baseIsComplexSimple = true; } } //Schema Spec : 5.11: Derivation Valid ( Extension ) 1.1.1 if (baseIsComplexSimple && !derivedByRestriction ) { // REVISIT: Localize reportGenericSchemaError("base is ComplexSimple, can't derive by extension in " + typeName); } } // END of if (base.length() > 0) {} // skip refinement and annotations child = null; if (baseIsComplexSimple) { contentSpecType = XMLElementDecl.TYPE_SIMPLE; int numEnumerationLiterals = 0; int numFacets = 0; Hashtable facetData = new Hashtable(); Vector enumData = new Vector(); //REVISIT: there is a better way to do this, for (child = XUtil.getFirstChildElement(complexTypeDecl); child != null && (child.getLocalName().equals(SchemaSymbols.ELT_MINEXCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_MININCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_MAXEXCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_MAXINCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_PRECISION) || child.getLocalName().equals(SchemaSymbols.ELT_SCALE) || child.getLocalName().equals(SchemaSymbols.ELT_LENGTH) || child.getLocalName().equals(SchemaSymbols.ELT_MINLENGTH) || child.getLocalName().equals(SchemaSymbols.ELT_MAXLENGTH) || child.getLocalName().equals(SchemaSymbols.ELT_ENCODING) || child.getLocalName().equals(SchemaSymbols.ELT_PERIOD) || child.getLocalName().equals(SchemaSymbols.ELT_DURATION) || child.getLocalName().equals(SchemaSymbols.ELT_ENUMERATION) || child.getLocalName().equals(SchemaSymbols.ELT_PATTERN) || child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)); child = XUtil.getNextSiblingElement(child)) { if ( child.getNodeType() == Node.ELEMENT_NODE ) { Element facetElt = (Element) child; numFacets++; if (facetElt.getLocalName().equals(SchemaSymbols.ELT_ENUMERATION)) { numEnumerationLiterals++; enumData.addElement(facetElt.getAttribute(SchemaSymbols.ATT_VALUE)); //Enumerations can have annotations ? ( 0 | 1 ) Element enumContent = XUtil.getFirstChildElement( facetElt ); if( enumContent != null && enumContent.getLocalName().equals( SchemaSymbols.ELT_ANNOTATION ) ){ traverseAnnotationDecl( child ); } // TO DO: if Jeff check in new changes to TraverseSimpleType, copy them over } else { facetData.put(facetElt.getLocalName(),facetElt.getAttribute( SchemaSymbols.ATT_VALUE )); } } } if (numEnumerationLiterals > 0) { facetData.put(SchemaSymbols.ELT_ENUMERATION, enumData); } //if (numFacets > 0) // baseTypeValidator.setFacets(facetData, derivedBy ); if (numFacets > 0) { simpleTypeValidator = fDatatypeRegistry.createDatatypeValidator( typeName, baseTypeValidator, facetData, false ); } else simpleTypeValidator = baseTypeValidator; if (child != null) { // REVISIT: Localize reportGenericSchemaError("Invalid child '"+child.getLocalName()+"' in complexType : '" + typeName + "', because it restricts another complexSimpleType"); } } // if content = textonly, base is a datatype if (content.equals(SchemaSymbols.ATTVAL_TEXTONLY)) { //TO DO if (base.length() == 0) { simpleTypeValidator = baseTypeValidator = getDatatypeValidator("", SchemaSymbols.ATTVAL_STRING); } else if ( baseTypeValidator == null && baseTypeInfo != null && baseTypeInfo.datatypeValidator==null ) // must be datatype reportSchemaError(SchemaMessageProvider.NotADatatype, new Object [] { base }); //REVISIT check forward refs //handle datatypes contentSpecType = XMLElementDecl.TYPE_SIMPLE; /** * Traverse ComplexType Declaration - CR Implementation. * * <complexType * abstract = boolean * block = #all or (possibly empty) subset of {extension, restriction} * final = #all or (possibly empty) subset of {extension, restriction} * id = ID * mixed = boolean : false * name = NCName> * Content: (annotation? , (simpleContent | complexContent | * ( (group | all | choice | sequence)? , * ( (attribute | attributeGroup)* , anyAttribute?)))) * </complexType> * @param complexTypeDecl * @return */ private int traverseComplexTypeDeclCR( Element complexTypeDecl ) throws Exception { // Get the attributes of the type String isAbstract = complexTypeDecl.getAttribute( SchemaSymbols.ATT_ABSTRACT ); String blockSet = complexTypeDecl.getAttribute( SchemaSymbols.ATT_BLOCK ); String finalSet = complexTypeDecl.getAttribute( SchemaSymbols.ATT_FINAL ); String typeId = complexTypeDecl.getAttribute( SchemaSymbols.ATTVAL_ID ); String typeName = complexTypeDecl.getAttribute(SchemaSymbols.ATT_NAME); String mixed = complexTypeDecl.getAttribute(SchemaSymbols.ATT_MIXED); boolean isNamedType = false; if ( DEBUGGING ) System.out.println("traversing complex Type : " + typeName); // Generate a type name, if one wasn't specified if (typeName.equals("")) { // gensym a unique name typeName = "#"+fAnonTypeCount++; } else { fCurrentTypeNameStack.push(typeName); isNamedType = true; } int typeNameIndex = fStringPool.addSymbol(typeName); // Check if the type has already been registered if (isTopLevel(complexTypeDecl)) { String fullName = fTargetNSURIString+","+typeName; ComplexTypeInfo temp = (ComplexTypeInfo) fComplexTypeRegistry.get(fullName); if (temp != null ) { return fStringPool.addSymbol(fullName); } } int scopeDefined = fScopeCount++; int previousScope = fCurrentScope; fCurrentScope = scopeDefined; Element child = null; ComplexTypeInfo typeInfo = new ComplexTypeInfo(); // First, handle any ANNOTATION declaration and get next child child = checkContent(complexTypeDecl,XUtil.getFirstChildElement(complexTypeDecl), true); // Process the content of the complex type declaration if (child==null) { // EMPTY complexType with complexContent processComplexContent(typeNameIndex, child, typeInfo, null, false); } else { String childName = child.getLocalName(); int index = -2; if (childName.equals(SchemaSymbols.ELT_SIMPLECONTENT)) { // SIMPLE CONTENT element traverseSimpleContentDecl(typeNameIndex, child, typeInfo); if (XUtil.getNextSiblingElement(child) != null) reportGenericSchemaError("SimpleContent must be the only child in complexType " + typeName); } else if (childName.equals(SchemaSymbols.ELT_COMPLEXCONTENT)) { // COMPLEX CONTENT element traverseComplexContentDecl(typeNameIndex, child, typeInfo, mixed.equals(SchemaSymbols.ATTVAL_TRUE) ? true:false); if (XUtil.getNextSiblingElement(child) != null) reportGenericSchemaError("ComplexContent must be the only child in complexType " + typeName); } else { // We must have .... // GROUP, ALL, SEQUENCE or CHOICE, followed by optional attributes // Note that it's possible that only attributes are specified. processComplexContent(typeNameIndex, child, typeInfo, null, mixed.equals(SchemaSymbols.ATTVAL_TRUE) ? true:false); } } // Finish the setup of the typeInfo and register the type typeInfo.scopeDefined = scopeDefined; typeInfo.blockSet = parseBlockSet(blockSet); typeInfo.finalSet = parseFinalSet(finalSet); typeInfo.isAbstract = isAbstract.equals(SchemaSymbols.ATTVAL_TRUE) ? true:false ; if (!typeName.startsWith(" typeName = fTargetNSURIString + "," + typeName; } typeInfo.typeName = new String(typeName); if ( DEBUGGING ) System.out.println(">>>add complex Type to Registry: " + typeName + " baseDTValidator=" + typeInfo.baseDataTypeValidator + " baseCTInfo=" + typeInfo.baseComplexTypeInfo + " derivedBy=" + typeInfo.derivedBy + " contentType=" + typeInfo.contentType + " contentSpecHandle=" + typeInfo.contentSpecHandle + " datatypeValidator=" + typeInfo.datatypeValidator); fComplexTypeRegistry.put(typeName,typeInfo); // Before exiting, restore the scope, mainly for nested anonymous types fCurrentScope = previousScope; if (isNamedType) { fCurrentTypeNameStack.pop(); checkRecursingComplexType(); } //set template element's typeInfo fSchemaGrammar.setElementComplexTypeInfo(typeInfo.templateElementIndex, typeInfo); typeNameIndex = fStringPool.addSymbol(typeName); return typeNameIndex; } // end traverseComplexTypeDeclCR /** * Traverse SimpleContent Declaration * * <simpleContent * id = ID * {any attributes with non-schema namespace...}> * * Content: (annotation? , (restriction | extension)) * </simpleContent> * * <restriction * base = QNAME * id = ID * {any attributes with non-schema namespace...}> * * Content: (annotation? , ((minExclusive | minInclusive | maxExclusive * | maxInclusive | precision | scale | length | minLength * | maxLength | encoding | period | duration | enumeration * | pattern | whiteSpace)*) ? , * ((attribute | attributeGroup)* , anyAttribute?)) * </restriction> * * <extension * base = QNAME * id = ID * {any attributes with non-schema namespace...}> * Content: (annotation? , ((attribute | attributeGroup)* , anyAttribute?)) * </extension> * * @param typeNameIndex * @param simpleContentTypeDecl * @param typeInfo * @return */ private void traverseSimpleContentDecl(int typeNameIndex, Element simpleContentDecl, ComplexTypeInfo typeInfo) throws Exception { String typeName = fStringPool.toString(typeNameIndex); // Get attributes. String simpleContentTypeId = simpleContentDecl.getAttribute(SchemaSymbols.ATTVAL_ID); // Set the content type to be simple, and initialize content spec handle typeInfo.contentType = XMLElementDecl.TYPE_SIMPLE; typeInfo.contentSpecHandle = -1; Element simpleContent = checkContent(simpleContentDecl, XUtil.getFirstChildElement(simpleContentDecl),false); // If there are no children, return // TODO - should we raise an exception? if (simpleContent==null) return; // The content should be either "restriction" or "extension" String simpleContentName = simpleContent.getLocalName(); if (simpleContentName.equals(SchemaSymbols.ELT_RESTRICTION)) typeInfo.derivedBy = SchemaSymbols.RESTRICTION; else if (simpleContentName.equals(SchemaSymbols.ELT_EXTENSION)) typeInfo.derivedBy = SchemaSymbols.EXTENSION; else reportGenericSchemaError("Invalid content for simpleContent"); // Get the attributes of the restriction/extension element String base = simpleContent.getAttribute(SchemaSymbols.ATT_BASE); String typeId = simpleContent.getAttribute(SchemaSymbols.ATTVAL_ID); // Skip over any annotations in the restriction or extension elements // todo - check whether the content can be empty... Element content = checkContent(simpleContent, XUtil.getFirstChildElement(simpleContent),true); // Handle the base type name if (base.length() < 0) { reportGenericSchemaError("Must specify BASE attribute"); return; } QName baseQName = parseBase(base); processBaseTypeInfo(baseQName,typeInfo); // check that the base isn't a complex type with complex content if (typeInfo.baseComplexTypeInfo != null) { if (typeInfo.baseComplexTypeInfo.contentSpecHandle > -1) { reportGenericSchemaError("Base type cannot have complexContent"); return; } } // Process the content of the derivation Element attrNode = null; // RESTRICTION if (typeInfo.derivedBy==SchemaSymbols.RESTRICTION) { //Schema Spec : 5.11: Complex Type Definition Properties Correct : 2 if (typeInfo.baseDataTypeValidator != null) { reportGenericSchemaError("base is a simpleType, can't derive by restriction in " + typeName); return; } // Build up facet information int numEnumerationLiterals = 0; int numFacets = 0; Hashtable facetData = new Hashtable(); Vector enumData = new Vector(); Element child; //REVISIT: there is a better way to do this, for (child = content; child != null && (child.getLocalName().equals(SchemaSymbols.ELT_MINEXCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_MININCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_MAXEXCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_MAXINCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_PRECISION) || child.getLocalName().equals(SchemaSymbols.ELT_SCALE) || child.getLocalName().equals(SchemaSymbols.ELT_LENGTH) || child.getLocalName().equals(SchemaSymbols.ELT_MINLENGTH) || child.getLocalName().equals(SchemaSymbols.ELT_MAXLENGTH) || child.getLocalName().equals(SchemaSymbols.ELT_ENCODING) || child.getLocalName().equals(SchemaSymbols.ELT_PERIOD) || child.getLocalName().equals(SchemaSymbols.ELT_DURATION) || child.getLocalName().equals(SchemaSymbols.ELT_ENUMERATION) || child.getLocalName().equals(SchemaSymbols.ELT_PATTERN) || child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)); child = XUtil.getNextSiblingElement(child)) { if ( child.getNodeType() == Node.ELEMENT_NODE ) { Element facetElt = (Element) child; numFacets++; if (facetElt.getLocalName().equals(SchemaSymbols.ELT_ENUMERATION)) { numEnumerationLiterals++; enumData.addElement(facetElt.getAttribute(SchemaSymbols.ATT_VALUE)); //Enumerations can have annotations ? ( 0 | 1 ) Element enumContent = XUtil.getFirstChildElement( facetElt ); if( enumContent != null && enumContent.getLocalName().equals ( SchemaSymbols.ELT_ANNOTATION )){ traverseAnnotationDecl( child ); } // TO DO: if Jeff check in new changes to TraverseSimpleType, copy them over } else { facetData.put(facetElt.getLocalName(), facetElt.getAttribute( SchemaSymbols.ATT_VALUE )); } } } // end of for loop thru facets if (numEnumerationLiterals > 0) { facetData.put(SchemaSymbols.ELT_ENUMERATION, enumData); } // If there were facets, create a new data type validator, otherwise // the data type validator is from the base if (numFacets > 0) { typeInfo.datatypeValidator = fDatatypeRegistry.createDatatypeValidator( typeName, typeInfo.baseDataTypeValidator, facetData, false); } else typeInfo.datatypeValidator = typeInfo.baseComplexTypeInfo.datatypeValidator; if (child != null) { // REVISIT: Shouldn't we allow attributes??? reportGenericSchemaError("Invalid child '"+child.getLocalName()+ "' in complexType : '" + typeName + "', because it restricts another complexSimpleType"); } } // end RESTRICTION // EXTENSION else { typeInfo.datatypeValidator = typeInfo.baseDataTypeValidator; // Look for attributes if (content != null) { // Check that we have attributes if (!isAttrOrAttrGroup(content)) { reportGenericSchemaError("Invalid child '"+content.getLocalName()+ "' in complexType : '" + typeName); } else { attrNode = content; } } } // add a template element to the grammar element decl pool for the type int templateElementNameIndex = fStringPool.addSymbol("$"+typeName); typeInfo.templateElementIndex = fSchemaGrammar.addElementDecl( new QName(-1, templateElementNameIndex,typeNameIndex,fTargetNSURI), (fTargetNSURI==-1) ? -1 : fCurrentScope, typeInfo.scopeDefined, typeInfo.contentType, typeInfo.contentSpecHandle, -1, typeInfo.datatypeValidator); typeInfo.attlistHead = fSchemaGrammar.getFirstAttributeDeclIndex( typeInfo.templateElementIndex); // Process attributes processAttributes(attrNode,baseQName,typeInfo); } // end traverseSimpleContentDecl /** * Traverse complexContent Declaration * * <complexContent * id = ID * mixed = boolean * {any attributes with non-schema namespace...}> * * Content: (annotation? , (restriction | extension)) * </complexContent> * * <restriction * base = QNAME * id = ID * {any attributes with non-schema namespace...}> * * Content: (annotation? , (group | all | choice | sequence)?, * ((attribute | attributeGroup)* , anyAttribute?)) * </restriction> * * <extension * base = QNAME * id = ID * {any attributes with non-schema namespace...}> * Content: (annotation? , (group | all | choice | sequence)?, * ((attribute | attributeGroup)* , anyAttribute?)) * </extension> * * @param typeNameIndex * @param simpleContentTypeDecl * @param typeInfo * @return */ private void traverseComplexContentDecl(int typeNameIndex, Element complexContentDecl, ComplexTypeInfo typeInfo, boolean mixedOnComplexTypeDecl) throws Exception { String typeName = fStringPool.toString(typeNameIndex); // Get the attributes String typeId = complexContentDecl.getAttribute(SchemaSymbols.ATTVAL_ID); String mixed = complexContentDecl.getAttribute(SchemaSymbols.ATT_MIXED); // Determine whether the content is mixed, or element-only // Setting here overrides any setting on the complex type decl boolean isMixed = mixedOnComplexTypeDecl; if (mixed.equals(SchemaSymbols.ATTVAL_TRUE)) isMixed = true; else if (mixed.equals(SchemaSymbols.ATTVAL_FALSE)) isMixed = false; // Since the type must have complex content, set the simple type validators // to null typeInfo.datatypeValidator = null; typeInfo.baseDataTypeValidator = null; Element complexContent = checkContent(complexContentDecl, XUtil.getFirstChildElement(complexContentDecl),false); // If there are no children, return // TODO - should we raise an exception? if (complexContent==null) return; // The content should be either "restriction" or "extension" String complexContentName = complexContent.getLocalName(); if (complexContentName.equals(SchemaSymbols.ELT_RESTRICTION)) typeInfo.derivedBy = SchemaSymbols.RESTRICTION; else if (complexContentName.equals(SchemaSymbols.ELT_EXTENSION)) typeInfo.derivedBy = SchemaSymbols.EXTENSION; else reportGenericSchemaError("Invalid content for complexContent"); // Get the attributes of the restriction/extension element String base = complexContent.getAttribute(SchemaSymbols.ATT_BASE); String complexContentTypeId=complexContent.getAttribute(SchemaSymbols.ATTVAL_ID); // Skip over any annotations in the restriction or extension elements // TODO - check whether the content can be empty... Element content = checkContent(complexContent, XUtil.getFirstChildElement(complexContent),true); // Handle the base type name if (base.length() < 0) { reportGenericSchemaError("Must specify BASE attribute"); return; } QName baseQName = parseBase(base); // check if the base is "anyType" String baseTypeURI = fStringPool.toString(baseQName.uri); String baseLocalName = fStringPool.toString(baseQName.localpart); if (!(baseTypeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && baseLocalName.equals("anyType"))) { processBaseTypeInfo(baseQName,typeInfo); //Check that the base is a complex type if (typeInfo.baseComplexTypeInfo == null) { reportGenericSchemaError("Base type must be complex"); return; } } // Process the elements that make up the content processComplexContent(typeNameIndex,content,typeInfo,baseQName,isMixed); } // end traverseComplexContentDecl /** * Parse base string * * @param base * @return QName */ private QName parseBase(String base) throws Exception { String prefix = ""; String localpart = base; int colonptr = base.indexOf(":"); if ( colonptr > 0) { prefix = base.substring(0,colonptr); localpart = base.substring(colonptr+1); } int nameIndex = fStringPool.addSymbol(base); int prefixIndex = fStringPool.addSymbol(prefix); int localpartIndex = fStringPool.addSymbol(localpart); int URIindex = fStringPool.addSymbol(resolvePrefixToURI(prefix)); return new QName(prefixIndex,localpartIndex,nameIndex,URIindex); } /** * Check if base is from another schema * * @param baseName * @return boolean */ private boolean baseFromAnotherSchema(QName baseName) throws Exception { String typeURI = fStringPool.toString(baseName.uri); if ( ! typeURI.equals(fTargetNSURIString) && ! typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && typeURI.length() != 0 ) //REVISIT, !!!! a hack: for schema that has no //target namespace, e.g. personal-schema.xml return true; else return false; } /** * Process "base" information * * @param baseTypeInfo * @param baseName * @param typeInfo * @return */ private void processBaseTypeInfo(QName baseName, ComplexTypeInfo typeInfo) throws Exception { ComplexTypeInfo baseComplexTypeInfo = null; DatatypeValidator baseDTValidator = null; String typeURI = fStringPool.toString(baseName.uri); String localpart = fStringPool.toString(baseName.localpart); String base = fStringPool.toString(baseName.rawname); String fullBaseName; Element baseTypeNode; int baseTypeSymbol; // check if the base is "anyType" if (typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && localpart.equals("anyType")) { return; } // check if the base type is from the same Schema if ( ! typeURI.equals(fTargetNSURIString) && ! typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && typeURI.length() != 0 ) { //REVISIT, !!!! a hack: for schema that has no target namespace, // e.g. personal-schema.xml baseComplexTypeInfo = getTypeInfoFromNS(typeURI, localpart); if (baseComplexTypeInfo == null) { baseDTValidator = getTypeValidatorFromNS(typeURI, localpart); if (baseDTValidator == null) { reportGenericSchemaError("Could not find base type " +localpart + " in schema " + typeURI); return; } } } // type must be from same schema else { fullBaseName = typeURI+","+localpart; // assume the base is a complexType and try to locate the base type first baseComplexTypeInfo= (ComplexTypeInfo) fComplexTypeRegistry.get(fullBaseName); // if not found, 2 possibilities: // 1: ComplexType in question has not been compiled yet; // 2: base is SimpleTYpe; if (baseComplexTypeInfo == null) { baseDTValidator = getDatatypeValidator(typeURI, localpart); if (baseDTValidator == null) { baseTypeNode = getTopLevelComponentByName(SchemaSymbols.ELT_COMPLEXTYPE,localpart); if (baseTypeNode != null) { baseTypeSymbol = traverseComplexTypeDecl( baseTypeNode ); baseComplexTypeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fStringPool.toString(baseTypeSymbol)); //REVISIT: should it be fullBaseName; } else { baseTypeNode = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (baseTypeNode != null) { baseTypeSymbol = traverseSimpleTypeDecl( baseTypeNode ); baseDTValidator = getDatatypeValidator(typeURI, localpart); if (baseDTValidator == null) { //TO DO: signal error here. } } else { // REVISIT: Localize reportGenericSchemaError("Base type could not be found : " + base); return; } } } } } // end else (type must be from same schema) if (baseComplexTypeInfo != null) { typeInfo.baseComplexTypeInfo = baseComplexTypeInfo; typeInfo.baseDataTypeValidator = baseComplexTypeInfo.datatypeValidator; } else typeInfo.baseDataTypeValidator = baseDTValidator; } // end processBaseTypeInfo /** * Process content which is complex * * (group | all | choice | sequence) ? , * ((attribute | attributeGroup)* , anyAttribute?)) * * @param typeNameIndex * @param complexContentChild * @param typeInfo * @return */ private void processComplexContent(int typeNameIndex, Element complexContentChild, ComplexTypeInfo typeInfo, QName baseName, boolean isMixed) throws Exception { Element attrNode = null; int index=-2; if (complexContentChild != null) { // GROUP, ALL, SEQUENCE or CHOICE, followed by attributes, if specified. // Note that it's possible that only attributes are specified. String childName = complexContentChild.getLocalName(); if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = expandContentModel(traverseGroupDecl(complexContentChild), complexContentChild); attrNode = XUtil.getNextSiblingElement(complexContentChild); } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = expandContentModel(traverseSequence(complexContentChild), complexContentChild); attrNode = XUtil.getNextSiblingElement(complexContentChild); } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = expandContentModel(traverseChoice(complexContentChild), complexContentChild); attrNode = XUtil.getNextSiblingElement(complexContentChild); } else if (childName.equals(SchemaSymbols.ELT_ALL)) { index = expandContentModel(traverseAll(complexContentChild), complexContentChild); attrNode = XUtil.getNextSiblingElement(complexContentChild); //TO DO: REVISIT //check that minOccurs = 1 and maxOccurs = 1 } else if (isAttrOrAttrGroup(complexContentChild)) { // reset the contentType typeInfo.contentType = XMLElementDecl.TYPE_ANY; attrNode = complexContentChild; } else { reportGenericSchemaError("Invalid child of a complex type "+ childName); } } if (isMixed) { // TODO - check to see if we MUST have an element. What if only attributes // were specified?? // add #PCDATA leaf int pcdataNode = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, -1, // -1 means "#PCDATA" is name -1, false); // If there was an element, the content spec becomes a choice of PCDATA and // the element if (index != -2) index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_CHOICE,pcdataNode,index,false); else index = pcdataNode; } typeInfo.contentSpecHandle = index; // Merge in information from base, if it exists if (typeInfo.baseComplexTypeInfo != null) { int baseContentSpecHandle = typeInfo.baseComplexTypeInfo.contentSpecHandle; if (typeInfo.derivedBy == SchemaSymbols.RESTRICTION) { //REVISIT: !!!really hairy stuff to check the particle derivation OK in 5.10 //checkParticleDerivationOK(); } else { // Compose the final content model by concatenating the base and the // current in sequence if (baseFromAnotherSchema(baseName)) { String baseSchemaURI = fStringPool.toString(baseName.uri); SchemaGrammar aGrammar= (SchemaGrammar) fGrammarResolver.getGrammar( baseSchemaURI); baseContentSpecHandle = importContentSpec(aGrammar, baseContentSpecHandle); } if (typeInfo.contentSpecHandle == -2) { typeInfo.contentSpecHandle = baseContentSpecHandle; } else { typeInfo.contentSpecHandle = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, baseContentSpecHandle, typeInfo.contentSpecHandle, false); } } } else { typeInfo.derivedBy = 0; } // Set the content type if (isMixed) typeInfo.contentType = XMLElementDecl.TYPE_MIXED; else if (typeInfo.contentSpecHandle == -2) typeInfo.contentType = XMLElementDecl.TYPE_EMPTY; else typeInfo.contentType = XMLElementDecl.TYPE_CHILDREN; // add a template element to the grammar element decl pool. String typeName = fStringPool.toString(typeNameIndex); int templateElementNameIndex = fStringPool.addSymbol("$"+typeName); typeInfo.templateElementIndex = fSchemaGrammar.addElementDecl( new QName(-1, templateElementNameIndex,typeNameIndex,fTargetNSURI), (fTargetNSURI==-1) ? -1 : fCurrentScope, typeInfo.scopeDefined, typeInfo.contentType, typeInfo.contentSpecHandle, -1, typeInfo.datatypeValidator); typeInfo.attlistHead = fSchemaGrammar.getFirstAttributeDeclIndex( typeInfo.templateElementIndex); // Now, check attributes and handle if (attrNode !=null) { if (!isAttrOrAttrGroup(attrNode)) reportGenericSchemaError("Invalid child of a complex type"); else processAttributes(attrNode,baseName,typeInfo); } else if (typeInfo.baseComplexTypeInfo != null) processAttributes(null,baseName,typeInfo); } // end processComplexContent /** * Process attributes of a complex type * * @param attrNode * @param typeInfo * @return */ private void processAttributes(Element attrNode, QName baseName, ComplexTypeInfo typeInfo) throws Exception { XMLAttributeDecl attWildcard = null; Vector anyAttDecls = new Vector(); Element child; for (child = attrNode; child != null; child = XUtil.getNextSiblingElement(child)) { String childName = child.getLocalName(); if (childName.equals(SchemaSymbols.ELT_ATTRIBUTE)) { traverseAttributeDecl(child, typeInfo); } else if ( childName.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) ) { traverseAttributeGroupDecl(child,typeInfo,anyAttDecls); } else if ( childName.equals(SchemaSymbols.ELT_ANYATTRIBUTE) ) { attWildcard = traverseAnyAttribute(child); } } if (attWildcard != null) { XMLAttributeDecl fromGroup = null; final int count = anyAttDecls.size(); if ( count > 0) { fromGroup = (XMLAttributeDecl) anyAttDecls.elementAt(0); for (int i=1; i<count; i++) { fromGroup = mergeTwoAnyAttribute( fromGroup,(XMLAttributeDecl)anyAttDecls.elementAt(i)); } } if (fromGroup != null) { int saveProcessContents = attWildcard.defaultType; attWildcard = mergeTwoAnyAttribute(attWildcard, fromGroup); attWildcard.defaultType = saveProcessContents; } } else { //REVISIT: unclear in the Scheme Structures 4.3.3 what to do in this case } // merge in base type's attribute decls XMLAttributeDecl baseAttWildcard = null; ComplexTypeInfo baseTypeInfo = typeInfo.baseComplexTypeInfo; if (baseTypeInfo != null && baseTypeInfo.attlistHead > -1 ) { int attDefIndex = baseTypeInfo.attlistHead; SchemaGrammar aGrammar = fSchemaGrammar; String baseTypeSchemaURI = baseFromAnotherSchema(baseName)? fStringPool.toString(baseName.uri):null; if (baseTypeSchemaURI != null) { aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(baseTypeSchemaURI); } if (aGrammar == null) { //reportGenericSchemaError("In complexType "+typeName+", can NOT find the grammar "+ // "with targetNamespace" + baseTypeSchemaURI+ // "for the base type"); } else while ( attDefIndex > -1 ) { fTempAttributeDecl.clear(); aGrammar.getAttributeDecl(attDefIndex, fTempAttributeDecl); if (fTempAttributeDecl.type == XMLAttributeDecl.TYPE_ANY_ANY ||fTempAttributeDecl.type == XMLAttributeDecl.TYPE_ANY_LIST ||fTempAttributeDecl.type == XMLAttributeDecl.TYPE_ANY_LOCAL ||fTempAttributeDecl.type == XMLAttributeDecl.TYPE_ANY_OTHER ) { if (attWildcard == null) { baseAttWildcard = fTempAttributeDecl; } attDefIndex = aGrammar.getNextAttributeDeclIndex(attDefIndex); continue; } // if found a duplicate, if it is derived by restriction, // then skip the one from the base type int temp = fSchemaGrammar.getAttributeDeclIndex(typeInfo.templateElementIndex, fTempAttributeDecl.name); if ( temp > -1) { if (typeInfo.derivedBy==SchemaSymbols.RESTRICTION) { attDefIndex = fSchemaGrammar.getNextAttributeDeclIndex(attDefIndex); continue; } } fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, fTempAttributeDecl.name, fTempAttributeDecl.type, fTempAttributeDecl.enumeration, fTempAttributeDecl.defaultType, fTempAttributeDecl.defaultValue, fTempAttributeDecl.datatypeValidator, fTempAttributeDecl.list); attDefIndex = aGrammar.getNextAttributeDeclIndex(attDefIndex); } } // att wildcard will inserted after all attributes were processed if (attWildcard != null) { if (attWildcard.type != -1) { fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, attWildcard.name, attWildcard.type, attWildcard.enumeration, attWildcard.defaultType, attWildcard.defaultValue, attWildcard.datatypeValidator, attWildcard.list); } else { //REVISIT: unclear in Schema spec if should report error here. } } else if (baseAttWildcard != null) { fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, baseAttWildcard.name, baseAttWildcard.type, baseAttWildcard.enumeration, baseAttWildcard.defaultType, baseAttWildcard.defaultValue, baseAttWildcard.datatypeValidator, baseAttWildcard.list); } typeInfo.attlistHead = fSchemaGrammar.getFirstAttributeDeclIndex (typeInfo.templateElementIndex); } // end processAttributes private boolean isAttrOrAttrGroup(Element e) { String elementName = e.getLocalName(); if (elementName.equals(SchemaSymbols.ELT_ATTRIBUTE) || elementName.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) || elementName.equals(SchemaSymbols.ELT_ANYATTRIBUTE)) return true; else return false; } private void checkRecursingComplexType() throws Exception { if ( fCurrentTypeNameStack.empty() ) { if (! fElementRecurseComplex.isEmpty() ) { Enumeration e = fElementRecurseComplex.keys(); while( e.hasMoreElements() ) { QName nameThenScope = (QName) e.nextElement(); String typeName = (String) fElementRecurseComplex.get(nameThenScope); int eltUriIndex = nameThenScope.uri; int eltNameIndex = nameThenScope.localpart; int enclosingScope = nameThenScope.prefix; ComplexTypeInfo typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fTargetNSURIString+","+typeName); if (typeInfo==null) { throw new Exception ( "Internal Error in void checkRecursingComplexType(). " ); } else { int elementIndex = fSchemaGrammar.addElementDecl(new QName(-1, eltNameIndex, eltNameIndex, eltUriIndex), enclosingScope, typeInfo.scopeDefined, typeInfo.contentType, typeInfo.contentSpecHandle, typeInfo.attlistHead, typeInfo.datatypeValidator); fSchemaGrammar.setElementComplexTypeInfo(elementIndex, typeInfo); } } fElementRecurseComplex.clear(); } } } private void checkParticleDerivationOK(Element derivedTypeNode, Element baseTypeNode) { //TO DO: !!! } private int importContentSpec(SchemaGrammar aGrammar, int contentSpecHead ) throws Exception { XMLContentSpec ctsp = new XMLContentSpec(); aGrammar.getContentSpec(contentSpecHead, ctsp); int left = -1; int right = -1; if ( ctsp.type == ctsp.CONTENTSPECNODE_LEAF || (ctsp.type & 0x0f) == ctsp.CONTENTSPECNODE_ANY || (ctsp.type & 0x0f) == ctsp.CONTENTSPECNODE_ANY_LOCAL || (ctsp.type & 0x0f) == ctsp.CONTENTSPECNODE_ANY_OTHER ) { return fSchemaGrammar.addContentSpecNode(ctsp.type, ctsp.value, ctsp.otherValue, false); } else { if ( ctsp.value == -1 ) { left = -1; } else { left = importContentSpec(aGrammar, ctsp.value); } if ( ctsp.otherValue == -1 ) { right = -1; } else { right = importContentSpec(aGrammar, ctsp.otherValue); } return fSchemaGrammar.addContentSpecNode(ctsp.type, left, right, false); } } private int expandContentModel ( int index, Element particle) throws Exception { String minOccurs = particle.getAttribute(SchemaSymbols.ATT_MINOCCURS).trim(); String maxOccurs = particle.getAttribute(SchemaSymbols.ATT_MAXOCCURS).trim(); int min=1, max=1; if(minOccurs.equals("0") && maxOccurs.equals("0")){ return -2; } if (minOccurs.equals("")) { minOccurs = "1"; } if (CR_IMPL) { //CR IMPLEMENTATION if (maxOccurs.equals("")) { maxOccurs = "1"; } } else { //WORKING DRAFT IMPLEMENTATION if (maxOccurs.equals("") ){ if ( minOccurs.equals("0")) { maxOccurs = "1"; } else { maxOccurs = minOccurs; } } } int leafIndex = index; //REVISIT: !!! minoccurs, maxoccurs. if (minOccurs.equals("1")&& maxOccurs.equals("1")) { } else if (minOccurs.equals("0")&& maxOccurs.equals("1")) { //zero or one index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_ZERO_OR_ONE, index, -1, false); } else if (minOccurs.equals("0")&& maxOccurs.equals("unbounded")) { //zero or more index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_ZERO_OR_MORE, index, -1, false); } else if (minOccurs.equals("1")&& maxOccurs.equals("unbounded")) { //one or more index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_ONE_OR_MORE, index, -1, false); } else if (maxOccurs.equals("unbounded") ) { // >=2 or more try { min = Integer.parseInt(minOccurs); } catch (Exception e) { reportSchemaError(SchemaMessageProvider.GenericError, new Object [] { "illegal value for minOccurs : '" +e.getMessage()+ "' " }); } if (min<2) { //REVISIT: report Error here } // => a,a,..,a+ index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_ONE_OR_MORE, index, -1, false); for (int i=0; i < (min-1); i++) { index = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, leafIndex, index, false); } } else { // {n,m} => a,a,a,...(a),(a),... try { min = Integer.parseInt(minOccurs); max = Integer.parseInt(maxOccurs); } catch (Exception e){ reportSchemaError(SchemaMessageProvider.GenericError, new Object [] { "illegal value for minOccurs or maxOccurs : '" +e.getMessage()+ "' "}); } if (min==0) { int optional = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_ZERO_OR_ONE, leafIndex, -1, false); index = optional; for (int i=0; i < (max-min-1); i++) { index = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, index, optional, false); } } else { for (int i=0; i<(min-1); i++) { index = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, index, leafIndex, false); } int optional = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_ZERO_OR_ONE, leafIndex, -1, false); for (int i=0; i < (max-min); i++) { index = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, index, optional, false); } } } return index; } /** * Traverses Schema attribute declaration. * * <attribute * form = qualified | unqualified * id = ID * name = NCName * ref = QName * type = QName * use = default | fixed | optional | prohibited | required * value = string> * Content: (annotation? , simpleType?) * <attribute/> * * @param attributeDecl * @return * @exception Exception */ private int traverseAttributeDecl( Element attrDecl, ComplexTypeInfo typeInfo ) throws Exception { String attNameStr = attrDecl.getAttribute(SchemaSymbols.ATT_NAME); int attName = fStringPool.addSymbol(attNameStr);// attribute name String isQName = attrDecl.getAttribute(SchemaSymbols.ATT_FORM);//form attribute DatatypeValidator dv = null; // attribute type int attType = -1; boolean attIsList = false; int dataTypeSymbol = -1; String ref = attrDecl.getAttribute(SchemaSymbols.ATT_REF); String datatype = attrDecl.getAttribute(SchemaSymbols.ATT_TYPE); // various tests if 'ref' is present: if(!ref.equals("")) { if(!attNameStr.equals("")) // REVISIT: localize reportGenericSchemaError ( "Attribute " + attNameStr + " cannot refer to another attribute, but it refers to " + ref); if(!datatype.equals("")) // REVISIT: localize reportGenericSchemaError ( "Attribute with reference " + ref + " cannot also contain a type"); } Element simpleTypeChild = findAttributeSimpleType(attrDecl); String localpart = null; if (!ref.equals("")) { String prefix = ""; localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } String uriStr = resolvePrefixToURI(prefix); if (!uriStr.equals(fTargetNSURIString)) { addAttributeDeclFromAnotherSchema(localpart, uriStr, typeInfo); return -1; } Element referredAttribute = getTopLevelComponentByName(SchemaSymbols.ELT_ATTRIBUTE,localpart); if (referredAttribute != null) { traverseAttributeDecl(referredAttribute, typeInfo); } else { if (fAttributeDeclRegistry.get(localpart) != null) { addAttributeDeclFromAnotherSchema(localpart, uriStr, typeInfo); } else // REVISIT: Localize reportGenericSchemaError ( "Couldn't find top level attribute " + ref); } return -1; } if (datatype.equals("")) { if (simpleTypeChild != null) { attType = XMLAttributeDecl.TYPE_SIMPLE; dataTypeSymbol = traverseSimpleTypeDecl(simpleTypeChild); localpart = fStringPool.toString(dataTypeSymbol); } else { attType = XMLAttributeDecl.TYPE_SIMPLE; localpart = "string"; dataTypeSymbol = fStringPool.addSymbol(localpart); } localpart = fStringPool.toString(dataTypeSymbol); dv = fDatatypeRegistry.getDatatypeValidator(localpart); } else { if(simpleTypeChild != null) reportGenericSchemaError("Attribute declarations may not contain both a type and a simpleType declaration"); String prefix = ""; localpart = datatype; dataTypeSymbol = fStringPool.addSymbol(localpart); int colonptr = datatype.indexOf(":"); if ( colonptr > 0) { prefix = datatype.substring(0,colonptr); localpart = datatype.substring(colonptr+1); } String typeURI = resolvePrefixToURI(prefix); if ( typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) || typeURI.length()==0) { dv = getDatatypeValidator("", localpart); if (localpart.equals("ID")) { attType = XMLAttributeDecl.TYPE_ID; } else if (localpart.equals("IDREF")) { attType = XMLAttributeDecl.TYPE_IDREF; } else if (localpart.equals("IDREFS")) { attType = XMLAttributeDecl.TYPE_IDREF; attIsList = true; } else if (localpart.equals("ENTITY")) { attType = XMLAttributeDecl.TYPE_ENTITY; } else if (localpart.equals("ENTITIES")) { attType = XMLAttributeDecl.TYPE_ENTITY; attIsList = true; } else if (localpart.equals("NMTOKEN")) { attType = XMLAttributeDecl.TYPE_NMTOKEN; } else if (localpart.equals("NMTOKENS")) { attType = XMLAttributeDecl.TYPE_NMTOKEN; attIsList = true; } else if (localpart.equals(SchemaSymbols.ELT_NOTATION)) { attType = XMLAttributeDecl.TYPE_NOTATION; } else { attType = XMLAttributeDecl.TYPE_SIMPLE; if (dv == null && typeURI.length() == 0) { Element topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (topleveltype != null) { traverseSimpleTypeDecl( topleveltype ); dv = getDatatypeValidator(typeURI, localpart); }else { // REVISIT: Localize reportGenericSchemaError("simpleType not found : " + "("+typeURI+":"+localpart+")"); } } } } else { //isn't of the schema for schemas namespace... // check if the type is from the same Schema dv = getDatatypeValidator(typeURI, localpart); if (dv == null && typeURI.equals(fTargetNSURIString) ) { Element topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (topleveltype != null) { traverseSimpleTypeDecl( topleveltype ); dv = getDatatypeValidator(typeURI, localpart); }else { // REVISIT: Localize reportGenericSchemaError("simpleType not found : " + "("+typeURI+":"+ localpart+")"); } } attType = XMLAttributeDecl.TYPE_SIMPLE; } } // attribute default type int attDefaultType = -1; int attDefaultValue = -1; String use = attrDecl.getAttribute(SchemaSymbols.ATT_USE); boolean prohibited = use.equals(SchemaSymbols.ATTVAL_PROHIBITED); boolean required = use.equals(SchemaSymbols.ATTVAL_REQUIRED); if (dv == null) { // REVISIT: Localize reportGenericSchemaError("could not resolve the type or get a null validator for datatype : " + fStringPool.toString(dataTypeSymbol)); } if (prohibited) attDefaultType = XMLAttributeDecl.DEFAULT_TYPE_PROHIBITED; else if (required) { attDefaultType = XMLAttributeDecl.DEFAULT_TYPE_REQUIRED; } else { // perhaps a controversial change: no "use" in lcoal scope means "fixed"! if (use.equals(SchemaSymbols.ATTVAL_FIXED) || (!isTopLevel(attrDecl) && !use.equals(SchemaSymbols.ATTVAL_DEFAULT))) { String fixed = attrDecl.getAttribute(SchemaSymbols.ATT_VALUE); if (!fixed.equals("")) { attDefaultType = XMLAttributeDecl.DEFAULT_TYPE_FIXED; attDefaultValue = fStringPool.addString(fixed); } } else if (use.equals(SchemaSymbols.ATTVAL_DEFAULT)) { // attribute default value String defaultValue = attrDecl.getAttribute(SchemaSymbols.ATT_VALUE); if (!defaultValue.equals("")) { attDefaultType = XMLAttributeDecl.DEFAULT_TYPE_DEFAULT; attDefaultValue = fStringPool.addString(defaultValue); } } else { attDefaultType = XMLAttributeDecl.DEFAULT_TYPE_IMPLIED; } // check default value is valid for the datatype. if (attType == XMLAttributeDecl.TYPE_SIMPLE && attDefaultValue != -1) { try { if (dv != null) //REVISIT dv.validate(fStringPool.toString(attDefaultValue), null); else reportSchemaError(SchemaMessageProvider.NoValidatorFor, new Object [] { datatype }); } catch (InvalidDatatypeValueException idve) { reportSchemaError(SchemaMessageProvider.IncorrectDefaultType, new Object [] { attrDecl.getAttribute(SchemaSymbols.ATT_NAME), idve.getMessage() }); } catch (Exception e) { e.printStackTrace(); System.out.println("Internal error in attribute datatype validation"); } } } int uriIndex = -1; if ( fTargetNSURIString.length() > 0 && ( isQName.equals(SchemaSymbols.ATTVAL_QUALIFIED)|| fAttributeDefaultQualified || isTopLevel(attrDecl) ) ) { uriIndex = fTargetNSURI; } QName attQName = new QName(-1,attName,attName,uriIndex); if ( DEBUGGING ) System.out.println(" the dataType Validator for " + fStringPool.toString(attName) + " is " + dv); //put the top-levels in the attribute decl registry. if (isTopLevel(attrDecl)) { fTempAttributeDecl.datatypeValidator = dv; fTempAttributeDecl.name.setValues(attQName); fTempAttributeDecl.type = attType; fTempAttributeDecl.defaultType = attDefaultType; fTempAttributeDecl.list = attIsList; if (attDefaultValue != -1 ) { fTempAttributeDecl.defaultValue = new String(fStringPool.toString(attDefaultValue)); } fAttributeDeclRegistry.put(attNameStr, new XMLAttributeDecl(fTempAttributeDecl)); } // add attribute to attr decl pool in fSchemaGrammar, if (typeInfo != null) { fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, attQName, attType, dataTypeSymbol, attDefaultType, fStringPool.toString( attDefaultValue), dv, attIsList); } return -1; } // end of method traverseAttribute private int addAttributeDeclFromAnotherSchema( String name, String uriStr, ComplexTypeInfo typeInfo) throws Exception { SchemaGrammar aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(uriStr); if (uriStr == null || ! (aGrammar instanceof SchemaGrammar) ) { // REVISIT: Localize reportGenericSchemaError("!!Schema not found in #addAttributeDeclFromAnotherSchema, schema uri : " + uriStr); return -1; } Hashtable attrRegistry = aGrammar.getAttirubteDeclRegistry(); if (attrRegistry == null) { // REVISIT: Localize reportGenericSchemaError("no attribute was defined in schema : " + uriStr); return -1; } XMLAttributeDecl tempAttrDecl = (XMLAttributeDecl) attrRegistry.get(name); if (tempAttrDecl == null) { // REVISIT: Localize reportGenericSchemaError( "no attribute named \"" + name + "\" was defined in schema : " + uriStr); return -1; } if (typeInfo!= null) { fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, tempAttrDecl.name, tempAttrDecl.type, -1, tempAttrDecl.defaultType, tempAttrDecl.defaultValue, tempAttrDecl.datatypeValidator, tempAttrDecl.list); } return 0; } /* * * <attributeGroup * id = ID * name = NCName * ref = QName> * Content: (annotation?, (attribute|attributeGroup), anyAttribute?) * </> * */ private int traverseAttributeGroupDecl( Element attrGrpDecl, ComplexTypeInfo typeInfo, Vector anyAttDecls ) throws Exception { // attribute name int attGrpName = fStringPool.addSymbol(attrGrpDecl.getAttribute(SchemaSymbols.ATT_NAME)); String ref = attrGrpDecl.getAttribute(SchemaSymbols.ATT_REF); // attribute type int attType = -1; int enumeration = -1; if (!ref.equals("")) { if (XUtil.getFirstChildElement(attrGrpDecl) != null) reportSchemaError(SchemaMessageProvider.NoContentForRef, null); String prefix = ""; String localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } String uriStr = resolvePrefixToURI(prefix); if (!uriStr.equals(fTargetNSURIString)) { traverseAttributeGroupDeclFromAnotherSchema(localpart, uriStr, typeInfo, anyAttDecls); return -1; // TO DO // REVISIST: different NS, not supported yet. // REVISIT: Localize //reportGenericSchemaError("Feature not supported: see an attribute from different NS"); } Element referredAttrGrp = getTopLevelComponentByName(SchemaSymbols.ELT_ATTRIBUTEGROUP,localpart); if (referredAttrGrp != null) { traverseAttributeGroupDecl(referredAttrGrp, typeInfo, anyAttDecls); } else { // REVISIT: Localize reportGenericSchemaError ( "Couldn't find top level attributegroup " + ref); } return -1; } for ( Element child = XUtil.getFirstChildElement(attrGrpDecl); child != null ; child = XUtil.getNextSiblingElement(child)) { if ( child.getLocalName().equals(SchemaSymbols.ELT_ATTRIBUTE) ){ traverseAttributeDecl(child, typeInfo); } else if ( child.getLocalName().equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) ) { traverseAttributeGroupDecl(child, typeInfo,anyAttDecls); } else if ( child.getLocalName().equals(SchemaSymbols.ELT_ANYATTRIBUTE) ) { anyAttDecls.addElement(traverseAnyAttribute(child)); break; } else if (child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION) ) { // REVISIT: what about appInfo } } return -1; } // end of method traverseAttributeGroup private int traverseAttributeGroupDeclFromAnotherSchema( String attGrpName , String uriStr, ComplexTypeInfo typeInfo, Vector anyAttDecls ) throws Exception { SchemaGrammar aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(uriStr); if (uriStr == null || aGrammar == null || ! (aGrammar instanceof SchemaGrammar) ) { // REVISIT: Localize reportGenericSchemaError("!!Schema not found in #traverseAttributeGroupDeclFromAnotherSchema, schema uri : " + uriStr); return -1; } // attribute name Element attGrpDecl = (Element) aGrammar.topLevelAttrGrpDecls.get((Object)attGrpName); if (attGrpDecl == null) { // REVISIT: Localize reportGenericSchemaError( "no attribute group named \"" + attGrpName + "\" was defined in schema : " + uriStr); return -1; } NamespacesScope saveNSMapping = fNamespacesScope; int saveTargetNSUri = fTargetNSURI; fTargetNSURI = fStringPool.addSymbol(aGrammar.getTargetNamespaceURI()); fNamespacesScope = aGrammar.getNamespacesScope(); // attribute type int attType = -1; int enumeration = -1; for ( Element child = XUtil.getFirstChildElement(attGrpDecl); child != null ; child = XUtil.getNextSiblingElement(child)) { //child attribute couldn't be a top-level attribute DEFINITION, if ( child.getLocalName().equals(SchemaSymbols.ELT_ATTRIBUTE) ){ String childAttName = child.getAttribute(SchemaSymbols.ATT_NAME); if ( childAttName.length() > 0 ) { Hashtable attDeclRegistry = aGrammar.getAttirubteDeclRegistry(); if (attDeclRegistry != null) { if (attDeclRegistry.get((Object)childAttName) != null ){ addAttributeDeclFromAnotherSchema(childAttName, uriStr, typeInfo); return -1; } } } else traverseAttributeDecl(child, typeInfo); } else if ( child.getLocalName().equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) ) { traverseAttributeGroupDecl(child, typeInfo, anyAttDecls); } else if ( child.getLocalName().equals(SchemaSymbols.ELT_ANYATTRIBUTE) ) { anyAttDecls.addElement(traverseAnyAttribute(child)); break; } else if (child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION) ) { // REVISIT: what about appInfo } } fNamespacesScope = saveNSMapping; fTargetNSURI = saveTargetNSUri; return -1; } // end of method traverseAttributeGroupFromAnotherSchema // This simple method takes an attribute declaration as a parameter and // returns null if there is no simpleType defined or the simpleType // declaration if one exists. It also throws an error if more than one // <annotation> or <simpleType> group is present. private Element findAttributeSimpleType(Element attrDecl) throws Exception { Element child = XUtil.getFirstChildElement(attrDecl); if (child == null) return null; if (child.getLocalName().equals(SchemaSymbols.ELT_SIMPLETYPE)) return child; if (child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); if (child == null) return null; if (child.getLocalName().equals(SchemaSymbols.ELT_SIMPLETYPE) && XUtil.getNextSiblingElement(child) == null) return child; //REVISIT: localize reportGenericSchemaError ( "An attribute declaration must contain at most one annotation preceding at most one simpleType"); return null; } // end findAttributeSimpleType /** * Traverse element declaration: * <element * abstract = boolean * block = #all or (possibly empty) subset of {equivClass, extension, restriction} * default = string * equivClass = QName * final = #all or (possibly empty) subset of {extension, restriction} * fixed = string * form = qualified | unqualified * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * name = NCName * nullable = boolean * ref = QName * type = QName> * Content: (annotation? , (simpleType | complexType)? , (unique | key | keyref)*) * </element> * * * The following are identity-constraint definitions * <unique * id = ID * name = NCName> * Content: (annotation? , (selector , field+)) * </unique> * * <key * id = ID * name = NCName> * Content: (annotation? , (selector , field+)) * </key> * * <keyref * id = ID * name = NCName * refer = QName> * Content: (annotation? , (selector , field+)) * </keyref> * * <selector> * Content: XPathExprApprox : An XPath expression * </selector> * * <field> * Content: XPathExprApprox : An XPath expression * </field> * * * @param elementDecl * @return * @exception Exception */ private QName traverseElementDecl(Element elementDecl) throws Exception { int contentSpecType = -1; int contentSpecNodeIndex = -1; int typeNameIndex = -1; int scopeDefined = -2; //signal a error if -2 gets gets through //cause scope can never be -2. DatatypeValidator dv = null; String name = elementDecl.getAttribute(SchemaSymbols.ATT_NAME); if ( DEBUGGING ) System.out.println("traversing element decl : " + name ); String ref = elementDecl.getAttribute(SchemaSymbols.ATT_REF); String type = elementDecl.getAttribute(SchemaSymbols.ATT_TYPE); String minOccurs = elementDecl.getAttribute(SchemaSymbols.ATT_MINOCCURS); String maxOccurs = elementDecl.getAttribute(SchemaSymbols.ATT_MAXOCCURS); String dflt = elementDecl.getAttribute(SchemaSymbols.ATT_DEFAULT); String fixed = elementDecl.getAttribute(SchemaSymbols.ATT_FIXED); String equivClass = elementDecl.getAttribute(SchemaSymbols.ATT_EQUIVCLASS); // form attribute String isQName = elementDecl.getAttribute(SchemaSymbols.ATT_FORM); String fromAnotherSchema = null; if (isTopLevel(elementDecl)) { int nameIndex = fStringPool.addSymbol(name); int eltKey = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, nameIndex,TOP_LEVEL_SCOPE); if (eltKey > -1 ) { return new QName(-1,nameIndex,nameIndex,fTargetNSURI); } } // parse out 'block', 'final', 'nullable', 'abstract' int blockSet = parseBlockSet(elementDecl.getAttribute(SchemaSymbols.ATT_BLOCK)); int finalSet = parseFinalSet(elementDecl.getAttribute(SchemaSymbols.ATT_FINAL)); boolean isNullable = elementDecl.getAttribute (SchemaSymbols.ATT_NULLABLE).equals(SchemaSymbols.ATTVAL_TRUE)? true:false; boolean isAbstract = elementDecl.getAttribute (SchemaSymbols.ATT_ABSTRACT).equals(SchemaSymbols.ATTVAL_TRUE)? true:false; int elementMiscFlags = 0; if (isNullable) { elementMiscFlags += SchemaSymbols.NULLABLE; } if (isAbstract) { elementMiscFlags += SchemaSymbols.ABSTRACT; } //if this is a reference to a global element int attrCount = 0; if (!ref.equals("")) attrCount++; if (!type.equals("")) attrCount++; //REVISIT top level check for ref & archref if (attrCount > 1) reportSchemaError(SchemaMessageProvider.OneOfTypeRefArchRef, null); if (!ref.equals("")) { if (XUtil.getFirstChildElement(elementDecl) != null) reportSchemaError(SchemaMessageProvider.NoContentForRef, null); String prefix = ""; String localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } int localpartIndex = fStringPool.addSymbol(localpart); String uriString = resolvePrefixToURI(prefix); QName eltName = new QName(prefix != null ? fStringPool.addSymbol(prefix) : -1, localpartIndex, fStringPool.addSymbol(ref), uriString != null ? fStringPool.addSymbol(uriString) : -1); //if from another schema, just return the element QName if (! uriString.equals(fTargetNSURIString) ) { return eltName; } int elementIndex = fSchemaGrammar.getElementDeclIndex(eltName, TOP_LEVEL_SCOPE); //if not found, traverse the top level element that if referenced if (elementIndex == -1 ) { Element targetElement = getTopLevelComponentByName(SchemaSymbols.ELT_ELEMENT,localpart); if (targetElement == null ) { // REVISIT: Localize reportGenericSchemaError("Element " + localpart + " not found in the Schema"); //REVISIT, for now, the QName anyway return eltName; //return new QName(-1,fStringPool.addSymbol(localpart), -1, fStringPool.addSymbol(uriString)); } else { // do nothing here, other wise would cause infinite loop for // <element name="recur"><complexType><element ref="recur"> ... //eltName= traverseElementDecl(targetElement); } } return eltName; } // Handle the equivClass Element equivClassElementDecl = null; int equivClassElementDeclIndex = -1; boolean noErrorSoFar = true; String equivClassUri = null; String equivClassLocalpart = null; String equivClassFullName = null; ComplexTypeInfo equivClassEltTypeInfo = null; DatatypeValidator equivClassEltDV = null; if ( equivClass.length() > 0 ) { equivClassUri = resolvePrefixToURI(getPrefix(equivClass)); equivClassLocalpart = getLocalPart(equivClass); equivClassFullName = equivClassUri+","+equivClassLocalpart; if ( !equivClassUri.equals(fTargetNSURIString) ) { equivClassEltTypeInfo = getElementDeclTypeInfoFromNS(equivClassUri, equivClassLocalpart); if (equivClassEltTypeInfo == null) { equivClassEltDV = getElementDeclTypeValidatorFromNS(equivClassUri, equivClassLocalpart); if (equivClassEltDV == null) { //TO DO: report error here; noErrorSoFar = false; reportGenericSchemaError("Could not find type for element '" +equivClassLocalpart + "' in schema '" + equivClassUri+"'"); } } } else { equivClassElementDecl = getTopLevelComponentByName(SchemaSymbols.ELT_ELEMENT, equivClassLocalpart); if (equivClassElementDecl == null) { equivClassElementDeclIndex = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, getLocalPartIndex(equivClass),TOP_LEVEL_SCOPE); if ( equivClassElementDeclIndex == -1) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("Equivclass affiliation element " +equivClass +" in element declaration " +name); } } else { equivClassElementDeclIndex = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, getLocalPartIndex(equivClass),TOP_LEVEL_SCOPE); if ( equivClassElementDeclIndex == -1) { traverseElementDecl(equivClassElementDecl); equivClassElementDeclIndex = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, getLocalPartIndex(equivClass),TOP_LEVEL_SCOPE); } } if (equivClassElementDeclIndex != -1) { equivClassEltTypeInfo = fSchemaGrammar.getElementComplexTypeInfo( equivClassElementDeclIndex ); if (equivClassEltTypeInfo == null) { fSchemaGrammar.getElementDecl(equivClassElementDeclIndex, fTempElementDecl); equivClassEltDV = fTempElementDecl.datatypeValidator; if (equivClassEltDV == null) { //TO DO: report error here; noErrorSoFar = false; reportGenericSchemaError("Could not find type for element '" +equivClassLocalpart + "' in schema '" + equivClassUri+"'"); } } } } } // resolving the type for this element right here ComplexTypeInfo typeInfo = null; // element has a single child element, either a datatype or a type, null if primitive Element child = XUtil.getFirstChildElement(elementDecl); while (child != null && child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); boolean haveAnonType = false; // Handle Anonymous type if there is one if (child != null) { String childName = child.getLocalName(); if (childName.equals(SchemaSymbols.ELT_COMPLEXTYPE)) { if (child.getAttribute(SchemaSymbols.ATT_NAME).length() > 0) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("anonymous complexType in element '" + name +"' has a name attribute"); } else typeNameIndex = traverseComplexTypeDecl(child); if (typeNameIndex != -1 ) { typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fStringPool.toString(typeNameIndex)); } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("traverse complexType error in element '" + name +"'"); } haveAnonType = true; } else if (childName.equals(SchemaSymbols.ELT_SIMPLETYPE)) { // TO DO: the Default and fixed attribute handling should be here. if (child.getAttribute(SchemaSymbols.ATT_NAME).length() > 0) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("anonymous simpleType in element '" + name +"' has a name attribute"); } else typeNameIndex = traverseSimpleTypeDecl(child); if (typeNameIndex != -1) { dv = fDatatypeRegistry.getDatatypeValidator(fStringPool.toString(typeNameIndex)); } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("traverse simpleType error in element '" + name +"'"); } contentSpecType = XMLElementDecl.TYPE_SIMPLE; haveAnonType = true; } else if (type.equals("")) { // "ur-typed" leaf contentSpecType = XMLElementDecl.TYPE_ANY; //REVISIT: is this right? //contentSpecType = fStringPool.addSymbol("UR_TYPE"); // set occurrence count contentSpecNodeIndex = -1; } else { System.out.println("unhandled case in TraverseElementDecl"); } } // handle type="" here if (haveAnonType && (type.length()>0)) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError( "Element '"+ name + "' have both a type attribute and a annoymous type child" ); } // type specified as an attribute and no child is type decl. else if (!type.equals("")) { if (equivClassElementDecl != null) { checkEquivClassOK(elementDecl, equivClassElementDecl); } String prefix = ""; String localpart = type; int colonptr = type.indexOf(":"); if ( colonptr > 0) { prefix = type.substring(0,colonptr); localpart = type.substring(colonptr+1); } String typeURI = resolvePrefixToURI(prefix); // check if the type is from the same Schema if ( !typeURI.equals(fTargetNSURIString) && !typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && typeURI.length() != 0) { // REVISIT, only needed because of resolvePrifixToURI. fromAnotherSchema = typeURI; typeInfo = getTypeInfoFromNS(typeURI, localpart); if (typeInfo == null) { dv = getTypeValidatorFromNS(typeURI, localpart); if (dv == null) { //TO DO: report error here; noErrorSoFar = false; reportGenericSchemaError("Could not find type " +localpart + " in schema " + typeURI); } } } else { typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(typeURI+","+localpart); if (typeInfo == null) { dv = getDatatypeValidator(typeURI, localpart); if (dv == null ) if (typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && !fTargetNSURIString.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA)) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("type not found : " + typeURI+":"+localpart); } else { Element topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_COMPLEXTYPE,localpart); if (topleveltype != null) { if (fCurrentTypeNameStack.search((Object)localpart) > - 1) { //then we found a recursive element using complexType. // REVISIT: this will be broken when recursing happens between 2 schemas int uriInd = -1; if ( isQName.equals(SchemaSymbols.ATTVAL_QUALIFIED)|| fElementDefaultQualified) { uriInd = fTargetNSURI; } int nameIndex = fStringPool.addSymbol(name); QName tempQName = new QName(fCurrentScope, nameIndex, nameIndex, uriInd); fElementRecurseComplex.put(tempQName, localpart); return new QName(-1, nameIndex, nameIndex, uriInd); } else { typeNameIndex = traverseComplexTypeDecl( topleveltype ); typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fStringPool.toString(typeNameIndex)); } } else { topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (topleveltype != null) { typeNameIndex = traverseSimpleTypeDecl( topleveltype ); dv = getDatatypeValidator(typeURI, localpart); // TO DO: the Default and fixed attribute handling should be here. } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("type not found : " + typeURI+":"+localpart); } } } } } } else if (haveAnonType){ if (equivClassElementDecl != null ) { checkEquivClassOK(elementDecl, equivClassElementDecl); } } // this element is ur-type, check its equivClass afficliation. else { // if there is equivClass affiliation and not type defintion found for this element, // then grab equivClass affiliation's type and give it to this element if ( typeInfo == null && dv == null ) typeInfo = equivClassEltTypeInfo; if ( typeInfo == null && dv == null ) dv = equivClassEltDV; } if (typeInfo == null && dv==null) { if (noErrorSoFar) { // Actually this Element's type definition is ur-type; contentSpecType = XMLElementDecl.TYPE_ANY; // REVISIT, need to wait till we have wildcards implementation. // ADD attribute wildcards here } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError ("untyped element : " + name ); } } // if element belongs to a compelx type if (typeInfo!=null) { contentSpecNodeIndex = typeInfo.contentSpecHandle; contentSpecType = typeInfo.contentType; scopeDefined = typeInfo.scopeDefined; dv = typeInfo.datatypeValidator; } // if element belongs to a simple type if (dv!=null) { contentSpecType = XMLElementDecl.TYPE_SIMPLE; if (typeInfo == null) { fromAnotherSchema = null; // not to switch schema in this case } } // Create element decl int elementNameIndex = fStringPool.addSymbol(name); int localpartIndex = elementNameIndex; int uriIndex = -1; int enclosingScope = fCurrentScope; if ( isQName.equals(SchemaSymbols.ATTVAL_QUALIFIED)|| fElementDefaultQualified ) { uriIndex = fTargetNSURI; } if ( isTopLevel(elementDecl)) { uriIndex = fTargetNSURI; enclosingScope = TOP_LEVEL_SCOPE; } //There can never be two elements with the same name and different type in the same scope. int existSuchElementIndex = fSchemaGrammar.getElementDeclIndex(uriIndex, localpartIndex, enclosingScope); if ( existSuchElementIndex > -1) { fSchemaGrammar.getElementDecl(existSuchElementIndex, fTempElementDecl); DatatypeValidator edv = fTempElementDecl.datatypeValidator; ComplexTypeInfo eTypeInfo = fSchemaGrammar.getElementComplexTypeInfo(existSuchElementIndex); if ( ((eTypeInfo != null)&&(eTypeInfo!=typeInfo)) || ((edv != null)&&(edv != dv)) ) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("duplicate element decl in the same scope : " + fStringPool.toString(localpartIndex)); } } QName eltQName = new QName(-1,localpartIndex,elementNameIndex,uriIndex); // add element decl to pool int attrListHead = -1 ; // copy up attribute decls from type object if (typeInfo != null) { attrListHead = typeInfo.attlistHead; } int elementIndex = fSchemaGrammar.addElementDecl(eltQName, enclosingScope, scopeDefined, contentSpecType, contentSpecNodeIndex, attrListHead, dv); if ( DEBUGGING ) { System.out.println(" + fStringPool.toString(eltQName.localpart) + ")"+ " eltType:"+type+" contentSpecType:"+contentSpecType+ " SpecNodeIndex:"+ contentSpecNodeIndex +" enclosingScope: " +enclosingScope + " scopeDefined: " +scopeDefined+"\n"); } if (typeInfo != null) { fSchemaGrammar.setElementComplexTypeInfo(elementIndex, typeInfo); } else { fSchemaGrammar.setElementComplexTypeInfo(elementIndex, typeInfo); // REVISIT: should we report error from here? } // mark element if its type belongs to different Schema. fSchemaGrammar.setElementFromAnotherSchemaURI(elementIndex, fromAnotherSchema); // set BlockSet, FinalSet, Nullable and Abstract for this element decl fSchemaGrammar.setElementDeclBlockSet(elementIndex, blockSet); fSchemaGrammar.setElementDeclFinalSet(elementIndex, finalSet); fSchemaGrammar.setElementDeclMiscFlags(elementIndex, elementMiscFlags); // setEquivClassElementFullName fSchemaGrammar.setElementDeclEquivClassElementFullName(elementIndex, equivClassFullName); // key/keyref/unique processing\ Element ic = XUtil.getFirstChildElement(elementDecl, IDENTITY_CONSTRAINTS); Vector idConstraints = null; if (ic != null) { // REVISIT: Use cached copy. -Ac XMLElementDecl edecl = new XMLElementDecl(); fSchemaGrammar.getElementDecl(elementIndex, edecl); while (ic != null){ String icName = ic.getLocalName(); if ( icName.equals(SchemaSymbols.ELT_KEY) ) { traverseKey(ic, edecl); } else if ( icName.equals(SchemaSymbols.ELT_KEYREF) ) { traverseKeyRef(ic, edecl); } else if ( icName.equals(SchemaSymbols.ELT_UNIQUE) ) { traverseUnique(ic, edecl); } else { // should never get here throw new RuntimeException("identity constraint must be one of "+ "\""+SchemaSymbols.ELT_UNIQUE+"\", "+ "\""+SchemaSymbols.ELT_KEY+"\", or "+ "\""+SchemaSymbols.ELT_KEYREF+'"'); } fSchemaGrammar.setElementDecl(elementIndex, edecl); ic = XUtil.getNextSiblingElement(ic, IDENTITY_CONSTRAINTS); } } return eltQName; }// end of method traverseElementDecl(Element) private void traverseUnique(Element uelem, XMLElementDecl edecl) throws Exception { // create identity constraint if (DEBUG_IDENTITY_CONSTRAINTS) { System.out.println("<IC>: traverseUnique(\""+uelem.getNodeName()+"\")"); } Unique unique = new Unique(); // get selector and fields traverseIdentityConstraint(unique, uelem); // add to element decl edecl.unique.addElement(unique); } // traverseUnique(Element,XMLElementDecl) private void traverseKey(Element kelem, XMLElementDecl edecl) throws Exception { // create identity constraint String kname = kelem.getAttribute(SchemaSymbols.ATT_NAME); if (DEBUG_IDENTITY_CONSTRAINTS) { System.out.println("<IC>: traverseKey(\""+kelem.getNodeName()+"\") ["+kname+']'); } Key key = new Key(kname); // get selector and fields traverseIdentityConstraint(key, kelem); // add to element decl edecl.key.addElement(key); } // traverseKey(Element,XMLElementDecl) private void traverseKeyRef(Element krelem, XMLElementDecl edecl) throws Exception { // create identity constraint String krname = krelem.getAttribute(SchemaSymbols.ATT_NAME); if (DEBUG_IDENTITY_CONSTRAINTS) { System.out.println("<IC>: traverseKeyRef(\""+krelem.getNodeName()+"\") ["+krname+']'); } KeyRef keyRef = new KeyRef(krname); // add to element decl traverseIdentityConstraint(keyRef, krelem); // add key reference to element decl edecl.keyRef.addElement(keyRef); } // traverseKeyRef(Element,XMLElementDecl) private void traverseIdentityConstraint(IdentityConstraint ic, Element icelem) throws Exception { // get selector Element selem = XUtil.getFirstChildElement(icelem, SchemaSymbols.ELT_SELECTOR); String stext = CR_IMPL ? selem.getAttribute(SchemaSymbols.ATT_XPATH) : XUtil.getChildText(selem); stext = stext.trim(); try { // REVISIT: Get namespace context! -Ac Selector.XPath sxpath = new Selector.XPath(stext, fStringPool, null); Selector selector = new Selector(sxpath, ic); if (DEBUG_IDENTITY_CONSTRAINTS) { System.out.println("<IC>: selector: "+selector); } ic.setSelector(selector); } catch (XPathException e) { // REVISIT: Add error message. throw new SAXException(e.getMessage()); } // get fields Element felem = XUtil.getNextSiblingElement(selem, SchemaSymbols.ELT_FIELD); while (felem != null) { String ftext = CR_IMPL ? felem.getAttribute(SchemaSymbols.ATT_XPATH) : XUtil.getChildText(felem); ftext = ftext.trim(); try { // REVISIT: Get namespace context! -Ac Field.XPath fxpath = new Field.XPath(ftext, fStringPool, null); // REVISIT: Get datatype validator. -Ac Field field = new Field(fxpath, null, ic); if (DEBUG_IDENTITY_CONSTRAINTS) { System.out.println("<IC>: field: "+field); } ic.addField(field); } catch (XPathException e) { // REVISIT: Add error message. throw new SAXException(e.getMessage()); } felem = XUtil.getNextSiblingElement(felem, SchemaSymbols.ELT_FIELD); } } // traverseIdentityConstraint(IdentityConstraint,Element) int getLocalPartIndex(String fullName){ int colonAt = fullName.indexOf(":"); String localpart = fullName; if ( colonAt > -1 ) { localpart = fullName.substring(colonAt+1); } return fStringPool.addSymbol(localpart); } String getLocalPart(String fullName){ int colonAt = fullName.indexOf(":"); String localpart = fullName; if ( colonAt > -1 ) { localpart = fullName.substring(colonAt+1); } return localpart; } int getPrefixIndex(String fullName){ int colonAt = fullName.indexOf(":"); String prefix = ""; if ( colonAt > -1 ) { prefix = fullName.substring(0,colonAt); } return fStringPool.addSymbol(prefix); } String getPrefix(String fullName){ int colonAt = fullName.indexOf(":"); String prefix = ""; if ( colonAt > -1 ) { prefix = fullName.substring(0,colonAt); } return prefix; } private void checkEquivClassOK(Element elementDecl, Element equivClassElementDecl){ //TO DO!! } private Element getTopLevelComponentByName(String componentCategory, String name) throws Exception { Element child = null; if ( componentCategory.equals(SchemaSymbols.ELT_GROUP) ) { child = (Element) fSchemaGrammar.topLevelGroupDecls.get(name); } else if ( componentCategory.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP ) ) { child = (Element) fSchemaGrammar.topLevelAttrGrpDecls.get(name); } else if ( componentCategory.equals(SchemaSymbols.ELT_ATTRIBUTE ) ) { child = (Element) fSchemaGrammar.topLevelAttrDecls.get(name); } if (child != null ) { return child; } child = XUtil.getFirstChildElement(fSchemaRootElement); if (child == null) { return null; } while (child != null ){ if ( child.getLocalName().equals(componentCategory)) { if (child.getAttribute(SchemaSymbols.ATT_NAME).equals(name)) { return child; } } child = XUtil.getNextSiblingElement(child); } return null; } private boolean isTopLevel(Element component) { //REVISIT, is this the right way to check ? return (component.getParentNode().getLocalName().endsWith(SchemaSymbols.ELT_SCHEMA) ); } DatatypeValidator getTypeValidatorFromNS(String newSchemaURI, String localpart) throws Exception { // The following impl is for the case where every Schema Grammar has its own instance of DatatypeRegistry. // Now that we have only one DataTypeRegistry used by all schemas. this is not needed. /** * Traverse attributeGroup Declaration * * <attributeGroup * id = ID * ref = QName> * Content: (annotation?) * </> * * @param elementDecl * @exception Exception */ /*private int traverseAttributeGroupDecl( Element attributeGroupDecl ) throws Exception { int attributeGroupID = fStringPool.addSymbol( attributeGroupDecl.getAttribute( SchemaSymbols.ATTVAL_ID )); int attributeGroupName = fStringPool.addSymbol( attributeGroupDecl.getAttribute( SchemaSymbols.ATT_NAME )); return -1; }*/ /** * Traverse Group Declaration. * * <group * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * name = NCName * ref = QName> * Content: (annotation? , (element | group | all | choice | sequence | any)*) * <group/> * * @param elementDecl * @return * @exception Exception */ private int traverseGroupDecl( Element groupDecl ) throws Exception { String groupName = groupDecl.getAttribute(SchemaSymbols.ATT_NAME); String ref = groupDecl.getAttribute(SchemaSymbols.ATT_REF); if (!ref.equals("")) { if (XUtil.getFirstChildElement(groupDecl) != null) reportSchemaError(SchemaMessageProvider.NoContentForRef, null); String prefix = ""; String localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } int localpartIndex = fStringPool.addSymbol(localpart); String uriStr = resolvePrefixToURI(prefix); if (!uriStr.equals(fTargetNSURIString)) { return traverseGroupDeclFromAnotherSchema(localpart, uriStr); } int contentSpecIndex = -1; Element referredGroup = getTopLevelComponentByName(SchemaSymbols.ELT_GROUP,localpart); if (referredGroup == null) { // REVISIT: Localize reportGenericSchemaError("Group " + localpart + " not found in the Schema"); //REVISIT, this should be some custom Exception //throw new RuntimeException("Group " + localpart + " not found in the Schema"); } else { contentSpecIndex = traverseGroupDecl(referredGroup); } return contentSpecIndex; } boolean traverseElt = true; if (fCurrentScope == TOP_LEVEL_SCOPE) { traverseElt = false; } Element child = XUtil.getFirstChildElement(groupDecl); while (child != null && child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); int contentSpecType = 0; int csnType = 0; int allChildren[] = null; int allChildCount = 0; csnType = XMLContentSpec.CONTENTSPECNODE_SEQ; contentSpecType = XMLElementDecl.TYPE_CHILDREN; int left = -2; int right = -2; boolean hadContent = false; boolean seeAll = false; boolean seeParticle = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; hadContent = true; boolean illegalChild = false; String childName = child.getLocalName(); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = traverseGroupDecl(child); if (index == -1) continue; seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ALL)) { index = traverseAll(child); //seeParticle = true; seeAll = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { illegalChild = true; reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if ( ! illegalChild ) { index = expandContentModel( index, child); } if (seeParticle && seeAll) { reportSchemaError( SchemaMessageProvider.GroupContentRestricted, new Object [] { "'all' needs to be 'the' only Child", childName}); } if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent && right != -2) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); return left; } private int traverseGroupDeclFromAnotherSchema( String groupName , String uriStr ) throws Exception { SchemaGrammar aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(uriStr); if (uriStr == null || aGrammar==null ||! (aGrammar instanceof SchemaGrammar) ) { // REVISIT: Localize reportGenericSchemaError("!!Schema not found in #traverseGroupDeclFromAnotherSchema, "+ "schema uri: " + uriStr +", groupName: " + groupName); return -1; } Element groupDecl = (Element) aGrammar.topLevelGroupDecls.get((Object)groupName); if (groupDecl == null) { // REVISIT: Localize reportGenericSchemaError( "no group named \"" + groupName + "\" was defined in schema : " + uriStr); return -1; } NamespacesScope saveNSMapping = fNamespacesScope; int saveTargetNSUri = fTargetNSURI; fTargetNSURI = fStringPool.addSymbol(aGrammar.getTargetNamespaceURI()); fNamespacesScope = aGrammar.getNamespacesScope(); boolean traverseElt = true; if (fCurrentScope == TOP_LEVEL_SCOPE) { traverseElt = false; } Element child = XUtil.getFirstChildElement(groupDecl); while (child != null && child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); int contentSpecType = 0; int csnType = 0; int allChildren[] = null; int allChildCount = 0; csnType = XMLContentSpec.CONTENTSPECNODE_SEQ; contentSpecType = XMLElementDecl.TYPE_CHILDREN; int left = -2; int right = -2; boolean hadContent = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; hadContent = true; boolean seeParticle = false; String childName = child.getLocalName(); int childNameIndex = fStringPool.addSymbol(childName); String formAttrVal = child.getAttribute(SchemaSymbols.ATT_FORM); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = traverseGroupDecl(child); if (index == -1) continue; seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ALL)) { index = traverseAll(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if (seeParticle) { index = expandContentModel( index, child); } if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent && right != -2) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); fNamespacesScope = saveNSMapping; fTargetNSURI = saveTargetNSUri; return left; } // end of method traverseGroupDeclFromAnotherSchema /** * * Traverse the Sequence declaration * * <sequence * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </sequence> * **/ int traverseSequence (Element sequenceDecl) throws Exception { Element child = XUtil.getFirstChildElement(sequenceDecl); while (child != null && child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); int contentSpecType = 0; int csnType = 0; csnType = XMLContentSpec.CONTENTSPECNODE_SEQ; contentSpecType = XMLElementDecl.TYPE_CHILDREN; int left = -2; int right = -2; boolean hadContent = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; hadContent = true; boolean seeParticle = false; String childName = child.getLocalName(); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = traverseGroupDecl(child); if (index == -1) continue; seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if (seeParticle) { index = expandContentModel( index, child); } if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent && right != -2) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); return left; } /** * * Traverse the Sequence declaration * * <choice * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </choice> * **/ int traverseChoice (Element choiceDecl) throws Exception { // REVISIT: traverseChoice, traverseSequence can be combined Element child = XUtil.getFirstChildElement(choiceDecl); while (child != null && child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); int contentSpecType = 0; int csnType = 0; csnType = XMLContentSpec.CONTENTSPECNODE_CHOICE; contentSpecType = XMLElementDecl.TYPE_CHILDREN; int left = -2; int right = -2; boolean hadContent = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; hadContent = true; boolean seeParticle = false; String childName = child.getLocalName(); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = traverseGroupDecl(child); if (index == -1) continue; seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if (seeParticle) { index = expandContentModel( index, child); } if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent && right != -2) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); return left; } /** * * Traverse the "All" declaration * * <all * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </all> * **/ int traverseAll( Element allDecl) throws Exception { Element child = XUtil.getFirstChildElement(allDecl); while (child != null && child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) child = XUtil.getNextSiblingElement(child); int allChildren[] = null; int allChildCount = 0; int left = -2; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; boolean seeParticle = false; String childName = child.getLocalName(); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { index = traverseGroupDecl(child); if (index == -1) continue; seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if (seeParticle) { index = expandContentModel( index, child); } try { allChildren[allChildCount] = index; } catch (NullPointerException ne) { allChildren = new int[32]; allChildren[allChildCount] = index; } catch (ArrayIndexOutOfBoundsException ae) { int[] newArray = new int[allChildren.length*2]; System.arraycopy(allChildren, 0, newArray, 0, allChildren.length); allChildren[allChildCount] = index; } allChildCount++; } try { left = allCalcWrapper(allChildren, allChildCount); } catch (java.lang.OutOfMemoryError e) { reportGenericSchemaError("The size of the <all>" + " declaration in your schema is too large for this parser" + " and elements using it will not validate correctly."); } return left; } // allCalcWrapper initiates the recursive calculation of the purmutations // of targetArray. // @param initialArray: the wrray we're passed, whose size may // not reflect the real number of elements to be permuted. // @param size: te true size of this array. private int allCalcWrapper (int[] initialArray, int size) throws Exception { int permSize = size/2; int[] targetArray = new int[size]; System.arraycopy(initialArray, 0, targetArray, 0, size); if(targetArray.length == 1) { return targetArray[0]; } else if (targetArray.length < 1) { return -2; } else if (permSize > targetArray.length) { reportGenericSchemaError("The size of the permutations " + permSize + " cannot be greater than the length of the array to be permuted; error in processing of <all>!"); return -2; } else if (targetArray.length <= 3) { return allCombo(targetArray); } else { return allCalc (targetArray, 0, permSize, 0, new int[targetArray.length-permSize], -2); } } // allCalcWrapper // allCombo generates all combinations of the given array. It // assumes the array has either 2 or 3 elements, and is hardcoded // for speed. private int allCombo(int[] targetArray) throws Exception { if(targetArray.length == 2) { int left, right; int[] lA = {targetArray[0], targetArray[1]}; int[] rA = {targetArray[1], targetArray[0]}; left = createSeq(lA); right = createSeq(rA); return fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, left, right, false); } else if (targetArray.length == 3) { int tempChoice; int[] a1 = {targetArray[0], targetArray[1], targetArray[2]}; int[] a2 = {targetArray[0], targetArray[2], targetArray[1]}; int[] a3 = {targetArray[1], targetArray[0], targetArray[2]}; int[] a4 = {targetArray[1], targetArray[2], targetArray[0]}; int[] a5 = {targetArray[2], targetArray[1], targetArray[0]}; int[] a6 = {targetArray[2], targetArray[0], targetArray[1]}; int s1 = createSeq(a1); int s2 = createSeq(a2); int s3 = createSeq(a3); int s4 = createSeq(a4); int s5 = createSeq(a5); int s6 = createSeq(a6); tempChoice = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, s1, s2, false); tempChoice = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, tempChoice, s3, false); tempChoice = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, tempChoice, s4, false); tempChoice = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, tempChoice, s5, false); return fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, tempChoice, s6, false); } else { return -2; } } // end allCombo // The purpose of allCalc is to produce all permutations of // permSize elements that can be derived from targetArray. // @param targetArray: the array from which permutations // must be extracted; // @param targetPosition: position in the target array of the // last element in targetArray that was completely processed; // @param permSize: the size of the permutation set // @param progressIndicator: indication of the number of meaningful // elements in complementArray; // @param complementArray: contains the set of elements that were // contained in the global targetArray array and are not // present in this invocation's targetArray. // @param choiceHead: index of the head of curretn <choice> // linked list. private int allCalc(int[] targetArray, int targetPosition, int permSize, int progressIndicator, int[] complementArray, int choiceHead) throws Exception { if (targetArray.length-permSize-targetPosition == 1) { //base case int[] newTargetArray = new int[permSize+targetPosition]; int allSeq; // pointer to sequence of <all>'s for (int i=targetPosition; i<targetArray.length; i++){ arrayProducer(targetArray, i, newTargetArray, complementArray, progressIndicator); // newTargetArray and complementArray must be recursed // upon... int c1 = allCalcWrapper(newTargetArray, newTargetArray.length); int c2 = allCalcWrapper(complementArray, complementArray.length); allSeq = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, c1, c2, false); if (choiceHead != -2) { choiceHead = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, choiceHead, allSeq, false); } else { choiceHead = allSeq; } } return choiceHead; } else { // recursive case for (int i=targetPosition; i<targetArray.length; i++){ int[] newTargetArray = new int[targetArray.length-1]; arrayProducer(targetArray, i, newTargetArray, complementArray, progressIndicator); choiceHead = allCalc(newTargetArray, targetPosition, permSize, progressIndicator+1, complementArray, choiceHead); targetPosition++; permSize } return choiceHead; } // end else...if }// allCalc // The purpose of arrayProducer is to create two arrays out of // targetArray: the first, newTargetArray, will contain all the // elements of targetArray except the tPos-th; complementArray // will have its cPos-th element set to targetArray[tPos]. // It is assumed that tPos, cPos and targetArray have meaningful // values; complementArray should already have been allocated and // newTargetArray should also have been allocated previously. private void arrayProducer(int [] targetArray, int tPos, int[] newTargetArray, int[] complementArray, int cPos) { complementArray[cPos] = targetArray[tPos]; if (tPos > 0) System.arraycopy(targetArray, 0, newTargetArray, 0, tPos); if (tPos < targetArray.length-1) System.arraycopy(targetArray, tPos+1, newTargetArray, tPos, targetArray.length-tPos-1); } // end arrayProducer /** Creates a sequence. */ private int createSeq(int src[]) throws Exception { int left = src[0]; int right = src[1]; for (int i = 2; i < src.length; i++) { left = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, left, right, false); right = src[i]; } return fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, left, right, false); } // createSeq(int[]):int // utilities from Tom Watson's SchemaParser class // TO DO: Need to make this more conformant with Schema int type parsing private int parseInt (String intString) throws Exception { if ( intString.equals("*") ) { return SchemaSymbols.INFINITY; } else { return Integer.parseInt (intString); } } private int parseSimpleDerivedBy (String derivedByString) throws Exception { if ( derivedByString.equals (SchemaSymbols.ATTVAL_LIST) ) { return SchemaSymbols.LIST; } else if ( derivedByString.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { return SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ("SimpleType: Invalid value for 'derivedBy'"); return -1; } } private int parseComplexDerivedBy (String derivedByString) throws Exception { if ( derivedByString.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { return SchemaSymbols.EXTENSION; } else if ( derivedByString.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { return SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "ComplexType: Invalid value for 'derivedBy'" ); return -1; } } private int parseSimpleFinal (String finalString) throws Exception { if ( finalString.equals (SchemaSymbols.ATTVAL_POUNDALL) ) { return SchemaSymbols.ENUMERATION+SchemaSymbols.RESTRICTION+SchemaSymbols.LIST+SchemaSymbols.REPRODUCTION; } else { int enumerate = 0; int restrict = 0; int list = 0; int reproduce = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ("restriction in set twice"); } } else if ( token.equals (SchemaSymbols.ATTVAL_LIST) ) { if ( list == 0 ) { list = SchemaSymbols.LIST; } else { // REVISIT: Localize reportGenericSchemaError ("list in set twice"); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid value (" + finalString + ")" ); } } return enumerate+restrict+list+reproduce; } } private int parseComplexContent (String contentString) throws Exception { if ( contentString.equals (SchemaSymbols.ATTVAL_EMPTY) ) { return XMLElementDecl.TYPE_EMPTY; } else if ( contentString.equals (SchemaSymbols.ATTVAL_ELEMENTONLY) ) { return XMLElementDecl.TYPE_CHILDREN; } else if ( contentString.equals (SchemaSymbols.ATTVAL_TEXTONLY) ) { return XMLElementDecl.TYPE_SIMPLE; } else if ( contentString.equals (SchemaSymbols.ATTVAL_MIXED) ) { return XMLElementDecl.TYPE_MIXED; } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid value for content" ); return -1; } } private int parseDerivationSet (String finalString) throws Exception { if ( finalString.equals ("#all") ) { return SchemaSymbols.EXTENSION+SchemaSymbols.RESTRICTION+SchemaSymbols.REPRODUCTION; } else { int extend = 0; int restrict = 0; int reproduce = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { if ( extend == 0 ) { extend = SchemaSymbols.EXTENSION; } else { // REVISIT: Localize reportGenericSchemaError ( "extension already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "restriction already in set" ); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid final value (" + finalString + ")" ); } } return extend+restrict+reproduce; } } private int parseBlockSet (String finalString) throws Exception { if ( finalString.equals ("#all") ) { return SchemaSymbols.EQUIVCLASS+SchemaSymbols.EXTENSION+SchemaSymbols.LIST+SchemaSymbols.RESTRICTION+SchemaSymbols.REPRODUCTION; } else { int extend = 0; int restrict = 0; int reproduce = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_EQUIVCLASS) ) { if ( extend == 0 ) { extend = SchemaSymbols.EQUIVCLASS; } else { // REVISIT: Localize reportGenericSchemaError ( "'equivClass' already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { if ( extend == 0 ) { extend = SchemaSymbols.EXTENSION; } else { // REVISIT: Localize reportGenericSchemaError ( "extension already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_LIST) ) { if ( extend == 0 ) { extend = SchemaSymbols.LIST; } else { // REVISIT: Localize reportGenericSchemaError ( "'list' already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "restriction already in set" ); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid final value (" + finalString + ")" ); } } return extend+restrict+reproduce; } } private int parseFinalSet (String finalString) throws Exception { if ( finalString.equals ("#all") ) { return SchemaSymbols.EQUIVCLASS+SchemaSymbols.EXTENSION+SchemaSymbols.LIST+SchemaSymbols.RESTRICTION+SchemaSymbols.REPRODUCTION; } else { int extend = 0; int restrict = 0; int reproduce = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_EQUIVCLASS) ) { if ( extend == 0 ) { extend = SchemaSymbols.EQUIVCLASS; } else { // REVISIT: Localize reportGenericSchemaError ( "'equivClass' already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { if ( extend == 0 ) { extend = SchemaSymbols.EXTENSION; } else { // REVISIT: Localize reportGenericSchemaError ( "extension already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_LIST) ) { if ( extend == 0 ) { extend = SchemaSymbols.LIST; } else { // REVISIT: Localize reportGenericSchemaError ( "'list' already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "restriction already in set" ); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid final value (" + finalString + ")" ); } } return extend+restrict+reproduce; } } private void reportGenericSchemaError (String error) throws Exception { if (fErrorReporter == null) { System.err.println("__TraverseSchemaError__ : " + error); } else { reportSchemaError (SchemaMessageProvider.GenericError, new Object[] { error }); } } private void reportSchemaError(int major, Object args[]) throws Exception { if (fErrorReporter == null) { System.out.println("__TraverseSchemaError__ : " + SchemaMessageProvider.fgMessageKeys[major]); for (int i=0; i< args.length ; i++) { System.out.println((String)args[i]); } } else { fErrorReporter.reportError(fErrorReporter.getLocator(), SchemaMessageProvider.SCHEMA_DOMAIN, major, SchemaMessageProvider.MSG_NONE, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } //Unit Test here public static void main(String args[] ) { if( args.length != 1 ) { System.out.println( "Error: Usage java TraverseSchema yourFile.xsd" ); System.exit(0); } DOMParser parser = new IgnoreWhitespaceParser(); parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() ); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } try { parser.parse( args[0]); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { e.printStackTrace(); } Document document = parser.getDocument(); //Our Grammar OutputFormat format = new OutputFormat( document ); java.io.StringWriter outWriter = new java.io.StringWriter(); XMLSerializer serial = new XMLSerializer( outWriter,format); TraverseSchema tst = null; try { Element root = document.getDocumentElement();// This is what we pass to TraverserSchema //serial.serialize( root ); //System.out.println(outWriter.toString()); tst = new TraverseSchema( root, new StringPool(), new SchemaGrammar(), (GrammarResolver) new GrammarResolverImpl() ); } catch (Exception e) { e.printStackTrace(System.err); } parser.getDocument(); } static class Resolver implements EntityResolver { private static final String SYSTEM[] = { "http: "http: "http: }; private static final String PATH[] = { "structures.dtd", "datatypes.dtd", "versionInfo.ent", }; public InputSource resolveEntity(String publicId, String systemId) throws IOException { // looking for the schema DTDs? for (int i = 0; i < SYSTEM.length; i++) { if (systemId.equals(SYSTEM[i])) { InputSource source = new InputSource(getClass().getResourceAsStream(PATH[i])); source.setPublicId(publicId); source.setSystemId(systemId); return source; } } // use default resolution return null; } // resolveEntity(String,String):InputSource } // class Resolver static class ErrorHandler implements org.xml.sax.ErrorHandler { /** Warning. */ public void warning(SAXParseException ex) { System.err.println("[Warning] "+ getLocationString(ex)+": "+ ex.getMessage()); } /** Error. */ public void error(SAXParseException ex) { System.err.println("[Error] "+ getLocationString(ex)+": "+ ex.getMessage()); } /** Fatal error. */ public void fatalError(SAXParseException ex) throws SAXException { System.err.println("[Fatal Error] "+ getLocationString(ex)+": "+ ex.getMessage()); throw ex; } // Private methods /** Returns a string of the location. */ private String getLocationString(SAXParseException ex) { StringBuffer str = new StringBuffer(); String systemId_ = ex.getSystemId(); if (systemId_ != null) { int index = systemId_.lastIndexOf('/'); if (index != -1) systemId_ = systemId_.substring(index + 1); str.append(systemId_); } str.append(':'); str.append(ex.getLineNumber()); str.append(':'); str.append(ex.getColumnNumber()); return str.toString(); } // getLocationString(SAXParseException):String } static class IgnoreWhitespaceParser extends DOMParser { public void ignorableWhitespace(char ch[], int start, int length) {} public void ignorableWhitespace(int dataIdx) {} } // class IgnoreWhitespaceParser }
package se.sics.cooja.plugins; import java.awt.BorderLayout; import java.awt.Insets; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.StringReader; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.swing.*; import org.apache.log4j.Logger; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.input.SAXBuilder; import org.jdom.output.Format; import org.jdom.output.XMLOutputter; import se.sics.cooja.*; import se.sics.cooja.dialogs.MessageList; @ClassDescription("(GUI) Test Script Editor") @PluginType(PluginType.COOJA_PLUGIN) public class ScriptRunner extends VisPlugin { private static final long serialVersionUID = 1L; private static Logger logger = Logger.getLogger(ScriptRunner.class); private JTextArea scriptTextArea; private JTextArea logTextArea; private GUI gui; private LogScriptEngine scriptTester = null; private JButton toggleButton; private String oldTestName = null; private String oldInfo = null; private static String exampleScript = "/* Script is called once for every node log output. */\n" + "/* Input variables: Mote mote, int id, String msg. */\n" + "\n" + "log.log('MOTE=' + mote + '\\n');\n" + "log.log('ID=' + id + '\\n');\n" + "log.log('TIME=' + mote.getSimulation().getSimulationTime() + '\\n');\n" + "log.log('MSG=' + msg + '\\n');\n" + "\n" + "/* Hashtable global may be used to store state across script invokes */\n" + "log.log('STORED VAR=' + global.get('storedVar') + '\\n');\n" + "global.put('storedVar', msg);\n" + "\n" + "/* Contiki test script example */\n" + "if (msg.startsWith('Hello, world')) {\n" + " log.log('TEST OK\\n'); /* Report test success */\n" + " \n" + " /* To increase test run speed, close the simulator when done */\n" + " //mote.getSimulation().getGUI().doQuit(false); /* Quit simulator (to end test run)*/\n" + "}\n" + "\n" + "//mote.getSimulation().getGUI().reloadCurrentSimulation(true); /* Reload simulation */\n"; public ScriptRunner(GUI gui) { super("(GUI) Test Script Editor", gui); this.gui = gui; scriptTextArea = new JTextArea(8,50); scriptTextArea.setMargin(new Insets(5,5,5,5)); scriptTextArea.setEditable(true); scriptTextArea.setCursor(null); scriptTextArea.setText(exampleScript); logTextArea = new JTextArea(8,50); logTextArea.setMargin(new Insets(5,5,5,5)); logTextArea.setEditable(true); logTextArea.setCursor(null); toggleButton = new JButton("Activate"); toggleButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent ev) { if (toggleButton.getText().equals("Activate")) { scriptTester = new LogScriptEngine(ScriptRunner.this.gui, scriptTextArea.getText()); scriptTester.setScriptLogObserver(new Observer() { public void update(Observable obs, Object obj) { logTextArea.append((String) obj); logTextArea.setCaretPosition(logTextArea.getText().length()); } }); scriptTester.activateScript(); toggleButton.setText("Deactivate"); scriptTextArea.setEnabled(false); } else { if (scriptTester != null) { scriptTester.deactiveScript(); scriptTester = null; } toggleButton.setText("Activate"); scriptTextArea.setEnabled(true); } } }); JButton importButton = new JButton("Import Contiki test"); importButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent ev) { Runnable doImport = new Runnable() { public void run() { importContikiTest(); } }; new Thread(doImport).start(); } }); JButton exportButton = new JButton("Export as Contiki test"); exportButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent ev) { exportAsContikiTest(); } }); JSplitPane centerPanel = new JSplitPane( JSplitPane.VERTICAL_SPLIT, new JScrollPane(scriptTextArea), new JScrollPane(logTextArea) ); JPanel buttonPanel = new JPanel(new BorderLayout()); buttonPanel.add(BorderLayout.WEST, importButton); buttonPanel.add(BorderLayout.CENTER, toggleButton); buttonPanel.add(BorderLayout.EAST, exportButton); JPanel southPanel = new JPanel(new BorderLayout()); southPanel.add(BorderLayout.EAST, buttonPanel); getContentPane().add(BorderLayout.CENTER, centerPanel); getContentPane().add(BorderLayout.SOUTH, southPanel); pack(); try { setSelected(true); } catch (java.beans.PropertyVetoException e) { // Could not select } } private void importContikiTest() { new Thread(new Runnable() { public void run() { Simulation simulation = ScriptRunner.this.gui.getSimulation(); /* Load config from test directory */ final File proposedDir = new File(GUI.getExternalToolsSetting("PATH_CONTIKI") + "/tools/cooja/contiki_tests"); if (!proposedDir.exists()) { logger.fatal("Test directory does not exist: " + proposedDir.getPath()); return; } scriptTextArea.setText(""); logTextArea.setText(""); gui.doLoadConfig(false, true, proposedDir); Vector<File> history = gui.getFileHistory(); File cscFile = history.firstElement(); String testName = cscFile.getName().substring(0, cscFile.getName().length()-4); File testDir = cscFile.getParentFile(); File jsFile = new File(testDir, testName + ".js"); File infoFile = new File(testDir, testName + ".info"); oldTestName = testName; if (!cscFile.exists()) { logger.fatal("Can't locate config file: " + cscFile.getAbsolutePath()); return; } if (!jsFile.exists()) { logger.fatal("Can't locate .js file: " + jsFile.getAbsolutePath()); return; } /* Import .js */ try { BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(jsFile))); String line; while ((line = reader.readLine()) != null) { scriptTextArea.append(line + "\n"); } reader.close(); } catch (Exception ex) { ex.printStackTrace(); return; } /* Import .info */ if (infoFile.exists()) { try { oldInfo = ""; BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(infoFile))); String line; while ((line = reader.readLine()) != null) { oldInfo += line + "\n"; } reader.close(); } catch (Exception ex) { ex.printStackTrace(); return; } } } }).start(); } private void exportAsContikiTest() { Simulation simulation = ScriptRunner.this.gui.getSimulation(); if (simulation == null) { JOptionPane.showMessageDialog(GUI.getTopParentContainer(), "No simulation loaded. Aborting.", "Error", JOptionPane.ERROR_MESSAGE); return; } /* Confirm test directory */ File testDir = new File(GUI.getExternalToolsSetting("PATH_CONTIKI") + "/tools/cooja/contiki_tests"); String s1 = "Ok"; String s2 = "Cancel"; Object[] options = { s1, s2 }; int n = JOptionPane.showOptionDialog(GUI.getTopParentContainer(), "Export current simulation config (.csc) and test script (.js)\n" + "to directory '" + testDir.getPath() + "'?", "Export Contiki test", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, s1); if (n != JOptionPane.YES_OPTION) { return; } if (!testDir.exists()) { logger.fatal("Test directory does not exist: " + testDir.getPath()); return; } /* Name test to export */ if (oldTestName == null) { oldTestName = "mytest"; } String testName = (String) JOptionPane.showInputDialog(GUI.getTopParentContainer(), "Enter test name. No spaces or strange chars allowed.", "Test name", JOptionPane.PLAIN_MESSAGE, null, null, oldTestName); if (testName == null) { return; } oldTestName = testName; if (testName.equals("") || testName.contains(" ")) { JOptionPane.showMessageDialog(GUI.getTopParentContainer(), "Bad test name: '" + testName + "'", "Bad test name", JOptionPane.ERROR_MESSAGE); return; } File cscFile = new File(testDir, testName + ".csc"); File jsFile = new File(testDir, testName + ".js"); File infoFile = new File(testDir, testName + ".info"); final File logFile = new File(testDir, testName + ".log"); /* Overwrite existing test */ if (cscFile.exists() || jsFile.exists() || infoFile.exists()) { s1 = "Overwrite"; s2 = "Cancel"; n = JOptionPane.showOptionDialog(GUI.getTopParentContainer(), "Some output files already exist. Overwrite?", "Test already exist", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, s1); if (n != JOptionPane.YES_OPTION) { return; } } if (cscFile.exists()) { cscFile.delete(); } if (jsFile.exists()) { jsFile.delete(); } if (infoFile.exists()) { infoFile.delete(); } /* Get current simulation configuration */ Element root = new Element("simconf"); Element simulationElement = new Element("simulation"); simulationElement.addContent(simulation.getConfigXML()); root.addContent(simulationElement); /* Strip plugins */ Collection<Element> pluginsConfig = ScriptRunner.this.gui.getPluginsConfigXML(); if (pluginsConfig != null) { root.addContent(pluginsConfig); } // if (pluginsConfig != null) { // JOptionPane.showMessageDialog(GUI.getTopParentContainer(), // "Stripping plugin configuration.\n" + // "(Exporting non-GUI plugins not implemented.)", // "Plugins detected", JOptionPane.WARNING_MESSAGE); /* Fix simulation delay */ root.detach(); String configString = new XMLOutputter().outputString(new Document(root)); String identifierExtraction = "<delaytime>([^<]*)</delaytime>"; Matcher matcher = Pattern.compile(identifierExtraction).matcher(configString); while (matcher.find()) { int delay = Integer.parseInt(matcher.group(1)); if (delay != 0) { JOptionPane.showMessageDialog(GUI.getTopParentContainer(), "Simulation delay currently set to " + delay + ".\n" + "Changing delay time to 0 in exported test.", "Non-zero delay time detected", JOptionPane.WARNING_MESSAGE); } configString = configString.replace( "<delaytime>" + matcher.group(1) + "</delaytime>", "<delaytime>0</delaytime>"); } /* Export .csc */ try { Element newRoot = new SAXBuilder().build(new StringReader(configString)).getRootElement(); newRoot.detach(); Document doc = new Document(newRoot); FileOutputStream out = new FileOutputStream(cscFile); XMLOutputter outputter = new XMLOutputter(); outputter.setFormat(Format.getPrettyFormat()); outputter.output(doc, out); out.close(); } catch (JDOMException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } /* Export .js */ try { BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(jsFile))); writer.write(scriptTextArea.getText()); writer.close(); } catch (Exception ex) { ex.printStackTrace(); return; } /* Export .info (optional) */ try { if (oldInfo == null) { oldInfo = ""; } String info = (String) JOptionPane.showInputDialog(GUI.getTopParentContainer(), "Optional test info", "(OPTIONAL) Enter test description", JOptionPane.PLAIN_MESSAGE, null, null, oldInfo); if (info != null && !info.equals("")) { oldInfo = info; BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(infoFile))); writer.write(info); writer.write("\n"); writer.close(); } else { oldInfo = null; } } catch (Exception ex) { ex.printStackTrace(); return; } /* Run exported test (optional) */ s1 = "Run test"; s2 = "No"; n = JOptionPane.showOptionDialog(GUI.getTopParentContainer(), "Run exported test in forked Cooja now?", "Run test?", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, s1); if (n != JOptionPane.YES_OPTION) { return; } try { final Process externalCoojaProcess; MessageList testOutput = new MessageList(); final PrintStream normal = testOutput.getInputStream(MessageList.NORMAL); final PrintStream error = testOutput.getInputStream(MessageList.ERROR); JPanel progressPanel = new JPanel(new BorderLayout()); final JDialog progressDialog = new JDialog((Window)GUI.getTopParentContainer(), (String) null); progressDialog.setTitle("Running test..."); String command[] = { "java", "-jar", "../dist/cooja.jar", "-nogui", "-test=" + testName }; externalCoojaProcess = Runtime.getRuntime().exec(command, null, testDir); final BufferedReader input = new BufferedReader(new InputStreamReader(externalCoojaProcess.getInputStream())); final BufferedReader err = new BufferedReader(new InputStreamReader(externalCoojaProcess.getErrorStream())); final JButton button = new JButton("Abort test"); button.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { externalCoojaProcess.destroy(); if (progressDialog.isDisplayable()) { progressDialog.dispose(); } } }); progressPanel.add(BorderLayout.CENTER, new JScrollPane(testOutput)); progressPanel.add(BorderLayout.SOUTH, button); progressPanel.setBorder(BorderFactory.createEmptyBorder(20, 20, 20, 20)); progressPanel.setVisible(true); progressDialog.getContentPane().add(progressPanel); progressDialog.setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE); progressDialog.getRootPane().setDefaultButton(button); progressDialog.setSize(500, 300); progressDialog.setLocationRelativeTo(ScriptRunner.this); progressDialog.setVisible(true); Thread readInput = new Thread(new Runnable() { public void run() { String readLine; try { while ((readLine = input.readLine()) != null) { if (normal != null) { normal.println(readLine); } } } catch (IOException e) { logger.warn("Error while reading from process"); } normal.println(""); normal.println(""); normal.println(""); /* Parse log file for success info */ try { BufferedReader in = new BufferedReader(new InputStreamReader( new FileInputStream(logFile))); boolean testSucceeded = false; while (in.ready()) { String line = in.readLine(); if (line == null) { line = ""; } normal.println(line); if (line.contains("TEST OK")) { testSucceeded = true; break; } } if (testSucceeded) { progressDialog.setTitle("Test run completed. Test succeeded!"); button.setText("Test OK"); } else { progressDialog.setTitle("Test run completed. Test failed!"); button.setText("Test failed"); } } catch (FileNotFoundException e) { logger.fatal("File not found: " + e); progressDialog.setTitle("Test run completed. Test failed! (no logfile)"); button.setText("Test failed"); } catch (IOException e) { logger.fatal("IO error: " + e); progressDialog.setTitle("Test run completed. Test failed! (IO exception)"); button.setText("Test failed"); } } }, "read input stream thread"); Thread readError = new Thread(new Runnable() { public void run() { String readLine; try { while ((readLine = err.readLine()) != null) { if (error != null) { error.println(readLine); } } } catch (IOException e) { logger.warn("Error while reading from process"); } } }, "read input stream thread"); readInput.start(); readError.start(); } catch (IOException e) { e.printStackTrace(); } } public void closePlugin() { if (scriptTester != null) { scriptTester.deactiveScript(); scriptTester.setScriptLogObserver(null); } } public Collection<Element> getConfigXML() { return null; } public boolean setConfigXML(Collection<Element> configXML, boolean visAvailable) { return true; } }
package hulop.hokoukukan.bean; import java.awt.geom.Line2D; import java.awt.geom.Point2D; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.wink.json4j.JSONArray; import org.apache.wink.json4j.JSONException; import org.apache.wink.json4j.JSONObject; import org.jgrapht.WeightedGraph; import org.jgrapht.alg.DijkstraShortestPath; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.SimpleDirectedWeightedGraph; import hulop.hokoukukan.servlet.RouteSearchServlet; import hulop.hokoukukan.utils.DBAdapter; public class RouteSearchBean { public static final DBAdapter adapter = DatabaseBean.adapter; private static final double WEIGHT_IGNORE = Double.MAX_VALUE; private static final double ESCALATOR_WEIGHT = RouteSearchServlet.getEnvInt("ESCALATOR_WEIGHT", 100); private static final double STAIR_WEIGHT = RouteSearchServlet.getEnvInt("STAIR_WEIGHT", 300); private static final double ELEVATOR_WEIGHT = RouteSearchServlet.getEnvInt("ELEVATOR_WEIGHT", 300); private long mLastInit = System.currentTimeMillis(); private JSONObject mNodeMap, mTempNode, mTempLink1, mTempLink2; private JSONArray mFeatures, mLandmarks, mDoors; private Set<String> mElevatorNodes; public RouteSearchBean() { } public void init(double[] point, double distance, String lang, boolean cache) throws JSONException { System.out.println("RouteSearchBean init lang=" + lang); RouteData rd = cache ? RouteData.getCache(point, distance) : new RouteData(point, distance); mTempNode = mTempLink1 = mTempLink2 = null; mNodeMap = rd.getNodeMap(); mFeatures = rd.getFeatures(); mLandmarks = rd.getLandmarks(lang); mDoors = rd.getDoors(); mElevatorNodes = rd.getElevatorNodes(); mLastInit = System.currentTimeMillis(); } public long getLastInit() { return mLastInit; } public JSONObject getNodeMap() { return mNodeMap; } public JSONArray getFeatures() { return mFeatures; } public JSONArray getLandmarks() { return mLandmarks; } public Object getDirection(String from, String to, Map<String, String> conditions) throws JSONException { mLastInit = System.currentTimeMillis(); mTempNode = mTempLink1 = mTempLink2 = null; JSONObject fromPoint = getPoint(from), toPoint = getPoint(to); if (fromPoint != null) { from = findNearestLink(fromPoint); if (from == null) { return null; } if (!mNodeMap.has(from)) { for (Object feature : mFeatures) { JSONObject json = (JSONObject) feature; if (from.equals(json.get("_id"))) { try { from = createTempNode(fromPoint, json); } catch (Exception e) { e.printStackTrace(); } break; } } } } else { from = extractNode(from); } if (toPoint != null) { to = adapter.findNearestNode(new double[] { toPoint.getDouble("lng"), toPoint.getDouble("lat") }, toPoint.has("floors") ? toPoint.getJSONArray("floors") : null); } if (from != null && from.equals(extractNode(to))) { return new JSONObject().put("error", "zero-distance"); } DirectionHandler dh = new DirectionHandler(from, to, conditions); for (Object feature : mFeatures) { dh.add(feature); } if (mTempNode != null) { dh.add(mTempLink1); dh.add(mTempLink2); } return dh.getResult(); } private class DirectionHandler { private WeightedGraph<String, DefaultWeightedEdge> g = new SimpleDirectedWeightedGraph<String, DefaultWeightedEdge>( DefaultWeightedEdge.class); private Map<Object, JSONObject> linkMap = new HashMap<Object, JSONObject>(); private JSONArray result = new JSONArray(); private String from, to; private Map<String, String> conditions; public DirectionHandler(String from, String to, Map<String, String> conditions) { this.from = from; this.to = to; this.conditions = conditions; } public void add(Object feature) throws JSONException { JSONObject json = (JSONObject) feature; JSONObject properties = json.getJSONObject("properties"); switch (properties.getString("category")) { case "": double weight = 10.0f; try { weight = Double.parseDouble(properties.getString("")); if (properties.has("road_low_priority") && "1".equals(properties.getString("road_low_priority"))) { weight *= 1.25; } } catch (Exception e) { } weight = adjustAccWeight(properties, conditions, weight); if (weight == WEIGHT_IGNORE) { break; } String start, end; try { start = properties.getString("ID"); end = properties.getString("ID"); } catch (Exception e) { break; } if (from == null) { try { properties.put("sourceHeight", getHeight(start)); properties.put("targetHeight", getHeight(end)); } catch (Exception e) { } result.add(json); break; } g.addVertex(start); g.addVertex(end); DefaultWeightedEdge startEnd = null, endStart = null; switch (properties.getString("")) { case "1": startEnd = g.addEdge(start, end); break; case "2": endStart = g.addEdge(end, start); break; default: startEnd = g.addEdge(start, end); endStart = g.addEdge(end, start); break; } if (startEnd != null) { double add = !mElevatorNodes.contains(start) && mElevatorNodes.contains(end) ? ELEVATOR_WEIGHT : 0; g.setEdgeWeight(startEnd, weight + add); linkMap.put(startEnd, json); } if (endStart != null) { double add = mElevatorNodes.contains(start) && !mElevatorNodes.contains(end) ? ELEVATOR_WEIGHT : 0; g.setEdgeWeight(endStart, weight + add); linkMap.put(endStart, json); } break; } } public JSONArray getResult() { if (from != null) { try { // System.out.println(from + " - " + to + " - " + g.toString()); double lastWeight = Double.MAX_VALUE; List<DefaultWeightedEdge> path = null; for (String t : to.split("\\|")) { t = t.trim(); if (t.length() > 0) { try { List<DefaultWeightedEdge> p = DijkstraShortestPath.findPathBetween(g, from, extractNode(t)); if (p != null && p.size() > 0) { double totalWeight = 0; for (DefaultWeightedEdge edge : p) { totalWeight += g.getEdgeWeight(edge); } if (lastWeight > totalWeight) { lastWeight = totalWeight; path = p; to = t; } } } catch (Exception e) { System.err.println("No route to " + t); } } } if (path != null && path.size() > 0) { JSONObject fromNode = (JSONObject) getNode(from).clone(); result.add(fromNode); for (DefaultWeightedEdge edge : path) { JSONObject link = linkMap.get(edge); try { JSONObject properties = link.getJSONObject("properties"); String edgeSource = g.getEdgeSource(edge); String edgeTarget = g.getEdgeTarget(edge); String sourceDoor = getDoor(edgeSource); String targetDoor = getDoor(edgeTarget); properties.put("sourceNode", edgeSource); properties.put("targetNode", edgeTarget); properties.put("sourceHeight", getHeight(edgeSource)); properties.put("targetHeight", getHeight(edgeTarget)); if (sourceDoor != null) { properties.put("sourceDoor", sourceDoor); } else { properties.remove("sourceDoor"); } if (targetDoor != null) { properties.put("targetDoor", targetDoor); } else { properties.remove("targetDoor"); } } catch (Exception e) { e.printStackTrace(); } result.add(link); } JSONObject toNode = (JSONObject) getNode(extractNode(to)).clone(); toNode.put("_id", to); result.add(toNode); } // System.out.println(new KShortestPaths(g, from, // 3).getPaths(to)); } catch (Exception e) { e.printStackTrace(); } } return result; } private double adjustAccWeight(JSONObject properties, Map<String, String> conditions, double weight) throws JSONException { String linkType = properties.has("") ? properties.getString("") : ""; switch (linkType) { case "10": // Elevator weight = 0.0f; break; case "7": // Moving walkway weight *= 0.75f; break; case "11": // Escalator weight = ESCALATOR_WEIGHT; break; case "12": // Stairs weight = STAIR_WEIGHT; break; } double penarty = Math.max(weight, 10.0f) * 9; String width = properties.has("") ? properties.getString("") : ""; // 0: less than 1m, // 1: >1m & <1.5m, // 2: >1.5m & <2m, // 3: >2m // 9: unknown try { switch (conditions.get("min_width")) { case "1": if (width.equals("0") || width.equals("1") || width.equals("2") || width.equals("9")) { return WEIGHT_IGNORE; } break; case "2": // >1.5m if (width.equals("0") || width.equals("1") || width.equals("9")) { return WEIGHT_IGNORE; } break; case "3": // >1.0m if (width.equals("0") || width.equals("9")) { return WEIGHT_IGNORE; } break; case "8": // Avoid if (width.equals("0") || width.equals("1") || width.equals("2") || width.equals("9")) { weight += penarty; } break; } } catch (NullPointerException npe) { } float slope = 0; try { slope = Float.parseFloat(properties.getString("1")); } catch (Exception e) { } // Maximum slope value (%) along the link try { switch (conditions.get("slope")) { case "1": if (slope >= 8.0) { return WEIGHT_IGNORE; } break; case "2": if (slope >= 10.0) { return WEIGHT_IGNORE; } break; case "8": // Avoid if (slope >= 8.0) { weight += penarty; } break; } } catch (NullPointerException npe) { } String road = properties.has("") ? properties.getString("") : ""; // 0: no problem, 1: dirt, 2: gravel, 3: other, 9: unknown try { switch (conditions.get("road_condition")) { case "1": // No problem if (road.equals("1") || road.equals("2") || road.equals("3") || road.equals("9")) { return WEIGHT_IGNORE; } break; case "8": // Avoid if (road.equals("1") || road.equals("2") || road.equals("3") || road.equals("9")) { weight += penarty; } break; } } catch (NullPointerException npe) { } String bump = properties.has("") ? properties.getString("") : ""; // 0: less than 2cm, 1: 2~5cm, 2: 5~10cm, 3: more than 10cm, 9: unknown // (assign max bump height for whole link) try { switch (conditions.get("deff_LV")) { case "1": // <2cm if (bump.equals("1") || bump.equals("2") || bump.equals("3") || bump.equals("9")) { return WEIGHT_IGNORE; } break; case "2": // <5cm if (bump.equals("2") || bump.equals("3") || bump.equals("9")) { return WEIGHT_IGNORE; } break; case "3": // <10cm if (bump.equals("3") || bump.equals("9")) { return WEIGHT_IGNORE; } break; case "8": // Avoid if (bump.equals("1") || bump.equals("2") || bump.equals("3") || bump.equals("9")) { weight += penarty; } break; } } catch (NullPointerException npe) { } int steps = 0; try { steps = Integer.parseInt(properties.getString("")); } catch (Exception e) { } // number of steps along a stairway // if (linkType.equals("12") && steps == 0) { // System.out.println("Error: steps should > 0"); String rail = properties.has("") ? properties.getString("") : ""; // 0: no, 1: on the right, 2: on the left, 3: both sides, 9: unknown // (link direction - start node to end node) try { switch (conditions.get("stairs")) { case "1": // Do not use if (steps > 0) { return WEIGHT_IGNORE; } break; case "2": // Use with hand rail if (steps > 0 && !(rail.equals("1") || rail.equals("2") || rail.equals("3"))) { return WEIGHT_IGNORE; } break; case "8": // Avoid if (steps > 0) { weight += penarty; } break; } } catch (NullPointerException npe) { } String elevator = properties.has("") ? properties.getString("") : ""; // 0: not included, 1: braille and audio, 2: wheelchair, 3: 1&2, 9: // unknown try { switch (conditions.get("elv")) { case "1": // Do not use if (elevator.equals("0") || elevator.equals("1") || elevator.equals("2") || elevator.equals("3") || elevator.equals("9")) { return WEIGHT_IGNORE; } case "2": // Wheel chair supported if (elevator.equals("0") || elevator.equals("1") || elevator.equals("9")) { return WEIGHT_IGNORE; } case "8": // Avoid if (elevator.equals("0") || elevator.equals("1") || elevator.equals("9")) { weight += penarty; } break; } } catch (NullPointerException npe) { } try { switch (conditions.get("esc")) { case "1": // Do not use if (linkType.equals("11")) { return WEIGHT_IGNORE; } case "8": // Avoid if (linkType.equals("11")) { weight += penarty; } break; } } catch (NullPointerException npe) { } try { switch (conditions.get("mvw")) { case "1": // Do not use if (linkType.equals("7")) { return WEIGHT_IGNORE; } case "8": // Avoid if (linkType.equals("7")) { weight += penarty; } break; } } catch (NullPointerException npe) { } if (properties.has("") && "1".equals(properties.getString(""))) { // 0: no, 1: yes, 9: unknown (tactile paving along the path/link) if ("1".equals(conditions.get("tactile_paving"))) { weight = weight / 3; } } return weight; } } private static String extractNode(String id) { return id != null ? id.split(":")[0] : null; } private static JSONObject getPoint(String node) { if (node != null) { String[] params = node.split(":"); if (params.length >= 3 && params[0].equals("latlng")) { JSONObject point = new JSONObject(); try { if (params.length > 3) { List<String> floors = new ArrayList<String>(); floors.add(params[3]); if (params[3].equals("1")) { floors.add("0"); } point.put("floors", floors); } return point.put("lat", Double.parseDouble(params[1])).put("lng", Double.parseDouble(params[2])); } catch (JSONException e) { e.printStackTrace(); } } } return null; } private boolean isNode(String id) { return tempNodeID.equals(id) ? mTempNode != null : mNodeMap.has(id); } private JSONObject getNode(String id) throws JSONException { return tempNodeID.equals(id) ? mTempNode : mNodeMap.getJSONObject(id); } private float getHeight(String node) throws NumberFormatException, JSONException { return Float.parseFloat(getNode(node).getJSONObject("properties").getString("").replace("B", "-")); } private String getDoor(String node) throws JSONException { if (countLinks(node) <= 2) { for (Object p : mDoors) { JSONObject properties = (JSONObject) p; if (node.equals(properties.getString("ID"))) { return properties.getString(""); } } } return null; } private int countLinks(String node) { try { int count = 0; JSONObject properties = getNode(node).getJSONObject("properties"); for (int i = 1; i <= 10; i++) { if (properties.has("ID" + i)) { count++; } } return count; } catch (Exception e) { e.printStackTrace(); } return 0; } private static final double METERS_PER_DEGREE = 60.0 * 1.1515 * 1.609344 * 1000; private static double deg2rad(double deg) { return (deg * Math.PI / 180.0); } public static double calcDistance(double[] point1, double[] point2) { double theta = deg2rad(point1[0] - point2[0]); double lat1 = deg2rad(point1[1]), lat2 = deg2rad(point2[1]); double dist = Math.sin(lat1) * Math.sin(lat2) + Math.cos(lat1) * Math.cos(lat2) * Math.cos(theta); return METERS_PER_DEGREE * Math.acos(dist) * 180.0 / Math.PI; } static final String INVALID_LINKS = "|7|10|11|"; private String findNearestLink(JSONObject fromPoint) { try { List<String> floors = fromPoint.has("floors") ? fromPoint.getJSONArray("floors") : null; List<JSONObject> links = new ArrayList<JSONObject>(); for (Object feature : mFeatures) { JSONObject json = (JSONObject) feature; JSONObject properties = json.getJSONObject("properties"); String startID, endID; try { startID = properties.getString("ID"); endID = properties.getString("ID"); } catch (Exception e) { continue; } if (INVALID_LINKS.indexOf("|" + properties.getString("") + "|") == -1) { if (isNode(startID) && isNode(endID)) { if (floors == null) { links.add(json); } else { String startHeight = getNode(startID).getJSONObject("properties").getString(""); String endHeight = getNode(endID).getJSONObject("properties").getString(""); if (floors.indexOf(startHeight) != -1 && floors.indexOf(endHeight) != -1) { links.add(json); } } } } } if (links.size() > 0) { final Point2D.Double pt = new Point2D.Double(fromPoint.getDouble("lng"), fromPoint.getDouble("lat")); double minDist = 100; JSONObject nearestLink = null; for (JSONObject json : links) { double dist = calc2Ddistance(json.getJSONObject("geometry").getJSONArray("coordinates"), pt, null); if (dist < minDist) { minDist = dist; nearestLink = json; } } if (nearestLink != null) { return nearestLink.getString("_id"); } } } catch (Exception e) { e.printStackTrace(); } return null; } private static double calc2Ddistance(JSONArray coordinates, Point2D.Double pt, int[] seg) throws Exception { double result = -1; Point2D.Double from = get2DPoint(coordinates, 0); for (int i = 1; i < coordinates.size() && result != 0.0; i++) { Point2D.Double to = get2DPoint(coordinates, i); double dist = Line2D.ptSegDist(from.x, from.y, to.x, to.y, pt.x, pt.y); from = to; if (result < 0 || dist < result) { result = dist; if (seg != null) { seg[0] = i - 1; } } } return result; } private static Point2D.Double get2DPoint(JSONArray coordinates, int index) throws Exception { JSONArray coord = coordinates.getJSONArray(index); // return new Point2D.Double(coord.getDouble(0), coord.getDouble(1)); return new Point2D.Double(((Double) coord.get(0)).doubleValue(), ((Double) coord.get(1)).doubleValue()); } static final String tempNodeID = "_TEMP_NODE_", tempLink1ID = "_TEMP_LINK1_", tempLink2ID = "_TEMP_LINK2_"; private String createTempNode(JSONObject point, JSONObject link) throws Exception { JSONArray linkCoords = link.getJSONObject("geometry").getJSONArray("coordinates"); JSONArray nodeCoord = new JSONArray().put(point.getDouble("lng")).put(point.getDouble("lat")); Object pos = getOrthoCenter(linkCoords, nodeCoord, link.getJSONObject("properties")); int lineSeg = 0; if (pos instanceof String) { return (String) pos; } else if (pos instanceof JSONObject) { JSONObject o = (JSONObject) pos; nodeCoord = new JSONArray().put(o.getDouble("x")).put(o.getDouble("y")); lineSeg = o.getInt("seg"); } // Create temp node String tempFloor = point.has("floors") ? point.getJSONArray("floors").getString(0) : "0"; mTempNode = new JSONObject(); final JSONObject geometry = new JSONObject(); final JSONObject nodeProp = new JSONObject(); geometry.put("type", "Point"); geometry.put("coordinates", nodeCoord); nodeProp.put("category", ""); nodeProp.put("ID", tempNodeID); nodeProp.put("", tempFloor); nodeProp.put("ID1", tempLink1ID); nodeProp.put("ID2", tempLink2ID); mTempNode.put("_id", tempNodeID); mTempNode.put("type", "Feature"); mTempNode.put("geometry", geometry); mTempNode.put("properties", nodeProp); // System.out.println(tempNode.toString(4)); // Create temp links mTempLink1 = new JSONObject(link.toString()); mTempLink2 = new JSONObject(link.toString()); mTempLink1.put("_id", tempLink1ID); mTempLink2.put("_id", tempLink2ID); final JSONObject link1Geo = mTempLink1.getJSONObject("geometry"), link2Geo = mTempLink2.getJSONObject("geometry"); JSONArray link1Coords = link1Geo.getJSONArray("coordinates"); JSONArray link2Coords = link2Geo.getJSONArray("coordinates"); for (int i = 0; i < linkCoords.length(); i++) { if (i > lineSeg) { link1Coords.remove(link1Coords.length() - 1); } else { link2Coords.remove(0); } } JSONArray link1Last = link1Coords.getJSONArray(link1Coords.length() - 1); if (!link1Last.equals(nodeCoord)) { link1Coords.add(nodeCoord); } JSONArray link2first = link2Coords.getJSONArray(0); if (!link2first.equals(nodeCoord)) { link2Coords.add(0, nodeCoord); } final JSONObject link1Prop = mTempLink1.getJSONObject("properties"), link2Prop = mTempLink2.getJSONObject("properties"); link1Prop.put("ID", tempLink1ID); link1Prop.put("ID", tempNodeID); link2Prop.put("ID", tempLink2ID); link2Prop.put("ID", tempNodeID); setLength(mTempLink1); setLength(mTempLink2); return tempNodeID; } private static void setLength(JSONObject link) throws Exception { JSONArray linkCoords = link.getJSONObject("geometry").getJSONArray("coordinates"); double length = 0; for (int i = 0; i < linkCoords.length() - 1; i++) { JSONArray from = linkCoords.getJSONArray(i); JSONArray to = linkCoords.getJSONArray(i + 1); length += calcDistance(new double[] { from.getDouble(0), from.getDouble(1) }, new double[] { to.getDouble(0), to.getDouble(1) }); } link.getJSONObject("properties").put("", Double.toString(length)); } private static Object getOrthoCenter(JSONArray line, JSONArray point, JSONObject linkProp) throws Exception { Point2D.Double c = new Point2D.Double(point.getDouble(0), point.getDouble(1)); int[] seg = new int[] { 0 }; calc2Ddistance(line, c, seg); JSONArray start = line.getJSONArray(seg[0]); JSONArray end = line.getJSONArray(seg[0] + 1); Point2D.Double a = new Point2D.Double(start.getDouble(0), start.getDouble(1)); Point2D.Double b = new Point2D.Double(end.getDouble(0), end.getDouble(1)); double distCA = Point2D.distance(a.x, a.y, c.x, c.y); double distCB = Point2D.distance(b.x, b.y, c.x, c.y); double distCX = Line2D.ptSegDist(a.x, a.y, b.x, b.y, c.x, c.y); if (distCA <= distCX && seg[0] == 0) { return linkProp.getString("ID"); } else if (distCB <= distCX && seg[0] == line.length() - 2) { return linkProp.getString("ID"); } else { double distAB = Point2D.distance(a.x, a.y, b.x, b.y); double distAX = Math.sqrt(distCA * distCA - distCX * distCX); double timeAX = Math.max(0, Math.min(distAX / distAB, 1)); double x = (b.x - a.x) * timeAX + a.x; double y = (b.y - a.y) * timeAX + a.y; return new JSONObject().put("x", x).put("y", y).put("seg", seg[0]); } } static double[] CURRENT_POINT = { 139.77392703294754, 35.68662700502585 }; static double MAX_DISTANCE = 500; public static void main(String[] args) { try { RouteSearchBean search = new RouteSearchBean(); search.init(CURRENT_POINT, MAX_DISTANCE, "en", false); String from = "latlng:" + CURRENT_POINT[0] + ":" + CURRENT_POINT[1]; JSONArray landmarks = search.getLandmarks(); JSONObject toNode = landmarks.getJSONObject((int) ((Math.random() / 2 + 0.25) * landmarks.length())); String to = toNode.getString("node"); Object direction = search.getDirection(from, to, new HashMap<String, String>()); System.out.println(direction.toString()); System.out.println(from + " to " + toNode.getString("name")); } catch (JSONException e) { e.printStackTrace(); } } }
package org.jenetics.colorizer; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.HashSet; import java.util.Set; public final class Colorize { public static void main(final String[] args) { final File dir = new File(args[0]); if (!dir.isDirectory()) { System.err.println(args[0] + " is not a directory."); System.exit(1); } try { final Colorizer colorizer = new Colorizer(); Files.walkFileTree(dir.toPath(), colorizer); System.out.println(String.format( "Colorizer processed %d files and modified %d.", colorizer.getProcessed(), colorizer.getModified() )); } catch (IOException e) { System.err.println("Error while processing files: " + e); System.exit(1); } } private static final class Colorizer extends SimpleFileVisitor<Path> { private static final String ENCODING = "UTF-8"; private int _processed = 0; private int _modified = 0; public int getProcessed() { return _processed; } public int getModified() { return _modified; } @Override public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) { if (file.toString().endsWith(".html")) { try { colorize(file); } catch (IOException e) { System.out.println("Error while processing file: " + file); return FileVisitResult.TERMINATE; } } return FileVisitResult.CONTINUE; } private void colorize(final Path file) throws IOException { _processed++; try (BufferedReader in = new BufferedReader(new InputStreamReader( new FileInputStream(file.toFile()), ENCODING))) { final StringBuilder doc = new StringBuilder(10000); State state = State.DATA; boolean modified = false; for (int read = in.read(); read != -1; read = in.read()) { if (state != State.DATA) { if (read == '<') { doc.append("&lt;"); } else if (read == '>') { doc.append("&gt;"); } else if (read == '&') { doc.append("&amp;"); } else { doc.append((char)read); } } else { doc.append((char)read); } if (state == State.CODE) { modified = true; } state = state.apply(read, doc); } if (modified) { _modified++; try (final OutputStreamWriter out = new OutputStreamWriter( new FileOutputStream(file.toFile()), ENCODING) ) { out.write(doc.toString()); } } } } } private static enum State { DATA { @Override public State apply(final int read, final StringBuilder doc) { State state = this; if ((read == ']') && (doc.length() > 5) && doc.substring(doc.length() - 6).equalsIgnoreCase("[code]")) { doc.setLength(doc.length() - 6); doc.append("<div class=\"code\"><code lang=\"java\">"); state = SKIP_NL; } return state; } }, SKIP_NL { @Override public State apply(final int read, final StringBuilder doc) { State state = this; if (read == '\n') { doc.setLength(doc.length() - 1); state = CODE; } return state; } }, CODE { @Override public State apply(final int read, final StringBuilder doc) { State state = this; if (Character.isJavaIdentifierPart((char)read)) { state = IDENTIFIER; state._start = doc.length() - 1; } else if (read == '"') { state = STRING_LITERAL; doc.insert(doc.length() - 1, "<font color=\"" + STRING_COLOR + "\">"); } else if ((read == '/') && (doc.charAt(doc.length() - 2) == '/')) { state = COMMENT; doc.insert(doc.length() - 2, "<font color=\"" + COMMENT_COLOR + "\">"); } return state; } }, IDENTIFIER { @Override public State apply(final int read, final StringBuilder doc) { State state = this; if ((read == ']') && // code identifier. doc.substring(doc.length() - 7).equalsIgnoreCase("[/code]")) { int index = doc.lastIndexOf("\n"); doc.setLength(index); doc.append("</code></div>"); state = DATA; } else if (!Character.isJavaIdentifierPart((char)read)) { // End of identifier. String name = doc.substring(_start, doc.length() - 1); if (IDENTIFIERS.contains(name)) { // Identifier found. doc.insert(_start + name.length(), "</b></font>"); doc.insert(_start, "<font color=\"" + KEYWORD_COLOR + "\"><b>"); } state = CODE; } return state; } }, STRING_LITERAL { @Override public State apply(final int read, final StringBuilder doc) { State state = this; if ((read == '"') && (doc.charAt(doc.length() - 2) != '\\')) { doc.append("</font>"); state = CODE; } return state; } }, COMMENT { @Override public State apply(final int read, final StringBuilder doc) { State state = this; if ((read == '\n') || (read == '\r')) { doc.insert(doc.length() - 1, "</font>"); state = CODE; } return state; } } ; int _start = -1; public abstract State apply(final int read, final StringBuilder doc); private static final String KEYWORD_COLOR = "#7F0055"; private static final String COMMENT_COLOR = "#3F7F5F"; private static final String STRING_COLOR = "#0000FF"; private static final String[] KEYWORDS = { "abstract", "assert", "boolean", "break", "byte", "case", "catch", "char", "class", "const", "continue", "default", "do", "double", "else", "enum", "extends", "final", "finally", "float", "for", "goto", "if", "implements", "import", "instanceof", "int", "interface", "long", "native", "new", "package", "private", "protected", "public", "return", "short", "static", "strictfp", "super", "switch", "synchronized", "this", "throw", "throws", "transient", "try", "void", "volatile", "while" }; private static final Set<String> IDENTIFIERS = new HashSet<>(); static { for (int i = 0; i < KEYWORDS.length; i++) { IDENTIFIERS.add(KEYWORDS[i]); } } } }
package placebooks.client.ui; import placebooks.client.AbstractCallback; import placebooks.client.PlaceBookService; import placebooks.client.Resources; import placebooks.client.model.PlaceBook; import placebooks.client.model.PlaceBookItem; import placebooks.client.model.Shelf; import placebooks.client.ui.dialogs.PlaceBookPublishDialog; import placebooks.client.ui.elements.DropMenu; import placebooks.client.ui.elements.PlaceBookCanvas; import placebooks.client.ui.elements.PlaceBookInteractionHandler; import placebooks.client.ui.elements.PlaceBookPanel; import placebooks.client.ui.elements.PlaceBookToolbarItem; import placebooks.client.ui.items.MapItem; import placebooks.client.ui.items.frames.PlaceBookItemFrame; import placebooks.client.ui.items.frames.PlaceBookItemPopupFrame; import placebooks.client.ui.palette.Palette; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.JsArray; import com.google.gwt.dom.client.NativeEvent; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.dom.client.KeyUpEvent; import com.google.gwt.event.dom.client.MouseOutEvent; import com.google.gwt.event.dom.client.MouseOverEvent; import com.google.gwt.event.shared.EventBus; import com.google.gwt.http.client.Request; import com.google.gwt.http.client.Response; import com.google.gwt.place.shared.PlaceTokenizer; import com.google.gwt.place.shared.Prefix; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.uibinder.client.UiHandler; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.Event.NativePreviewEvent; import com.google.gwt.user.client.Timer; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.AcceptsOneWidget; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.Panel; import com.google.gwt.user.client.ui.PopupPanel; import com.google.gwt.user.client.ui.TextBox; import com.google.gwt.user.client.ui.Widget; public class PlaceBookEditor extends PlaceBookPlace { public class SaveContext extends Timer { private SaveState state = SaveState.saved; private static final int saveDelay = 2000; public SaveState getState() { return state; } public void markChanged() { cancel(); schedule(saveDelay); setState(SaveState.not_saved); // changed = true; } public void refreshMap() { for (final PlaceBookItemFrame itemFrame : canvas.getItems()) { if (itemFrame.getItemWidget() instanceof MapItem) { ((MapItem) itemFrame.getItemWidget()).refreshMarkers(); } } } @Override public void run() { setState(SaveState.saving); PlaceBookService.savePlaceBook(placebook, new AbstractCallback() { @Override public void failure(final Request request, final Response response) { markChanged(); setState(SaveState.save_error); } @Override public void success(final Request request, final Response response) { try { updatePlaceBook(PlaceBook.parse(response.getText())); setState(SaveState.saved); } catch (final Exception e) { failure(request, response); } } }); } private void setState(final SaveState state) { this.state = state; switch (state) { case saved: saveStatusPanel.setText("Saved"); saveStatusPanel.hideImage(); saveStatusPanel.setEnabled(false); break; case not_saved: saveStatusPanel.setText("Save"); saveStatusPanel.hideImage(); saveStatusPanel.setEnabled(true); break; case saving: saveStatusPanel.setText("Saving"); saveStatusPanel.setImage(Resources.IMAGES.progress2()); saveStatusPanel.setEnabled(false); break; case save_error: saveStatusPanel.setText("Error Saving"); saveStatusPanel.setImage(Resources.IMAGES.error()); saveStatusPanel.setEnabled(true); break; default: break; } } } public enum SaveState { not_saved, save_error, saved, saving } @Prefix("edit") public static class Tokenizer implements PlaceTokenizer<PlaceBookEditor> { @Override public PlaceBookEditor getPlace(final String token) { return new PlaceBookEditor(token, null); } @Override public String getToken(final PlaceBookEditor place) { return place.getKey(); } } interface PlaceBookEditorUiBinder extends UiBinder<Widget, PlaceBookEditor> { } private final static String newPlaceBook = "{\"items\":[], \"metadata\":{} }"; private static final PlaceBookEditorUiBinder uiBinder = GWT.create(PlaceBookEditorUiBinder.class); @UiField Panel backPanel; @UiField Panel canvasPanel; @UiField Panel loadingPanel; @UiField Palette palette; @UiField PlaceBookToolbarItem saveStatusPanel; @UiField Label zoomLabel; @UiField TextBox title; @UiField PlaceBookToolbarItem actionMenu; @UiField DropMenu dropMenu; private final PlaceBookCanvas canvas = new PlaceBookCanvas(); private final PlaceBookItemPopupFrame.Factory factory = new PlaceBookItemPopupFrame.Factory(); private PlaceBookInteractionHandler interactionHandler; private PlaceBook placebook; private SaveContext saveContext = new SaveContext(); private int zoom = 100; private final String placebookKey; public PlaceBookEditor(final PlaceBook placebook, final Shelf shelf) { super(shelf); this.placebook = placebook; this.placebookKey = placebook.getKey(); } public PlaceBookEditor(final String placebookKey, final Shelf shelf) { super(shelf); this.placebookKey = placebookKey; this.placebook = null; } public PlaceBookCanvas getCanvas() { return canvas; } public PlaceBookInteractionHandler getDragHandler() { return interactionHandler; } public SaveContext getSaveContext() { return saveContext; } public void markChanged() { saveContext.markChanged(); } @Override public String mayStop() { if (saveContext.getState() != SaveState.saved) { return "The current PlaceBook has unsaved changes. Are you sure you want to leave?"; } return super.mayStop(); } public void setPlaceBook(final PlaceBook newPlacebook) { placebook = newPlacebook; canvas.setPlaceBook(newPlacebook, factory, true); if (newPlacebook.hasMetadata("title")) { Window.setTitle(newPlacebook.getMetadata("title") + " - PlaceBooks Editor"); title.setText(newPlacebook.getMetadata("title")); } else { Window.setTitle("PlaceBooks Editor"); title.setText("No Title"); } loadingPanel.setVisible(false); canvas.reflow(); } @Override public void start(final AcceptsOneWidget panel, final EventBus eventBus) { final Widget editor = uiBinder.createAndBindUi(this); canvasPanel.add(canvas); Event.addNativePreviewHandler(new Event.NativePreviewHandler() { @Override public void onPreviewNativeEvent(final NativePreviewEvent event) { if ((event.getTypeInt() == Event.ONMOUSEDOWN || event.getTypeInt() == Event.ONMOUSEMOVE) && event.getNativeEvent().getButton() == NativeEvent.BUTTON_LEFT && event.getNativeEvent().getEventTarget().toString().startsWith("<img")) { event.getNativeEvent().preventDefault(); } } }); interactionHandler = new PlaceBookInteractionHandler(canvas, factory, saveContext); interactionHandler.setupUIElements(backPanel); factory.setInteractionHandler(interactionHandler); saveContext.setState(SaveState.saved); Window.setTitle("PlaceBooks Editor"); toolbar.setPlace(this); title.setMaxLength(64); updatePalette(); final Timer timer = new Timer() { @Override public void run() { updatePalette(); } }; timer.scheduleRepeating(120000); panel.setWidget(editor); if (placebook != null) { setPlaceBook(placebook); } else if (placebookKey.equals("new")) { setPlaceBook(PlaceBook.parse(newPlaceBook)); } else { PlaceBookService.getPlaceBook(placebookKey, new AbstractCallback() { @Override public void success(final Request request, final Response response) { final PlaceBook placebook = PlaceBook.parse(response.getText()); setPlaceBook(placebook); } }); } } public void updatePalette() { PlaceBookService.getPaletteItems(new AbstractCallback() { @Override public void failure(final Request request, final Response response) { if (response.getStatusCode() == 401) { getPlaceController().goTo(new PlaceBookHome()); } } @Override public void success(final Request request, final Response response) { final JsArray<PlaceBookItem> items = PlaceBookItem.parseArray(response.getText()); palette.setPalette(items, interactionHandler); } }); } @UiHandler("publish") void publish(final ClickEvent event) { final PlaceBookPublishDialog publish = new PlaceBookPublishDialog(PlaceBookEditor.this, canvas); publish.addClickHandler(new ClickHandler() { @Override public void onClick(final ClickEvent event) { loadingPanel.setVisible(true); publish.hide(); } }); publish.show(); publish.center(); publish.getElement().getStyle().setTop(50, Unit.PX); } @UiHandler("delete") void delete(final ClickEvent event) { final Panel panel = new FlowPanel(); final PopupPanel dialogBox = new PopupPanel(true, true); dialogBox.getElement().getStyle().setZIndex(2000); final Label warning = new Label( "You will not be able to get your placebook back after deleting it. Are you sure?"); final Button okbutton = new Button("Delete", new ClickHandler() { @Override public void onClick(final ClickEvent event) { PlaceBookService.deletePlaceBook(placebook.getKey(), new AbstractCallback() { @Override public void failure(final Request request, final Response response) { dialogBox.hide(); } @Override public void success(final Request request, final Response response) { dialogBox.hide(); getPlaceController().goTo(new PlaceBookHome()); } }); } }); final Button cancelButton = new Button("Cancel", new ClickHandler() { @Override public void onClick(final ClickEvent event) { dialogBox.hide(); } }); panel.add(warning); panel.add(okbutton); panel.add(cancelButton); dialogBox.setGlassStyleName(Resources.STYLES.style().glassPanel()); dialogBox.setStyleName(Resources.STYLES.style().popupPanel()); dialogBox.setGlassEnabled(true); dialogBox.setAnimationEnabled(true); dialogBox.setWidget(panel); dialogBox.center(); dialogBox.show(); } @UiHandler("title") void handleTitleEdit(final KeyUpEvent event) { canvas.getPlaceBook().setMetadata("title", title.getText()); saveContext.markChanged(); } @UiHandler("zoomIn") void handleZoomIn(final ClickEvent event) { setZoom(zoom + 20); } @UiHandler("zoomOut") void handleZoomOut(final ClickEvent event) { setZoom(zoom - 20); } private String getKey() { return placebookKey; } @UiHandler(value = { "dropMenu", "actionMenu" }) void hideMenu(final MouseOutEvent event) { dropMenu.startHideMenu(); } @UiHandler("dropMenu") void showMenu(final MouseOverEvent event) { dropMenu.showMenu(dropMenu.getAbsoluteLeft(), dropMenu.getAbsoluteTop()); } @UiHandler("actionMenu") void showMenuButton(final MouseOverEvent event) { dropMenu.showMenu(actionMenu.getAbsoluteLeft(), actionMenu.getAbsoluteTop() + actionMenu.getOffsetHeight()); } @UiHandler("preview") void preview(final ClickEvent event) { getPlaceController().goTo(new PlaceBookPreview(getShelf(), getCanvas().getPlaceBook())); } private void setZoom(final int zoom) { this.zoom = zoom; canvas.getElement().getStyle().setWidth(zoom, Unit.PCT); canvas.getElement().getStyle().setFontSize(zoom, Unit.PCT); zoomLabel.setText(zoom + "%"); for (final PlaceBookPanel panel : canvas.getPanels()) { panel.reflow(); } } private void updatePlaceBook(final PlaceBook newPlacebook) { if (placebook != null && (placebook.getKey() == null || !placebook.getKey().equals(newPlacebook.getKey()))) { canvas.updatePlaceBook(newPlacebook); final PlaceBook placebook = canvas.getPlaceBook(); placebook.setKey(newPlacebook.getKey()); saveContext.setState(SaveState.saved); getPlaceController().goTo(new PlaceBookEditor(placebook, getShelf())); } else { placebook = newPlacebook; canvas.updatePlaceBook(newPlacebook); canvas.reflow(); } } }
package org.apache.xerces.validators.schema; import org.apache.xerces.framework.XMLErrorReporter; import org.apache.xerces.validators.common.Grammar; import org.apache.xerces.validators.common.GrammarResolver; import org.apache.xerces.validators.common.GrammarResolverImpl; import org.apache.xerces.validators.common.XMLElementDecl; import org.apache.xerces.validators.common.XMLAttributeDecl; import org.apache.xerces.validators.schema.SchemaSymbols; import org.apache.xerces.validators.schema.XUtil; import org.apache.xerces.validators.schema.identity.Field; import org.apache.xerces.validators.schema.identity.IdentityConstraint; import org.apache.xerces.validators.schema.identity.Key; import org.apache.xerces.validators.schema.identity.KeyRef; import org.apache.xerces.validators.schema.identity.Selector; import org.apache.xerces.validators.schema.identity.Unique; import org.apache.xerces.validators.schema.identity.XPath; import org.apache.xerces.validators.schema.identity.XPathException; import org.apache.xerces.validators.datatype.DatatypeValidator; import org.apache.xerces.validators.datatype.DatatypeValidatorFactoryImpl; import org.apache.xerces.validators.datatype.IDDatatypeValidator; import org.apache.xerces.validators.datatype.NOTATIONDatatypeValidator; import org.apache.xerces.validators.datatype.StringDatatypeValidator; import org.apache.xerces.validators.datatype.ListDatatypeValidator; import org.apache.xerces.validators.datatype.UnionDatatypeValidator; import org.apache.xerces.validators.datatype.InvalidDatatypeValueException; import org.apache.xerces.utils.StringPool; import org.w3c.dom.Element; import java.io.IOException; import java.util.*; import java.net.URL; import java.net.MalformedURLException; //REVISIT: for now, import everything in the DOM package import org.w3c.dom.*; //Unit Test import org.apache.xerces.parsers.DOMParser; import org.apache.xerces.validators.common.XMLValidator; import org.apache.xerces.validators.datatype.DatatypeValidator.*; import org.apache.xerces.validators.datatype.InvalidDatatypeValueException; import org.apache.xerces.framework.XMLContentSpec; import org.apache.xerces.utils.QName; import org.apache.xerces.utils.NamespacesScope; import org.apache.xerces.parsers.SAXParser; import org.apache.xerces.framework.XMLParser; import org.apache.xerces.framework.XMLDocumentScanner; import org.xml.sax.InputSource; import org.xml.sax.SAXParseException; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.w3c.dom.Document; /** Don't check the following code in because it creates a dependency on the serializer, preventing to package the parser without the serializer. import org.apache.xml.serialize.OutputFormat; import org.apache.xml.serialize.XMLSerializer; **/ import org.apache.xerces.validators.schema.SchemaSymbols; /** * Instances of this class get delegated to Traverse the Schema and * to populate the Grammar internal representation by * instances of Grammar objects. * Traverse a Schema Grammar: * * @author Eric Ye, IBM * @author Jeffrey Rodriguez, IBM * @author Andy Clark, IBM * * @see org.apache.xerces.validators.common.Grammar * * @version $Id$ */ public class TraverseSchema implements NamespacesScope.NamespacesHandler{ //CONSTANTS private static final int TOP_LEVEL_SCOPE = -1; /** Identity constraint keywords. */ private static final String[][] IDENTITY_CONSTRAINTS = { { SchemaSymbols.URI_SCHEMAFORSCHEMA, SchemaSymbols.ELT_UNIQUE }, { SchemaSymbols.URI_SCHEMAFORSCHEMA, SchemaSymbols.ELT_KEY }, { SchemaSymbols.URI_SCHEMAFORSCHEMA, SchemaSymbols.ELT_KEYREF }, }; // hopefully a string people will not use in their names very often... private static final String redefIdentifier = "_fn3dktizrknc9pi"; // Flags for handleOccurrences to indicate any special // restrictions on minOccurs and maxOccurs relating to "all". // NOT_ALL_CONTEXT - not processing an <all> // PROCESSING_ALL_EL - processing an <element> in an <all> // GROUP_REF_WITH_ALL - processing <group> reference that contained <all> // CHILD_OF_GROUP - processing a child of a model group definition // PROCESSING_ALL_GP - processing an <all> group itself private static final int NOT_ALL_CONTEXT = 0; private static final int PROCESSING_ALL_EL = 1; private static final int GROUP_REF_WITH_ALL = 2; private static final int CHILD_OF_GROUP = 4; private static final int PROCESSING_ALL_GP = 8; //debugging private static final boolean DEBUGGING = false; /** Compile to true to debug identity constraints. */ private static final boolean DEBUG_IDENTITY_CONSTRAINTS = false; private static final boolean DEBUG_NEW_GROUP = true; /** * Compile to true to debug datatype validator lookup for * identity constraint support. */ private static final boolean DEBUG_IC_DATATYPES = false; //private data members private boolean fFullConstraintChecking = false; private XMLErrorReporter fErrorReporter = null; private StringPool fStringPool = null; private GrammarResolver fGrammarResolver = null; private SchemaGrammar fSchemaGrammar = null; private Element fSchemaRootElement; // this is always set to refer to the root of the linked list containing the root info of schemas under redefinition. private SchemaInfo fSchemaInfoListRoot = null; private SchemaInfo fCurrentSchemaInfo = null; private boolean fRedefineSucceeded; private DatatypeValidatorFactoryImpl fDatatypeRegistry = null; private Hashtable fComplexTypeRegistry = new Hashtable(); private Hashtable fAttributeDeclRegistry = new Hashtable(); // stores the names of groups that we've traversed so we can avoid multiple traversals // qualified group names are keys and their contentSpecIndexes are values. private Hashtable fGroupNameRegistry = new Hashtable(); // this Hashtable keeps track of whether a given redefined group does so by restriction. private Hashtable fRestrictedRedefinedGroupRegistry = new Hashtable(); // stores "final" values of simpleTypes--no clean way to integrate this into the existing datatype validation structure... private Hashtable fSimpleTypeFinalRegistry = new Hashtable(); // stores <notation> decl private Hashtable fNotationRegistry = new Hashtable(); private Vector fIncludeLocations = new Vector(); private Vector fImportLocations = new Vector(); private Hashtable fRedefineLocations = new Hashtable(); private Vector fTraversedRedefineElements = new Vector(); // Hashtable associating attributeGroups within a <redefine> which // restrict attributeGroups in the original schema with the // new name for those groups in the modified redefined schema. private Hashtable fRedefineAttributeGroupMap = null; // simpleType data private Hashtable fFacetData = new Hashtable(10); private Stack fSimpleTypeNameStack = new Stack(); private String fListName = ""; private int fAnonTypeCount =0; private int fScopeCount=0; private int fCurrentScope=TOP_LEVEL_SCOPE; private int fSimpleTypeAnonCount = 0; private Stack fCurrentTypeNameStack = new Stack(); private Stack fBaseTypeNameStack = new Stack(); private Stack fCurrentGroupNameStack = new Stack(); private Vector fElementRecurseComplex = new Vector(); private Vector fTopLevelElementsRefdFromGroup = new Vector(); private Vector fSubstitutionGroupRecursionRegistry = new Vector(); private boolean fElementDefaultQualified = false; private boolean fAttributeDefaultQualified = false; private int fBlockDefault = 0; private int fFinalDefault = 0; private int fTargetNSURI; private String fTargetNSURIString = ""; private NamespacesScope fNamespacesScope = null; private String fCurrentSchemaURL = ""; private XMLAttributeDecl fTempAttributeDecl = new XMLAttributeDecl(); private XMLAttributeDecl fTemp2AttributeDecl = new XMLAttributeDecl(); private XMLElementDecl fTempElementDecl = new XMLElementDecl(); private XMLElementDecl fTempElementDecl2 = new XMLElementDecl(); private XMLContentSpec tempContentSpec1 = new XMLContentSpec(); private XMLContentSpec tempContentSpec2 = new XMLContentSpec(); private EntityResolver fEntityResolver = null; private SubstitutionGroupComparator fSComp = null; private Hashtable fIdentityConstraints = new Hashtable(); // Yet one more data structure; this one associates // <unique> and <key> QNames with their corresponding objects, // so that <keyRef>s can find them. private Hashtable fIdentityConstraintNames = new Hashtable(); // General Attribute Checking private GeneralAttrCheck fGeneralAttrCheck = null; private int fXsiURI; // REVISIT: maybe need to be moved into SchemaGrammar class public class ComplexTypeInfo { public String typeName; public DatatypeValidator baseDataTypeValidator; public ComplexTypeInfo baseComplexTypeInfo; public int derivedBy = 0; public int blockSet = 0; public int finalSet = 0; public int miscFlags=0; public int scopeDefined = -1; public int contentType; public int contentSpecHandle = -1; public int templateElementIndex = -1; public int attlistHead = -1; public DatatypeValidator datatypeValidator; public boolean isAbstractType() { return ((miscFlags & CT_IS_ABSTRACT)!=0); } public boolean containsAttrTypeID () { return ((miscFlags & CT_CONTAINS_ATTR_TYPE_ID)!=0); } public boolean declSeen () { return ((miscFlags & CT_DECL_SEEN)!=0); } public void setIsAbstractType() { miscFlags |= CT_IS_ABSTRACT; } public void setContainsAttrTypeID() { miscFlags |= CT_CONTAINS_ATTR_TYPE_ID; } public void setDeclSeen() { miscFlags |= CT_DECL_SEEN; } } private static final int CT_IS_ABSTRACT=1; private static final int CT_CONTAINS_ATTR_TYPE_ID=2; private static final int CT_DECL_SEEN=4; // indicates that the declaration was // traversed as opposed to processed due // to a forward reference private class ComplexTypeRecoverableError extends Exception { ComplexTypeRecoverableError() {super();} ComplexTypeRecoverableError(String s) {super(s);} } private class ParticleRecoverableError extends Exception { ParticleRecoverableError(String s) {super(s);} } private class GroupInfo { int contentSpecIndex = -1; int scope = -1; } private class ElementInfo { int elementIndex; String typeName; private ElementInfo(int i, String name) { elementIndex = i; typeName = name; } } //REVISIT: verify the URI. public final static String SchemaForSchemaURI = "http: private TraverseSchema( ) { // new TraverseSchema() is forbidden; } public void setFullConstraintCheckingEnabled() { fFullConstraintChecking = true; } public void setGrammarResolver(GrammarResolver grammarResolver){ fGrammarResolver = grammarResolver; } public void startNamespaceDeclScope(int prefix, int uri){ //TO DO } public void endNamespaceDeclScope(int prefix){ //TO DO, do we need to do anything here? } public boolean particleEmptiable(int contentSpecIndex) { if (!fFullConstraintChecking) { return true; } if (minEffectiveTotalRange(contentSpecIndex)==0) return true; else return false; } public int minEffectiveTotalRange(int contentSpecIndex) { fSchemaGrammar.getContentSpec(contentSpecIndex, tempContentSpec1); int type = tempContentSpec1.type; if (type == XMLContentSpec.CONTENTSPECNODE_SEQ || type == XMLContentSpec.CONTENTSPECNODE_ALL) { return minEffectiveTotalRangeSeq(contentSpecIndex); } else if (type == XMLContentSpec.CONTENTSPECNODE_CHOICE) { return minEffectiveTotalRangeChoice(contentSpecIndex); } else { return(fSchemaGrammar.getContentSpecMinOccurs(contentSpecIndex)); } } private int minEffectiveTotalRangeSeq(int csIndex) { fSchemaGrammar.getContentSpec(csIndex, tempContentSpec1); int type = tempContentSpec1.type; int left = tempContentSpec1.value; int right = tempContentSpec1.otherValue; int min = fSchemaGrammar.getContentSpecMinOccurs(csIndex); int result; if (right == -2) result = min * minEffectiveTotalRange(left); else result = min * (minEffectiveTotalRange(left) + minEffectiveTotalRange(right)); return result; } private int minEffectiveTotalRangeChoice(int csIndex) { fSchemaGrammar.getContentSpec(csIndex, tempContentSpec1); int type = tempContentSpec1.type; int left = tempContentSpec1.value; int right = tempContentSpec1.otherValue; int min = fSchemaGrammar.getContentSpecMinOccurs(csIndex); int result; if (right == -2) result = min * minEffectiveTotalRange(left); else { int minLeft = minEffectiveTotalRange(left); int minRight = minEffectiveTotalRange(right); result = min * ((minLeft < minRight)?minLeft:minRight); } return result; } public int maxEffectiveTotalRange(int contentSpecIndex) { fSchemaGrammar.getContentSpec(contentSpecIndex, tempContentSpec1); int type = tempContentSpec1.type; if (type == XMLContentSpec.CONTENTSPECNODE_SEQ || type == XMLContentSpec.CONTENTSPECNODE_ALL) { return maxEffectiveTotalRangeSeq(contentSpecIndex); } else if (type == XMLContentSpec.CONTENTSPECNODE_CHOICE) { return maxEffectiveTotalRangeChoice(contentSpecIndex); } else { return(fSchemaGrammar.getContentSpecMaxOccurs(contentSpecIndex)); } } private int maxEffectiveTotalRangeSeq(int csIndex) { fSchemaGrammar.getContentSpec(csIndex, tempContentSpec1); int type = tempContentSpec1.type; int left = tempContentSpec1.value; int right = tempContentSpec1.otherValue; int max = fSchemaGrammar.getContentSpecMaxOccurs(csIndex); if (max == SchemaSymbols.OCCURRENCE_UNBOUNDED) return SchemaSymbols.OCCURRENCE_UNBOUNDED; int maxLeft = maxEffectiveTotalRange(left); if (right == -2) { if (maxLeft == SchemaSymbols.OCCURRENCE_UNBOUNDED) return SchemaSymbols.OCCURRENCE_UNBOUNDED; else return max * maxLeft; } else { int maxRight = maxEffectiveTotalRange(right); if (maxLeft == SchemaSymbols.OCCURRENCE_UNBOUNDED || maxRight == SchemaSymbols.OCCURRENCE_UNBOUNDED) return SchemaSymbols.OCCURRENCE_UNBOUNDED; else return max * (maxLeft + maxRight); } } private int maxEffectiveTotalRangeChoice(int csIndex) { fSchemaGrammar.getContentSpec(csIndex, tempContentSpec1); int type = tempContentSpec1.type; int left = tempContentSpec1.value; int right = tempContentSpec1.otherValue; int max = fSchemaGrammar.getContentSpecMaxOccurs(csIndex); if (max == SchemaSymbols.OCCURRENCE_UNBOUNDED) return SchemaSymbols.OCCURRENCE_UNBOUNDED; int maxLeft = maxEffectiveTotalRange(left); if (right == -2) { if (maxLeft == SchemaSymbols.OCCURRENCE_UNBOUNDED) return SchemaSymbols.OCCURRENCE_UNBOUNDED; else return max * maxLeft; } else { int maxRight = maxEffectiveTotalRange(right); if (maxLeft == SchemaSymbols.OCCURRENCE_UNBOUNDED || maxRight == SchemaSymbols.OCCURRENCE_UNBOUNDED) return SchemaSymbols.OCCURRENCE_UNBOUNDED; else return max * ((maxLeft > maxRight)?maxLeft:maxRight); } } private String resolvePrefixToURI (String prefix) throws Exception { String uriStr = fStringPool.toString(fNamespacesScope.getNamespaceForPrefix(fStringPool.addSymbol(prefix))); if (uriStr.length() == 0 && prefix.length() > 0) { // REVISIT: Localize reportGenericSchemaError("prefix : [" + prefix +"] cannot be resolved to a URI"); return ""; } return uriStr; } public TraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver, XMLErrorReporter errorReporter, String schemaURL, EntityResolver entityResolver, boolean fullChecking, GeneralAttrCheck generalAttrCheck ) throws Exception { fErrorReporter = errorReporter; fCurrentSchemaURL = schemaURL; fFullConstraintChecking = fullChecking; fEntityResolver = entityResolver; fGeneralAttrCheck = generalAttrCheck; doTraverseSchema(root, stringPool, schemaGrammar, grammarResolver); } public TraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver, XMLErrorReporter errorReporter, String schemaURL, boolean fullChecking, GeneralAttrCheck generalAttrCheck ) throws Exception { fErrorReporter = errorReporter; fCurrentSchemaURL = schemaURL; fFullConstraintChecking = fullChecking; fGeneralAttrCheck = generalAttrCheck; doTraverseSchema(root, stringPool, schemaGrammar, grammarResolver); } public TraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver, boolean fullChecking, GeneralAttrCheck generalAttrCheck ) throws Exception { fFullConstraintChecking = fullChecking; fGeneralAttrCheck = generalAttrCheck; doTraverseSchema(root, stringPool, schemaGrammar, grammarResolver); } public void doTraverseSchema(Element root, StringPool stringPool, SchemaGrammar schemaGrammar, GrammarResolver grammarResolver) throws Exception { fNamespacesScope = new NamespacesScope(this); fSchemaRootElement = root; fStringPool = stringPool; fSchemaGrammar = schemaGrammar; if (fFullConstraintChecking) { fSchemaGrammar.setDeferContentSpecExpansion(); fSchemaGrammar.setCheckUniqueParticleAttribution(); } fGrammarResolver = grammarResolver; fDatatypeRegistry = (DatatypeValidatorFactoryImpl) fGrammarResolver.getDatatypeRegistry(); //Expand to registry type to contain all primitive datatype fDatatypeRegistry.expandRegistryToFullSchemaSet(); fXsiURI = fStringPool.addSymbol(SchemaSymbols.URI_XSI); if (root == null) { // REVISIT: Anything to do? return; } // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_GLOBAL; Hashtable attrValues = generalCheck(root, scope); //Make sure namespace binding is defaulted String rootPrefix = root.getPrefix(); if( rootPrefix == null || rootPrefix.length() == 0 ){ String xmlns = root.getAttribute("xmlns"); if( xmlns.length() == 0 ) root.setAttribute("xmlns", SchemaSymbols.URI_SCHEMAFORSCHEMA ); } //Retrieve the targetNamespace URI information fTargetNSURIString = getTargetNamespaceString(root); fTargetNSURI = fStringPool.addSymbol(fTargetNSURIString); if (fGrammarResolver == null) { // REVISIT: Localize reportGenericSchemaError("Internal error: don't have a GrammarResolver for TraverseSchema"); } else{ // for complex type registry, attribute decl registry and // namespace mapping, needs to check whether the passed in // Grammar was a newly instantiated one. if (fSchemaGrammar.getComplexTypeRegistry() == null ) { fSchemaGrammar.setComplexTypeRegistry(fComplexTypeRegistry); } else { fComplexTypeRegistry = fSchemaGrammar.getComplexTypeRegistry(); } if (fSchemaGrammar.getAttributeDeclRegistry() == null ) { fSchemaGrammar.setAttributeDeclRegistry(fAttributeDeclRegistry); } else { fAttributeDeclRegistry = fSchemaGrammar.getAttributeDeclRegistry(); } if (fSchemaGrammar.getNamespacesScope() == null ) { fSchemaGrammar.setNamespacesScope(fNamespacesScope); } else { fNamespacesScope = fSchemaGrammar.getNamespacesScope(); } fSchemaGrammar.setDatatypeRegistry(fDatatypeRegistry); fSchemaGrammar.setTargetNamespaceURI(fTargetNSURIString); fGrammarResolver.putGrammar(fTargetNSURIString, fSchemaGrammar); } // Retrived the Namespace mapping from the schema element. NamedNodeMap schemaEltAttrs = root.getAttributes(); int i = 0; Attr sattr = null; boolean seenXMLNS = false; while ((sattr = (Attr)schemaEltAttrs.item(i++)) != null) { String attName = sattr.getName(); if (attName.startsWith("xmlns:")) { String attValue = sattr.getValue(); String prefix = attName.substring(attName.indexOf(":")+1); fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(prefix), fStringPool.addSymbol(attValue) ); } if (attName.equals("xmlns")) { String attValue = sattr.getValue(); fNamespacesScope.setNamespaceForPrefix( StringPool.EMPTY_STRING, fStringPool.addSymbol(attValue) ); seenXMLNS = true; } } if (!seenXMLNS && fTargetNSURIString.length() == 0 ) { fNamespacesScope.setNamespaceForPrefix( StringPool.EMPTY_STRING, StringPool.EMPTY_STRING); } fElementDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ELEMENTFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); fAttributeDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ATTRIBUTEFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); Attr blockAttr = root.getAttributeNode(SchemaSymbols.ATT_BLOCKDEFAULT); if (blockAttr == null) fBlockDefault = 0; else fBlockDefault = parseBlockSet(blockAttr.getValue()); Attr finalAttr = root.getAttributeNode(SchemaSymbols.ATT_FINALDEFAULT); if (finalAttr == null) fFinalDefault = 0; else fFinalDefault = parseFinalSet(finalAttr.getValue()); //REVISIT, really sticky when noTargetNamesapce, for now, we assume everyting is in the same name space); if (fTargetNSURI == StringPool.EMPTY_STRING) { //fElementDefaultQualified = true; //fAttributeDefaultQualified = true; } //fScopeCount++; // fCurrentScope = -1; //extract all top-level attribute, attributeGroup, and group Decls and put them in the 3 hasn table in the SchemaGrammar. extractTopLevel3Components(root); // process <redefine>, <include> and <import> info items. Element child = XUtil.getFirstChildElement(root); for (; child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); if (name.equals(SchemaSymbols.ELT_ANNOTATION) ) { traverseAnnotationDecl(child); } else if (name.equals(SchemaSymbols.ELT_INCLUDE)) { fNamespacesScope.increaseDepth(); traverseInclude(child); fNamespacesScope.decreaseDepth(); } else if (name.equals(SchemaSymbols.ELT_IMPORT)) { traverseImport(child); } else if (name.equals(SchemaSymbols.ELT_REDEFINE)) { fRedefineSucceeded = true; // presume worked until proven failed. traverseRedefine(child); } else break; } // child refers to the first info item which is not <annotation> or // one of the schema inclusion/importation declarations. for (; child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); if (name.equals(SchemaSymbols.ELT_ANNOTATION) ) { traverseAnnotationDecl(child); } else if (name.equals(SchemaSymbols.ELT_SIMPLETYPE )) { traverseSimpleTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_COMPLEXTYPE )) { traverseComplexTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_ELEMENT )) { traverseElementDecl(child); } else if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { traverseAttributeGroupDecl(child, null, null); } else if (name.equals( SchemaSymbols.ELT_ATTRIBUTE ) ) { traverseAttributeDecl( child, null, false ); } else if (name.equals(SchemaSymbols.ELT_GROUP)) { traverseGroupDecl(child); } else if (name.equals(SchemaSymbols.ELT_NOTATION)) { traverseNotationDecl(child); //TO DO } else { // REVISIT: Localize reportGenericSchemaError("error in content of <schema> element information item"); } } // for each child node // handle identity constraints // we must traverse <key>s and <unique>s before we tackel<keyref>s, // since all have global scope and may be declared anywhere in the schema. Enumeration elementIndexes = fIdentityConstraints.keys(); while (elementIndexes.hasMoreElements()) { Integer elementIndexObj = (Integer)elementIndexes.nextElement(); if (DEBUG_IC_DATATYPES) { System.out.println("<ICD>: traversing identity constraints for element: "+elementIndexObj); } Vector identityConstraints = (Vector)fIdentityConstraints.get(elementIndexObj); if (identityConstraints != null) { int elementIndex = elementIndexObj.intValue(); traverseIdentityNameConstraintsFor(elementIndex, identityConstraints); } } elementIndexes = fIdentityConstraints.keys(); while (elementIndexes.hasMoreElements()) { Integer elementIndexObj = (Integer)elementIndexes.nextElement(); if (DEBUG_IC_DATATYPES) { System.out.println("<ICD>: traversing identity constraints for element: "+elementIndexObj); } Vector identityConstraints = (Vector)fIdentityConstraints.get(elementIndexObj); if (identityConstraints != null) { int elementIndex = elementIndexObj.intValue(); traverseIdentityRefConstraintsFor(elementIndex, identityConstraints); } } // At this point, we can do any remaining checking for cos-element-consistent // that involves substitution group elements if (fFullConstraintChecking) { // Loop thru all of the top-level elements that were ref'd from groups or // complexTypes, and ensure that: // 1. they have consistent type as any local element potentially declared in // the group/type // 2. any substitutable elements are consistent wrt type given any // similiarly named elements from the group scope // Note: for a complexType, we don't check against base scope. Not clear if // we need to. for (int j = 0; j < fTopLevelElementsRefdFromGroup.size(); j+=2) { QName eltName = (QName)fTopLevelElementsRefdFromGroup.elementAt(j); int groupScope = ((Integer)fTopLevelElementsRefdFromGroup.elementAt(j+1)).intValue(); checkConsistentElements(eltName, groupScope); } // Loop thru all of the complexTypes, and for any derived by restriction, // do particle derivation checking int count = fComplexTypeRegistry.size(); Enumeration enum = fComplexTypeRegistry.elements(); ComplexTypeInfo typeInfo,baseTypeInfo; while (enum.hasMoreElements ()) { typeInfo = (TraverseSchema.ComplexTypeInfo)enum.nextElement(); baseTypeInfo = typeInfo.baseComplexTypeInfo; if (typeInfo.derivedBy == SchemaSymbols.RESTRICTION && baseTypeInfo!=null && typeInfo.contentSpecHandle>-1) { try { checkParticleDerivationOK(typeInfo.contentSpecHandle, typeInfo.scopeDefined, baseTypeInfo.contentSpecHandle, baseTypeInfo.scopeDefined,baseTypeInfo); } catch (ParticleRecoverableError e) { String message = e.getMessage(); reportGenericSchemaError("ComplexType '" + typeInfo.typeName + "': " + message); } } } } } // traverseSchema(Element) private void extractTopLevel3Components(Element root) throws Exception { for (Element child = XUtil.getFirstChildElement(root); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); String compName = child.getAttribute(SchemaSymbols.ATT_NAME); if (name.equals(SchemaSymbols.ELT_ELEMENT)) { // Check if the element has already been declared if (fSchemaGrammar.topLevelElemDecls.get(compName) != null) { reportGenericSchemaError("sch-props-correct: Duplicate declaration for an element " + compName); } else { fSchemaGrammar.topLevelElemDecls.put(compName, child); } } else if (name.equals(SchemaSymbols.ELT_SIMPLETYPE) || name.equals(SchemaSymbols.ELT_COMPLEXTYPE)) { // Check for dublicate declaration if (fSchemaGrammar.topLevelTypeDecls.get(compName) != null) { reportGenericSchemaError("sch-props-correct: Duplicate declaration for a type " + compName); } else { fSchemaGrammar.topLevelTypeDecls.put(compName, child); } } else if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { // Check for dublicate declaration if (fSchemaGrammar.topLevelAttrGrpDecls.get(compName) != null) { reportGenericSchemaError("sch-props-correct: Duplicate declaration for an attribute group " + compName); } else { fSchemaGrammar.topLevelAttrGrpDecls.put(compName, child); } } else if (name.equals( SchemaSymbols.ELT_ATTRIBUTE ) ) { // Check for dublicate declaration if (fSchemaGrammar.topLevelAttrGrpDecls.get(compName) != null) { reportGenericSchemaError("sch-props-correct: Duplicate declaration for an attribute " + compName); } else { fSchemaGrammar.topLevelAttrGrpDecls.put(compName, child); } } else if ( name.equals(SchemaSymbols.ELT_GROUP) ) { // Check if the group has already been declared if (fSchemaGrammar.topLevelGroupDecls.get(compName) != null){ reportGenericSchemaError("sch-props-correct: Duplicate declaration for a group " + compName); } else { fSchemaGrammar.topLevelGroupDecls.put(compName, child); } } else if ( name.equals(SchemaSymbols.ELT_NOTATION) ) { // Check for dublicate declaration if (fSchemaGrammar.topLevelNotationDecls.get(compName) != null) { reportGenericSchemaError("sch-props-correct: Duplicate declaration for a notation " + compName); } else { fSchemaGrammar.topLevelNotationDecls.put(compName, child); } } } // for each child node } private void checkConsistentElements(QName eltName, int scope) throws Exception { // See if there is a declaration of an element with the same name at the // given scope. // This is required because any model group cannot have more than 1 // element with the same name, but different types (even if some are // local, and others top-level) fTempElementDecl.clear(); int topLevelElementNdx = fSchemaGrammar.getElementDeclIndex(eltName, TOP_LEVEL_SCOPE); if (topLevelElementNdx < 0) return; fSchemaGrammar.getElementDecl(topLevelElementNdx, fTempElementDecl); DatatypeValidator edv = fTempElementDecl.datatypeValidator; ComplexTypeInfo eTypeInfo = fSchemaGrammar.getElementComplexTypeInfo(topLevelElementNdx); int existingEltNdx = fSchemaGrammar.getElementDeclIndex(eltName.uri, eltName.localpart,scope); if (existingEltNdx > -1) { if (!checkDuplicateElementTypes(existingEltNdx,eTypeInfo,edv)) reportGenericSchemaError("duplicate element decl in the same scope with different types : " + fStringPool.toString(eltName.localpart)); } Vector substitutableNames = fSchemaGrammar.getElementDeclAllSubstitutionGroupQNames(topLevelElementNdx, fGrammarResolver, fStringPool); for (int i = 0; i < substitutableNames.size(); i++) { SchemaGrammar.OneSubGroup subGroup = (SchemaGrammar.OneSubGroup)substitutableNames.elementAt(i); QName substName = subGroup.name; int substEltNdx = subGroup.eleIndex; int localEltNdx = fSchemaGrammar.getElementDeclIndex(substName, scope); if (localEltNdx > -1) { fSchemaGrammar.getElementDecl(localEltNdx, fTempElementDecl); edv = fTempElementDecl.datatypeValidator; eTypeInfo = fSchemaGrammar.getElementComplexTypeInfo(localEltNdx); if (!checkDuplicateElementTypes(substEltNdx,eTypeInfo,edv)) reportGenericSchemaError("duplicate element decl in the same scope with different types : " + fStringPool.toString(substName.localpart)); } } } /** * Expands a system id and returns the system id as a URL, if * it can be expanded. A return value of null means that the * identifier is already expanded. An exception thrown * indicates a failure to expand the id. * * @param systemId The systemId to be expanded. * * @return Returns the URL object representing the expanded system * identifier. A null value indicates that the given * system identifier is already expanded. * */ private String expandSystemId(String systemId, String currentSystemId) throws Exception{ String id = systemId; // check for bad parameters id if (id == null || id.length() == 0) { return systemId; } // if id already expanded, return try { URL url = new URL(id); if (url != null) { return systemId; } } catch (MalformedURLException e) { // continue on... } // normalize id id = fixURI(id); // normalize base URL base = null; URL url = null; try { if (currentSystemId == null) { String dir; try { dir = fixURI(System.getProperty("user.dir")); } catch (SecurityException se) { dir = ""; } if (!dir.endsWith("/")) { dir = dir + "/"; } base = new URL("file", "", dir); } else { base = new URL(currentSystemId); } // expand id url = new URL(base, id); } catch (Exception e) { // let it go through } if (url == null) { return systemId; } return url.toString(); } /** * Fixes a platform dependent filename to standard URI form. * * @param str The string to fix. * * @return Returns the fixed URI string. */ private static String fixURI(String str) { // handle platform dependent strings str = str.replace(java.io.File.separatorChar, '/'); // Windows fix if (str.length() >= 2) { char ch1 = str.charAt(1); if (ch1 == ':') { char ch0 = Character.toUpperCase(str.charAt(0)); if (ch0 >= 'A' && ch0 <= 'Z') { str = "/" + str; } } } // done return str; } private void traverseInclude(Element includeDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_GLOBAL; Hashtable attrValues = generalCheck(includeDecl, scope); checkContent(includeDecl, XUtil.getFirstChildElement(includeDecl), true); Attr locationAttr = includeDecl.getAttributeNode(SchemaSymbols.ATT_SCHEMALOCATION); if (locationAttr == null) { // REVISIT: Localize reportGenericSchemaError("a schemaLocation attribute must be specified on an <include> element"); return; } String location = locationAttr.getValue(); // expand it before passing it to the parser InputSource source = null; if (fEntityResolver != null) { source = fEntityResolver.resolveEntity("", location); } if (source == null) { location = expandSystemId(location, fCurrentSchemaURL); source = new InputSource(location); } else { // create a string for uniqueness of this included schema in fIncludeLocations if (source.getPublicId () != null) location = source.getPublicId (); location += (',' + source.getSystemId ()); } if (fIncludeLocations.contains((Object)location)) { return; } fIncludeLocations.addElement((Object)location); DOMParser parser = new IgnoreWhitespaceParser(); parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() { public void fatalError(SAXParseException ex) throws SAXException { StringBuffer str = new StringBuffer(); String systemId_ = ex.getSystemId(); if (systemId_ != null) { int index = systemId_.lastIndexOf('/'); if (index != -1) systemId_ = systemId_.substring(index + 1); str.append(systemId_); } str.append(':').append(ex.getLineNumber()).append(':').append(ex.getColumnNumber()); String message = ex.getMessage(); if(message.toLowerCase().trim().endsWith("not found.")) { System.err.println("[Warning] "+ str.toString()+": "+ message); } else { // do standard thing System.err.println("[Fatal Error] "+ str.toString()+":"+message); throw ex; } } }); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://xml.org/sax/features/namespaces", true); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } try { parser.parse( source ); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { //e.printStackTrace(); } Document document = parser.getDocument(); //Our Grammar Element root = null; if (document != null) { root = document.getDocumentElement(); } if (root != null) { String targetNSURI = getTargetNamespaceString(root); if (targetNSURI.length() > 0 && !targetNSURI.equals(fTargetNSURIString) ) { // REVISIT: Localize reportGenericSchemaError("included schema '"+location+"' has a different targetNameSpace '" +targetNSURI+"'"); } else { // We not creating another TraverseSchema object to compile // the included schema file, because the scope count, anon-type count // should not be reset for a included schema, this can be fixed by saving // the counters in the Schema Grammar, if (fSchemaInfoListRoot == null) { fSchemaInfoListRoot = new SchemaInfo(fElementDefaultQualified, fAttributeDefaultQualified, fBlockDefault, fFinalDefault, fCurrentSchemaURL, fSchemaRootElement, fNamespacesScope, null, null); fCurrentSchemaInfo = fSchemaInfoListRoot; } fSchemaRootElement = root; fCurrentSchemaURL = location; traverseIncludedSchemaHeader(root); // and now we'd better save this stuff! fCurrentSchemaInfo = new SchemaInfo(fElementDefaultQualified, fAttributeDefaultQualified, fBlockDefault, fFinalDefault, fCurrentSchemaURL, fSchemaRootElement, fNamespacesScope, fCurrentSchemaInfo.getNext(), fCurrentSchemaInfo); (fCurrentSchemaInfo.getPrev()).setNext(fCurrentSchemaInfo); traverseIncludedSchema(root); // there must always be a previous element! fCurrentSchemaInfo = fCurrentSchemaInfo.getPrev(); fCurrentSchemaInfo.restore(); } } } private void traverseIncludedSchemaHeader(Element root) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_GLOBAL; Hashtable attrValues = generalCheck(root, scope); // Retrieved the Namespace mapping from the schema element. NamedNodeMap schemaEltAttrs = root.getAttributes(); int i = 0; Attr sattr = null; boolean seenXMLNS = false; while ((sattr = (Attr)schemaEltAttrs.item(i++)) != null) { String attName = sattr.getName(); if (attName.startsWith("xmlns:")) { String attValue = sattr.getValue(); String prefix = attName.substring(attName.indexOf(":")+1); fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(prefix), fStringPool.addSymbol(attValue) ); } if (attName.equals("xmlns")) { String attValue = sattr.getValue(); fNamespacesScope.setNamespaceForPrefix( StringPool.EMPTY_STRING, fStringPool.addSymbol(attValue) ); seenXMLNS = true; } } if (!seenXMLNS && fTargetNSURIString.length() == 0 ) { fNamespacesScope.setNamespaceForPrefix( StringPool.EMPTY_STRING, StringPool.EMPTY_STRING); } fElementDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ELEMENTFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); fAttributeDefaultQualified = root.getAttribute(SchemaSymbols.ATT_ATTRIBUTEFORMDEFAULT).equals(SchemaSymbols.ATTVAL_QUALIFIED); Attr blockAttr = root.getAttributeNode(SchemaSymbols.ATT_BLOCKDEFAULT); if (blockAttr == null) fBlockDefault = 0; else fBlockDefault = parseBlockSet(blockAttr.getValue()); Attr finalAttr = root.getAttributeNode(SchemaSymbols.ATT_FINALDEFAULT); if (finalAttr == null) fFinalDefault = 0; else fFinalDefault = parseFinalSet(finalAttr.getValue()); //REVISIT, really sticky when noTargetNamesapce, for now, we assume everyting is in the same name space); if (fTargetNSURI == StringPool.EMPTY_STRING) { fElementDefaultQualified = true; //fAttributeDefaultQualified = true; } //fScopeCount++; //fCurrentScope = -1; } // traverseIncludedSchemaHeader private void traverseIncludedSchema(Element root) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_GLOBAL; Hashtable attrValues = generalCheck(root, scope); //extract all top-level attribute, attributeGroup, and group Decls and put them in the 3 hasn table in the SchemaGrammar. extractTopLevel3Components(root); // handle <redefine>, <include> and <import> elements. Element child = XUtil.getFirstChildElement(root); for (; child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); if (name.equals(SchemaSymbols.ELT_ANNOTATION) ) { traverseAnnotationDecl(child); } else if (name.equals(SchemaSymbols.ELT_INCLUDE)) { fNamespacesScope.increaseDepth(); traverseInclude(child); fNamespacesScope.decreaseDepth(); } else if (name.equals(SchemaSymbols.ELT_IMPORT)) { traverseImport(child); } else if (name.equals(SchemaSymbols.ELT_REDEFINE)) { fRedefineSucceeded = true; // presume worked until proven failed. traverseRedefine(child); } else break; } // handle the rest of the schema elements. // BEWARE! this method gets called both from traverseRedefine and // traverseInclude; the preconditions (especially with respect to // groups and attributeGroups) are different! for (; child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); if (name.equals(SchemaSymbols.ELT_ANNOTATION) ) { traverseAnnotationDecl(child); } else if (name.equals(SchemaSymbols.ELT_SIMPLETYPE )) { traverseSimpleTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_COMPLEXTYPE )) { traverseComplexTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_ELEMENT )) { traverseElementDecl(child); } else if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { if(fRedefineAttributeGroupMap != null) { String dName = child.getAttribute(SchemaSymbols.ATT_NAME); String bName = (String)fRedefineAttributeGroupMap.get(dName); if(bName != null) { child.setAttribute(SchemaSymbols.ATT_NAME, bName); // and make sure we wipe this out of the grammar! fSchemaGrammar.topLevelAttrGrpDecls.remove(dName); // Now we reuse this location in the array to store info we'll need for validation... ComplexTypeInfo typeInfo = new ComplexTypeInfo(); int templateElementNameIndex = fStringPool.addSymbol("$"+bName); int typeNameIndex = fStringPool.addSymbol("%"+bName); typeInfo.scopeDefined = -2; typeInfo.contentSpecHandle = -1; typeInfo.contentType = XMLElementDecl.TYPE_SIMPLE; typeInfo.datatypeValidator = null; typeInfo.templateElementIndex = fSchemaGrammar.addElementDecl( new QName(-1, templateElementNameIndex,typeNameIndex,fTargetNSURI), (fTargetNSURI==StringPool.EMPTY_STRING) ? StringPool.EMPTY_STRING : -2, typeInfo.scopeDefined, typeInfo.contentType, typeInfo.contentSpecHandle, -1, typeInfo.datatypeValidator); Vector anyAttDecls = new Vector(); // need to determine how to initialize these babies; then // on the <redefine> traversing end, try // and cast the hash value into the right form; // failure indicates nothing to redefine; success // means we can feed checkAttribute... what it needs... traverseAttributeGroupDecl(child, typeInfo, anyAttDecls); typeInfo.attlistHead = fSchemaGrammar.getFirstAttributeDeclIndex( typeInfo.templateElementIndex); fRedefineAttributeGroupMap.put(dName, new Object []{typeInfo, fSchemaGrammar, anyAttDecls}); continue; } } traverseAttributeGroupDecl(child, null, null); } else if (name.equals( SchemaSymbols.ELT_ATTRIBUTE ) ) { traverseAttributeDecl( child, null , false); } else if (name.equals(SchemaSymbols.ELT_GROUP)) { String dName = child.getAttribute(SchemaSymbols.ATT_NAME); if(fGroupNameRegistry.get(fTargetNSURIString + ","+dName) == null) { // we've been renamed already traverseGroupDecl(child); continue; } // if we're here: must have been a restriction. // we have yet to be renamed. try { GroupInfo gi = (GroupInfo)fGroupNameRegistry.get(fTargetNSURIString + ","+dName); // if that succeeded then we're done; were ref'd here in // an include most likely. continue; } catch (ClassCastException c) { String s = (String)fGroupNameRegistry.get(fTargetNSURIString + ","+dName); if (s == null) continue; // must have seen this already--somehow... }; String bName = (String)fGroupNameRegistry.get(fTargetNSURIString +"," + dName); if(bName != null) { child.setAttribute(SchemaSymbols.ATT_NAME, bName); // Now we reuse this location in the array to store info we'll need for validation... // note that traverseGroupDecl will happily do that for us! } traverseGroupDecl(child); } else if (name.equals(SchemaSymbols.ELT_NOTATION)) { traverseNotationDecl(child); } else { // REVISIT: Localize reportGenericSchemaError("error in content of included <schema> element information item"); } } // for each child node } // This method's job is to open a redefined schema and store away its root element, defaultElementQualified and other // such info, in order that it can be available when redefinition actually takes place. // It assumes that it will be called from the schema doing the redefining, and it assumes // that the other schema's info has already been saved, putting the info it finds into the // SchemaInfoList element that is passed in. private void openRedefinedSchema(Element redefineDecl, SchemaInfo store) throws Exception { Attr locationAttr = redefineDecl.getAttributeNode(SchemaSymbols.ATT_SCHEMALOCATION); if (locationAttr == null) { // REVISIT: Localize fRedefineSucceeded = false; reportGenericSchemaError("a schemaLocation attribute must be specified on a <redefine> element"); return; } String location = locationAttr.getValue(); // expand it before passing it to the parser InputSource source = null; if (fEntityResolver != null) { source = fEntityResolver.resolveEntity("", location); } if (source == null) { location = expandSystemId(location, fCurrentSchemaURL); source = new InputSource(location); } else { // Make sure we don't redefine the same schema twice; it's allowed // but the specs encourage us to avoid it. if (source.getPublicId () != null) location = source.getPublicId (); // make sure we're not redefining ourselves! if(source.getSystemId().equals(fCurrentSchemaURL)) { // REVISIT: localize reportGenericSchemaError("src-redefine.2: a schema cannot redefine itself"); fRedefineSucceeded = false; return; } location += (',' + source.getSystemId ()); } if (fRedefineLocations.get((Object)location) != null) { // then we'd better make sure we're directed at that schema... fCurrentSchemaInfo = (SchemaInfo)(fRedefineLocations.get((Object)location)); fCurrentSchemaInfo.restore(); return; } DOMParser parser = new IgnoreWhitespaceParser(); parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() ); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://xml.org/sax/features/namespaces", true); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } try { parser.parse( source ); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { //e.printStackTrace(); } Document document = parser.getDocument(); //Our Grammar to be redefined Element root = null; if (document != null) { root = document.getDocumentElement(); } if (root == null) { // nothing to be redefined, so just continue; specs disallow an error here. fRedefineSucceeded = false; return; } // now if root isn't null, it'll contain the root of the schema we need to redefine. // We do this in two phases: first, we look through the children of // redefineDecl. Each one will correspond to an element of the // redefined schema that we need to redefine. To do this, we rename the // element of the redefined schema, and rework the base or ref tag of // the kid we're working on to refer to the renamed group or derive the // renamed type. Once we've done this, we actually go through the // schema being redefined and convert it to a grammar. Only then do we // run through redefineDecl's kids and put them in the grammar. // This approach is kosher with the specs. It does raise interesting // questions about error reporting, and perhaps also about grammar // access, but it is comparatively efficient (we need make at most // only 2 traversals of any given information item) and moreover // we can use existing code to build the grammar structures once the // first pass is out of the way, so this should be quite robust. // check to see if the targetNameSpace is right String redefinedTargetNSURIString = getTargetNamespaceString(root); if (redefinedTargetNSURIString.length() > 0 && !redefinedTargetNSURIString.equals(fTargetNSURIString) ) { // REVISIT: Localize fRedefineSucceeded = false; reportGenericSchemaError("redefined schema '"+location+"' has a different targetNameSpace '" +redefinedTargetNSURIString+"' from the original schema"); } else { // targetNamespace is right, so let's do the renaming... // and let's keep in mind that the targetNamespace of the redefined // elements is that of the redefined schema! fSchemaRootElement = root; fCurrentSchemaURL = location; fNamespacesScope = new NamespacesScope(this); if((redefinedTargetNSURIString.length() == 0) && (root.getAttributeNode("xmlns") == null)) { fNamespacesScope.setNamespaceForPrefix(StringPool.EMPTY_STRING, fTargetNSURI); } else { } // get default form xmlns bindings et al. traverseIncludedSchemaHeader(root); // and then save them... store.setNext(new SchemaInfo(fElementDefaultQualified, fAttributeDefaultQualified, fBlockDefault, fFinalDefault, fCurrentSchemaURL, fSchemaRootElement, fNamespacesScope, null, store)); (store.getNext()).setPrev(store); fCurrentSchemaInfo = store.getNext(); fRedefineLocations.put((Object)location, store.getNext()); } // end if } // end openRedefinedSchema /**** * <redefine * schemaLocation = uriReference * {any attributes with non-schema namespace . . .}> * Content: (annotation | ( * attributeGroup | complexType | group | simpleType))* * </redefine> */ private void traverseRedefine(Element redefineDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_GLOBAL; Hashtable attrValues = generalCheck(redefineDecl, scope); // initialize storage areas... fRedefineAttributeGroupMap = new Hashtable(); NamespacesScope saveNSScope = (NamespacesScope)fNamespacesScope.clone(); // only case in which need to save contents is when fSchemaInfoListRoot is null; otherwise we'll have // done this already one way or another. if (fSchemaInfoListRoot == null) { fSchemaInfoListRoot = new SchemaInfo(fElementDefaultQualified, fAttributeDefaultQualified, fBlockDefault, fFinalDefault, fCurrentSchemaURL, fSchemaRootElement, fNamespacesScope, null, null); openRedefinedSchema(redefineDecl, fSchemaInfoListRoot); if(!fRedefineSucceeded) return; fCurrentSchemaInfo = fSchemaInfoListRoot.getNext(); fNamespacesScope = (NamespacesScope)saveNSScope.clone(); renameRedefinedComponents(redefineDecl,fSchemaInfoListRoot.getNext().getRoot(), fSchemaInfoListRoot.getNext()); } else { // may have a chain here; need to be wary! SchemaInfo curr = fSchemaInfoListRoot; for(; curr.getNext() != null; curr = curr.getNext()); fCurrentSchemaInfo = curr; fCurrentSchemaInfo.restore(); openRedefinedSchema(redefineDecl, fCurrentSchemaInfo); if(!fRedefineSucceeded) return; fNamespacesScope = (NamespacesScope)saveNSScope.clone(); renameRedefinedComponents(redefineDecl,fCurrentSchemaInfo.getRoot(), fCurrentSchemaInfo); } // Now we have to march through our nicely-renamed schemas from the // bottom up. When we do these traversals other <redefine>'s may // perhaps be encountered; we leave recursion to sort this out. fCurrentSchemaInfo.restore(); traverseIncludedSchema(fSchemaRootElement); fNamespacesScope = (NamespacesScope)saveNSScope.clone(); // and last but not least: traverse our own <redefine>--the one all // this labour has been expended upon. for (Element child = XUtil.getFirstChildElement(redefineDecl); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); // annotations can occur anywhere in <redefine>s! if (name.equals(SchemaSymbols.ELT_ANNOTATION) ) { traverseAnnotationDecl(child); } else if (name.equals(SchemaSymbols.ELT_SIMPLETYPE )) { traverseSimpleTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_COMPLEXTYPE )) { traverseComplexTypeDecl(child); } else if (name.equals(SchemaSymbols.ELT_GROUP)) { String dName = child.getAttribute(SchemaSymbols.ATT_NAME); if(fGroupNameRegistry.get(fTargetNSURIString +","+ dName) == null || ((fRestrictedRedefinedGroupRegistry.get(fTargetNSURIString+","+dName) != null) && !((Boolean)fRestrictedRedefinedGroupRegistry.get(fTargetNSURIString+","+dName)).booleanValue())) { // extension! traverseGroupDecl(child); continue; } traverseGroupDecl(child); GroupInfo bGIObj = null; try { bGIObj = (GroupInfo)fGroupNameRegistry.get(fTargetNSURIString +","+ dName+redefIdentifier); } catch(ClassCastException c) { // if it's still a String, then we mustn't have found a corresponding attributeGroup in the redefined schema. // REVISIT: localize reportGenericSchemaError("src-redefine.6.2: a <group> within a <redefine> must either have a ref to a <group> with the same name or must restrict such an <group>"); continue; } if(bGIObj != null) { // we have something! int bCSIndex = bGIObj.contentSpecIndex; GroupInfo dGIObj; try { dGIObj = (GroupInfo)fGroupNameRegistry.get(fTargetNSURIString+","+dName); } catch (ClassCastException c) { continue; } if(dGIObj == null) // something went wrong... continue; int dCSIndex = dGIObj.contentSpecIndex; try { checkParticleDerivationOK(dCSIndex, -1, bCSIndex, -1, null); } catch (ParticleRecoverableError e) { reportGenericSchemaError(e.getMessage()); } } else // REVISIT: localize reportGenericSchemaError("src-redefine.6.2: a <group> within a <redefine> must either have a ref to a <group> with the same name or must restrict such an <group>"); } else if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { if(fRedefineAttributeGroupMap != null) { String dName = child.getAttribute(SchemaSymbols.ATT_NAME); Object [] bAttGrpStore = null; try { bAttGrpStore = (Object [])fRedefineAttributeGroupMap.get(dName); } catch(ClassCastException c) { // if it's still a String, then we mustn't have found a corresponding attributeGroup in the redefined schema. // REVISIT: localize reportGenericSchemaError("src-redefine.7.2: an <attributeGroup> within a <redefine> must either have a ref to an <attributeGroup> with the same name or must restrict such an <attributeGroup>"); continue; } if(bAttGrpStore != null) { // we have something! ComplexTypeInfo bTypeInfo = (ComplexTypeInfo)bAttGrpStore[0]; SchemaGrammar bSchemaGrammar = (SchemaGrammar)bAttGrpStore[1]; Vector bAnyAttDecls = (Vector)bAttGrpStore[2]; XMLAttributeDecl bAnyAttDecl = (bAnyAttDecls.size()>0 )? (XMLAttributeDecl)bAnyAttDecls.elementAt(0):null; ComplexTypeInfo dTypeInfo = new ComplexTypeInfo(); int templateElementNameIndex = fStringPool.addSymbol("$"+dName); int dTypeNameIndex = fStringPool.addSymbol("%"+dName); dTypeInfo.scopeDefined = -2; dTypeInfo.contentSpecHandle = -1; dTypeInfo.contentType = XMLElementDecl.TYPE_SIMPLE; dTypeInfo.datatypeValidator = null; dTypeInfo.templateElementIndex = fSchemaGrammar.addElementDecl( new QName(-1, templateElementNameIndex,dTypeNameIndex,fTargetNSURI), (fTargetNSURI==StringPool.EMPTY_STRING) ? StringPool.EMPTY_STRING : -2, dTypeInfo.scopeDefined, dTypeInfo.contentType, dTypeInfo.contentSpecHandle, -1, dTypeInfo.datatypeValidator); Vector dAnyAttDecls = new Vector(); XMLAttributeDecl dAnyAttDecl = (dAnyAttDecls.size()>0 )? (XMLAttributeDecl)dAnyAttDecls.elementAt(0):null; traverseAttributeGroupDecl(child, dTypeInfo, dAnyAttDecls); dTypeInfo.attlistHead = fSchemaGrammar.getFirstAttributeDeclIndex( dTypeInfo.templateElementIndex); try { checkAttributesDerivationOKRestriction(dTypeInfo.attlistHead,fSchemaGrammar, dAnyAttDecl,bTypeInfo.attlistHead,bSchemaGrammar,bAnyAttDecl); } catch (ComplexTypeRecoverableError e) { String message = e.getMessage(); reportGenericSchemaError("src-redefine.7.2: redefinition failed because of " + message); } continue; } } traverseAttributeGroupDecl(child, null, null); } // no else; error reported in the previous traversal } //for // and restore the original globals fCurrentSchemaInfo = fCurrentSchemaInfo.getPrev(); fCurrentSchemaInfo.restore(); } // traverseRedefine // the purpose of this method is twofold: 1. To find and appropriately modify all information items // in redefinedSchema with names that are redefined by children of // redefineDecl. 2. To make sure the redefine element represented by // redefineDecl is valid as far as content goes and with regard to // properly referencing components to be redefined. No traversing is done here! // This method also takes actions to find and, if necessary, modify the names // of elements in <redefine>'s in the schema that's being redefined. private void renameRedefinedComponents(Element redefineDecl, Element schemaToRedefine, SchemaInfo currSchemaInfo) throws Exception { for (Element child = XUtil.getFirstChildElement(redefineDecl); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); if (name.equals(SchemaSymbols.ELT_ANNOTATION) ) continue; else if (name.equals(SchemaSymbols.ELT_SIMPLETYPE)) { String typeName = child.getAttribute( SchemaSymbols.ATT_NAME ); if(fTraversedRedefineElements.contains(typeName)) continue; if(validateRedefineNameChange(SchemaSymbols.ELT_SIMPLETYPE, typeName, typeName+redefIdentifier, child)) { fixRedefinedSchema(SchemaSymbols.ELT_SIMPLETYPE, typeName, typeName+redefIdentifier, schemaToRedefine, currSchemaInfo); } } else if (name.equals(SchemaSymbols.ELT_COMPLEXTYPE)) { String typeName = child.getAttribute( SchemaSymbols.ATT_NAME ); if(fTraversedRedefineElements.contains(typeName)) continue; if(validateRedefineNameChange(SchemaSymbols.ELT_COMPLEXTYPE, typeName, typeName+redefIdentifier, child)) { fixRedefinedSchema(SchemaSymbols.ELT_COMPLEXTYPE, typeName, typeName+redefIdentifier, schemaToRedefine, currSchemaInfo); } } else if (name.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { String baseName = child.getAttribute( SchemaSymbols.ATT_NAME ); if(fTraversedRedefineElements.contains(baseName)) continue; if(validateRedefineNameChange(SchemaSymbols.ELT_ATTRIBUTEGROUP, baseName, baseName+redefIdentifier, child)) { fixRedefinedSchema(SchemaSymbols.ELT_ATTRIBUTEGROUP, baseName, baseName+redefIdentifier, schemaToRedefine, currSchemaInfo); } } else if (name.equals(SchemaSymbols.ELT_GROUP)) { String baseName = child.getAttribute( SchemaSymbols.ATT_NAME ); if(fTraversedRedefineElements.contains(baseName)) continue; if(validateRedefineNameChange(SchemaSymbols.ELT_GROUP, baseName, baseName+redefIdentifier, child)) { fixRedefinedSchema(SchemaSymbols.ELT_GROUP, baseName, baseName+redefIdentifier, schemaToRedefine, currSchemaInfo); } } else { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("invalid top-level content for <redefine>"); return; } } // for } // renameRedefinedComponents // This function looks among the children of curr for an element of type elementSought. // If it finds one, it evaluates whether its ref attribute contains a reference // to originalName. If it does, it returns 1 + the value returned by // calls to itself on all other children. In all other cases it returns 0 plus // the sum of the values returned by calls to itself on curr's children. // It also resets the value of ref so that it will refer to the renamed type from the schema // being redefined. private int changeRedefineGroup(QName originalName, String elementSought, String newName, Element curr) throws Exception { int result = 0; for (Element child = XUtil.getFirstChildElement(curr); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); if (!name.equals(elementSought)) result += changeRedefineGroup(originalName, elementSought, newName, child); else { String ref = child.getAttribute( SchemaSymbols.ATT_REF ); if (ref.length() != 0) { String prefix = ""; String localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } String uriStr = resolvePrefixToURI(prefix); if(originalName.equals(new QName(-1, fStringPool.addSymbol(localpart), fStringPool.addSymbol(localpart), fStringPool.addSymbol(uriStr)))) { if(prefix.length() == 0) child.setAttribute(SchemaSymbols.ATT_REF, newName); else child.setAttribute(SchemaSymbols.ATT_REF, prefix + ":" + newName); result++; if(elementSought.equals(SchemaSymbols.ELT_GROUP)) { String minOccurs = child.getAttribute( SchemaSymbols.ATT_MINOCCURS ); String maxOccurs = child.getAttribute( SchemaSymbols.ATT_MAXOCCURS ); if(!((maxOccurs.length() == 0 || maxOccurs.equals("1")) && (minOccurs.length() == 0 || minOccurs.equals("1")))) { //REVISIT: localize reportGenericSchemaError("src-redefine.6.1.2: the group " + ref + " which contains a reference to a group being redefined must have minOccurs = maxOccurs = 1"); } } } } // if ref was null some other stage of processing will flag the error } } return result; } // changeRedefineGroup // This simple function looks for the first occurrence of an eltLocalname // schema information item and appropriately changes the value of // its name or type attribute from oldName to newName. // Root contains the root of the schema being operated upon. // If it turns out that what we're looking for is in a <redefine> though, then we // just rename it--and it's reference--to be the same and wait until // renameRedefineDecls can get its hands on it and do it properly. private void fixRedefinedSchema(String eltLocalname, String oldName, String newName, Element schemaToRedefine, SchemaInfo currSchema) throws Exception { boolean foundIt = false; for (Element child = XUtil.getFirstChildElement(schemaToRedefine); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); if(name.equals(SchemaSymbols.ELT_REDEFINE)) { // need to search the redefine decl... for (Element redefChild = XUtil.getFirstChildElement(child); redefChild != null; redefChild = XUtil.getNextSiblingElement(redefChild)) { String redefName = redefChild.getLocalName(); if (redefName.equals(eltLocalname) ) { String infoItemName = redefChild.getAttribute( SchemaSymbols.ATT_NAME ); if(!infoItemName.equals(oldName)) continue; else { // found it! foundIt = true; openRedefinedSchema(child, currSchema); if(!fRedefineSucceeded) return; NamespacesScope saveNSS = (NamespacesScope)fNamespacesScope.clone(); currSchema.restore(); if (validateRedefineNameChange(eltLocalname, oldName, newName+redefIdentifier, redefChild) && (currSchema.getNext() != null)) { currSchema.getNext().restore(); fixRedefinedSchema(eltLocalname, oldName, newName+redefIdentifier, fSchemaRootElement, currSchema.getNext()); } fNamespacesScope = saveNSS; redefChild.setAttribute( SchemaSymbols.ATT_NAME, newName ); // and we now know we will traverse this, so set fTraversedRedefineElements appropriately... fTraversedRedefineElements.addElement(newName); currSchema.restore(); fCurrentSchemaInfo = currSchema; break; } } } //for if (foundIt) break; } else if (name.equals(eltLocalname) ) { String infoItemName = child.getAttribute( SchemaSymbols.ATT_NAME ); if(!infoItemName.equals(oldName)) continue; else { // found it! foundIt = true; child.setAttribute( SchemaSymbols.ATT_NAME, newName ); break; } } } //for if(!foundIt) { fRedefineSucceeded = false; // REVISIT: localize reportGenericSchemaError("could not find a declaration in the schema to be redefined corresponding to " + oldName); } } // end fixRedefinedSchema // this method returns true if the redefine component is valid, and if // it was possible to revise it correctly. The definition of // correctly will depend on whether renameRedefineDecls // or fixRedefineSchema is the caller. // this method also prepends a prefix onto newName if necessary; newName will never contain one. private boolean validateRedefineNameChange(String eltLocalname, String oldName, String newName, Element child) throws Exception { if (eltLocalname.equals(SchemaSymbols.ELT_SIMPLETYPE)) { QName processedTypeName = new QName(-1, fStringPool.addSymbol(oldName), fStringPool.addSymbol(oldName), fTargetNSURI); Element grandKid = XUtil.getFirstChildElement(child); if (grandKid == null) { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("a simpleType child of a <redefine> must have a restriction element as a child"); } else { String grandKidName = grandKid.getLocalName(); if(grandKidName.equals(SchemaSymbols.ELT_ANNOTATION)) { grandKid = XUtil.getNextSiblingElement(grandKid); grandKidName = grandKid.getLocalName(); } if (grandKid == null) { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("a simpleType child of a <redefine> must have a restriction element as a child"); } else if(!grandKidName.equals(SchemaSymbols.ELT_RESTRICTION)) { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("a simpleType child of a <redefine> must have a restriction element as a child"); } else { String derivedBase = grandKid.getAttribute( SchemaSymbols.ATT_BASE ); QName processedDerivedBase = parseBase(derivedBase); if(!processedTypeName.equals(processedDerivedBase)) { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("the base attribute of the restriction child of a simpleType child of a redefine must have the same value as the simpleType's type attribute"); } else { // now we have to do the renaming... String prefix = ""; int colonptr = derivedBase.indexOf(":"); if ( colonptr > 0) prefix = derivedBase.substring(0,colonptr) + ":"; grandKid.setAttribute( SchemaSymbols.ATT_BASE, prefix + newName ); return true; } } } } else if (eltLocalname.equals(SchemaSymbols.ELT_COMPLEXTYPE)) { QName processedTypeName = new QName(-1, fStringPool.addSymbol(oldName), fStringPool.addSymbol(oldName), fTargetNSURI); Element grandKid = XUtil.getFirstChildElement(child); if (grandKid == null) { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("a complexType child of a <redefine> must have a restriction or extension element as a grandchild"); } else { if(grandKid.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) { grandKid = XUtil.getNextSiblingElement(grandKid); } if (grandKid == null) { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("a complexType child of a <redefine> must have a restriction or extension element as a grandchild"); } else { // have to go one more level down; let another pass worry whether complexType is valid. Element greatGrandKid = XUtil.getFirstChildElement(grandKid); if (greatGrandKid == null) { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("a complexType child of a <redefine> must have a restriction or extension element as a grandchild"); } else { String greatGrandKidName = greatGrandKid.getLocalName(); if(greatGrandKidName.equals(SchemaSymbols.ELT_ANNOTATION)) { greatGrandKid = XUtil.getNextSiblingElement(greatGrandKid); greatGrandKidName = greatGrandKid.getLocalName(); } if (greatGrandKid == null) { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("a complexType child of a <redefine> must have a restriction or extension element as a grandchild"); } else if(!greatGrandKidName.equals(SchemaSymbols.ELT_RESTRICTION) && !greatGrandKidName.equals(SchemaSymbols.ELT_EXTENSION)) { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("a complexType child of a <redefine> must have a restriction or extension element as a grandchild"); } else { String derivedBase = greatGrandKid.getAttribute( SchemaSymbols.ATT_BASE ); QName processedDerivedBase = parseBase(derivedBase); if(!processedTypeName.equals(processedDerivedBase)) { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("the base attribute of the restriction or extension grandchild of a complexType child of a redefine must have the same value as the complexType's type attribute"); } else { // now we have to do the renaming... String prefix = ""; int colonptr = derivedBase.indexOf(":"); if ( colonptr > 0) prefix = derivedBase.substring(0,colonptr) + ":"; greatGrandKid.setAttribute( SchemaSymbols.ATT_BASE, prefix + newName ); return true; } } } } } } else if (eltLocalname.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP)) { QName processedBaseName = new QName(-1, fStringPool.addSymbol(oldName), fStringPool.addSymbol(oldName), fTargetNSURI); int attGroupRefsCount = changeRedefineGroup(processedBaseName, eltLocalname, newName, child); if(attGroupRefsCount > 1) { fRedefineSucceeded = false; // REVISIT: localize reportGenericSchemaError("if an attributeGroup child of a <redefine> element contains an attributeGroup ref'ing itself, it must have exactly 1; this one has " + attGroupRefsCount); } else if (attGroupRefsCount == 1) { return true; } else fRedefineAttributeGroupMap.put(oldName, newName); } else if (eltLocalname.equals(SchemaSymbols.ELT_GROUP)) { QName processedBaseName = new QName(-1, fStringPool.addSymbol(oldName), fStringPool.addSymbol(oldName), fTargetNSURI); int groupRefsCount = changeRedefineGroup(processedBaseName, eltLocalname, newName, child); String restrictedName = newName.substring(0, newName.length()-redefIdentifier.length()); if(!fRedefineSucceeded) { fRestrictedRedefinedGroupRegistry.put(fTargetNSURIString+","+restrictedName, new Boolean(false)); } if(groupRefsCount > 1) { fRedefineSucceeded = false; fRestrictedRedefinedGroupRegistry.put(fTargetNSURIString+","+restrictedName, new Boolean(false)); // REVISIT: localize reportGenericSchemaError("if a group child of a <redefine> element contains a group ref'ing itself, it must have exactly 1; this one has " + groupRefsCount); } else if (groupRefsCount == 1) { fRestrictedRedefinedGroupRegistry.put(fTargetNSURIString+","+restrictedName, new Boolean(false)); return true; } else { fGroupNameRegistry.put(fTargetNSURIString + "," + oldName, newName); fRestrictedRedefinedGroupRegistry.put(fTargetNSURIString+","+restrictedName, new Boolean(true)); } } else { fRedefineSucceeded = false; // REVISIT: Localize reportGenericSchemaError("internal Xerces error; please submit a bug with schema as testcase"); } // if we get here then we must have reported an error and failed somewhere... return false; } // validateRedefineNameChange private void traverseImport(Element importDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_GLOBAL; Hashtable attrValues = generalCheck(importDecl, scope); checkContent(importDecl, XUtil.getFirstChildElement(importDecl), true); String location = importDecl.getAttribute(SchemaSymbols.ATT_SCHEMALOCATION); // expand it before passing it to the parser InputSource source = null; if (fEntityResolver != null) { source = fEntityResolver.resolveEntity("", location); } if (source == null) { location = expandSystemId(location, fCurrentSchemaURL); source = new InputSource(location); } else { // create a string for uniqueness of this imported schema in fImportLocations if (source.getPublicId () != null) location = source.getPublicId (); location += (',' + source.getSystemId ()); } if (fImportLocations.contains((Object)location)) { return; } fImportLocations.addElement((Object)location); // check to make sure we're not importing ourselves... if(source.getSystemId().equals(fCurrentSchemaURL)) { // REVISIT: localize return; } String namespaceString = importDecl.getAttribute(SchemaSymbols.ATT_NAMESPACE); if(namespaceString.length() == 0) { if(fTargetNSURI == StringPool.EMPTY_STRING) { // REVISIT: localize reportGenericSchemaError("src-import.1.2: if the namespace attribute on an <import> element is not present, the <import>ing schema must have a targetNamespace"); } } else { if(fTargetNSURIString.equals(namespaceString.trim())) { // REVISIT: localize reportGenericSchemaError("src-import.1.1: the namespace attribute of an <import> element must not be the same as the targetNamespace of the <import>ing schema"); } } SchemaGrammar importedGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(namespaceString); if (importedGrammar == null) { importedGrammar = new SchemaGrammar(); } else { return; } DOMParser parser = new IgnoreWhitespaceParser(); parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() { public void fatalError(SAXParseException ex) throws SAXException { StringBuffer str = new StringBuffer(); String systemId_ = ex.getSystemId(); if (systemId_ != null) { int index = systemId_.lastIndexOf('/'); if (index != -1) systemId_ = systemId_.substring(index + 1); str.append(systemId_); } str.append(':').append(ex.getLineNumber()).append(':').append(ex.getColumnNumber()); String message = ex.getMessage(); if(message.toLowerCase().trim().endsWith("not found.")) { System.err.println("[Warning] "+ str.toString()+": "+ message); } else { // do standard thing System.err.println("[Fatal Error] "+ str.toString()+":"+message); throw ex; } } }); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://xml.org/sax/features/namespaces", true); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } try { parser.parse( source ); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { e.printStackTrace(); } Document document = parser.getDocument(); //Our Grammar Element root = null; if (document != null) { root = document.getDocumentElement(); } if (root != null) { String targetNSURI = getTargetNamespaceString(root); if (!targetNSURI.equals(namespaceString) ) { // REVISIT: Localize reportGenericSchemaError("imported schema '"+location+"' has a different targetNameSpace '" +targetNSURI+"' from what is declared '"+namespaceString+"'."); } else { TraverseSchema impSchema = new TraverseSchema(root, fStringPool, importedGrammar, fGrammarResolver, fErrorReporter, location, fEntityResolver, fFullConstraintChecking, fGeneralAttrCheck); Enumeration ics = impSchema.fIdentityConstraints.keys(); while(ics.hasMoreElements()) { Object icsKey = ics.nextElement(); fIdentityConstraints.put(icsKey, impSchema.fIdentityConstraints.get(icsKey)); } Enumeration icNames = impSchema.fIdentityConstraintNames.keys(); while(icNames.hasMoreElements()) { String icsNameKey = (String)icNames.nextElement(); fIdentityConstraintNames.put(icsNameKey, impSchema.fIdentityConstraintNames.get(icsNameKey)); } } } else { reportGenericSchemaError("Could not get the doc root for imported Schema file: "+location); } } // utility method for finding the targetNamespace (and flagging errors if they occur) private String getTargetNamespaceString( Element root) throws Exception { String targetNSURI = ""; Attr targetNSAttr = root.getAttributeNode(SchemaSymbols.ATT_TARGETNAMESPACE); if(targetNSAttr != null) { targetNSURI=targetNSAttr.getValue(); if(targetNSURI.length() == 0) { // REVISIT: localize reportGenericSchemaError("sch-prop-correct.1: \"\" is not a legal value for the targetNamespace attribute; the attribute must either be absent or contain a nonempty value"); } } return targetNSURI; } // end getTargetNamespaceString(Element) /** * <annotation>(<appinfo> | <documentation>)*</annotation> * * @param annotationDecl: the DOM node corresponding to the <annotation> info item */ private void traverseAnnotationDecl(Element annotationDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(annotationDecl, scope); for(Element child = XUtil.getFirstChildElement(annotationDecl); child != null; child = XUtil.getNextSiblingElement(child)) { String name = child.getLocalName(); if(!((name.equals(SchemaSymbols.ELT_APPINFO)) || (name.equals(SchemaSymbols.ELT_DOCUMENTATION)))) { // REVISIT: Localize reportGenericSchemaError("an <annotation> can only contain <appinfo> and <documentation> elements"); } // General Attribute Checking attrValues = generalCheck(child, scope); } } // Evaluates content of Annotation if present. // @param: elm - top element // @param: content - content must be annotation? or some other simple content // @param: isEmpty: -- true if the content allowed is (annotation?) only // false if must have some element (with possible preceding <annotation?>) //REVISIT: this function should be used in all traverse* methods! private Element checkContent( Element elm, Element content, boolean isEmpty ) throws Exception { //isEmpty = true-> means content can be null! if ( content == null) { if (!isEmpty) { reportSchemaError(SchemaMessageProvider.ContentError, new Object [] { elm.getAttribute( SchemaSymbols.ATT_NAME )}); } return null; } if (content.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) { traverseAnnotationDecl( content ); content = XUtil.getNextSiblingElement(content); if (content == null ) { //must be followed by <simpleType?> if (!isEmpty) { reportSchemaError(SchemaMessageProvider.ContentError, new Object [] { elm.getAttribute( SchemaSymbols.ATT_NAME )}); } return null; } if (content.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) { reportSchemaError(SchemaMessageProvider.AnnotationError, new Object [] { elm.getAttribute( SchemaSymbols.ATT_NAME )}); return null; } //return null if expected only annotation?, else returns updated content } return content; } //@param: elm - top element //@param: baseTypeStr - type (base/itemType/memberTypes) //@param: baseRefContext: whether the caller is using this type as a base for restriction, union or list //return DatatypeValidator available for the baseTypeStr, null if not found or disallowed. // also throws an error if the base type won't allow itself to be used in this context. //REVISIT: this function should be used in some|all traverse* methods! private DatatypeValidator findDTValidator (Element elm, String baseTypeStr, int baseRefContext ) throws Exception{ int baseType = fStringPool.addSymbol( baseTypeStr ); String prefix = ""; DatatypeValidator baseValidator = null; String localpart = baseTypeStr; int colonptr = baseTypeStr.indexOf(":"); if ( colonptr > 0) { prefix = baseTypeStr.substring(0,colonptr); localpart = baseTypeStr.substring(colonptr+1); } String uri = resolvePrefixToURI(prefix); baseValidator = getDatatypeValidator(uri, localpart); if (baseValidator == null) { Element baseTypeNode = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (baseTypeNode != null) { traverseSimpleTypeDecl( baseTypeNode ); baseValidator = getDatatypeValidator(uri, localpart); } } Integer finalValue; if ( baseValidator == null ) { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { elm.getAttribute( SchemaSymbols.ATT_BASE ), elm.getAttribute(SchemaSymbols.ATT_NAME)}); } else { finalValue = ((Integer)fSimpleTypeFinalRegistry.get(uri + "," +localpart)); if((finalValue != null) && ((finalValue.intValue() & baseRefContext) != 0)) { //REVISIT: localize reportGenericSchemaError("the base type " + baseTypeStr + " does not allow itself to be used as the base for a restriction and/or as a type in a list and/or union"); return baseValidator; } } return baseValidator; } private void checkEnumerationRequiredNotation(String name, String type) throws Exception{ String localpart = type; int colonptr = type.indexOf(":"); if ( colonptr > 0) { localpart = type.substring(colonptr+1); } if (localpart.equals("NOTATION")) { reportGenericSchemaError("[enumeration-required-notation] It is an error for NOTATION to be used "+ "directly in a schema in element/attribute '"+name+"'"); } } // @used in traverseSimpleType // on return we need to pop the last simpleType name from // the name stack private int resetSimpleTypeNameStack(int returnValue){ if (!fSimpleTypeNameStack.empty()) { fSimpleTypeNameStack.pop(); } return returnValue; } // @used in traverseSimpleType // report an error cos-list-of-atomic and reset the last name of the list datatype we traversing private void reportCosListOfAtomic () throws Exception{ reportGenericSchemaError("cos-list-of-atomic: The itemType must have a {variety} of atomic or union (in which case all the {member type definitions} must be atomic)"); fListName=""; } // @used in traverseSimpleType // find if union datatype validator has list datatype member. private boolean isListDatatype (DatatypeValidator validator){ if (validator instanceof UnionDatatypeValidator) { Vector temp = ((UnionDatatypeValidator)validator).getBaseValidators(); for (int i=0;i<temp.size();i++) { if (temp.elementAt(i) instanceof ListDatatypeValidator) { return true; } if (temp.elementAt(i) instanceof UnionDatatypeValidator) { if (isListDatatype((DatatypeValidator)temp.elementAt(i))) { return true; } } } } return false; } /** * Traverse SimpleType declaration: * <simpleType * final = #all | list of (restriction, union or list) * id = ID * name = NCName> * Content: (annotation? , ((list | restriction | union))) * </simpleType> * traverse <list>|<restriction>|<union> * * @param simpleTypeDecl * @return */ private int traverseSimpleTypeDecl( Element simpleTypeDecl ) throws Exception { // General Attribute Checking int scope = isTopLevel(simpleTypeDecl)? GeneralAttrCheck.ELE_CONTEXT_GLOBAL: GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(simpleTypeDecl, scope); String nameProperty = simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME ); String qualifiedName = nameProperty; // set qualified name if ( nameProperty.length() == 0) { // anonymous simpleType qualifiedName = fTargetNSURIString+","+"#S#"+(fSimpleTypeAnonCount++); fStringPool.addSymbol(qualifiedName); } else { // this behaviour has been changed so that we neither // process unqualified names as if they came from the schemaforschema namespace nor // fail to pick up unqualified names from schemas with no // targetnamespace. - NG //if (fTargetNSURIString.length () != 0) { qualifiedName = fTargetNSURIString+","+qualifiedName; fStringPool.addSymbol( nameProperty ); } //check if we have already traversed the same simpleType decl if (fDatatypeRegistry.getDatatypeValidator(qualifiedName)!=null) { return resetSimpleTypeNameStack(fStringPool.addSymbol(qualifiedName)); } else { if (fSimpleTypeNameStack.search(qualifiedName) != -1 ){ // cos-no-circular-unions && no circular definitions reportGenericSchemaError("cos-no-circular-unions: no circular definitions are allowed for an element '"+ nameProperty+"'"); return resetSimpleTypeNameStack(-1); } } // update _final_ registry Attr finalAttr = simpleTypeDecl.getAttributeNode(SchemaSymbols.ATT_FINAL); int finalProperty = 0; if(finalAttr != null) finalProperty = parseFinalSet(finalAttr.getValue()); else finalProperty = parseFinalSet(null); // if we have a nonzero final , store it in the hash... if(finalProperty != 0) fSimpleTypeFinalRegistry.put(qualifiedName, new Integer(finalProperty)); // remember name being traversed to // avoid circular definitions in union fSimpleTypeNameStack.push(qualifiedName); //annotation?,(list|restriction|union) Element content = XUtil.getFirstChildElement(simpleTypeDecl); content = checkContent(simpleTypeDecl, content, false); if (content == null) { return resetSimpleTypeNameStack(-1); } // General Attribute Checking scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable contentAttrs = generalCheck(content, scope); //use content.getLocalName for the cases there "xsd:" is a prefix, ei. "xsd:list" String varietyProperty = content.getLocalName(); String baseTypeQNameProperty = null; Vector dTValidators = null; int size = 0; StringTokenizer unionMembers = null; boolean list = false; boolean union = false; boolean restriction = false; int numOfTypes = 0; //list/restriction = 1, union = "+" if (varietyProperty.equals(SchemaSymbols.ELT_LIST)) { //traverse List baseTypeQNameProperty = content.getAttribute( SchemaSymbols.ATT_ITEMTYPE ); list = true; if (fListName.length() != 0) { // parent is <list> datatype reportCosListOfAtomic(); return resetSimpleTypeNameStack(-1); } else { fListName = qualifiedName; } } else if (varietyProperty.equals(SchemaSymbols.ELT_RESTRICTION)) { //traverse Restriction baseTypeQNameProperty = content.getAttribute( SchemaSymbols.ATT_BASE ); restriction= true; } else if (varietyProperty.equals(SchemaSymbols.ELT_UNION)) { //traverse union union = true; baseTypeQNameProperty = content.getAttribute( SchemaSymbols.ATT_MEMBERTYPES); if (baseTypeQNameProperty.length() != 0) { unionMembers = new StringTokenizer( baseTypeQNameProperty ); size = unionMembers.countTokens(); } else { size = 1; //at least one must be seen as <simpleType> decl } dTValidators = new Vector (size, 2); } else { reportSchemaError(SchemaMessageProvider.FeatureUnsupported, new Object [] { varietyProperty }); return -1; } if(XUtil.getNextSiblingElement(content) != null) { // REVISIT: Localize reportGenericSchemaError("error in content of simpleType"); } int typeNameIndex; DatatypeValidator baseValidator = null; if ( baseTypeQNameProperty.length() == 0 ) { //must 'see' <simpleType> //content = {annotation?,simpleType?...} content = XUtil.getFirstChildElement(content); //check content (annotation?, ...) content = checkContent(simpleTypeDecl, content, false); if (content == null) { return resetSimpleTypeNameStack(-1); } if (content.getLocalName().equals( SchemaSymbols.ELT_SIMPLETYPE )) { typeNameIndex = traverseSimpleTypeDecl(content); if (typeNameIndex!=-1) { baseValidator=fDatatypeRegistry.getDatatypeValidator(fStringPool.toString(typeNameIndex)); if (baseValidator !=null && union) { dTValidators.addElement((DatatypeValidator)baseValidator); } } if ( typeNameIndex == -1 || baseValidator == null) { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { content.getAttribute( SchemaSymbols.ATT_BASE ), content.getAttribute(SchemaSymbols.ATT_NAME) }); return resetSimpleTypeNameStack(-1); } } else { reportSchemaError(SchemaMessageProvider.ListUnionRestrictionError, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME )}); return resetSimpleTypeNameStack(-1); } } //end - must see simpleType? else { //base was provided - get proper validator. numOfTypes = 1; if (union) { numOfTypes= size; } // this loop is also where we need to find out whether the type being used as // a base (or itemType or whatever) allows such things. int baseRefContext = (restriction? SchemaSymbols.RESTRICTION:0); baseRefContext = baseRefContext | (union? SchemaSymbols.UNION:0); baseRefContext = baseRefContext | (list ? SchemaSymbols.LIST:0); for (int i=0; i<numOfTypes; i++) { //find all validators if (union) { baseTypeQNameProperty = unionMembers.nextToken(); } baseValidator = findDTValidator ( simpleTypeDecl, baseTypeQNameProperty, baseRefContext); if ( baseValidator == null) { return resetSimpleTypeNameStack(-1); } // (variety is list)cos-list-of-atomic if (fListName.length() != 0 ) { if (baseValidator instanceof ListDatatypeValidator) { reportCosListOfAtomic(); return resetSimpleTypeNameStack(-1); } // if baseValidator is of type (union) need to look // at Union validators to make sure that List is not one of them if (isListDatatype(baseValidator)) { reportCosListOfAtomic(); return resetSimpleTypeNameStack(-1); } } if (union) { dTValidators.addElement((DatatypeValidator)baseValidator); //add validator to structure } //REVISIT: Should we raise exception here? // if baseValidator.isInstanceOf(LIST) and UNION if ( list && (baseValidator instanceof UnionDatatypeValidator)) { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_BASE ), simpleTypeDecl.getAttribute(SchemaSymbols.ATT_NAME)}); return -1; } } } //end - base is available // move to next child // <base==empty)->[simpleType]->[facets] OR // <base!=empty)->[facets] if (baseTypeQNameProperty.length() == 0) { content = XUtil.getNextSiblingElement( content ); } else { content = XUtil.getFirstChildElement(content); } //get more types for union if any if (union) { int index=size; if (baseTypeQNameProperty.length() != 0 ) { content = checkContent(simpleTypeDecl, content, true); } while (content!=null) { typeNameIndex = traverseSimpleTypeDecl(content); if (typeNameIndex!=-1) { baseValidator=fDatatypeRegistry.getDatatypeValidator(fStringPool.toString(typeNameIndex)); if (baseValidator != null) { if (fListName.length() != 0 && baseValidator instanceof ListDatatypeValidator) { reportCosListOfAtomic(); return resetSimpleTypeNameStack(-1); } dTValidators.addElement((DatatypeValidator)baseValidator); } } if ( baseValidator == null || typeNameIndex == -1) { reportSchemaError(SchemaMessageProvider.UnknownBaseDatatype, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_BASE ), simpleTypeDecl.getAttribute(SchemaSymbols.ATT_NAME)}); return (-1); } content = XUtil.getNextSiblingElement( content ); } } // end - traverse Union if (fListName.length() != 0) { // reset fListName, meaning that we are done with // traversing <list> and its itemType resolves to atomic value if (fListName.equals(qualifiedName)) { fListName = ""; } } int numFacets=0; fFacetData.clear(); if (restriction && content != null) { short flags = 0; // flag facets that have fixed="true" int numEnumerationLiterals = 0; Vector enumData = new Vector(); content = checkContent(simpleTypeDecl, content , true); StringBuffer pattern = null; String facet; while (content != null) { if (content.getNodeType() == Node.ELEMENT_NODE) { // General Attribute Checking contentAttrs = generalCheck(content, scope); numFacets++; facet =content.getLocalName(); if (facet.equals(SchemaSymbols.ELT_ENUMERATION)) { numEnumerationLiterals++; String enumVal = content.getAttribute(SchemaSymbols.ATT_VALUE); String localName; if (baseValidator instanceof NOTATIONDatatypeValidator) { String prefix = ""; String localpart = enumVal; int colonptr = enumVal.indexOf(":"); if ( colonptr > 0) { prefix = enumVal.substring(0,colonptr); localpart = enumVal.substring(colonptr+1); } String uriStr = (prefix.length() != 0)?resolvePrefixToURI(prefix):fTargetNSURIString; nameProperty=uriStr + ":" + localpart; localName = (String)fNotationRegistry.get(nameProperty); if(localName == null){ localName = traverseNotationFromAnotherSchema( localpart, uriStr); if (localName == null) { reportGenericSchemaError("Notation '" + localpart + "' not found in the grammar "+ uriStr); } } enumVal=nameProperty; } enumData.addElement(enumVal); checkContent(simpleTypeDecl, XUtil.getFirstChildElement( content ), true); } else if (facet.equals(SchemaSymbols.ELT_ANNOTATION) || facet.equals(SchemaSymbols.ELT_SIMPLETYPE)) { reportSchemaError(SchemaMessageProvider.ListUnionRestrictionError, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME )}); } else if (facet.equals(SchemaSymbols.ELT_PATTERN)) { if (pattern == null) { pattern = new StringBuffer (content.getAttribute( SchemaSymbols.ATT_VALUE )); } else { //datatypes: 5.2.4 pattern: src-multiple-pattern pattern.append("|"); pattern.append(content.getAttribute( SchemaSymbols.ATT_VALUE )); checkContent(simpleTypeDecl, XUtil.getFirstChildElement( content ), true); } } else { if ( fFacetData.containsKey(facet) ) reportSchemaError(SchemaMessageProvider.DatatypeError, new Object [] {"The facet '" + facet + "' is defined more than once."} ); fFacetData.put(facet,content.getAttribute( SchemaSymbols.ATT_VALUE )); if (content.getAttribute( SchemaSymbols.ATT_FIXED).equals("true") || content.getAttribute( SchemaSymbols.ATT_FIXED).equals("1")){ // set fixed facet flags // length - must remain const through derivation // thus we don't care if it fixed if ( facet.equals(SchemaSymbols.ELT_MINLENGTH) ) { flags |= DatatypeValidator.FACET_MINLENGTH; } else if (facet.equals(SchemaSymbols.ELT_MAXLENGTH)) { flags |= DatatypeValidator.FACET_MAXLENGTH; } else if (facet.equals(SchemaSymbols.ELT_MAXEXCLUSIVE)) { flags |= DatatypeValidator.FACET_MAXEXCLUSIVE; } else if (facet.equals(SchemaSymbols.ELT_MAXINCLUSIVE)) { flags |= DatatypeValidator.FACET_MAXINCLUSIVE; } else if (facet.equals(SchemaSymbols.ELT_MINEXCLUSIVE)) { flags |= DatatypeValidator.FACET_MINEXCLUSIVE; } else if (facet.equals(SchemaSymbols.ELT_MININCLUSIVE)) { flags |= DatatypeValidator.FACET_MININCLUSIVE; } else if (facet.equals(SchemaSymbols.ELT_TOTALDIGITS)) { flags |= DatatypeValidator.FACET_TOTALDIGITS; } else if (facet.equals(SchemaSymbols.ELT_FRACTIONDIGITS)) { flags |= DatatypeValidator.FACET_FRACTIONDIGITS; } else if (facet.equals(SchemaSymbols.ELT_WHITESPACE) && baseValidator instanceof StringDatatypeValidator) { flags |= DatatypeValidator.FACET_WHITESPACE; } } checkContent(simpleTypeDecl, XUtil.getFirstChildElement( content ), true); } } content = XUtil.getNextSiblingElement(content); } if (numEnumerationLiterals > 0) { fFacetData.put(SchemaSymbols.ELT_ENUMERATION, enumData); } if (pattern !=null) { fFacetData.put(SchemaSymbols.ELT_PATTERN, pattern.toString()); } if (flags != 0) { fFacetData.put(DatatypeValidator.FACET_FIXED, new Short(flags)); } } else if (list && content!=null) { // report error - must not have any children! if (baseTypeQNameProperty.length() != 0) { content = checkContent(simpleTypeDecl, content, true); if (content!=null) { reportSchemaError(SchemaMessageProvider.ListUnionRestrictionError, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME )}); } } else { reportSchemaError(SchemaMessageProvider.ListUnionRestrictionError, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME )}); //REVISIT: should we return? } } else if (union && content!=null) { //report error - must not have any children! if (baseTypeQNameProperty.length() != 0) { content = checkContent(simpleTypeDecl, content, true); if (content!=null) { reportSchemaError(SchemaMessageProvider.ListUnionRestrictionError, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME )}); } } else { reportSchemaError(SchemaMessageProvider.ListUnionRestrictionError, new Object [] { simpleTypeDecl.getAttribute( SchemaSymbols.ATT_NAME )}); //REVISIT: should we return? } } // create & register validator for "generated" type if it doesn't exist try { DatatypeValidator newValidator = fDatatypeRegistry.getDatatypeValidator( qualifiedName ); if( newValidator == null ) { // not previously registered if (list) { fDatatypeRegistry.createDatatypeValidator( qualifiedName, baseValidator, fFacetData,true); } else if (restriction) { fDatatypeRegistry.createDatatypeValidator( qualifiedName, baseValidator, fFacetData,false); } else { //union fDatatypeRegistry.createDatatypeValidator( qualifiedName, dTValidators); } } } catch (Exception e) { reportSchemaError(SchemaMessageProvider.DatatypeError,new Object [] { e.getMessage() }); } return resetSimpleTypeNameStack(fStringPool.addSymbol(qualifiedName)); } /* * <any * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * namespace = (##any | ##other) | List of (anyURI | (##targetNamespace | ##local)) * processContents = lax | skip | strict> * Content: (annotation?) * </any> */ private int traverseAny(Element child) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(child, scope); Element annotation = checkContent( child, XUtil.getFirstChildElement(child), true ); if(annotation != null ) { // REVISIT: Localize reportGenericSchemaError("<any> elements can contain at most one <annotation> element in their children"); } int anyIndex = -1; String namespace = child.getAttribute(SchemaSymbols.ATT_NAMESPACE).trim(); String processContents = child.getAttribute("processContents").trim(); int processContentsAny = XMLContentSpec.CONTENTSPECNODE_ANY; int processContentsAnyOther = XMLContentSpec.CONTENTSPECNODE_ANY_OTHER; int processContentsAnyLocal = XMLContentSpec.CONTENTSPECNODE_ANY_NS; if (processContents.length() > 0 && !processContents.equals("strict")) { if (processContents.equals("lax")) { processContentsAny = XMLContentSpec.CONTENTSPECNODE_ANY_LAX; processContentsAnyOther = XMLContentSpec.CONTENTSPECNODE_ANY_OTHER_LAX; processContentsAnyLocal = XMLContentSpec.CONTENTSPECNODE_ANY_NS_LAX; } else if (processContents.equals("skip")) { processContentsAny = XMLContentSpec.CONTENTSPECNODE_ANY_SKIP; processContentsAnyOther = XMLContentSpec.CONTENTSPECNODE_ANY_OTHER_SKIP; processContentsAnyLocal = XMLContentSpec.CONTENTSPECNODE_ANY_NS_SKIP; } } if (namespace.length() == 0 || namespace.equals("##any")) { anyIndex = fSchemaGrammar.addContentSpecNode(processContentsAny, -1, StringPool.EMPTY_STRING, false); } else if (namespace.equals("##other")) { String uri = fTargetNSURIString; int uriIndex = fStringPool.addSymbol(uri); anyIndex = fSchemaGrammar.addContentSpecNode(processContentsAnyOther, -1, uriIndex, false); } else if (namespace.length() > 0) { int uriIndex, leafIndex, choiceIndex; StringTokenizer tokenizer = new StringTokenizer(namespace); String token = tokenizer.nextToken(); if (token.equals(SchemaSymbols.ATTVAL_TWOPOUNDLOCAL)) { uriIndex = StringPool.EMPTY_STRING; } else { if (token.equals("##targetNamespace")) token = fTargetNSURIString; uriIndex = fStringPool.addSymbol(token); } choiceIndex = fSchemaGrammar.addContentSpecNode(processContentsAnyLocal, -1, uriIndex, false); // store a list of seen uri, so that if there are duplicate // namespaces, we only add one of them to the content model int[] uriList = new int[8]; uriList[0] = uriIndex; int uriCount = 1; while (tokenizer.hasMoreElements()) { token = tokenizer.nextToken(); if (token.equals(SchemaSymbols.ATTVAL_TWOPOUNDLOCAL)) { uriIndex = StringPool.EMPTY_STRING; } else { if (token.equals("##targetNamespace")) token = fTargetNSURIString; uriIndex = fStringPool.addSymbol(token); } // check whether we have seen this namespace, if so, skip for (int i = 0; i < uriCount; i++) { if (uriList[i] == uriIndex) continue; } // add this namespace to the list if (uriList.length == uriCount) { int[] newList = new int[uriCount*2]; System.arraycopy(uriList,0,newList,0,uriCount); uriList = newList; } uriList[uriCount++] = uriIndex; leafIndex = fSchemaGrammar.addContentSpecNode(processContentsAnyLocal, -1, uriIndex, false); choiceIndex = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_CHOICE, choiceIndex, leafIndex, false); } anyIndex = choiceIndex; } else { // REVISIT: Localize reportGenericSchemaError("Empty namespace attribute for any element"); } return anyIndex; } public DatatypeValidator getDatatypeValidator(String uri, String localpart) { DatatypeValidator dv = null; if (uri.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA)) { dv = fDatatypeRegistry.getDatatypeValidator( localpart ); } else { dv = fDatatypeRegistry.getDatatypeValidator( uri+","+localpart ); } return dv; } /* * <anyAttribute * id = ID * namespace = ##any | ##other | ##local | list of {uri, ##targetNamespace}> * Content: (annotation?) * </anyAttribute> */ private XMLAttributeDecl traverseAnyAttribute(Element anyAttributeDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(anyAttributeDecl, scope); Element annotation = checkContent( anyAttributeDecl, XUtil.getFirstChildElement(anyAttributeDecl), true ); if(annotation != null ) { // REVISIT: Localize reportGenericSchemaError("<anyAttribute> elements can contain at most one <annotation> element in their children"); } XMLAttributeDecl anyAttDecl = new XMLAttributeDecl(); String processContents = anyAttributeDecl.getAttribute(SchemaSymbols.ATT_PROCESSCONTENTS).trim(); String namespace = anyAttributeDecl.getAttribute(SchemaSymbols.ATT_NAMESPACE).trim(); // simplify! NG //String curTargetUri = anyAttributeDecl.getOwnerDocument().getDocumentElement().getAttribute("targetNamespace"); String curTargetUri = fTargetNSURIString; if ( namespace.length() == 0 || namespace.equals(SchemaSymbols.ATTVAL_TWOPOUNDANY) ) { anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_ANY; } else if (namespace.equals(SchemaSymbols.ATTVAL_TWOPOUNDOTHER)) { anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_OTHER; anyAttDecl.name.uri = fStringPool.addSymbol(curTargetUri); } else if (namespace.length() > 0){ anyAttDecl.type = XMLAttributeDecl.TYPE_ANY_LIST; StringTokenizer tokenizer = new StringTokenizer(namespace); int aStringList = fStringPool.startStringList(); Vector tokens = new Vector(); int tokenStr; while (tokenizer.hasMoreElements()) { String token = tokenizer.nextToken(); if (token.equals(SchemaSymbols.ATTVAL_TWOPOUNDLOCAL)) { tokenStr = StringPool.EMPTY_STRING; } else { if (token.equals(SchemaSymbols.ATTVAL_TWOPOUNDTARGETNS)) token = curTargetUri; tokenStr = fStringPool.addSymbol(token); } if (!fStringPool.addStringToList(aStringList, tokenStr)){ reportGenericSchemaError("Internal StringPool error when reading the "+ "namespace attribute for anyattribute declaration"); } } fStringPool.finishStringList(aStringList); anyAttDecl.enumeration = aStringList; } else { // REVISIT: Localize reportGenericSchemaError("Empty namespace attribute for anyattribute declaration"); } // default processContents is "strict"; if (processContents.equals(SchemaSymbols.ATTVAL_SKIP)){ anyAttDecl.defaultType |= XMLAttributeDecl.PROCESSCONTENTS_SKIP; } else if (processContents.equals(SchemaSymbols.ATTVAL_LAX)) { anyAttDecl.defaultType |= XMLAttributeDecl.PROCESSCONTENTS_LAX; } else { anyAttDecl.defaultType |= XMLAttributeDecl.PROCESSCONTENTS_STRICT; } return anyAttDecl; } // Schema Component Constraint: Attribute Wildcard Intersection // For a wildcard's {namespace constraint} value to be the intensional intersection of two other such values (call them O1 and O2): the appropriate case among the following must be true: // 1 If O1 and O2 are the same value, then that value must be the value. // 2 If either O1 or O2 is any, then the other must be the value. // 3 If either O1 or O2 is a pair of not and a namespace name and the other is a set of (namespace names or absent), then that set, minus the negated namespace name if it was in the set, must be the value. // 4 If both O1 and O2 are sets of (namespace names or absent), then the intersection of those sets must be the value. // 5 If the two are negations of different namespace names, then the intersection is not expressible. // In the case where there are more than two values, the intensional intersection is determined by identifying the intensional intersection of two of the values as above, then the intensional intersection of that value with the third (providing the first intersection was expressible), and so on as required. private XMLAttributeDecl AWildCardIntersection(XMLAttributeDecl oneAny, XMLAttributeDecl anotherAny) { // if either one is not expressible, the result is still not expressible if (oneAny.type == -1) { return oneAny; } if (anotherAny.type == -1) { return anotherAny; } // 1 If O1 and O2 are the same value, then that value must be the value. // this one is dealt with in different branches // 2 If either O1 or O2 is any, then the other must be the value. if (oneAny.type == XMLAttributeDecl.TYPE_ANY_ANY) { return anotherAny; } if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_ANY) { return oneAny; } // 3 If either O1 or O2 is a pair of not and a namespace name and the other is a set of (namespace names or absent), then that set, minus the negated namespace name if it was in the set, must be the value. if (oneAny.type == XMLAttributeDecl.TYPE_ANY_OTHER && anotherAny.type == XMLAttributeDecl.TYPE_ANY_LIST || oneAny.type == XMLAttributeDecl.TYPE_ANY_LIST && anotherAny.type == XMLAttributeDecl.TYPE_ANY_OTHER) { XMLAttributeDecl anyList, anyOther; if (oneAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { anyList = oneAny; anyOther = anotherAny; } else { anyList = anotherAny; anyOther = oneAny; } int[] uriList = fStringPool.stringListAsIntArray(anyList.enumeration); if (elementInSet(anyOther.name.uri, uriList)) { int newList = fStringPool.startStringList(); for (int i=0; i< uriList.length; i++) { if (uriList[i] != anyOther.name.uri ) { fStringPool.addStringToList(newList, uriList[i]); } } fStringPool.finishStringList(newList); anyList.enumeration = newList; } return anyList; } // 4 If both O1 and O2 are sets of (namespace names or absent), then the intersection of those sets must be the value. if (oneAny.type == XMLAttributeDecl.TYPE_ANY_LIST && anotherAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { int[] result = intersect2sets(fStringPool.stringListAsIntArray(oneAny.enumeration), fStringPool.stringListAsIntArray(anotherAny.enumeration)); int newList = fStringPool.startStringList(); for (int i=0; i<result.length; i++) { fStringPool.addStringToList(newList, result[i]); } fStringPool.finishStringList(newList); oneAny.enumeration = newList; return oneAny; } // 5 If the two are negations of different namespace names, then the intersection is not expressible. if (oneAny.type == XMLAttributeDecl.TYPE_ANY_OTHER && anotherAny.type == XMLAttributeDecl.TYPE_ANY_OTHER) { if (oneAny.name.uri == anotherAny.name.uri) { return oneAny; } else { oneAny.type = -1; return oneAny; } } // should never go there; return oneAny; } // Schema Component Constraint: Attribute Wildcard Union // For a wildcard's {namespace constraint} value to be the intensional union of two other such values (call them O1 and O2): the appropriate case among the following must be true: // 1 If O1 and O2 are the same value, then that value must be the value. // 2 If either O1 or O2 is any, then any must be the value. // 3 If both O1 and O2 are sets of (namespace names or absent), then the union of those sets must be the value. // 4 If the two are negations of different namespace names, then any must be the value. // 5 If either O1 or O2 is a pair of not and a namespace name and the other is a set of (namespace names or absent), then The appropriate case among the following must be true: // 5.1 If the set includes the negated namespace name, then any must be the value. // 5.2 If the set does not include the negated namespace name, then whichever of O1 or O2 is a pair of not and a namespace name must be the value. // In the case where there are more than two values, the intensional union is determined by identifying the intensional union of two of the values as above, then the intensional union of that value with the third, and so on as required. private XMLAttributeDecl AWildCardUnion(XMLAttributeDecl oneAny, XMLAttributeDecl anotherAny) { // if either one is not expressible, the result is still not expressible if (oneAny.type == -1) { return oneAny; } if (anotherAny.type == -1) { return anotherAny; } // 1 If O1 and O2 are the same value, then that value must be the value. // this one is dealt with in different branches // 2 If either O1 or O2 is any, then any must be the value. if (oneAny.type == XMLAttributeDecl.TYPE_ANY_ANY) { return oneAny; } if (anotherAny.type == XMLAttributeDecl.TYPE_ANY_ANY) { return anotherAny; } // 3 If both O1 and O2 are sets of (namespace names or absent), then the union of those sets must be the value. if (oneAny.type == XMLAttributeDecl.TYPE_ANY_LIST && anotherAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { int[] result = union2sets(fStringPool.stringListAsIntArray(oneAny.enumeration), fStringPool.stringListAsIntArray(anotherAny.enumeration)); int newList = fStringPool.startStringList(); for (int i=0; i<result.length; i++) { fStringPool.addStringToList(newList, result[i]); } fStringPool.finishStringList(newList); oneAny.enumeration = newList; return oneAny; } // 4 If the two are negations of different namespace names, then any must be the value. if (oneAny.type == XMLAttributeDecl.TYPE_ANY_OTHER && anotherAny.type == XMLAttributeDecl.TYPE_ANY_OTHER) { if (oneAny.name.uri == anotherAny.name.uri) { return oneAny; } else { oneAny.type = XMLAttributeDecl.TYPE_ANY_ANY; return oneAny; } } // 5 If either O1 or O2 is a pair of not and a namespace name and the other is a set of (namespace names or absent), then The appropriate case among the following must be true: if (oneAny.type == XMLAttributeDecl.TYPE_ANY_OTHER && anotherAny.type == XMLAttributeDecl.TYPE_ANY_LIST || oneAny.type == XMLAttributeDecl.TYPE_ANY_LIST && anotherAny.type == XMLAttributeDecl.TYPE_ANY_OTHER) { XMLAttributeDecl anyList, anyOther; if (oneAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { anyList = oneAny; anyOther = anotherAny; } else { anyList = anotherAny; anyOther = oneAny; } // 5.1 If the set includes the negated namespace name, then any must be the value. if (elementInSet(anyOther.name.uri, fStringPool.stringListAsIntArray(anyList.enumeration))) { anyOther.type = XMLAttributeDecl.TYPE_ANY_ANY; } // 5.2 If the set does not include the negated namespace name, then whichever of O1 or O2 is a pair of not and a namespace name must be the value. return anyOther; } // should never go there; return oneAny; } // Schema Component Constraint: Wildcard Subset // For a namespace constraint (call it sub) to be an intensional subset of another namespace constraint (call it super) one of the following must be true: // 1 super must be any. // 2 All of the following must be true: // 2.1 sub must be a pair of not and a namespace name or absent. // 2.2 super must be a pair of not and the same value. // 3 All of the following must be true: // 3.1 sub must be a set whose members are either namespace names or absent. // 3.2 One of the following must be true: // 3.2.1 super must be the same set or a superset thereof. // 3.2.2 super must be a pair of not and a namespace name or absent and that value must not be in sub's set. private boolean AWildCardSubset(XMLAttributeDecl subAny, XMLAttributeDecl superAny) { // if either one is not expressible, it can't be a subset if (subAny.type == -1 || superAny.type == -1) return false; // 1 super must be any. if (superAny.type == XMLAttributeDecl.TYPE_ANY_ANY) return true; // 2 All of the following must be true: // 2.1 sub must be a pair of not and a namespace name or absent. if (subAny.type == XMLAttributeDecl.TYPE_ANY_OTHER) { // 2.2 super must be a pair of not and the same value. if (superAny.type == XMLAttributeDecl.TYPE_ANY_OTHER && subAny.name.uri == superAny.name.uri) { return true; } } // 3 All of the following must be true: // 3.1 sub must be a set whose members are either namespace names or absent. if (subAny.type == XMLAttributeDecl.TYPE_ANY_LIST) { // 3.2 One of the following must be true: // 3.2.1 super must be the same set or a superset thereof. if (superAny.type == XMLAttributeDecl.TYPE_ANY_LIST && subset2sets(fStringPool.stringListAsIntArray(subAny.enumeration), fStringPool.stringListAsIntArray(superAny.enumeration))) { return true; } // 3.2.2 super must be a pair of not and a namespace name or absent and that value must not be in sub's set. if (superAny.type == XMLAttributeDecl.TYPE_ANY_OTHER && !elementInSet(superAny.name.uri, fStringPool.stringListAsIntArray(superAny.enumeration))) { return true; } } return false; } // Validation Rule: Wildcard allows Namespace Name // For a value which is either a namespace name or absent to be valid with respect to a wildcard constraint (the value of a {namespace constraint}) one of the following must be true: // 1 The constraint must be any. // 2 All of the following must be true: // 2.1 The constraint is a pair of not and a namespace name or absent ([Definition:] call this the namespace test). // 2.2 The value must not be identical to the namespace test. // 2.3 The value must not be absent. // 3 The constraint is a set, and the value is identical to one of the members of the set. private boolean AWildCardAllowsNameSpace(XMLAttributeDecl wildcard, String uri) { // if the constrain is not expressible, then nothing is allowed if (wildcard.type == -1) return false; // 1 The constraint must be any. if (wildcard.type == XMLAttributeDecl.TYPE_ANY_ANY) return true; int uriStr = fStringPool.addString(uri); // 2 All of the following must be true: // 2.1 The constraint is a pair of not and a namespace name or absent ([Definition:] call this the namespace test). if (wildcard.type == XMLAttributeDecl.TYPE_ANY_OTHER) { // 2.2 The value must not be identical to the namespace test. // 2.3 The value must not be absent. if (uriStr != wildcard.name.uri && uriStr != StringPool.EMPTY_STRING) return true; } // 3 The constraint is a set, and the value is identical to one of the members of the set. if (wildcard.type == XMLAttributeDecl.TYPE_ANY_LIST) { if (elementInSet(uriStr, fStringPool.stringListAsIntArray(wildcard.enumeration))) return true; } return false; } private boolean isAWildCard(XMLAttributeDecl a) { if (a.type == XMLAttributeDecl.TYPE_ANY_ANY ||a.type == XMLAttributeDecl.TYPE_ANY_LIST ||a.type == XMLAttributeDecl.TYPE_ANY_OTHER ) return true; else return false; } int[] intersect2sets(int[] one, int[] theOther){ int[] result = new int[(one.length>theOther.length?one.length:theOther.length)]; // simple implemention, int count = 0; for (int i=0; i<one.length; i++) { if (elementInSet(one[i], theOther)) result[count++] = one[i]; } int[] result2 = new int[count]; System.arraycopy(result, 0, result2, 0, count); return result2; } int[] union2sets(int[] one, int[] theOther){ int[] result1 = new int[one.length]; // simple implemention, int count = 0; for (int i=0; i<one.length; i++) { if (!elementInSet(one[i], theOther)) result1[count++] = one[i]; } int[] result2 = new int[count+theOther.length]; System.arraycopy(result1, 0, result2, 0, count); System.arraycopy(theOther, 0, result2, count, theOther.length); return result2; } boolean subset2sets(int[] subSet, int[] superSet){ for (int i=0; i<subSet.length; i++) { if (!elementInSet(subSet[i], superSet)) return false; } return true; } boolean elementInSet(int ele, int[] set){ boolean found = false; for (int i=0; i<set.length && !found; i++) { if (ele==set[i]) found = true; } return found; } // wrapper traverseComplexTypeDecl method private int traverseComplexTypeDecl( Element complexTypeDecl ) throws Exception { return traverseComplexTypeDecl (complexTypeDecl, false); } /** * Traverse ComplexType Declaration - Rec Implementation. * * <complexType * abstract = boolean * block = #all or (possibly empty) subset of {extension, restriction} * final = #all or (possibly empty) subset of {extension, restriction} * id = ID * mixed = boolean : false * name = NCName> * Content: (annotation? , (simpleContent | complexContent | * ( (group | all | choice | sequence)? , * ( (attribute | attributeGroup)* , anyAttribute?)))) * </complexType> * @param complexTypeDecl * @param forwardRef * @return */ private int traverseComplexTypeDecl( Element complexTypeDecl, boolean forwardRef) throws Exception { // General Attribute Checking int scope = isTopLevel(complexTypeDecl)? GeneralAttrCheck.ELE_CONTEXT_GLOBAL: GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(complexTypeDecl, scope); // Get the attributes of the type String isAbstract = complexTypeDecl.getAttribute( SchemaSymbols.ATT_ABSTRACT ); String blockSet = null; Attr blockAttr = complexTypeDecl.getAttributeNode( SchemaSymbols.ATT_BLOCK ); if (blockAttr != null) blockSet = blockAttr.getValue(); String finalSet = null; Attr finalAttr = complexTypeDecl.getAttributeNode( SchemaSymbols.ATT_FINAL ); if (finalAttr != null) finalSet = finalAttr.getValue(); String typeId = complexTypeDecl.getAttribute( SchemaSymbols.ATTVAL_ID ); String typeName = complexTypeDecl.getAttribute(SchemaSymbols.ATT_NAME); String mixed = complexTypeDecl.getAttribute(SchemaSymbols.ATT_MIXED); boolean isNamedType = false; Stack savedGroupNameStack = null; // Generate a type name, if one wasn't specified if (typeName.length() == 0) { // gensym a unique name typeName = genAnonTypeName(complexTypeDecl); } if ( DEBUGGING ) System.out.println("traversing complex Type : " + typeName); fCurrentTypeNameStack.push(typeName); int typeNameIndex = fStringPool.addSymbol(typeName); // Check if the type has already been registered if (isTopLevel(complexTypeDecl)) { String fullName = fTargetNSURIString+","+typeName; ComplexTypeInfo temp = (ComplexTypeInfo) fComplexTypeRegistry.get(fullName); if (temp != null ) { // check for duplicate declarations if (!forwardRef) { if (temp.declSeen()) reportGenericSchemaError("sch-props-correct: Duplicate declaration for complexType " + typeName); else temp.setDeclSeen(); } return fStringPool.addSymbol(fullName); } else { // check if the type is the name of a simple type if (getDatatypeValidator(fTargetNSURIString,typeName)!=null) reportGenericSchemaError("sch-props-correct: Duplicate type declaration - type is " + typeName); } } int scopeDefined = fScopeCount++; int previousScope = fCurrentScope; fCurrentScope = scopeDefined; // Squirrel away the groupNameStack. // If we are in the middle of processing a group, and we hit the group again // because of a complexType for an element, it's not an error. if (!fCurrentGroupNameStack.isEmpty()) { savedGroupNameStack = fCurrentGroupNameStack; fCurrentGroupNameStack = new Stack(); } Element child = null; ComplexTypeInfo typeInfo = new ComplexTypeInfo(); try { // First, handle any ANNOTATION declaration and get next child child = checkContent(complexTypeDecl,XUtil.getFirstChildElement(complexTypeDecl), true); // Process the content of the complex type declaration if (child==null) { // EMPTY complexType with complexContent processComplexContent(typeNameIndex, child, typeInfo, null, false); } else { String childName = child.getLocalName(); int index = -2; if (childName.equals(SchemaSymbols.ELT_SIMPLECONTENT)) { // SIMPLE CONTENT element traverseSimpleContentDecl(typeNameIndex, child, typeInfo); if (XUtil.getNextSiblingElement(child) != null) throw new ComplexTypeRecoverableError( "Invalid child following the simpleContent child in the complexType"); } else if (childName.equals(SchemaSymbols.ELT_COMPLEXCONTENT)) { // COMPLEX CONTENT element traverseComplexContentDecl(typeNameIndex, child, typeInfo, mixed.equals(SchemaSymbols.ATTVAL_TRUE) ? true:false); if (XUtil.getNextSiblingElement(child) != null) throw new ComplexTypeRecoverableError( "Invalid child following the complexContent child in the complexType"); } else { // We must have .... // GROUP, ALL, SEQUENCE or CHOICE, followed by optional attributes // Note that it's possible that only attributes are specified. processComplexContent(typeNameIndex, child, typeInfo, null, mixed.equals(SchemaSymbols.ATTVAL_TRUE) ? true:false); } } typeInfo.blockSet = parseBlockSet(blockSet); // make sure block's value was absent, #all or in {extension, restriction} if( (blockSet != null ) && blockSet.length() != 0 && (!blockSet.equals(SchemaSymbols.ATTVAL_POUNDALL) && (((typeInfo.blockSet & SchemaSymbols.RESTRICTION) == 0) && ((typeInfo.blockSet & SchemaSymbols.EXTENSION) == 0)))) throw new ComplexTypeRecoverableError("The values of the 'block' attribute of a complexType must be either #all or a list of 'restriction' and 'extension'; " + blockSet + " was found"); typeInfo.finalSet = parseFinalSet(finalSet); // make sure final's value was absent, #all or in {extension, restriction} if( (finalSet != null ) && finalSet.length() != 0 && (!finalSet.equals(SchemaSymbols.ATTVAL_POUNDALL) && (((typeInfo.finalSet & SchemaSymbols.RESTRICTION) == 0) && ((typeInfo.finalSet & SchemaSymbols.EXTENSION) == 0)))) throw new ComplexTypeRecoverableError("The values of the 'final' attribute of a complexType must be either #all or a list of 'restriction' and 'extension'; " + finalSet + " was found"); } catch (ComplexTypeRecoverableError e) { String message = e.getMessage(); handleComplexTypeError(message,typeNameIndex,typeInfo); } // Finish the setup of the typeInfo and register the type typeInfo.scopeDefined = scopeDefined; if (isAbstract.equals(SchemaSymbols.ATTVAL_TRUE)) typeInfo.setIsAbstractType(); if (!forwardRef) typeInfo.setDeclSeen(); typeName = fTargetNSURIString + "," + typeName; typeInfo.typeName = new String(typeName); if ( DEBUGGING ) System.out.println(">>>add complex Type to Registry: " + typeName + " baseDTValidator=" + typeInfo.baseDataTypeValidator + " baseCTInfo=" + typeInfo.baseComplexTypeInfo + " derivedBy=" + typeInfo.derivedBy + " contentType=" + typeInfo.contentType + " contentSpecHandle=" + typeInfo.contentSpecHandle + " datatypeValidator=" + typeInfo.datatypeValidator + " scopeDefined=" + typeInfo.scopeDefined); fComplexTypeRegistry.put(typeName,typeInfo); // Before exiting, restore the scope, mainly for nested anonymous types fCurrentScope = previousScope; if (savedGroupNameStack != null) fCurrentGroupNameStack = savedGroupNameStack; fCurrentTypeNameStack.pop(); checkRecursingComplexType(); //set template element's typeInfo fSchemaGrammar.setElementComplexTypeInfo(typeInfo.templateElementIndex, typeInfo); typeNameIndex = fStringPool.addSymbol(typeName); return typeNameIndex; } // end traverseComplexTypeDecl /** * Traverse SimpleContent Declaration * * <simpleContent * id = ID * {any attributes with non-schema namespace...}> * * Content: (annotation? , (restriction | extension)) * </simpleContent> * * <restriction * base = QNAME * id = ID * {any attributes with non-schema namespace...}> * * Content: (annotation?,(simpleType?, (minExclusive|minInclusive|maxExclusive * | maxInclusive | totalDigits | fractionDigits | length | minLength * | maxLength | encoding | period | duration | enumeration * | pattern | whiteSpace)*) ? , * ((attribute | attributeGroup)* , anyAttribute?)) * </restriction> * * <extension * base = QNAME * id = ID * {any attributes with non-schema namespace...}> * Content: (annotation? , ((attribute | attributeGroup)* , anyAttribute?)) * </extension> * * @param typeNameIndex * @param simpleContentTypeDecl * @param typeInfo * @return */ private void traverseSimpleContentDecl(int typeNameIndex, Element simpleContentDecl, ComplexTypeInfo typeInfo) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(simpleContentDecl, scope); String typeName = fStringPool.toString(typeNameIndex); // Get attributes. String simpleContentTypeId = simpleContentDecl.getAttribute(SchemaSymbols.ATTVAL_ID); // Set the content type to be simple, and initialize content spec handle typeInfo.contentType = XMLElementDecl.TYPE_SIMPLE; typeInfo.contentSpecHandle = -1; Element simpleContent = checkContent(simpleContentDecl, XUtil.getFirstChildElement(simpleContentDecl),false); // If there are no children, return if (simpleContent==null) { throw new ComplexTypeRecoverableError(); } // General Attribute Checking attrValues = generalCheck(simpleContent, scope); // The content should be either "restriction" or "extension" String simpleContentName = simpleContent.getLocalName(); if (simpleContentName.equals(SchemaSymbols.ELT_RESTRICTION)) typeInfo.derivedBy = SchemaSymbols.RESTRICTION; else if (simpleContentName.equals(SchemaSymbols.ELT_EXTENSION)) typeInfo.derivedBy = SchemaSymbols.EXTENSION; else { throw new ComplexTypeRecoverableError( "The content of the simpleContent element is invalid. The " + "content must be RESTRICTION or EXTENSION"); } // Get the attributes of the restriction/extension element String base = simpleContent.getAttribute(SchemaSymbols.ATT_BASE); String typeId = simpleContent.getAttribute(SchemaSymbols.ATTVAL_ID); // Skip over any annotations in the restriction or extension elements Element content = checkContent(simpleContent, XUtil.getFirstChildElement(simpleContent),true); // Handle the base type name if (base.length() == 0) { throw new ComplexTypeRecoverableError( "The BASE attribute must be specified for the " + "RESTRICTION or EXTENSION element"); } QName baseQName = parseBase(base); // check if we're extending a simpleType which has a "final" setting which precludes this Integer finalValue = ((Integer)fSimpleTypeFinalRegistry.get(fStringPool.toString(baseQName.uri) + "," +fStringPool.toString(baseQName.localpart))); if(finalValue != null && (finalValue.intValue() == typeInfo.derivedBy)) throw new ComplexTypeRecoverableError( "The simpleType " + base + " that " + typeName + " uses has a value of \"final\" which does not permit extension"); processBaseTypeInfo(baseQName,typeInfo); // check that the base isn't a complex type with complex content if (typeInfo.baseComplexTypeInfo != null) { if (typeInfo.baseComplexTypeInfo.contentType != XMLElementDecl.TYPE_SIMPLE) { throw new ComplexTypeRecoverableError( "The type '"+ base +"' specified as the " + "base in the simpleContent element must not have complexContent"); } } // Process the content of the derivation Element attrNode = null; // RESTRICTION if (typeInfo.derivedBy==SchemaSymbols.RESTRICTION) { //Schema Spec : Complex Type Definition Properties Correct : 2 if (typeInfo.baseDataTypeValidator != null) { throw new ComplexTypeRecoverableError( "ct-props-correct.2: The type '" + base +"' is a simple type. It cannot be used in a "+ "derivation by RESTRICTION for a complexType"); } else { typeInfo.baseDataTypeValidator = typeInfo.baseComplexTypeInfo.datatypeValidator; } // Check that the base's final set does not include RESTRICTION if((typeInfo.baseComplexTypeInfo.finalSet & SchemaSymbols.RESTRICTION) != 0) { throw new ComplexTypeRecoverableError("Derivation by restriction is forbidden by either the base type " + base + " or the schema"); } // There may be a simple type definition in the restriction element // The data type validator will be based on it, if specified if (content.getLocalName().equals(SchemaSymbols.ELT_SIMPLETYPE )) { int simpleTypeNameIndex = traverseSimpleTypeDecl(content); if (simpleTypeNameIndex!=-1) { DatatypeValidator dv=fDatatypeRegistry.getDatatypeValidator( fStringPool.toString(simpleTypeNameIndex)); //check that this datatype validator is validly derived from the base //according to derivation-ok-restriction 5.1.1 if (!checkSimpleTypeDerivationOK(dv,typeInfo.baseDataTypeValidator)) { throw new ComplexTypeRecoverableError("derivation-ok-restriction.5.1.1: The content type is not a valid restriction of the content type of the base"); } typeInfo.baseDataTypeValidator = dv; content = XUtil.getNextSiblingElement(content); } else { throw new ComplexTypeRecoverableError(); } } // Build up facet information int numEnumerationLiterals = 0; int numFacets = 0; Hashtable facetData = new Hashtable(); Vector enumData = new Vector(); Element child; // General Attribute Checking scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable contentAttrs; //REVISIT: there is a better way to do this, for (child = content; child != null && (child.getLocalName().equals(SchemaSymbols.ELT_MINEXCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_MININCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_MAXEXCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_MAXINCLUSIVE) || child.getLocalName().equals(SchemaSymbols.ELT_TOTALDIGITS) || child.getLocalName().equals(SchemaSymbols.ELT_FRACTIONDIGITS) || child.getLocalName().equals(SchemaSymbols.ELT_LENGTH) || child.getLocalName().equals(SchemaSymbols.ELT_MINLENGTH) || child.getLocalName().equals(SchemaSymbols.ELT_MAXLENGTH) || child.getLocalName().equals(SchemaSymbols.ELT_PERIOD) || child.getLocalName().equals(SchemaSymbols.ELT_DURATION) || child.getLocalName().equals(SchemaSymbols.ELT_ENUMERATION) || child.getLocalName().equals(SchemaSymbols.ELT_PATTERN) || child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)); child = XUtil.getNextSiblingElement(child)) { if ( child.getNodeType() == Node.ELEMENT_NODE ) { Element facetElt = (Element) child; // General Attribute Checking contentAttrs = generalCheck(facetElt, scope); numFacets++; if (facetElt.getLocalName().equals(SchemaSymbols.ELT_ENUMERATION)) { numEnumerationLiterals++; enumData.addElement(facetElt.getAttribute(SchemaSymbols.ATT_VALUE)); //Enumerations can have annotations ? ( 0 | 1 ) Element enumContent = XUtil.getFirstChildElement( facetElt ); if( enumContent != null && enumContent.getLocalName().equals ( SchemaSymbols.ELT_ANNOTATION )){ traverseAnnotationDecl( child ); } // TO DO: if Jeff check in new changes to TraverseSimpleType, copy them over } else { facetData.put(facetElt.getLocalName(), facetElt.getAttribute( SchemaSymbols.ATT_VALUE )); } } } // end of for loop thru facets if (numEnumerationLiterals > 0) { facetData.put(SchemaSymbols.ELT_ENUMERATION, enumData); } // If there were facets, create a new data type validator, otherwise // the data type validator is from the base if (numFacets > 0) { try{ typeInfo.datatypeValidator = fDatatypeRegistry.createDatatypeValidator( typeName, typeInfo.baseDataTypeValidator, facetData, false); } catch (Exception e) { throw new ComplexTypeRecoverableError(e.getMessage()); } } else typeInfo.datatypeValidator = typeInfo.baseDataTypeValidator; if (child != null) { // Check that we have attributes if (!isAttrOrAttrGroup(child)) { throw new ComplexTypeRecoverableError( "Invalid child in the RESTRICTION element of simpleContent"); } else attrNode = child; } } // end RESTRICTION // EXTENSION else { if (typeInfo.baseComplexTypeInfo != null) { typeInfo.baseDataTypeValidator = typeInfo.baseComplexTypeInfo.datatypeValidator; // Check that the base's final set does not include EXTENSION if((typeInfo.baseComplexTypeInfo.finalSet & SchemaSymbols.EXTENSION) != 0) { throw new ComplexTypeRecoverableError("Derivation by extension is forbidden by either the base type " + base + " or the schema"); } } typeInfo.datatypeValidator = typeInfo.baseDataTypeValidator; // Look for attributes if (content != null) { // Check that we have attributes if (!isAttrOrAttrGroup(content)) { throw new ComplexTypeRecoverableError( "Only annotations and attributes are allowed in the " + "content of an EXTENSION element for a complexType with simpleContent"); } else { attrNode = content; } } } // add a template element to the grammar element decl pool for the type int templateElementNameIndex = fStringPool.addSymbol("$"+typeName); typeInfo.templateElementIndex = fSchemaGrammar.addElementDecl( new QName(-1, templateElementNameIndex,typeNameIndex,fTargetNSURI), (fTargetNSURI==StringPool.EMPTY_STRING) ? StringPool.EMPTY_STRING : fCurrentScope, typeInfo.scopeDefined, typeInfo.contentType, typeInfo.contentSpecHandle, -1, typeInfo.datatypeValidator); typeInfo.attlistHead = fSchemaGrammar.getFirstAttributeDeclIndex( typeInfo.templateElementIndex); // Process attributes processAttributes(attrNode,baseQName,typeInfo); if (XUtil.getNextSiblingElement(simpleContent) != null) throw new ComplexTypeRecoverableError( "Invalid child following the RESTRICTION or EXTENSION element in the " + "complex type definition"); } // end traverseSimpleContentDecl /** * Traverse complexContent Declaration * * <complexContent * id = ID * mixed = boolean * {any attributes with non-schema namespace...}> * * Content: (annotation? , (restriction | extension)) * </complexContent> * * <restriction * base = QNAME * id = ID * {any attributes with non-schema namespace...}> * * Content: (annotation? , (group | all | choice | sequence)?, * ((attribute | attributeGroup)* , anyAttribute?)) * </restriction> * * <extension * base = QNAME * id = ID * {any attributes with non-schema namespace...}> * Content: (annotation? , (group | all | choice | sequence)?, * ((attribute | attributeGroup)* , anyAttribute?)) * </extension> * * @param typeNameIndex * @param simpleContentTypeDecl * @param typeInfo * @param mixedOnComplexTypeDecl * @return */ private void traverseComplexContentDecl(int typeNameIndex, Element complexContentDecl, ComplexTypeInfo typeInfo, boolean mixedOnComplexTypeDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(complexContentDecl, scope); String typeName = fStringPool.toString(typeNameIndex); // Get the attributes String typeId = complexContentDecl.getAttribute(SchemaSymbols.ATTVAL_ID); String mixed = complexContentDecl.getAttribute(SchemaSymbols.ATT_MIXED); // Determine whether the content is mixed, or element-only // Setting here overrides any setting on the complex type decl boolean isMixed = mixedOnComplexTypeDecl; if (mixed.equals(SchemaSymbols.ATTVAL_TRUE)) isMixed = true; else if (mixed.equals(SchemaSymbols.ATTVAL_FALSE)) isMixed = false; // Since the type must have complex content, set the simple type validators // to null typeInfo.datatypeValidator = null; typeInfo.baseDataTypeValidator = null; Element complexContent = checkContent(complexContentDecl, XUtil.getFirstChildElement(complexContentDecl),false); // If there are no children, return if (complexContent==null) { throw new ComplexTypeRecoverableError(); } // The content should be either "restriction" or "extension" String complexContentName = complexContent.getLocalName(); if (complexContentName.equals(SchemaSymbols.ELT_RESTRICTION)) typeInfo.derivedBy = SchemaSymbols.RESTRICTION; else if (complexContentName.equals(SchemaSymbols.ELT_EXTENSION)) typeInfo.derivedBy = SchemaSymbols.EXTENSION; else { throw new ComplexTypeRecoverableError( "The content of the complexContent element is invalid. " + "The content must be RESTRICTION or EXTENSION"); } // Get the attributes of the restriction/extension element String base = complexContent.getAttribute(SchemaSymbols.ATT_BASE); String complexContentTypeId=complexContent.getAttribute(SchemaSymbols.ATTVAL_ID); // Skip over any annotations in the restriction or extension elements Element content = checkContent(complexContent, XUtil.getFirstChildElement(complexContent),true); // Handle the base type name if (base.length() == 0) { throw new ComplexTypeRecoverableError( "The BASE attribute must be specified for the " + "RESTRICTION or EXTENSION element"); } QName baseQName = parseBase(base); // check if the base is "anyType" String baseTypeURI = fStringPool.toString(baseQName.uri); String baseLocalName = fStringPool.toString(baseQName.localpart); if (!(baseTypeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && baseLocalName.equals("anyType"))) { processBaseTypeInfo(baseQName,typeInfo); //Check that the base is a complex type if (typeInfo.baseComplexTypeInfo == null) { throw new ComplexTypeRecoverableError( "The base type specified in the complexContent element must be a complexType"); } } // Process the elements that make up the content processComplexContent(typeNameIndex,content,typeInfo,baseQName,isMixed); if (XUtil.getNextSiblingElement(complexContent) != null) throw new ComplexTypeRecoverableError( "Invalid child following the RESTRICTION or EXTENSION element in the " + "complex type definition"); } // end traverseComplexContentDecl /** * Handle complexType error * * @param message * @param typeNameIndex * @param typeInfo * @return */ private void handleComplexTypeError(String message, int typeNameIndex, ComplexTypeInfo typeInfo) throws Exception { String typeName = fStringPool.toString(typeNameIndex); if (message != null) { if (typeName.startsWith(" reportGenericSchemaError("Anonymous complexType: " + message); else reportGenericSchemaError("ComplexType '" + typeName + "': " + message); } // Mock up the typeInfo structure so that there won't be problems during // validation typeInfo.contentType = XMLElementDecl.TYPE_ANY; // this should match anything typeInfo.contentSpecHandle = -1; typeInfo.derivedBy = 0; typeInfo.datatypeValidator = null; typeInfo.attlistHead = -1; int templateElementNameIndex = fStringPool.addSymbol("$"+typeName); typeInfo.templateElementIndex = fSchemaGrammar.addElementDecl( new QName(-1, templateElementNameIndex,typeNameIndex,fTargetNSURI), (fTargetNSURI==StringPool.EMPTY_STRING) ? StringPool.EMPTY_STRING : fCurrentScope, typeInfo.scopeDefined, typeInfo.contentType, typeInfo.contentSpecHandle, -1, typeInfo.datatypeValidator); return; } /** * Generate a name for an anonymous type * * @param Element * @return String */ private String genAnonTypeName(Element complexTypeDecl) throws Exception { // Generate a unique name for the anonymous type by concatenating together the // names of parent nodes String typeName; Element node=complexTypeDecl; typeName="#AnonType_"; while (!isTopLevel(node)) { node = (Element)node.getParentNode(); typeName = typeName+node.getAttribute(SchemaSymbols.ATT_NAME); } return typeName; } /** * Parse base string * * @param base * @return QName */ private QName parseBase(String base) throws Exception { String prefix = ""; String localpart = base; int colonptr = base.indexOf(":"); if ( colonptr > 0) { prefix = base.substring(0,colonptr); localpart = base.substring(colonptr+1); } int nameIndex = fStringPool.addSymbol(base); int prefixIndex = fStringPool.addSymbol(prefix); int localpartIndex = fStringPool.addSymbol(localpart); int URIindex = fStringPool.addSymbol(resolvePrefixToURI(prefix)); return new QName(prefixIndex,localpartIndex,nameIndex,URIindex); } /** * Check if base is from another schema * * @param baseName * @return boolean */ private boolean baseFromAnotherSchema(QName baseName) throws Exception { String typeURI = fStringPool.toString(baseName.uri); if ( ! typeURI.equals(fTargetNSURIString) && ! typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && typeURI.length() != 0 ) //REVISIT, !!!! a hack: for schema that has no //target namespace, e.g. personal-schema.xml return true; else return false; } /** * Process "base" information for a complexType * * @param baseTypeInfo * @param baseName * @param typeInfo * @return */ private void processBaseTypeInfo(QName baseName, ComplexTypeInfo typeInfo) throws Exception { ComplexTypeInfo baseComplexTypeInfo = null; DatatypeValidator baseDTValidator = null; String typeURI = fStringPool.toString(baseName.uri); String localpart = fStringPool.toString(baseName.localpart); String base = fStringPool.toString(baseName.rawname); // check if the base type is from another schema if (baseFromAnotherSchema(baseName)) { baseComplexTypeInfo = getTypeInfoFromNS(typeURI, localpart); if (baseComplexTypeInfo == null) { baseDTValidator = getTypeValidatorFromNS(typeURI, localpart); if (baseDTValidator == null) { throw new ComplexTypeRecoverableError( "Could not find base type " +localpart + " in schema " + typeURI); } } } // type must be from same schema else { String fullBaseName = typeURI+","+localpart; // assume the base is a complexType and try to locate the base type first baseComplexTypeInfo= (ComplexTypeInfo) fComplexTypeRegistry.get(fullBaseName); // if not found, 2 possibilities: // 1: ComplexType in question has not been compiled yet; // 2: base is SimpleTYpe; if (baseComplexTypeInfo == null) { baseDTValidator = getDatatypeValidator(typeURI, localpart); if (baseDTValidator == null) { int baseTypeSymbol; Element baseTypeNode = getTopLevelComponentByName( SchemaSymbols.ELT_COMPLEXTYPE,localpart); if (baseTypeNode != null) { // Before traversing the base, make sure we're not already // doing so.. // ct-props-correct 3 if (fBaseTypeNameStack.search((Object)fullBaseName) > - 1) { throw new ComplexTypeRecoverableError( "ct-props-correct.3: Recursive type definition"); } fBaseTypeNameStack.push(fullBaseName); baseTypeSymbol = traverseComplexTypeDecl( baseTypeNode, true ); fBaseTypeNameStack.pop(); baseComplexTypeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fStringPool.toString(baseTypeSymbol)); //REVISIT: should it be fullBaseName; } else { baseTypeNode = getTopLevelComponentByName( SchemaSymbols.ELT_SIMPLETYPE, localpart); if (baseTypeNode != null) { baseTypeSymbol = traverseSimpleTypeDecl( baseTypeNode ); baseDTValidator = getDatatypeValidator(typeURI, localpart); if (baseDTValidator == null) { //TO DO: signal error here. } } else { throw new ComplexTypeRecoverableError( "Base type could not be found : " + base); } } } } } // end else (type must be from same schema) typeInfo.baseComplexTypeInfo = baseComplexTypeInfo; typeInfo.baseDataTypeValidator = baseDTValidator; } // end processBaseTypeInfo /** * Process content which is complex * * (group | all | choice | sequence) ? , * ((attribute | attributeGroup)* , anyAttribute?)) * * @param typeNameIndex * @param complexContentChild * @param typeInfo * @return */ private void processComplexContent(int typeNameIndex, Element complexContentChild, ComplexTypeInfo typeInfo, QName baseName, boolean isMixed) throws Exception { Element attrNode = null; int index=-2; String typeName = fStringPool.toString(typeNameIndex); if (complexContentChild != null) { // GROUP, ALL, SEQUENCE or CHOICE, followed by attributes, if specified. // Note that it's possible that only attributes are specified. String childName = complexContentChild.getLocalName(); if (childName.equals(SchemaSymbols.ELT_GROUP)) { GroupInfo grpInfo = traverseGroupDecl(complexContentChild); int groupIndex = (grpInfo != null) ? grpInfo.contentSpecIndex:-2; index = handleOccurrences(groupIndex, complexContentChild, hasAllContent(groupIndex) ? GROUP_REF_WITH_ALL : NOT_ALL_CONTEXT); attrNode = XUtil.getNextSiblingElement(complexContentChild); } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = handleOccurrences(traverseSequence(complexContentChild), complexContentChild); attrNode = XUtil.getNextSiblingElement(complexContentChild); } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = handleOccurrences(traverseChoice(complexContentChild), complexContentChild); attrNode = XUtil.getNextSiblingElement(complexContentChild); } else if (childName.equals(SchemaSymbols.ELT_ALL)) { index = handleOccurrences(traverseAll(complexContentChild), complexContentChild, PROCESSING_ALL_GP); attrNode = XUtil.getNextSiblingElement(complexContentChild); } else if (isAttrOrAttrGroup(complexContentChild)) { // reset the contentType typeInfo.contentType = XMLElementDecl.TYPE_ANY; attrNode = complexContentChild; } else { throw new ComplexTypeRecoverableError( "Invalid child '"+ childName +"' in the complex type"); } } typeInfo.contentSpecHandle = index; // Merge in information from base, if it exists if (typeInfo.baseComplexTypeInfo != null) { int baseContentSpecHandle = typeInfo.baseComplexTypeInfo.contentSpecHandle; // RESTRICTION if (typeInfo.derivedBy == SchemaSymbols.RESTRICTION) { // check to see if the baseType permits derivation by restriction if((typeInfo.baseComplexTypeInfo.finalSet & SchemaSymbols.RESTRICTION) != 0) throw new ComplexTypeRecoverableError("Derivation by restriction is forbidden by either the base type " + fStringPool.toString(baseName.localpart) + " or the schema"); // if the content is EMPTY, check that the base is correct // according to derivation-ok-restriction 5.2 if (typeInfo.contentSpecHandle==-2) { if (!(typeInfo.baseComplexTypeInfo.contentType==XMLElementDecl.TYPE_EMPTY || particleEmptiable(baseContentSpecHandle))) { throw new ComplexTypeRecoverableError("derivation-ok-restrictoin.5.2 Content type of complexType is EMPTY but base is not EMPTY or does not have a particle which is emptiable"); } } // Delay derivation by restriction particle constraint checking until // the whole schema has been processed. We need to do this because: // - top-level element declarations are not processed until traversed // thru the main traverseSchema walk. } // EXTENSION else { // check to see if the baseType permits derivation by extension if((typeInfo.baseComplexTypeInfo.finalSet & SchemaSymbols.EXTENSION) != 0) throw new ComplexTypeRecoverableError("cos-ct-extends.1.1: Derivation by extension is forbidden by either the base type " + fStringPool.toString(baseName.localpart) + " or the schema"); // Check if the contentType of the base is consistent with the new type // cos-ct-extends.1.4.2.2 if (typeInfo.baseComplexTypeInfo.contentType != XMLElementDecl.TYPE_EMPTY) { if (((typeInfo.baseComplexTypeInfo.contentType == XMLElementDecl.TYPE_CHILDREN) && isMixed) || ((typeInfo.baseComplexTypeInfo.contentType == XMLElementDecl.TYPE_MIXED_COMPLEX) && !isMixed)) { throw new ComplexTypeRecoverableError("cos-ct-extends.1.4.2.2.2.1: The content type of the base type " + fStringPool.toString(baseName.localpart) + " and derived type " + typeName + " must both be mixed or element-only"); } } // Compose the final content model by concatenating the base and the // current in sequence if (baseFromAnotherSchema(baseName)) { String baseSchemaURI = fStringPool.toString(baseName.uri); SchemaGrammar aGrammar= (SchemaGrammar) fGrammarResolver.getGrammar( baseSchemaURI); baseContentSpecHandle = importContentSpec(aGrammar, baseContentSpecHandle); } if (typeInfo.contentSpecHandle == -2) { typeInfo.contentSpecHandle = baseContentSpecHandle; } else if (baseContentSpecHandle > -1) { if (typeInfo.contentSpecHandle > -1 && (hasAllContent(typeInfo.contentSpecHandle) || hasAllContent(baseContentSpecHandle))) { throw new ComplexTypeRecoverableError("cos-all-limited.1.2: An \"all\" model group that is part of a complex type definition must constitute the entire {content type} of the definition."); } typeInfo.contentSpecHandle = fSchemaGrammar.addContentSpecNode(XMLContentSpec.CONTENTSPECNODE_SEQ, baseContentSpecHandle, typeInfo.contentSpecHandle, false); } // Check that there is a particle in the final content // cos-ct-extends.1.4.2.1 // LM - commented out until I get a clarification from HT //if (typeInfo.contentSpecHandle <0) { // throw new ComplexTypeRecoverableError("cos-ct-extends.1.4.2.1: The content of a type derived by EXTENSION must contain a particle"); } } else { typeInfo.derivedBy = 0; } // Set the content type if (isMixed) { // if there are no children, detect an error // See the definition of content type in Structures 3.4.1 // This is commented out for now, until I get a clarification from schema WG if (typeInfo.contentSpecHandle == -2) { //throw new ComplexTypeRecoverableError("Type '" + typeName + "': The content of a mixed complexType must not be empty"); typeInfo.contentType = XMLElementDecl.TYPE_MIXED_SIMPLE; } else typeInfo.contentType = XMLElementDecl.TYPE_MIXED_COMPLEX; } else if (typeInfo.contentSpecHandle == -2) typeInfo.contentType = XMLElementDecl.TYPE_EMPTY; else typeInfo.contentType = XMLElementDecl.TYPE_CHILDREN; // add a template element to the grammar element decl pool. int templateElementNameIndex = fStringPool.addSymbol("$"+typeName); typeInfo.templateElementIndex = fSchemaGrammar.addElementDecl( new QName(-1, templateElementNameIndex,typeNameIndex,fTargetNSURI), (fTargetNSURI==StringPool.EMPTY_STRING) ? StringPool.EMPTY_STRING : fCurrentScope, typeInfo.scopeDefined, typeInfo.contentType, typeInfo.contentSpecHandle, -1, typeInfo.datatypeValidator); typeInfo.attlistHead = fSchemaGrammar.getFirstAttributeDeclIndex( typeInfo.templateElementIndex); // Now, check attributes and handle if (attrNode !=null) { if (!isAttrOrAttrGroup(attrNode)) { throw new ComplexTypeRecoverableError( "Invalid child "+ attrNode.getLocalName() + " in the complexType or complexContent"); } else processAttributes(attrNode,baseName,typeInfo); } else if (typeInfo.baseComplexTypeInfo != null) processAttributes(null,baseName,typeInfo); } // end processComplexContent /** * Process attributes of a complex type * * @param attrNode * @param typeInfo * @return */ private void processAttributes(Element attrNode, QName baseName, ComplexTypeInfo typeInfo) throws Exception { XMLAttributeDecl attWildcard = null; Vector anyAttDecls = new Vector(); Element child; for (child = attrNode; child != null; child = XUtil.getNextSiblingElement(child)) { String childName = child.getLocalName(); if (childName.equals(SchemaSymbols.ELT_ATTRIBUTE)) { traverseAttributeDecl(child, typeInfo, false); } else if ( childName.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) ) { traverseAttributeGroupDecl(child,typeInfo,anyAttDecls); } else if ( childName.equals(SchemaSymbols.ELT_ANYATTRIBUTE) ) { attWildcard = traverseAnyAttribute(child); } else { throw new ComplexTypeRecoverableError( "Invalid child among the children of the complexType definition"); } } if (attWildcard != null) { XMLAttributeDecl fromGroup = null; final int count = anyAttDecls.size(); if ( count > 0) { fromGroup = (XMLAttributeDecl) anyAttDecls.elementAt(0); for (int i=1; i<count; i++) { fromGroup = AWildCardIntersection( fromGroup,(XMLAttributeDecl)anyAttDecls.elementAt(i)); } } if (fromGroup != null) { int saveProcessContents = attWildcard.defaultType; attWildcard = AWildCardIntersection(attWildcard, fromGroup); attWildcard.defaultType = saveProcessContents; } } else { //REVISIT: unclear in the Scheme Structures 4.3.3 what to do in this case if (anyAttDecls.size()>0) { attWildcard = (XMLAttributeDecl)anyAttDecls.elementAt(0); } } // merge in base type's attribute decls XMLAttributeDecl baseAttWildcard = null; ComplexTypeInfo baseTypeInfo = typeInfo.baseComplexTypeInfo; SchemaGrammar aGrammar=null; if (baseTypeInfo != null && baseTypeInfo.attlistHead > -1 ) { int attDefIndex = baseTypeInfo.attlistHead; aGrammar = fSchemaGrammar; String baseTypeSchemaURI = baseFromAnotherSchema(baseName)? fStringPool.toString(baseName.uri):null; if (baseTypeSchemaURI != null) { aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(baseTypeSchemaURI); } if (aGrammar == null) { //reportGenericSchemaError("In complexType "+typeName+", can NOT find the grammar "+ // "with targetNamespace" + baseTypeSchemaURI+ // "for the base type"); } else while ( attDefIndex > -1 ) { fTempAttributeDecl.clear(); aGrammar.getAttributeDecl(attDefIndex, fTempAttributeDecl); if (fTempAttributeDecl.type == XMLAttributeDecl.TYPE_ANY_ANY ||fTempAttributeDecl.type == XMLAttributeDecl.TYPE_ANY_LIST ||fTempAttributeDecl.type == XMLAttributeDecl.TYPE_ANY_OTHER ) { if (attWildcard == null) { baseAttWildcard = fTempAttributeDecl; } attDefIndex = aGrammar.getNextAttributeDeclIndex(attDefIndex); continue; } // if found a duplicate, if it is derived by restriction, // then skip the one from the base type int temp = fSchemaGrammar.getAttributeDeclIndex(typeInfo.templateElementIndex, fTempAttributeDecl.name); if ( temp > -1) { if (typeInfo.derivedBy==SchemaSymbols.EXTENSION) { throw new ComplexTypeRecoverableError("Attribute " + fStringPool.toString(fTempAttributeDecl.name.localpart) + " that appeared in the base should not appear in a derivation by extension"); } else { attDefIndex = fSchemaGrammar.getNextAttributeDeclIndex(attDefIndex); continue; } } fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, fTempAttributeDecl.name, fTempAttributeDecl.type, fTempAttributeDecl.enumeration, fTempAttributeDecl.defaultType, fTempAttributeDecl.defaultValue, fTempAttributeDecl.datatypeValidator, fTempAttributeDecl.list); attDefIndex = aGrammar.getNextAttributeDeclIndex(attDefIndex); } } // att wildcard will inserted after all attributes were processed if (attWildcard != null) { if (attWildcard.type != -1) { fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, attWildcard.name, attWildcard.type, attWildcard.enumeration, attWildcard.defaultType, attWildcard.defaultValue, attWildcard.datatypeValidator, attWildcard.list); } else { //REVISIT: unclear in Schema spec if should report error here. reportGenericSchemaError("The intensional intersection for {attribute wildcard}s must be expressible"); } } else if (baseAttWildcard != null) { fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, baseAttWildcard.name, baseAttWildcard.type, baseAttWildcard.enumeration, baseAttWildcard.defaultType, baseAttWildcard.defaultValue, baseAttWildcard.datatypeValidator, baseAttWildcard.list); } typeInfo.attlistHead = fSchemaGrammar.getFirstAttributeDeclIndex (typeInfo.templateElementIndex); // For derivation by restriction, ensure that the resulting attribute list // satisfies the constraints in derivation-ok-restriction 2,3,4 if ((typeInfo.derivedBy==SchemaSymbols.RESTRICTION) && (typeInfo.attlistHead>-1 && baseTypeInfo != null)) { checkAttributesDerivationOKRestriction(typeInfo.attlistHead,fSchemaGrammar, attWildcard,baseTypeInfo.attlistHead,aGrammar,baseAttWildcard); } } // end processAttributes // Check that the attributes of a type derived by restriction satisfy the // constraints of derivation-ok-restriction private void checkAttributesDerivationOKRestriction(int dAttListHead, SchemaGrammar dGrammar, XMLAttributeDecl dAttWildCard, int bAttListHead, SchemaGrammar bGrammar, XMLAttributeDecl bAttWildCard) throws ComplexTypeRecoverableError { int attDefIndex = dAttListHead; if (bAttListHead < 0) { throw new ComplexTypeRecoverableError("derivation-ok-restriction.2: Base type definition does not have any attributes"); } // Loop thru the attributes while ( attDefIndex > -1 ) { fTempAttributeDecl.clear(); dGrammar.getAttributeDecl(attDefIndex, fTempAttributeDecl); if (isAWildCard(fTempAttributeDecl)) { attDefIndex = dGrammar.getNextAttributeDeclIndex(attDefIndex); continue; } int bAttDefIndex = bGrammar.findAttributeDecl(bAttListHead, fTempAttributeDecl.name); if (bAttDefIndex > -1) { fTemp2AttributeDecl.clear(); bGrammar.getAttributeDecl(bAttDefIndex, fTemp2AttributeDecl); // derivation-ok-restriction. Constraint 2.1.1 if ((fTemp2AttributeDecl.defaultType & XMLAttributeDecl.DEFAULT_TYPE_REQUIRED) > 0 && (fTempAttributeDecl.defaultType & XMLAttributeDecl.DEFAULT_TYPE_REQUIRED) <= 0) { throw new ComplexTypeRecoverableError("derivation-ok-restriction.2.1.1: Attribute '" + fStringPool.toString(fTempAttributeDecl.name.localpart) + "' in derivation has an inconsistent REQUIRED setting to that of attribute in base"); } // derivation-ok-restriction. Constraint 2.1.2 if (!(checkSimpleTypeDerivationOK( fTempAttributeDecl.datatypeValidator, fTemp2AttributeDecl.datatypeValidator))) { throw new ComplexTypeRecoverableError("derivation-ok-restriction.2.1.2: Type of attribute '" + fStringPool.toString(fTempAttributeDecl.name.localpart) + "' in derivation must be a restriction of type of attribute in base"); } // derivation-ok-restriction. Constraint 2.1.3 if ((fTemp2AttributeDecl.defaultType & XMLAttributeDecl.DEFAULT_TYPE_FIXED) > 0) { if (!((fTempAttributeDecl.defaultType & XMLAttributeDecl.DEFAULT_TYPE_FIXED) > 0) || !fTempAttributeDecl.defaultValue.equals(fTemp2AttributeDecl.defaultValue)) { throw new ComplexTypeRecoverableError("derivation-ok-restriction.2.1.3: Attribute '" + fStringPool.toString(fTempAttributeDecl.name.localpart) + "' is either not fixed, or is not fixed with the same value as the attribute in the base"); } } } else { // derivation-ok-restriction. Constraint 2.2 if ((bAttWildCard==null) || !AWildCardAllowsNameSpace(bAttWildCard, dGrammar.getTargetNamespaceURI())) { throw new ComplexTypeRecoverableError("derivation-ok-restriction.2.2: Attribute '" + fStringPool.toString(fTempAttributeDecl.name.localpart) + "' has a target namespace which is not valid with respect to a base type definition's wildcard or, the base does not contain a wildcard"); } } attDefIndex = dGrammar.getNextAttributeDeclIndex(attDefIndex); } // derivation-ok-restriction. Constraint 4 if (dAttWildCard!=null) { if (bAttWildCard==null) { throw new ComplexTypeRecoverableError("derivation-ok-restriction.4.1: An attribute wildcard is present in the derived type, but not the base"); } if (!AWildCardSubset(dAttWildCard,bAttWildCard)) { throw new ComplexTypeRecoverableError("derivation-ok-restriction.4.2: The attribute wildcard in the derived type is not a valid subset of that in the base"); } } } private boolean isAttrOrAttrGroup(Element e) { String elementName = e.getLocalName(); if (elementName.equals(SchemaSymbols.ELT_ATTRIBUTE) || elementName.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) || elementName.equals(SchemaSymbols.ELT_ANYATTRIBUTE)) return true; else return false; } private void checkRecursingComplexType() throws Exception { if ( fCurrentTypeNameStack.empty() ) { if (! fElementRecurseComplex.isEmpty() ) { int count= fElementRecurseComplex.size(); for (int i = 0; i<count; i++) { ElementInfo eobj = (ElementInfo)fElementRecurseComplex.elementAt(i); int elementIndex = eobj.elementIndex; String typeName = eobj.typeName; ComplexTypeInfo typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fTargetNSURIString+","+typeName); if (typeInfo==null) { throw new Exception ( "Internal Error in void checkRecursingComplexType(). " ); } else { // update the element decl with info from the type fSchemaGrammar.getElementDecl(elementIndex, fTempElementDecl); fTempElementDecl.type = typeInfo.contentType; fTempElementDecl.contentSpecIndex = typeInfo.contentSpecHandle; fTempElementDecl.datatypeValidator = typeInfo.datatypeValidator; fSchemaGrammar.setElementDecl(elementIndex, fTempElementDecl); fSchemaGrammar.setFirstAttributeDeclIndex(elementIndex, typeInfo.attlistHead); fSchemaGrammar.setElementComplexTypeInfo(elementIndex,typeInfo); } } fElementRecurseComplex.removeAllElements(); } } } // Check that the particle defined by the derived ct tree is a valid restriction of // that specified by baseContentSpecIndex. derivedScope and baseScope are the // scopes of the particles, respectively. bInfo is supplied when the base particle // is from a base type definition, and may be null - it helps determine other scopes // that elements should be looked up in. private void checkParticleDerivationOK(int derivedContentSpecIndex, int derivedScope, int baseContentSpecIndex, int baseScope, ComplexTypeInfo bInfo) throws Exception { // Only do this if full checking is enabled if (!fFullConstraintChecking) return; // Check for pointless occurrences of all, choice, sequence. The result is the // contentspec which is not pointless. If the result is a non-pointless // group, Vector is filled in with the children of interest int csIndex1 = derivedContentSpecIndex; fSchemaGrammar.getContentSpec(csIndex1, tempContentSpec1); int csIndex2 = baseContentSpecIndex; fSchemaGrammar.getContentSpec(csIndex2, tempContentSpec2); Vector tempVector1 = new Vector(); Vector tempVector2 = new Vector(); if (tempContentSpec1.type == XMLContentSpec.CONTENTSPECNODE_SEQ || tempContentSpec1.type == XMLContentSpec.CONTENTSPECNODE_CHOICE || tempContentSpec1.type == XMLContentSpec.CONTENTSPECNODE_ALL) { csIndex1 = checkForPointlessOccurrences(csIndex1,tempVector1); } if (tempContentSpec2.type == XMLContentSpec.CONTENTSPECNODE_SEQ || tempContentSpec2.type == XMLContentSpec.CONTENTSPECNODE_CHOICE || tempContentSpec2.type == XMLContentSpec.CONTENTSPECNODE_ALL) { csIndex2 = checkForPointlessOccurrences(csIndex2,tempVector2); } fSchemaGrammar.getContentSpec(csIndex1, tempContentSpec1); fSchemaGrammar.getContentSpec(csIndex2, tempContentSpec2); switch (tempContentSpec1.type & 0x0f) { case XMLContentSpec.CONTENTSPECNODE_LEAF: { switch (tempContentSpec2.type & 0x0f) { // Elt:Elt NameAndTypeOK case XMLContentSpec.CONTENTSPECNODE_LEAF: { checkNameAndTypeOK(csIndex1, derivedScope, csIndex2, baseScope, bInfo); return; } // Elt:Any NSCompat case XMLContentSpec.CONTENTSPECNODE_ANY: case XMLContentSpec.CONTENTSPECNODE_ANY_OTHER: case XMLContentSpec.CONTENTSPECNODE_ANY_NS: { checkNSCompat(csIndex1, derivedScope, csIndex2); return; } // Elt:All RecurseAsIfGroup case XMLContentSpec.CONTENTSPECNODE_CHOICE: case XMLContentSpec.CONTENTSPECNODE_SEQ: case XMLContentSpec.CONTENTSPECNODE_ALL: { checkRecurseAsIfGroup(csIndex1, derivedScope, csIndex2, tempVector2, baseScope, bInfo); return; } default: { throw new ParticleRecoverableError("internal Xerces error"); } } } case XMLContentSpec.CONTENTSPECNODE_ANY: case XMLContentSpec.CONTENTSPECNODE_ANY_OTHER: case XMLContentSpec.CONTENTSPECNODE_ANY_NS: { switch (tempContentSpec2.type & 0x0f) { // Any:Any NSSubset case XMLContentSpec.CONTENTSPECNODE_ANY: case XMLContentSpec.CONTENTSPECNODE_ANY_OTHER: case XMLContentSpec.CONTENTSPECNODE_ANY_NS: { checkNSSubset(csIndex1, csIndex2); return; } case XMLContentSpec.CONTENTSPECNODE_CHOICE: case XMLContentSpec.CONTENTSPECNODE_SEQ: case XMLContentSpec.CONTENTSPECNODE_ALL: case XMLContentSpec.CONTENTSPECNODE_LEAF: { throw new ParticleRecoverableError("cos-particle-restrict: Forbidden restriction: Any: Choice,Seq,All,Elt"); } default: { throw new ParticleRecoverableError("internal Xerces error"); } } } case XMLContentSpec.CONTENTSPECNODE_ALL: { switch (tempContentSpec2.type & 0x0f) { // All:Any NSRecurseCheckCardinality case XMLContentSpec.CONTENTSPECNODE_ANY: case XMLContentSpec.CONTENTSPECNODE_ANY_OTHER: case XMLContentSpec.CONTENTSPECNODE_ANY_NS: { checkNSRecurseCheckCardinality(csIndex1, tempVector1, derivedScope, csIndex2); return; } case XMLContentSpec.CONTENTSPECNODE_ALL: { checkRecurse(csIndex1, tempVector1, derivedScope, csIndex2, tempVector2, baseScope, bInfo); return; } case XMLContentSpec.CONTENTSPECNODE_CHOICE: case XMLContentSpec.CONTENTSPECNODE_SEQ: case XMLContentSpec.CONTENTSPECNODE_LEAF: { throw new ParticleRecoverableError("cos-particle-restrict: Forbidden restriction: All:Choice,Seq,Elt"); } default: { throw new ParticleRecoverableError("internal Xerces error"); } } } case XMLContentSpec.CONTENTSPECNODE_CHOICE: { switch (tempContentSpec2.type & 0x0f) { // Choice:Any NSRecurseCheckCardinality case XMLContentSpec.CONTENTSPECNODE_ANY: case XMLContentSpec.CONTENTSPECNODE_ANY_OTHER: case XMLContentSpec.CONTENTSPECNODE_ANY_NS: { checkNSRecurseCheckCardinality(csIndex1, tempVector1, derivedScope, csIndex2); return; } case XMLContentSpec.CONTENTSPECNODE_CHOICE: { checkRecurseLax(csIndex1, tempVector1, derivedScope, csIndex2, tempVector2, baseScope, bInfo); return; } case XMLContentSpec.CONTENTSPECNODE_ALL: case XMLContentSpec.CONTENTSPECNODE_SEQ: case XMLContentSpec.CONTENTSPECNODE_LEAF: { throw new ParticleRecoverableError("cos-particle-restrict: Forbidden restriction: Choice:All,Seq,Leaf"); } default: { throw new ParticleRecoverableError("internal Xerces error"); } } } case XMLContentSpec.CONTENTSPECNODE_SEQ: { switch (tempContentSpec2.type & 0x0f) { // Choice:Any NSRecurseCheckCardinality case XMLContentSpec.CONTENTSPECNODE_ANY: case XMLContentSpec.CONTENTSPECNODE_ANY_OTHER: case XMLContentSpec.CONTENTSPECNODE_ANY_NS: { checkNSRecurseCheckCardinality(csIndex1, tempVector1, derivedScope, csIndex2); return; } case XMLContentSpec.CONTENTSPECNODE_ALL: { checkRecurseUnordered(csIndex1, tempVector1, derivedScope, csIndex2, tempVector2, baseScope, bInfo); return; } case XMLContentSpec.CONTENTSPECNODE_SEQ: { checkRecurse(csIndex1, tempVector1, derivedScope, csIndex2, tempVector2, baseScope, bInfo); return; } case XMLContentSpec.CONTENTSPECNODE_CHOICE: { checkMapAndSum(csIndex1, tempVector1, derivedScope, csIndex2, tempVector2, baseScope, bInfo); return; } case XMLContentSpec.CONTENTSPECNODE_LEAF: { throw new ParticleRecoverableError("cos-particle-restrict: Forbidden restriction: Seq:Elt"); } default: { throw new ParticleRecoverableError("internal Xerces error"); } } } } } private int checkForPointlessOccurrences(int csIndex, Vector tempVector) { // Note: instead of using a Vector, we should use a growable array of int. // To be cleaned up in release 1.4.1. (LM) fSchemaGrammar.getContentSpec(csIndex, tempContentSpec1); if (tempContentSpec1.otherValue == -2) { gatherChildren(tempContentSpec1.type,tempContentSpec1.value,tempVector); if (tempVector.size() == 1) { Integer returnVal = (Integer)(tempVector.elementAt(0)); return returnVal.intValue(); } } int type = tempContentSpec1.type; int value = tempContentSpec1.value; int otherValue = tempContentSpec1.otherValue; gatherChildren(type,value, tempVector); gatherChildren(type,otherValue, tempVector); return csIndex; } private void gatherChildren(int parentType, int csIndex, Vector tempVector) { fSchemaGrammar.getContentSpec(csIndex, tempContentSpec1); int min = fSchemaGrammar.getContentSpecMinOccurs(csIndex); int max = fSchemaGrammar.getContentSpecMaxOccurs(csIndex); int left = tempContentSpec1.value; int right = tempContentSpec1.otherValue; int type = tempContentSpec1.type; if (type == XMLContentSpec.CONTENTSPECNODE_LEAF || (type & 0x0f) == XMLContentSpec.CONTENTSPECNODE_ANY || (type & 0x0f) == XMLContentSpec.CONTENTSPECNODE_ANY_NS || (type & 0x0f) == XMLContentSpec.CONTENTSPECNODE_ANY_OTHER ) { tempVector.addElement(new Integer(csIndex)); } else if (! (min==1 && max==1)) { tempVector.addElement(new Integer(csIndex)); } else if (right == -2) { gatherChildren(type,left,tempVector); } else if (parentType == type) { gatherChildren(type,left,tempVector); gatherChildren(type,right,tempVector); } else { tempVector.addElement(new Integer(csIndex)); } } private void checkNameAndTypeOK(int csIndex1, int derivedScope, int csIndex2, int baseScope, ComplexTypeInfo bInfo) throws Exception { fSchemaGrammar.getContentSpec(csIndex1, tempContentSpec1); fSchemaGrammar.getContentSpec(csIndex2, tempContentSpec2); int localpart1 = tempContentSpec1.value; int uri1 = tempContentSpec1.otherValue; int localpart2 = tempContentSpec2.value; int uri2 = tempContentSpec2.otherValue; int min1 = fSchemaGrammar.getContentSpecMinOccurs(csIndex1); int max1 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex1); int min2 = fSchemaGrammar.getContentSpecMinOccurs(csIndex2); int max2 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex2); //start the checking... if (!(localpart1==localpart2 && uri1==uri2)) { // we have non-matching names. Check substitution groups. if (fSComp == null) fSComp = new SubstitutionGroupComparator(fGrammarResolver,fStringPool,fErrorReporter); if (!checkSubstitutionGroups(localpart1,uri1,localpart2,uri2)) throw new ParticleRecoverableError("rcase-nameAndTypeOK.1: Element name/uri in restriction does not match that of corresponding base element"); } if (!checkOccurrenceRange(min1,max1,min2,max2)) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.3: Element occurrence range not a restriction of base element's range: element is " + fStringPool.toString(localpart1)); } SchemaGrammar aGrammar = fSchemaGrammar; // get the element decl indices for the remainder... String schemaURI = fStringPool.toString(uri1); if ( !schemaURI.equals(fTargetNSURIString) && schemaURI.length() != 0 ) aGrammar= (SchemaGrammar) fGrammarResolver.getGrammar(schemaURI); int eltndx1 = findElement(derivedScope, localpart1, aGrammar, null); if (eltndx1 < 0) return; int eltndx2 = findElement(baseScope, localpart2, aGrammar, bInfo); if (eltndx2 < 0) return; int miscFlags1 = ((SchemaGrammar) aGrammar).getElementDeclMiscFlags(eltndx1); int miscFlags2 = ((SchemaGrammar) aGrammar).getElementDeclMiscFlags(eltndx2); boolean element1IsNillable = (miscFlags1 & SchemaSymbols.NILLABLE) !=0; boolean element2IsNillable = (miscFlags2 & SchemaSymbols.NILLABLE) !=0; boolean element2IsFixed = (miscFlags2 & SchemaSymbols.FIXED) !=0; boolean element1IsFixed = (miscFlags1 & SchemaSymbols.FIXED) !=0; String element1Value = aGrammar.getElementDefaultValue(eltndx1); String element2Value = aGrammar.getElementDefaultValue(eltndx2); if (! (element2IsNillable || !element1IsNillable)) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.2: Element " +fStringPool.toString(localpart1) + " is nillable in the restriction but not the base"); } if (! (element2Value == null || !element2IsFixed || (element1IsFixed && element1Value.equals(element2Value)))) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.4: Element " +fStringPool.toString(localpart1) + " is either not fixed, or is not fixed with the same value as in the base"); } // check disallowed substitutions int blockSet1 = ((SchemaGrammar) aGrammar).getElementDeclBlockSet(eltndx1); int blockSet2 = ((SchemaGrammar) aGrammar).getElementDeclBlockSet(eltndx2); if (((blockSet1 & blockSet2)!=blockSet2) || (blockSet1==0 && blockSet2!=0)) throw new ParticleRecoverableError("rcase-nameAndTypeOK.6: Element " +fStringPool.toString(localpart1) + "'s disallowed subsitutions are not a superset of those of the base element's"); // Need element decls for the remainder of the checks aGrammar.getElementDecl(eltndx1, fTempElementDecl); aGrammar.getElementDecl(eltndx2, fTempElementDecl2); // check identity constraints checkIDConstraintRestriction(fTempElementDecl, fTempElementDecl2, aGrammar, localpart1, localpart2); // check that the derived element's type is derived from the base's. - TO BE DONE checkTypesOK(fTempElementDecl,fTempElementDecl2,eltndx1,eltndx2,aGrammar,fStringPool.toString(localpart1)); } private void checkTypesOK(XMLElementDecl derived, XMLElementDecl base, int dndx, int bndx, SchemaGrammar aGrammar, String elementName) throws Exception { ComplexTypeInfo tempType=((SchemaGrammar)aGrammar).getElementComplexTypeInfo(dndx); if (derived.type == XMLElementDecl.TYPE_SIMPLE ) { if (base.type != XMLElementDecl.TYPE_SIMPLE) throw new ParticleRecoverableError("rcase-nameAndTypeOK.6: Derived element " + elementName + " has a type that does not derive from that of the base"); if (tempType == null) { if (!(checkSimpleTypeDerivationOK(derived.datatypeValidator, base.datatypeValidator))) throw new ParticleRecoverableError("rcase-nameAndTypeOK.6: Derived element " + elementName + " has a type that does not derive from that of the base"); return; } } ComplexTypeInfo bType=((SchemaGrammar)aGrammar).getElementComplexTypeInfo(bndx); for(; tempType != null; tempType = tempType.baseComplexTypeInfo) { if (tempType.derivedBy != SchemaSymbols.RESTRICTION) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.6: Derived element " + elementName + " has a type that does not derives from that of the base"); } if (tempType.typeName.equals(bType.typeName)) break; } if(tempType == null) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.6: Derived element " + elementName + " has a type that does not derives from that of the base"); } } private void checkIDConstraintRestriction(XMLElementDecl derivedElemDecl, XMLElementDecl baseElemDecl, SchemaGrammar grammar, int derivedElemName, int baseElemName) throws Exception { // this method throws no errors if the ID constraints on // the derived element are a logical subset of those on the // base element--that is, those that are present are // identical to ones in the base element. Vector derivedUnique = derivedElemDecl.unique; Vector baseUnique = baseElemDecl.unique; if(derivedUnique.size() > baseUnique.size()) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.5: derived element " + fStringPool.toString(derivedElemName) + " has fewer <unique> Identity Constraints than the base element"+ fStringPool.toString(baseElemName)); } else { boolean found = true; for(int i=0; i<derivedUnique.size() && found; i++) { Unique id = (Unique)derivedUnique.elementAt(i); found = false; for(int j=0; j<baseUnique.size(); j++) { if(id.equals((Unique)baseUnique.elementAt(j))) { found = true; break; } } } if(!found) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.5: derived element " + fStringPool.toString(derivedElemName) + " has a <unique> Identity Constraint that does not appear on the base element"+ fStringPool.toString(baseElemName)); } } Vector derivedKey = derivedElemDecl.key; Vector baseKey = baseElemDecl.key; if(derivedKey.size() > baseKey.size()) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.5: derived element " + fStringPool.toString(derivedElemName) + " has fewer <key> Identity Constraints than the base element"+ fStringPool.toString(baseElemName)); } else { boolean found = true; for(int i=0; i<derivedKey.size() && found; i++) { Key id = (Key)derivedKey.elementAt(i); found = false; for(int j=0; j<baseKey.size(); j++) { if(id.equals((Key)baseKey.elementAt(j))) { found = true; break; } } } if(!found) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.5: derived element " + fStringPool.toString(derivedElemName) + " has a <key> Identity Constraint that does not appear on the base element"+ fStringPool.toString(baseElemName)); } } Vector derivedKeyRef = derivedElemDecl.keyRef; Vector baseKeyRef = baseElemDecl.keyRef; if(derivedKeyRef.size() > baseKeyRef.size()) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.5: derived element " + fStringPool.toString(derivedElemName) + " has fewer <keyref> Identity Constraints than the base element"+ fStringPool.toString(baseElemName)); } else { boolean found = true; for(int i=0; i<derivedKeyRef.size() && found; i++) { KeyRef id = (KeyRef)derivedKeyRef.elementAt(i); found = false; for(int j=0; j<baseKeyRef.size(); j++) { if(id.equals((KeyRef)baseKeyRef.elementAt(j))) { found = true; break; } } } if(!found) { throw new ParticleRecoverableError("rcase-nameAndTypeOK.5: derived element " + fStringPool.toString(derivedElemName) + " has a <keyref> Identity Constraint that does not appear on the base element"+ fStringPool.toString(baseElemName)); } } } // checkIDConstraintRestriction private boolean checkSubstitutionGroups(int local1, int uri1, int local2, int uri2) throws Exception { // check if either name is in the other's substitution group QName name1 = new QName(-1,local1,local1,uri1); QName name2 = new QName(-1,local2,local2,uri2); if (fSComp.isEquivalentTo(name1,name2) || fSComp.isEquivalentTo(name2,name1)) return true; else return false; } private boolean checkOccurrenceRange(int min1, int max1, int min2, int max2) { if ((min1 >= min2) && ((max2==SchemaSymbols.OCCURRENCE_UNBOUNDED) || (max1!=SchemaSymbols.OCCURRENCE_UNBOUNDED && max1<=max2))) return true; else return false; } private int findElement(int scope, int nameIndex, SchemaGrammar gr, ComplexTypeInfo bInfo) { // check for element at given scope first int elementDeclIndex = gr.getElementDeclIndex(nameIndex,scope); // if not found, check at global scope if (elementDeclIndex == -1) { elementDeclIndex = gr.getElementDeclIndex(nameIndex, -1); // if still not found, and base is specified, look it up there if (elementDeclIndex == -1 && bInfo != null) { ComplexTypeInfo baseInfo = bInfo; while (baseInfo != null) { elementDeclIndex = gr.getElementDeclIndex(nameIndex,baseInfo.scopeDefined); if (elementDeclIndex > -1) break; baseInfo = baseInfo.baseComplexTypeInfo; } } } return elementDeclIndex; } private void checkNSCompat(int csIndex1, int derivedScope, int csIndex2) throws Exception { int min1 = fSchemaGrammar.getContentSpecMinOccurs(csIndex1); int max1 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex1); int min2 = fSchemaGrammar.getContentSpecMinOccurs(csIndex2); int max2 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex2); // check Occurrence ranges if (!checkOccurrenceRange(min1,max1,min2,max2)) { throw new ParticleRecoverableError("rcase-NSCompat.2: Element occurrence range not a restriction of base any element's range"); } fSchemaGrammar.getContentSpec(csIndex1, tempContentSpec1); int uri = tempContentSpec1.otherValue; // check wildcard subset if (!wildcardEltAllowsNamespace(csIndex2, uri)) throw new ParticleRecoverableError("rcase-NSCompat.1: Element's namespace not allowed by wildcard in base"); } private boolean wildcardEltAllowsNamespace(int wildcardNode, int uriIndex) { fSchemaGrammar.getContentSpec(wildcardNode, tempContentSpec1); if ((tempContentSpec1.type & 0x0f) == XMLContentSpec.CONTENTSPECNODE_ANY) return true; if ((tempContentSpec1.type & 0x0f)==XMLContentSpec.CONTENTSPECNODE_ANY_NS) { if (uriIndex == tempContentSpec1.otherValue) return true; } else { // must be ANY_OTHER if (uriIndex != tempContentSpec1.otherValue && uriIndex != StringPool.EMPTY_STRING) return true; } return false; } private void checkNSSubset(int csIndex1, int csIndex2) throws Exception { int min1 = fSchemaGrammar.getContentSpecMinOccurs(csIndex1); int max1 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex1); int min2 = fSchemaGrammar.getContentSpecMinOccurs(csIndex2); int max2 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex2); // check Occurrence ranges if (!checkOccurrenceRange(min1,max1,min2,max2)) { throw new ParticleRecoverableError("rcase-NSSubset.2: Wildcard's occurrence range not a restriction of base wildcard's range"); } if (!wildcardEltSubset(csIndex1, csIndex2)) throw new ParticleRecoverableError("rcase-NSSubset.1: Wildcard is not a subset of corresponding wildcard in base"); } private boolean wildcardEltSubset(int wildcardNode, int wildcardBaseNode) { fSchemaGrammar.getContentSpec(wildcardNode, tempContentSpec1); fSchemaGrammar.getContentSpec(wildcardBaseNode, tempContentSpec2); if ((tempContentSpec2.type & 0x0f) == XMLContentSpec.CONTENTSPECNODE_ANY) return true; if ((tempContentSpec1.type & 0x0f)==XMLContentSpec.CONTENTSPECNODE_ANY_OTHER) { if ((tempContentSpec2.type & 0x0f)==XMLContentSpec.CONTENTSPECNODE_ANY_OTHER && tempContentSpec1.otherValue == tempContentSpec2.otherValue) return true; } if ((tempContentSpec1.type & 0x0f)==XMLContentSpec.CONTENTSPECNODE_ANY_NS) { if ((tempContentSpec2.type & 0x0f)==XMLContentSpec.CONTENTSPECNODE_ANY_NS && tempContentSpec1.otherValue == tempContentSpec2.otherValue) return true; if ((tempContentSpec2.type & 0x0f)==XMLContentSpec.CONTENTSPECNODE_ANY_OTHER && tempContentSpec1.otherValue != tempContentSpec2.otherValue) return true; } return false; } private void checkRecurseAsIfGroup(int csIndex1, int derivedScope, int csIndex2, Vector tempVector2, int baseScope, ComplexTypeInfo bInfo) throws Exception { fSchemaGrammar.getContentSpec(csIndex2, tempContentSpec2); // Treat the element as if it were in a group of the same type as csindex2 int indexOfGrp=fSchemaGrammar.addContentSpecNode(tempContentSpec2.type, csIndex1,-2, false); Vector tmpVector = new Vector(); tmpVector.addElement(new Integer(csIndex1)); if (tempContentSpec2.type == XMLContentSpec.CONTENTSPECNODE_ALL || tempContentSpec2.type == XMLContentSpec.CONTENTSPECNODE_SEQ) checkRecurse(indexOfGrp, tmpVector, derivedScope, csIndex2, tempVector2, baseScope, bInfo); else checkRecurseLax(indexOfGrp, tmpVector, derivedScope, csIndex2, tempVector2, baseScope, bInfo); tmpVector = null; } private void checkNSRecurseCheckCardinality(int csIndex1, Vector tempVector1, int derivedScope, int csIndex2) throws Exception { // Implement total range check int min1 = minEffectiveTotalRange(csIndex1); int max1 = maxEffectiveTotalRange(csIndex1); int min2 = fSchemaGrammar.getContentSpecMinOccurs(csIndex2); int max2 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex2); // check Occurrence ranges if (!checkOccurrenceRange(min1,max1,min2,max2)) { throw new ParticleRecoverableError("rcase-NSSubset.2: Wildcard's occurrence range not a restriction of base wildcard's range"); } if (!wildcardEltSubset(csIndex1, csIndex2)) throw new ParticleRecoverableError("rcase-NSSubset.1: Wildcard is not a subset of corresponding wildcard in base"); // Check that each member of the group is a valid restriction of the wildcard int count = tempVector1.size(); for (int i = 0; i < count; i++) { Integer particle1 = (Integer)tempVector1.elementAt(i); checkParticleDerivationOK(particle1.intValue(),derivedScope,csIndex2,-1,null); } } private void checkRecurse(int csIndex1, Vector tempVector1, int derivedScope, int csIndex2, Vector tempVector2, int baseScope, ComplexTypeInfo bInfo) throws Exception { int min1 = fSchemaGrammar.getContentSpecMinOccurs(csIndex1); int max1 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex1); int min2 = fSchemaGrammar.getContentSpecMinOccurs(csIndex2); int max2 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex2); // check Occurrence ranges if (!checkOccurrenceRange(min1,max1,min2,max2)) { throw new ParticleRecoverableError("rcase-Recurse.1: Occurrence range of group is not a valid restriction of occurence range of base group"); } int count1= tempVector1.size(); int count2= tempVector2.size(); int current = 0; label: for (int i = 0; i<count1; i++) { Integer particle1 = (Integer)tempVector1.elementAt(i); for (int j = current; j<count2; j++) { Integer particle2 = (Integer)tempVector2.elementAt(j); current +=1; try { checkParticleDerivationOK(particle1.intValue(),derivedScope, particle2.intValue(), baseScope, bInfo); continue label; } catch (ParticleRecoverableError e) { if (!particleEmptiable(particle2.intValue())) throw new ParticleRecoverableError("rcase-Recurse.2: There is not a complete functional mapping between the particles"); } } throw new ParticleRecoverableError("rcase-Recurse.2: There is not a complete functional mapping between the particles"); } // Now, see if there are some elements in the base we didn't match up for (int j=current; j < count2; j++) { Integer particle2 = (Integer)tempVector2.elementAt(j); if (!particleEmptiable(particle2.intValue())) { throw new ParticleRecoverableError("rcase-Recurse.2: There is not a complete functional mapping between the particles"); } } } private void checkRecurseUnordered(int csIndex1, Vector tempVector1, int derivedScope, int csIndex2, Vector tempVector2, int baseScope, ComplexTypeInfo bInfo) throws Exception { int min1 = fSchemaGrammar.getContentSpecMinOccurs(csIndex1); int max1 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex1); int min2 = fSchemaGrammar.getContentSpecMinOccurs(csIndex2); int max2 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex2); // check Occurrence ranges if (!checkOccurrenceRange(min1,max1,min2,max2)) { throw new ParticleRecoverableError("rcase-RecurseUnordered.1: Occurrence range of group is not a valid restriction of occurence range of base group"); } int count1= tempVector1.size(); int count2 = tempVector2.size(); boolean foundIt[] = new boolean[count2]; label: for (int i = 0; i<count1; i++) { Integer particle1 = (Integer)tempVector1.elementAt(i); for (int j = 0; j<count2; j++) { Integer particle2 = (Integer)tempVector2.elementAt(j); try { checkParticleDerivationOK(particle1.intValue(),derivedScope, particle2.intValue(), baseScope, bInfo); if (foundIt[j]) throw new ParticleRecoverableError("rcase-RecurseUnordered.2: There is not a complete functional mapping between the particles"); else foundIt[j]=true; continue label; } catch (ParticleRecoverableError e) { } } // didn't find a match. Detect an error throw new ParticleRecoverableError("rcase-RecurseUnordered.2: There is not a complete functional mapping between the particles"); } } private void checkRecurseLax(int csIndex1, Vector tempVector1, int derivedScope, int csIndex2, Vector tempVector2, int baseScope, ComplexTypeInfo bInfo) throws Exception { int min1 = fSchemaGrammar.getContentSpecMinOccurs(csIndex1); int max1 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex1); int min2 = fSchemaGrammar.getContentSpecMinOccurs(csIndex2); int max2 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex2); // check Occurrence ranges if (!checkOccurrenceRange(min1,max1,min2,max2)) { throw new ParticleRecoverableError("rcase-RecurseLax.1: Occurrence range of group is not a valid restriction of occurence range of base group"); } int count1= tempVector1.size(); int count2 = tempVector2.size(); int current = 0; label: for (int i = 0; i<count1; i++) { Integer particle1 = (Integer)tempVector1.elementAt(i); for (int j = current; j<count2; j++) { Integer particle2 = (Integer)tempVector2.elementAt(j); current +=1; try { checkParticleDerivationOK(particle1.intValue(),derivedScope, particle2.intValue(), baseScope, bInfo); continue label; } catch (ParticleRecoverableError e) { } } // didn't find a match. Detect an error throw new ParticleRecoverableError("rcase-Recurse.2: There is not a complete functional mapping between the particles"); } } private void checkMapAndSum(int csIndex1, Vector tempVector1, int derivedScope, int csIndex2, Vector tempVector2, int baseScope, ComplexTypeInfo bInfo) throws Exception { // See if the sequence group is a valid restriction of the choice // Here is an example of a valid restriction: // <choice minOccurs="2"> // </choice> // <sequence> // </sequence> // check the occurrence ranges // Occurrence range for the sequence: // min1 = (length of particles) * min of sequence // max1 = (length of particles) * max of sequence (or unbounded) // min2 = minOccurs of choice // max2 = maxOccurs of choice int count1 = tempVector1.size(); int count2 = tempVector2.size(); int min1 = fSchemaGrammar.getContentSpecMinOccurs(csIndex1) * count1; int max1 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex1); if (max1!=SchemaSymbols.OCCURRENCE_UNBOUNDED) max1 = max1 * count1; int min2 = fSchemaGrammar.getContentSpecMinOccurs(csIndex2); int max2 = fSchemaGrammar.getContentSpecMaxOccurs(csIndex2); // check Occurrence ranges if (!checkOccurrenceRange(min1,max1,min2,max2)) { throw new ParticleRecoverableError("rcase-MapAndSum.2: Occurrence range of group is not a valid restriction of occurence range of base group"); } label: for (int i = 0; i<count1; i++) { Integer particle1 = (Integer)tempVector1.elementAt(i); for (int j = 0; j<count2; j++) { Integer particle2 = (Integer)tempVector2.elementAt(j); try { checkParticleDerivationOK(particle1.intValue(),derivedScope, particle2.intValue(), baseScope, bInfo); continue label; } catch (ParticleRecoverableError e) { } } // didn't find a match. Detect an error throw new ParticleRecoverableError("rcase-MapAndSum.1: There is not a complete functional mapping between the particles"); } } private int importContentSpec(SchemaGrammar aGrammar, int contentSpecHead ) throws Exception { XMLContentSpec ctsp = new XMLContentSpec(); aGrammar.getContentSpec(contentSpecHead, ctsp); int left = -1; int right = -1; if ( ctsp.type == ctsp.CONTENTSPECNODE_LEAF || (ctsp.type & 0x0f) == ctsp.CONTENTSPECNODE_ANY || (ctsp.type & 0x0f) == ctsp.CONTENTSPECNODE_ANY_NS || (ctsp.type & 0x0f) == ctsp.CONTENTSPECNODE_ANY_OTHER ) { return fSchemaGrammar.addContentSpecNode(ctsp.type, ctsp.value, ctsp.otherValue, false); } else if (ctsp.type == -1) // case where type being extended has no content return -2; else { if ( ctsp.value == -1 ) { left = -1; } else { left = importContentSpec(aGrammar, ctsp.value); } if ( ctsp.otherValue == -1 ) { right = -1; } else { right = importContentSpec(aGrammar, ctsp.otherValue); } return fSchemaGrammar.addContentSpecNode(ctsp.type, left, right, false); } } private int handleOccurrences(int index, Element particle) throws Exception { // Pass through, indicating we're not processing an <all> return handleOccurrences(index, particle, NOT_ALL_CONTEXT); } // Checks constraints for minOccurs, maxOccurs and expands content model // accordingly private int handleOccurrences(int index, Element particle, int allContextFlags) throws Exception { // if index is invalid, return if (index < 0) return index; String minOccurs = particle.getAttribute(SchemaSymbols.ATT_MINOCCURS).trim(); String maxOccurs = particle.getAttribute(SchemaSymbols.ATT_MAXOCCURS).trim(); boolean processingAllEl = ((allContextFlags & PROCESSING_ALL_EL) != 0); boolean processingAllGP = ((allContextFlags & PROCESSING_ALL_GP) != 0); boolean groupRefWithAll = ((allContextFlags & GROUP_REF_WITH_ALL) != 0); boolean isGroupChild = ((allContextFlags & CHILD_OF_GROUP) != 0); // Neither minOccurs nor maxOccurs may be specified // for the child of a model group definition. if (isGroupChild && (minOccurs.length() != 0 || maxOccurs.length() != 0)) { reportSchemaError(SchemaMessageProvider.MinMaxOnGroupChild, null); minOccurs = (maxOccurs = "1"); } // If minOccurs=maxOccurs=0, no component is specified if(minOccurs.equals("0") && maxOccurs.equals("0")){ return -2; } int min=1, max=1; if (minOccurs.length() == 0) { minOccurs = "1"; } if (maxOccurs.length() == 0) { maxOccurs = "1"; } // For the elements referenced in an <all>, minOccurs attribute // must be zero or one, and maxOccurs attribute must be one. // For a complex type definition that contains an <all> or a // reference a <group> whose model group is an all model group, // minOccurs and maxOccurs must be one. if (processingAllEl || groupRefWithAll || processingAllGP) { if ((processingAllGP||groupRefWithAll||!minOccurs.equals("0")) && !minOccurs.equals("1")) { int minMsg; if (processingAllEl) { minMsg = SchemaMessageProvider.BadMinMaxForAllElem; } else if (processingAllGP) { minMsg = SchemaMessageProvider.BadMinMaxForAllGp; } else { minMsg = SchemaMessageProvider.BadMinMaxForGroupWithAll; } reportSchemaError(minMsg, new Object [] { "minOccurs", minOccurs }); minOccurs = "1"; } if (!maxOccurs.equals("1")) { int maxMsg; if (processingAllEl) { maxMsg = SchemaMessageProvider.BadMinMaxForAllElem; } else if (processingAllGP) { maxMsg = SchemaMessageProvider.BadMinMaxForAllGp; } else { maxMsg = SchemaMessageProvider.BadMinMaxForGroupWithAll; } reportSchemaError(maxMsg, new Object [] { "maxOccurs", maxOccurs }); maxOccurs = "1"; } } try { min = Integer.parseInt(minOccurs); } catch (Exception e){ reportSchemaError(SchemaMessageProvider.GenericError, new Object [] { "illegal value for minOccurs or maxOccurs : '" +e.getMessage()+ "' "}); } if (maxOccurs.equals("unbounded")) { max = SchemaSymbols.OCCURRENCE_UNBOUNDED; } else { try { max = Integer.parseInt(maxOccurs); } catch (Exception e){ reportSchemaError(SchemaMessageProvider.GenericError, new Object [] { "illegal value for minOccurs or maxOccurs : '" +e.getMessage()+ "' "}); } // Check that minOccurs isn't greater than maxOccurs. // p-props-correct 2.1 if (min > max) { reportGenericSchemaError("p-props-correct:2.1 Value of minOccurs '" + minOccurs + "' must not be greater than value of maxOccurs '" + maxOccurs +"'"); } if (max < 1) { reportGenericSchemaError("p-props-correct:2.2 Value of maxOccurs " + maxOccurs + " is invalid. It must be greater than or equal to 1"); } } if (fSchemaGrammar.getDeferContentSpecExpansion()) { fSchemaGrammar.setContentSpecMinOccurs(index,min); fSchemaGrammar.setContentSpecMaxOccurs(index,max); return index; } else { return fSchemaGrammar.expandContentModel(index,min,max); } } /** * Traverses Schema attribute declaration. * * <attribute * default = string * fixed = string * form = (qualified | unqualified) * id = ID * name = NCName * ref = QName * type = QName * use = (optional | prohibited | required) : optional * {any attributes with non-schema namespace ...}> * Content: (annotation? , simpleType?) * <attribute/> * * @param attributeDecl: the declaration of the attribute under * consideration * @param typeInfo: Contains the index of the element to which * the attribute declaration is attached. * @param referredTo: true iff traverseAttributeDecl was called because * of encountering a ``ref''property (used * to suppress error-reporting). * @return 0 if the attribute schema is validated successfully, otherwise -1 * @exception Exception */ private int traverseAttributeDecl( Element attrDecl, ComplexTypeInfo typeInfo, boolean referredTo ) throws Exception { // General Attribute Checking int scope = isTopLevel(attrDecl)? GeneralAttrCheck.ELE_CONTEXT_GLOBAL: GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(attrDecl, scope); ////// Get declared fields of the attribute String defaultStr = attrDecl.getAttribute(SchemaSymbols.ATT_DEFAULT); String fixedStr = attrDecl.getAttribute(SchemaSymbols.ATT_FIXED); String formStr = attrDecl.getAttribute(SchemaSymbols.ATT_FORM);//form attribute String attNameStr = attrDecl.getAttribute(SchemaSymbols.ATT_NAME); String refStr = attrDecl.getAttribute(SchemaSymbols.ATT_REF); String datatypeStr = attrDecl.getAttribute(SchemaSymbols.ATT_TYPE); String useStr = attrDecl.getAttribute(SchemaSymbols.ATT_USE); Element simpleTypeChild = findAttributeSimpleType(attrDecl); Attr defaultAtt = attrDecl.getAttributeNode(SchemaSymbols.ATT_DEFAULT); Attr fixedAtt = attrDecl.getAttributeNode(SchemaSymbols.ATT_FIXED); Attr formAtt = attrDecl.getAttributeNode(SchemaSymbols.ATT_FORM); Attr attNameAtt = attrDecl.getAttributeNode(SchemaSymbols.ATT_NAME); Attr refAtt = attrDecl.getAttributeNode(SchemaSymbols.ATT_REF); Attr datatypeAtt = attrDecl.getAttributeNode(SchemaSymbols.ATT_TYPE); Attr useAtt = attrDecl.getAttributeNode(SchemaSymbols.ATT_USE); checkEnumerationRequiredNotation(attNameStr, datatypeStr); ////// define attribute declaration Schema components int attName; // attribute name indexed in the string pool int uriIndex; // indexed for target namespace uri QName attQName; // QName combining attName and uriIndex // attribute type int attType; boolean attIsList = false; int dataTypeSymbol = -1; String localpart = null; // validator DatatypeValidator dv; boolean dvIsDerivedFromID = false; // value constraints and use type int attValueAndUseType = 0; int attValueConstraint = -1; // indexed value in a string pool ////// Check W3C's PR-Structure 3.2.3 boolean isAttrTopLevel = isTopLevel(attrDecl); boolean isOptional = false; boolean isProhibited = false; boolean isRequired = false; StringBuffer errorContext = new StringBuffer(30); errorContext.append(" if(typeInfo == null) { errorContext.append("(global attribute) "); } else if(typeInfo.typeName == null) { errorContext.append("(local attribute) "); } else { errorContext.append("(attribute) ").append(typeInfo.typeName).append("/"); } errorContext.append(attNameStr).append(' ').append(refStr); if(useStr.length() == 0 || useStr.equals(SchemaSymbols.ATTVAL_OPTIONAL)) { attValueAndUseType |= XMLAttributeDecl.USE_TYPE_OPTIONAL; isOptional = true; } else if(useStr.equals(SchemaSymbols.ATTVAL_PROHIBITED)) { attValueAndUseType |= XMLAttributeDecl.USE_TYPE_PROHIBITED; isProhibited = true; } else if(useStr.equals(SchemaSymbols.ATTVAL_REQUIRED)) { attValueAndUseType |= XMLAttributeDecl.USE_TYPE_REQUIRED; isRequired = true; } else { reportGenericSchemaError("An attribute cannot declare \"" + SchemaSymbols.ATT_USE + "\" as \"" + useStr + "\"" + errorContext); } if(defaultAtt != null && fixedAtt != null) { reportGenericSchemaError("src-attribute.1: \"" + SchemaSymbols.ATT_DEFAULT + "\" and \"" + SchemaSymbols.ATT_FIXED + "\" cannot be both present" + errorContext); } else if(defaultAtt != null && !isOptional) { reportGenericSchemaError("src-attribute.2: If both \"" + SchemaSymbols.ATT_DEFAULT + "\" and \"" + SchemaSymbols.ATT_USE + "\" " + "are present for an attribute declaration, \"" + SchemaSymbols.ATT_USE + "\" can only be \"" + SchemaSymbols.ATTVAL_OPTIONAL + "\", not \"" + useStr + "\"." + errorContext); } if(!isAttrTopLevel) { if((refAtt == null) == (attNameAtt == null)) { reportGenericSchemaError("src-attribute.3.1: When the attribute's parent is not <schema> , one of \"" + SchemaSymbols.ATT_REF + "\" and \"" + SchemaSymbols.ATT_NAME + "\" should be declared, but not both."+ errorContext); return -1; } else if((refAtt != null) && (simpleTypeChild != null || formAtt != null || datatypeAtt != null)) { reportGenericSchemaError("src-attribute.3.2: When the attribute's parent is not <schema> and \"" + SchemaSymbols.ATT_REF + "\" is present, " + "all of <" + SchemaSymbols.ELT_SIMPLETYPE + ">, " + SchemaSymbols.ATT_FORM + " and " + SchemaSymbols.ATT_TYPE + " must be absent."+ errorContext); } } if(datatypeAtt != null && simpleTypeChild != null) { reportGenericSchemaError("src-attribute.4: \"" + SchemaSymbols.ATT_TYPE + "\" and <" + SchemaSymbols.ELT_SIMPLETYPE + "> cannot both be present"+ errorContext); } ////// Check W3C's PR-Structure 3.2.2 // check case-dependent attribute declaration schema components if (isAttrTopLevel) { //// global attributes // set name component attName = fStringPool.addSymbol(attNameStr); if(fTargetNSURIString.length() == 0) { uriIndex = StringPool.EMPTY_STRING; } else { uriIndex = fTargetNSURI; } // attQName = new QName(-1,attName,attName,uriIndex); // Above line replaced by following 2 to work around a JIT problem. attQName = new QName(); attQName.setValues(-1,attName,attName,uriIndex); } else if(refAtt == null) { //// local attributes // set name component attName = fStringPool.addSymbol(attNameStr); if((formStr.length() > 0 && formStr.equals(SchemaSymbols.ATTVAL_QUALIFIED)) || (formStr.length() == 0 && fAttributeDefaultQualified)) { uriIndex = fTargetNSURI; } else { uriIndex = StringPool.EMPTY_STRING; } // attQName = new QName(-1,attName,attName,uriIndex); // Above line replaced by following 2 to work around a JIT problem. attQName = new QName(); attQName.setValues(-1,attName,attName,uriIndex); } else { //// locally referenced global attributes String prefix; int colonptr = refStr.indexOf(":"); if ( colonptr > 0) { prefix = refStr.substring(0,colonptr); localpart = refStr.substring(colonptr+1); } else { prefix = ""; localpart = refStr; } String uriStr = resolvePrefixToURI(prefix); if (!uriStr.equals(fTargetNSURIString)) { addAttributeDeclFromAnotherSchema(localpart, uriStr, typeInfo); return 0; } Element referredAttribute = getTopLevelComponentByName(SchemaSymbols.ELT_ATTRIBUTE,localpart); if (referredAttribute != null) { // don't need to traverse ref'd attribute if we're global; just make sure it's there... traverseAttributeDecl(referredAttribute, typeInfo, true); Attr referFixedAttr = referredAttribute.getAttributeNode(SchemaSymbols.ATT_FIXED); String referFixed = referFixedAttr == null ? null : referFixedAttr.getValue(); if (referFixed != null && (defaultAtt != null || fixedAtt != null && !referFixed.equals(fixedStr))) { reportGenericSchemaError("au-props-correct.2: If the {attribute declaration} has a fixed {value constraint}, then if the attribute use itself has a {value constraint}, it must also be fixed and its value must match that of the {attribute declaration}'s {value constraint}" + errorContext); } // this nasty hack needed to ``override'' the // global attribute with "use" and "fixed" on the ref'ing attribute if((typeInfo != null) && (!isOptional || fixedStr.length() > 0)) { int referredAttName = fStringPool.addSymbol(referredAttribute.getAttribute(SchemaSymbols.ATT_NAME)); uriIndex = StringPool.EMPTY_STRING; if ( fTargetNSURIString.length() > 0) { uriIndex = fTargetNSURI; } QName referredAttQName = new QName(-1,referredAttName,referredAttName,uriIndex); int tempIndex = fSchemaGrammar.getAttributeDeclIndex(typeInfo.templateElementIndex, referredAttQName); XMLAttributeDecl referredAttrDecl = new XMLAttributeDecl(); fSchemaGrammar.getAttributeDecl(tempIndex, referredAttrDecl); boolean updated = false; int useDigits = XMLAttributeDecl.USE_TYPE_OPTIONAL | XMLAttributeDecl.USE_TYPE_PROHIBITED | XMLAttributeDecl.USE_TYPE_REQUIRED; int valueDigits = XMLAttributeDecl.VALUE_CONSTRAINT_DEFAULT | XMLAttributeDecl.VALUE_CONSTRAINT_FIXED; if(!isOptional && (referredAttrDecl.defaultType & useDigits) != (attValueAndUseType & useDigits)) { if(referredAttrDecl.defaultType != XMLAttributeDecl.USE_TYPE_PROHIBITED) { referredAttrDecl.defaultType |= useDigits; referredAttrDecl.defaultType ^= useDigits; // clear the use referredAttrDecl.defaultType |= (attValueAndUseType & useDigits); updated = true; } } if(fixedStr.length() > 0) { if((referredAttrDecl.defaultType & XMLAttributeDecl.VALUE_CONSTRAINT_FIXED) == 0) { referredAttrDecl.defaultType |= valueDigits; referredAttrDecl.defaultType ^= valueDigits; // clear the value referredAttrDecl.defaultType |= XMLAttributeDecl.VALUE_CONSTRAINT_FIXED; referredAttrDecl.defaultValue = fixedStr; updated = true; } } if(updated) { fSchemaGrammar.setAttributeDecl(typeInfo.templateElementIndex, tempIndex, referredAttrDecl); } } } else if (fAttributeDeclRegistry.get(localpart) != null) { addAttributeDeclFromAnotherSchema(localpart, uriStr, typeInfo); } else { // REVISIT: Localize reportGenericSchemaError ( "Couldn't find top level attribute " + refStr + errorContext); } return 0; } if (uriIndex == fXsiURI) { reportGenericSchemaError("no-xsi: The {target namespace} of an attribute declaration must not match " + SchemaSymbols.URI_XSI + errorContext); } // validation of attribute type is same for each case of declaration if (simpleTypeChild != null) { attType = XMLAttributeDecl.TYPE_SIMPLE; dataTypeSymbol = traverseSimpleTypeDecl(simpleTypeChild); localpart = fStringPool.toString(dataTypeSymbol); dv = fDatatypeRegistry.getDatatypeValidator(localpart); } else if (datatypeStr.length() != 0) { dataTypeSymbol = fStringPool.addSymbol(datatypeStr); String prefix; int colonptr = datatypeStr.indexOf(":"); if ( colonptr > 0) { prefix = datatypeStr.substring(0,colonptr); localpart = datatypeStr.substring(colonptr+1); } else { prefix = ""; localpart = datatypeStr; } String typeURI = resolvePrefixToURI(prefix); if ( typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA)) { dv = getDatatypeValidator(SchemaSymbols.URI_SCHEMAFORSCHEMA, localpart); if (localpart.equals("ID")) { attType = XMLAttributeDecl.TYPE_ID; } else if (localpart.equals("IDREF")) { attType = XMLAttributeDecl.TYPE_IDREF; } else if (localpart.equals("IDREFS")) { attType = XMLAttributeDecl.TYPE_IDREF; attIsList = true; } else if (localpart.equals("ENTITY")) { attType = XMLAttributeDecl.TYPE_ENTITY; } else if (localpart.equals("ENTITIES")) { attType = XMLAttributeDecl.TYPE_ENTITY; attIsList = true; } else if (localpart.equals("NMTOKEN")) { attType = XMLAttributeDecl.TYPE_NMTOKEN; } else if (localpart.equals("NMTOKENS")) { attType = XMLAttributeDecl.TYPE_NMTOKEN; attIsList = true; } else if (localpart.equals(SchemaSymbols.ELT_NOTATION)) { attType = XMLAttributeDecl.TYPE_NOTATION; } else { attType = XMLAttributeDecl.TYPE_SIMPLE; if(dv == null && !referredTo) { // REVISIT: localize reportGenericSchemaError("attribute " + attNameStr + " has a type (" + datatypeStr + ") which is not recognized as one of the predefined schema datatypes"); } } } else { //isn't of the schema for schemas namespace... attType = XMLAttributeDecl.TYPE_SIMPLE; // check if the type is from the same Schema dv = getDatatypeValidator(typeURI, localpart); if (dv == null && typeURI.equals(fTargetNSURIString) ) { Element topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (topleveltype != null) { traverseSimpleTypeDecl( topleveltype ); dv = getDatatypeValidator(typeURI, localpart); }else if (!referredTo) { // REVISIT: Localize reportGenericSchemaError("simpleType not found : " + "("+typeURI+":"+ localpart+")"+ errorContext); } } if(dv == null && !referredTo) { // REVISIT: localize reportGenericSchemaError("attribute " + attNameStr + " has an unrecognized type " + datatypeStr); } } } else { attType = XMLAttributeDecl.TYPE_SIMPLE; localpart = "string"; dataTypeSymbol = fStringPool.addSymbol(localpart); dv = fDatatypeRegistry.getDatatypeValidator(localpart); } // if(...Type) // validation of data constraint is same for each case of declaration if(defaultStr.length() > 0) { attValueAndUseType |= XMLAttributeDecl.VALUE_CONSTRAINT_DEFAULT; attValueConstraint = fStringPool.addString(defaultStr); } else if(fixedStr.length() > 0) { attValueAndUseType |= XMLAttributeDecl.VALUE_CONSTRAINT_FIXED; attValueConstraint = fStringPool.addString(fixedStr); } ////// Check W3C's PR-Structure 3.2.6 // check default value is valid for the datatype. if (attType == XMLAttributeDecl.TYPE_SIMPLE && attValueConstraint != -1) { try { if (dv != null) { if(defaultStr.length() > 0) { //REVISIT dv.validate(defaultStr, null); } else { dv.validate(fixedStr, null); } } else if (!referredTo) reportSchemaError(SchemaMessageProvider.NoValidatorFor, new Object [] { datatypeStr }); } catch (InvalidDatatypeValueException idve) { if (!referredTo) reportSchemaError(SchemaMessageProvider.IncorrectDefaultType, new Object [] { attrDecl.getAttribute(SchemaSymbols.ATT_NAME), idve.getMessage() }); //a-props-correct.2 } } // check the coexistence of ID and value constraint dvIsDerivedFromID = ((dv != null) && dv instanceof IDDatatypeValidator); if (dvIsDerivedFromID && attValueConstraint != -1) { reportGenericSchemaError("a-props-correct.3: If type definition is or is derived from ID ," + "there must not be a value constraint" + errorContext); } if (attNameStr.equals("xmlns")) { reportGenericSchemaError("no-xmlns: The {name} of an attribute declaration must not match 'xmlns'" + errorContext); } ////// every contraints were matched. Now register the attribute declaration //put the top-levels in the attribute decl registry. if (isAttrTopLevel) { fTempAttributeDecl.datatypeValidator = dv; fTempAttributeDecl.name.setValues(attQName); fTempAttributeDecl.type = attType; fTempAttributeDecl.defaultType = attValueAndUseType; fTempAttributeDecl.list = attIsList; if (attValueConstraint != -1 ) { fTempAttributeDecl.defaultValue = fStringPool.toString(attValueConstraint); } fAttributeDeclRegistry.put(attNameStr, new XMLAttributeDecl(fTempAttributeDecl)); } // add attribute to attr decl pool in fSchemaGrammar, if (typeInfo != null) { // check that there aren't duplicate attributes int temp = fSchemaGrammar.getAttributeDeclIndex(typeInfo.templateElementIndex, attQName); if (temp > -1) { reportGenericSchemaError("ct-props-correct.4: Duplicate attribute " + fStringPool.toString(attQName.rawname) + " in type definition"); } // check that there aren't multiple attributes with type derived from ID if (dvIsDerivedFromID) { if (typeInfo.containsAttrTypeID()) { reportGenericSchemaError("ct-props-correct.5: More than one attribute derived from type ID cannot appear in the same complex type definition."); } typeInfo.setContainsAttrTypeID(); } fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, attQName, attType, dataTypeSymbol, attValueAndUseType, fStringPool.toString( attValueConstraint), dv, attIsList); } return 0; } // end of method traverseAttribute private int addAttributeDeclFromAnotherSchema( String name, String uriStr, ComplexTypeInfo typeInfo) throws Exception { SchemaGrammar aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(uriStr); if (uriStr == null || ! (aGrammar instanceof SchemaGrammar) ) { // REVISIT: Localize reportGenericSchemaError( "no attribute named \"" + name + "\" was defined in schema : " + uriStr); return -1; } Hashtable attrRegistry = aGrammar.getAttributeDeclRegistry(); if (attrRegistry == null) { // REVISIT: Localize reportGenericSchemaError( "no attribute named \"" + name + "\" was defined in schema : " + uriStr); return -1; } XMLAttributeDecl tempAttrDecl = (XMLAttributeDecl) attrRegistry.get(name); if (tempAttrDecl == null) { // REVISIT: Localize reportGenericSchemaError( "no attribute named \"" + name + "\" was defined in schema : " + uriStr); return -1; } if (typeInfo!= null) { // check that there aren't duplicate attributes int temp = fSchemaGrammar.getAttributeDeclIndex(typeInfo.templateElementIndex, tempAttrDecl.name); if (temp > -1) { reportGenericSchemaError("ct-props-correct.4: Duplicate attribute " + fStringPool.toString(tempAttrDecl.name.rawname) + " in type definition"); } // check that there aren't multiple attributes with type derived from ID if (tempAttrDecl.datatypeValidator != null && tempAttrDecl.datatypeValidator instanceof IDDatatypeValidator) { if (typeInfo.containsAttrTypeID()) { reportGenericSchemaError("ct-props-correct.5: More than one attribute derived from type ID cannot appear in the same complex type definition"); } typeInfo.setContainsAttrTypeID(); } fSchemaGrammar.addAttDef( typeInfo.templateElementIndex, tempAttrDecl.name, tempAttrDecl.type, -1, tempAttrDecl.defaultType, tempAttrDecl.defaultValue, tempAttrDecl.datatypeValidator, tempAttrDecl.list); } return 0; } /* * * <attributeGroup * id = ID * name = NCName * ref = QName> * Content: (annotation?, (attribute|attributeGroup)*, anyAttribute?) * </> * */ private int traverseAttributeGroupDecl( Element attrGrpDecl, ComplexTypeInfo typeInfo, Vector anyAttDecls ) throws Exception { // General Attribute Checking int scope = isTopLevel(attrGrpDecl)? GeneralAttrCheck.ELE_CONTEXT_GLOBAL: GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(attrGrpDecl, scope); // attributeGroup name String attGrpNameStr = attrGrpDecl.getAttribute(SchemaSymbols.ATT_NAME); int attGrpName = fStringPool.addSymbol(attGrpNameStr); String ref = attrGrpDecl.getAttribute(SchemaSymbols.ATT_REF); Element child = checkContent( attrGrpDecl, XUtil.getFirstChildElement(attrGrpDecl), true ); if (ref.length() != 0) { if(isTopLevel(attrGrpDecl)) // REVISIT: localize reportGenericSchemaError ( "An attributeGroup with \"ref\" present must not have <schema> or <redefine> as its parent"); if(attGrpNameStr.length() != 0) // REVISIT: localize reportGenericSchemaError ( "attributeGroup " + attGrpNameStr + " cannot refer to another attributeGroup, but it refers to " + ref); if (XUtil.getFirstChildElement(attrGrpDecl) != null || attrGrpDecl.getNodeValue() != null) // REVISIT: localize reportGenericSchemaError ( "An attributeGroup with \"ref\" present must be empty"); String prefix = ""; String localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } String uriStr = resolvePrefixToURI(prefix); if (!uriStr.equals(fTargetNSURIString)) { traverseAttributeGroupDeclFromAnotherSchema(localpart, uriStr, typeInfo, anyAttDecls); return -1; // TO DO // REVISIT: different NS, not supported yet. // REVISIT: Localize //reportGenericSchemaError("Feature not supported: see an attribute from different NS"); } else { Element parent = (Element)attrGrpDecl.getParentNode(); if (parent.getLocalName().equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) && parent.getAttribute(SchemaSymbols.ATT_NAME).equals(localpart)) { if (!((Element)parent.getParentNode()).getLocalName().equals(SchemaSymbols.ELT_REDEFINE)) { reportGenericSchemaError("src-attribute_group.3: Circular group reference is disallowed outside <redefine> -- "+ref); } return -1; } } if(typeInfo != null) { // only do this if we're traversing because we were ref'd here; when we come // upon this decl by itself we're just validating. Element referredAttrGrp = getTopLevelComponentByName(SchemaSymbols.ELT_ATTRIBUTEGROUP,localpart); if (referredAttrGrp != null) { traverseAttributeGroupDecl(referredAttrGrp, typeInfo, anyAttDecls); } else { // REVISIT: Localize reportGenericSchemaError ( "Couldn't find top level attributeGroup " + ref); } return -1; } } else if (attGrpNameStr.length() == 0) // REVISIT: localize reportGenericSchemaError ( "an attributeGroup must have a name or a ref attribute present"); for (; child != null ; child = XUtil.getNextSiblingElement(child)) { if ( child.getLocalName().equals(SchemaSymbols.ELT_ATTRIBUTE) ){ traverseAttributeDecl(child, typeInfo, false); } else if ( child.getLocalName().equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) ) { NamespacesScope currScope = (NamespacesScope)fNamespacesScope.clone(); // if(typeInfo != null) // only do this if we're traversing because we were ref'd here; when we come // upon this decl by itself we're just validating. traverseAttributeGroupDecl(child, typeInfo,anyAttDecls); fNamespacesScope = currScope; } else break; } if (child != null) { if ( child.getLocalName().equals(SchemaSymbols.ELT_ANYATTRIBUTE) ) { if (anyAttDecls != null) { anyAttDecls.addElement(traverseAnyAttribute(child)); } if (XUtil.getNextSiblingElement(child) != null) // REVISIT: localize reportGenericSchemaError ( "src-attribute_group.0: The content of an attributeGroup declaration must match (annotation?, ((attribute | attributeGroup)*, anyAttribute?))"); return -1; } else // REVISIT: localize reportGenericSchemaError ( "src-attribute_group.0: The content of an attributeGroup declaration must match (annotation?, ((attribute | attributeGroup)*, anyAttribute?))"); } return -1; } // end of method traverseAttributeGroup private int traverseAttributeGroupDeclFromAnotherSchema( String attGrpName , String uriStr, ComplexTypeInfo typeInfo, Vector anyAttDecls ) throws Exception { SchemaGrammar aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(uriStr); if (uriStr == null || aGrammar == null || ! (aGrammar instanceof SchemaGrammar) ) { // REVISIT: Localize reportGenericSchemaError("!!Schema not found in #traverseAttributeGroupDeclFromAnotherSchema, schema uri : " + uriStr); return -1; } // attribute name Element attGrpDecl = (Element) aGrammar.topLevelAttrGrpDecls.get((Object)attGrpName); if (attGrpDecl == null) { // REVISIT: Localize reportGenericSchemaError( "no attribute group named \"" + attGrpName + "\" was defined in schema : " + uriStr); return -1; } NamespacesScope saveNSMapping = fNamespacesScope; int saveTargetNSUri = fTargetNSURI; fTargetNSURI = fStringPool.addSymbol(aGrammar.getTargetNamespaceURI()); fNamespacesScope = aGrammar.getNamespacesScope(); // attribute type int attType = -1; int enumeration = -1; Element child = checkContent(attGrpDecl, XUtil.getFirstChildElement(attGrpDecl), true); for (; child != null ; child = XUtil.getNextSiblingElement(child)) { //child attribute couldn't be a top-level attribute DEFINITION, if ( child.getLocalName().equals(SchemaSymbols.ELT_ATTRIBUTE) ){ String childAttName = child.getAttribute(SchemaSymbols.ATT_NAME); if ( childAttName.length() > 0 ) { Hashtable attDeclRegistry = aGrammar.getAttributeDeclRegistry(); if ((attDeclRegistry != null) && (attDeclRegistry.get((Object)childAttName) != null) ){ addAttributeDeclFromAnotherSchema(childAttName, uriStr, typeInfo); fNamespacesScope = saveNSMapping; fTargetNSURI = saveTargetNSUri; return -1; } else { traverseAttributeDecl(child, typeInfo, false); } } else traverseAttributeDecl(child, typeInfo, false); } else if ( child.getLocalName().equals(SchemaSymbols.ELT_ATTRIBUTEGROUP) ) { traverseAttributeGroupDecl(child, typeInfo, anyAttDecls); } else if ( child.getLocalName().equals(SchemaSymbols.ELT_ANYATTRIBUTE) ) { anyAttDecls.addElement(traverseAnyAttribute(child)); break; } else { // REVISIT: Localize reportGenericSchemaError("Invalid content for attributeGroup"); } } fNamespacesScope = saveNSMapping; fTargetNSURI = saveTargetNSUri; if(child != null) { // REVISIT: Localize reportGenericSchemaError("Invalid content for attributeGroup"); } return -1; } // end of method traverseAttributeGroupFromAnotherSchema // This simple method takes an attribute declaration as a parameter and // returns null if there is no simpleType defined or the simpleType // declaration if one exists. It also throws an error if more than one // <annotation> or <simpleType> group is present. private Element findAttributeSimpleType(Element attrDecl) throws Exception { Element child = checkContent(attrDecl, XUtil.getFirstChildElement(attrDecl), true); // if there is only a annotatoin, then no simpleType if (child == null) return null; // if the current one is not simpleType, or there are more elements, // report an error if (!child.getLocalName().equals(SchemaSymbols.ELT_SIMPLETYPE) || XUtil.getNextSiblingElement(child) != null) //REVISIT: localize reportGenericSchemaError("src-attribute.0: the content must match (annotation?, (simpleType?)) -- attribute declaration '"+ attrDecl.getAttribute(SchemaSymbols.ATT_NAME)+"'"); if (child.getLocalName().equals(SchemaSymbols.ELT_SIMPLETYPE)) return child; return null; } // end findAttributeSimpleType /** * Traverse element declaration: * <element * abstract = boolean * block = #all or (possibly empty) subset of {substitutionGroup, extension, restriction} * default = string * substitutionGroup = QName * final = #all or (possibly empty) subset of {extension, restriction} * fixed = string * form = qualified | unqualified * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * name = NCName * nillable = boolean * ref = QName * type = QName> * Content: (annotation? , (simpleType | complexType)? , (unique | key | keyref)*) * </element> * * * The following are identity-constraint definitions * <unique * id = ID * name = NCName> * Content: (annotation? , (selector , field+)) * </unique> * * <key * id = ID * name = NCName> * Content: (annotation? , (selector , field+)) * </key> * * <keyref * id = ID * name = NCName * refer = QName> * Content: (annotation? , (selector , field+)) * </keyref> * * <selector> * Content: XPathExprApprox : An XPath expression * </selector> * * <field> * Content: XPathExprApprox : An XPath expression * </field> * * * @param elementDecl * @return * @exception Exception */ private QName traverseElementDecl(Element elementDecl) throws Exception { // General Attribute Checking int scope = isTopLevel(elementDecl)? GeneralAttrCheck.ELE_CONTEXT_GLOBAL: GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(elementDecl, scope); int contentSpecType = -1; int contentSpecNodeIndex = -1; int typeNameIndex = -1; int scopeDefined = -2; //signal a error if -2 gets gets through //cause scope can never be -2. DatatypeValidator dv = null; String abstractStr = elementDecl.getAttribute(SchemaSymbols.ATT_ABSTRACT); String blockStr = elementDecl.getAttribute(SchemaSymbols.ATT_BLOCK); String defaultStr = elementDecl.getAttribute(SchemaSymbols.ATT_DEFAULT); String finalStr = elementDecl.getAttribute(SchemaSymbols.ATT_FINAL); String fixedStr = elementDecl.getAttribute(SchemaSymbols.ATT_FIXED); String formStr = elementDecl.getAttribute(SchemaSymbols.ATT_FORM); String maxOccursStr = elementDecl.getAttribute(SchemaSymbols.ATT_MAXOCCURS); String minOccursStr = elementDecl.getAttribute(SchemaSymbols.ATT_MINOCCURS); String nameStr = elementDecl.getAttribute(SchemaSymbols.ATT_NAME); String nillableStr = elementDecl.getAttribute(SchemaSymbols.ATT_NILLABLE); String refStr = elementDecl.getAttribute(SchemaSymbols.ATT_REF); String substitutionGroupStr = elementDecl.getAttribute(SchemaSymbols.ATT_SUBSTITUTIONGROUP); String typeStr = elementDecl.getAttribute(SchemaSymbols.ATT_TYPE); checkEnumerationRequiredNotation(nameStr, typeStr); if ( DEBUGGING ) System.out.println("traversing element decl : " + nameStr ); Attr abstractAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_ABSTRACT); Attr blockAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_BLOCK); Attr defaultAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_DEFAULT); Attr finalAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_FINAL); Attr fixedAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_FIXED); Attr formAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_FORM); Attr maxOccursAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_MAXOCCURS); Attr minOccursAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_MINOCCURS); Attr nameAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_NAME); Attr nillableAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_NILLABLE); Attr refAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_REF); Attr substitutionGroupAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_SUBSTITUTIONGROUP); Attr typeAtt = elementDecl.getAttributeNode(SchemaSymbols.ATT_TYPE); if(defaultAtt != null && fixedAtt != null) // REVISIT: localize reportGenericSchemaError("src-element.1: an element cannot have both \"fixed\" and \"default\" present at the same time"); String fromAnotherSchema = null; if (isTopLevel(elementDecl)) { if(nameAtt == null) // REVISIT: localize reportGenericSchemaError("globally-declared element must have a name"); else if (refAtt != null) // REVISIT: localize reportGenericSchemaError("globally-declared element " + nameStr + " cannot have a ref attribute"); int nameIndex = fStringPool.addSymbol(nameStr); int eltKey = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, nameIndex,TOP_LEVEL_SCOPE); if (eltKey > -1 ) { return new QName(-1,nameIndex,nameIndex,fTargetNSURI); } } // parse out 'block', 'final', 'nillable', 'abstract' if (blockAtt == null) blockStr = null; int blockSet = parseBlockSet(blockStr); if( (blockStr != null) && blockStr.length() != 0 && (!blockStr.equals(SchemaSymbols.ATTVAL_POUNDALL) && (((blockSet & SchemaSymbols.RESTRICTION) == 0) && (((blockSet & SchemaSymbols.EXTENSION) == 0) && ((blockSet & SchemaSymbols.SUBSTITUTION) == 0))))) reportGenericSchemaError("The values of the 'block' attribute of an element must be either #all or a list of 'substitution', 'restriction' and 'extension'; " + blockStr + " was found"); if (finalAtt == null) finalStr = null; int finalSet = parseFinalSet(finalStr); if( (finalStr != null) && finalStr.length() != 0 && (!finalStr.equals(SchemaSymbols.ATTVAL_POUNDALL) && (((finalSet & SchemaSymbols.RESTRICTION) == 0) && ((finalSet & SchemaSymbols.EXTENSION) == 0)))) reportGenericSchemaError("The values of the 'final' attribute of an element must be either #all or a list of 'restriction' and 'extension'; " + finalStr + " was found"); boolean isNillable = nillableStr.equals(SchemaSymbols.ATTVAL_TRUE)? true:false; boolean isAbstract = abstractStr.equals(SchemaSymbols.ATTVAL_TRUE)? true:false; int elementMiscFlags = 0; if (isNillable) { elementMiscFlags += SchemaSymbols.NILLABLE; } if (isAbstract) { elementMiscFlags += SchemaSymbols.ABSTRACT; } // make the property of the element's value being fixed also appear in elementMiscFlags if(fixedAtt != null) elementMiscFlags += SchemaSymbols.FIXED; //if this is a reference to a global element if (refAtt != null) { //REVISIT top level check for ref if (abstractAtt != null || blockAtt != null || defaultAtt != null || finalAtt != null || fixedAtt != null || formAtt != null || nillableAtt != null || substitutionGroupAtt != null || typeAtt != null) reportSchemaError(SchemaMessageProvider.BadAttWithRef, null); //src-element.2.2 if (nameAtt != null) // REVISIT: Localize reportGenericSchemaError("src-element.2.1: element " + nameStr + " cannot also have a ref attribute"); Element child = XUtil.getFirstChildElement(elementDecl); if(child != null && child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) { if (XUtil.getNextSiblingElement(child) != null) reportSchemaError(SchemaMessageProvider.NoContentForRef, null); else traverseAnnotationDecl(child); } else if (child != null) reportSchemaError(SchemaMessageProvider.NoContentForRef, null); String prefix = ""; String localpart = refStr; int colonptr = refStr.indexOf(":"); if ( colonptr > 0) { prefix = refStr.substring(0,colonptr); localpart = refStr.substring(colonptr+1); } int localpartIndex = fStringPool.addSymbol(localpart); String uriString = resolvePrefixToURI(prefix); QName eltName = new QName(prefix != null ? fStringPool.addSymbol(prefix) : -1, localpartIndex, fStringPool.addSymbol(refStr), uriString != null ? fStringPool.addSymbol(uriString) : StringPool.EMPTY_STRING); //if from another schema, just return the element QName if (! uriString.equals(fTargetNSURIString) ) { return eltName; } int elementIndex = fSchemaGrammar.getElementDeclIndex(eltName, TOP_LEVEL_SCOPE); //if not found, traverse the top level element that if referenced if (elementIndex == -1 ) { Element targetElement = getTopLevelComponentByName(SchemaSymbols.ELT_ELEMENT,localpart); if (targetElement == null ) { // REVISIT: Localize reportGenericSchemaError("Element " + localpart + " not found in the Schema"); //REVISIT, for now, the QName anyway return eltName; //return new QName(-1,fStringPool.addSymbol(localpart), -1, fStringPool.addSymbol(uriString)); } else { // Problem with recursive decls if we attempt the traversal now. //eltName= traverseElementDecl(targetElement); } } if (fCurrentScope != TOP_LEVEL_SCOPE) { if (fFullConstraintChecking) { // Add the name to a list of top-level elements we'll need to check later // for consistency wrt type fTopLevelElementsRefdFromGroup.addElement(eltName); fTopLevelElementsRefdFromGroup.addElement(new Integer(fCurrentScope)); } } return eltName; } else if (nameAtt == null) // REVISIT: Localize reportGenericSchemaError("src-element.2.1: a local element must have a name or a ref attribute present"); // Handle the substitutionGroup Element substitutionGroupElementDecl = null; int substitutionGroupElementDeclIndex = -1; boolean noErrorSoFar = true; // resolving the type for this element right here ComplexTypeInfo typeInfo = null; // element has a single child element, either a datatype or a type, null if primitive Element child = XUtil.getFirstChildElement(elementDecl); if(child != null && child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) { traverseAnnotationDecl(child); child = XUtil.getNextSiblingElement(child); } if(child != null && child.getLocalName().equals(SchemaSymbols.ELT_ANNOTATION)) // REVISIT: Localize reportGenericSchemaError("element declarations can contain at most one annotation Element Information Item"); boolean haveAnonType = false; // Handle Anonymous type if there is one if (child != null) { String childName = child.getLocalName(); if (childName.equals(SchemaSymbols.ELT_COMPLEXTYPE)) { if (child.getAttribute(SchemaSymbols.ATT_NAME).length() > 0) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("anonymous complexType in element '" + nameStr +"' has a name attribute"); } else { // Determine what the type name will be String anonTypeName = genAnonTypeName(child); if (fCurrentTypeNameStack.search((Object)anonTypeName) > - 1) { // A recursing element using an anonymous type int uriInd = StringPool.EMPTY_STRING; if ( formStr.equals(SchemaSymbols.ATTVAL_QUALIFIED)|| fElementDefaultQualified) { uriInd = fTargetNSURI; } int nameIndex = fStringPool.addSymbol(nameStr); QName tempQName = new QName(-1, nameIndex, nameIndex, uriInd); int eltIndex = fSchemaGrammar.addElementDecl(tempQName, fCurrentScope, fCurrentScope, -1, -1, -1, null); fElementRecurseComplex.addElement(new ElementInfo(eltIndex,anonTypeName)); return tempQName; } else { typeNameIndex = traverseComplexTypeDecl(child); if (typeNameIndex != -1 ) { typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fStringPool.toString(typeNameIndex)); } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("traverse complexType error in element '" + nameStr +"'"); } } } haveAnonType = true; child = XUtil.getNextSiblingElement(child); } else if (childName.equals(SchemaSymbols.ELT_SIMPLETYPE)) { if (child.getAttribute(SchemaSymbols.ATT_NAME).length() > 0) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("anonymous simpleType in element '" + nameStr +"' has a name attribute"); } else typeNameIndex = traverseSimpleTypeDecl(child); if (typeNameIndex != -1) { dv = fDatatypeRegistry.getDatatypeValidator(fStringPool.toString(typeNameIndex)); } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("traverse simpleType error in element '" + nameStr +"'"); } contentSpecType = XMLElementDecl.TYPE_SIMPLE; haveAnonType = true; child = XUtil.getNextSiblingElement(child); } else if (typeAtt == null) { // "ur-typed" leaf contentSpecType = XMLElementDecl.TYPE_ANY; //REVISIT: is this right? //contentSpecType = fStringPool.addSymbol("UR_TYPE"); // set occurrence count contentSpecNodeIndex = -1; } // see if there's something here; it had better be key, keyref or unique. if (child != null) childName = child.getLocalName(); while ((child != null) && ((childName.equals(SchemaSymbols.ELT_KEY)) || (childName.equals(SchemaSymbols.ELT_KEYREF)) || (childName.equals(SchemaSymbols.ELT_UNIQUE)))) { child = XUtil.getNextSiblingElement(child); if (child != null) { childName = child.getLocalName(); } } if (child != null) { // REVISIT: Localize noErrorSoFar = false; reportGenericSchemaError("src-element.0: the content of an element information item must match (annotation?, (simpleType | complexType)?, (unique | key | keyref)*)"); } } // handle type="" here if (haveAnonType && (typeAtt != null)) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError( "src-element.3: Element '"+ nameStr + "' have both a type attribute and a annoymous type child" ); } // type specified as an attribute and no child is type decl. else if (typeAtt != null) { String prefix = ""; String localpart = typeStr; int colonptr = typeStr.indexOf(":"); if ( colonptr > 0) { prefix = typeStr.substring(0,colonptr); localpart = typeStr.substring(colonptr+1); } String typeURI = resolvePrefixToURI(prefix); // check if the type is from the same Schema if ( !typeURI.equals(fTargetNSURIString) && !typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && typeURI.length() != 0) { // REVISIT, only needed because of resolvePrifixToURI. fromAnotherSchema = typeURI; typeInfo = getTypeInfoFromNS(typeURI, localpart); if (typeInfo == null) { dv = getTypeValidatorFromNS(typeURI, localpart); if (dv == null) { //TO DO: report error here; noErrorSoFar = false; reportGenericSchemaError("Could not find type " +localpart + " in schema " + typeURI); } } } else { typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(typeURI+","+localpart); if (typeInfo == null) { dv = getDatatypeValidator(typeURI, localpart); if (dv == null ) if (typeURI.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA) && !fTargetNSURIString.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA)) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("type not found : " + typeURI+":"+localpart); } else { Element topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_COMPLEXTYPE,localpart); if (topleveltype != null) { if (fCurrentTypeNameStack.search((Object)localpart) > - 1) { //then we found a recursive element using complexType. // REVISIT: this will be broken when recursing happens between 2 schemas int uriInd = StringPool.EMPTY_STRING; if ( formStr.equals(SchemaSymbols.ATTVAL_QUALIFIED)|| fElementDefaultQualified) { uriInd = fTargetNSURI; } int nameIndex = fStringPool.addSymbol(nameStr); QName tempQName = new QName(-1, nameIndex, nameIndex, uriInd); int eltIndex = fSchemaGrammar.addElementDecl(tempQName, fCurrentScope, fCurrentScope, -1, -1, -1, null); fElementRecurseComplex.addElement(new ElementInfo(eltIndex,localpart)); return tempQName; } else { // Squirrel away the baseTypeNameStack. Stack savedbaseNameStack = null; if (!fBaseTypeNameStack.isEmpty()) { savedbaseNameStack = fBaseTypeNameStack; fBaseTypeNameStack = new Stack(); } typeNameIndex = traverseComplexTypeDecl( topleveltype, true ); if (savedbaseNameStack != null) fBaseTypeNameStack = savedbaseNameStack; typeInfo = (ComplexTypeInfo) fComplexTypeRegistry.get(fStringPool.toString(typeNameIndex)); } } else { topleveltype = getTopLevelComponentByName(SchemaSymbols.ELT_SIMPLETYPE, localpart); if (topleveltype != null) { typeNameIndex = traverseSimpleTypeDecl( topleveltype ); dv = getDatatypeValidator(typeURI, localpart); } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("type not found : " + typeURI+":"+localpart); } } } } } } // now we need to make sure that our substitution (if any) // is valid, now that we have all the requisite type-related info. String substitutionGroupUri = null; String substitutionGroupLocalpart = null; String substitutionGroupFullName = null; ComplexTypeInfo substitutionGroupEltTypeInfo = null; DatatypeValidator substitutionGroupEltDV = null; SchemaGrammar subGrammar = fSchemaGrammar; boolean ignoreSub = false; if ( substitutionGroupStr.length() > 0 ) { if(refAtt != null) // REVISIT: Localize reportGenericSchemaError("a local element cannot have a substitutionGroup"); substitutionGroupUri = resolvePrefixToURI(getPrefix(substitutionGroupStr)); substitutionGroupLocalpart = getLocalPart(substitutionGroupStr); substitutionGroupFullName = substitutionGroupUri+","+substitutionGroupLocalpart; if ( !substitutionGroupUri.equals(fTargetNSURIString) ) { Grammar grammar = fGrammarResolver.getGrammar(substitutionGroupUri); if (grammar != null && grammar instanceof SchemaGrammar) { subGrammar = (SchemaGrammar) grammar; substitutionGroupElementDeclIndex = subGrammar.getElementDeclIndex(fStringPool.addSymbol(substitutionGroupUri), fStringPool.addSymbol(substitutionGroupLocalpart), TOP_LEVEL_SCOPE); if (substitutionGroupElementDeclIndex<=-1) { // REVISIT: localize noErrorSoFar = false; reportGenericSchemaError("couldn't find substitutionGroup " + substitutionGroupLocalpart + " referenced by element " + nameStr + " in the SchemaGrammar "+substitutionGroupUri); } else { substitutionGroupEltTypeInfo = getElementDeclTypeInfoFromNS(substitutionGroupUri, substitutionGroupLocalpart); if (substitutionGroupEltTypeInfo == null) { substitutionGroupEltDV = getElementDeclTypeValidatorFromNS(substitutionGroupUri, substitutionGroupLocalpart); if (substitutionGroupEltDV == null) { //TO DO: report error here; noErrorSoFar = false; reportGenericSchemaError("Could not find type for element '" +substitutionGroupLocalpart + "' in schema '" + substitutionGroupUri+"'"); } } } } else { // REVISIT: locallize noErrorSoFar = false; reportGenericSchemaError("couldn't find a schema grammar with target namespace " + substitutionGroupUri); } } else { substitutionGroupElementDecl = getTopLevelComponentByName(SchemaSymbols.ELT_ELEMENT, substitutionGroupLocalpart); if (substitutionGroupElementDecl == null) { substitutionGroupElementDeclIndex = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, getLocalPartIndex(substitutionGroupStr),TOP_LEVEL_SCOPE); if ( substitutionGroupElementDeclIndex == -1) { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError("unable to locate substitutionGroup affiliation element " +substitutionGroupStr +" in element declaration " +nameStr); } } else { substitutionGroupElementDeclIndex = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, getLocalPartIndex(substitutionGroupStr),TOP_LEVEL_SCOPE); if ( substitutionGroupElementDeclIndex == -1) { // check for mutual recursion! if(fSubstitutionGroupRecursionRegistry.contains(fTargetNSURIString+","+substitutionGroupElementDecl.getAttribute(SchemaSymbols.ATT_NAME))) { ignoreSub = true; } else { fSubstitutionGroupRecursionRegistry.addElement(fTargetNSURIString+","+substitutionGroupElementDecl.getAttribute(SchemaSymbols.ATT_NAME)); traverseElementDecl(substitutionGroupElementDecl); substitutionGroupElementDeclIndex = fSchemaGrammar.getElementDeclIndex(fTargetNSURI, getLocalPartIndex(substitutionGroupStr),TOP_LEVEL_SCOPE); fSubstitutionGroupRecursionRegistry.removeElement((Object)fTargetNSURIString+","+substitutionGroupElementDecl.getAttribute(SchemaSymbols.ATT_NAME)); } } } if (!ignoreSub && substitutionGroupElementDeclIndex != -1) { substitutionGroupEltTypeInfo = fSchemaGrammar.getElementComplexTypeInfo( substitutionGroupElementDeclIndex ); if (substitutionGroupEltTypeInfo == null) { fSchemaGrammar.getElementDecl(substitutionGroupElementDeclIndex, fTempElementDecl); substitutionGroupEltDV = fTempElementDecl.datatypeValidator; if (substitutionGroupEltDV == null) { //TO DO: report error here; noErrorSoFar = false; reportGenericSchemaError("Could not find type for element '" +substitutionGroupLocalpart + "' in schema '" + substitutionGroupUri+"'"); } } } } if(!ignoreSub) checkSubstitutionGroupOK(elementDecl, substitutionGroupElementDecl, noErrorSoFar, substitutionGroupElementDeclIndex, subGrammar, typeInfo, substitutionGroupEltTypeInfo, dv, substitutionGroupEltDV); } // this element is ur-type, check its substitutionGroup affiliation. // if there is substitutionGroup affiliation and not type definition found for this element, // then grab substitutionGroup affiliation's type and give it to this element if ( noErrorSoFar && typeInfo == null && dv == null ) { typeInfo = substitutionGroupEltTypeInfo; dv = substitutionGroupEltDV; } if (typeInfo == null && dv==null) { if (noErrorSoFar) { // Actually this Element's type definition is ur-type; contentSpecType = XMLElementDecl.TYPE_ANY; // REVISIT, need to wait till we have wildcards implementation. // ADD attribute wildcards here } else { noErrorSoFar = false; // REVISIT: Localize reportGenericSchemaError ("untyped element : " + nameStr ); } } // if element belongs to a compelx type if (typeInfo!=null) { contentSpecNodeIndex = typeInfo.contentSpecHandle; contentSpecType = typeInfo.contentType; scopeDefined = typeInfo.scopeDefined; dv = typeInfo.datatypeValidator; } // if element belongs to a simple type if (dv!=null) { contentSpecType = XMLElementDecl.TYPE_SIMPLE; if (typeInfo == null) { fromAnotherSchema = null; // not to switch schema in this case } } // Now we can handle validation etc. of default and fixed attributes, // since we finally have all the type information. if(fixedAtt != null) defaultStr = fixedStr; if(defaultStr.length() != 0) { if(typeInfo != null && (typeInfo.contentType != XMLElementDecl.TYPE_MIXED_SIMPLE && typeInfo.contentType != XMLElementDecl.TYPE_MIXED_COMPLEX && typeInfo.contentType != XMLElementDecl.TYPE_SIMPLE)) { // REVISIT: Localize reportGenericSchemaError ("e-props-correct.2.1: element " + nameStr + " has a fixed or default value and must have a mixed or simple content model"); } if(typeInfo != null && (typeInfo.contentType == XMLElementDecl.TYPE_MIXED_SIMPLE || typeInfo.contentType == XMLElementDecl.TYPE_MIXED_COMPLEX)) { if (!particleEmptiable(typeInfo.contentSpecHandle)) reportGenericSchemaError ("e-props-correct.2.2.2: for element " + nameStr + ", the {content type} is mixed, then the {content type}'s particle must be emptiable"); } try { if(dv != null) { dv.validate(defaultStr, null); } } catch (InvalidDatatypeValueException ide) { reportGenericSchemaError ("e-props-correct.2: invalid fixed or default value '" + defaultStr + "' in element " + nameStr); } } if (defaultStr.length() != 0 && dv != null && dv instanceof IDDatatypeValidator) { reportGenericSchemaError ("e-props-correct.4: If the {type definition} or {type definition}'s {content type} is or is derived from ID then there must not be a {value constraint} -- element " + nameStr); } // Create element decl int elementNameIndex = fStringPool.addSymbol(nameStr); int localpartIndex = elementNameIndex; int uriIndex = StringPool.EMPTY_STRING; int enclosingScope = fCurrentScope; //refer to 4.3.2 in "XML Schema Part 1: Structures" if ( isTopLevel(elementDecl)) { uriIndex = fTargetNSURI; enclosingScope = TOP_LEVEL_SCOPE; } else if ( !formStr.equals(SchemaSymbols.ATTVAL_UNQUALIFIED) && (( formStr.equals(SchemaSymbols.ATTVAL_QUALIFIED)|| fElementDefaultQualified ))) { uriIndex = fTargetNSURI; } QName eltQName = new QName(-1,localpartIndex,elementNameIndex,uriIndex); // Check if an element exists at this scope. // If it does, check it against the type of the new element int existingEltNdx = fSchemaGrammar.getElementDeclIndex(eltQName.uri, eltQName.localpart,enclosingScope); if (existingEltNdx > -1) { if (!checkDuplicateElementTypes(existingEltNdx,typeInfo,dv)) { noErrorSoFar = false; reportGenericSchemaError("duplicate element decl in the same scope with different types : " + fStringPool.toString(localpartIndex)); } } // add element decl to pool int attrListHead = -1 ; // copy up attribute decls from type object if (typeInfo != null) { attrListHead = typeInfo.attlistHead; } int elementIndex = fSchemaGrammar.addElementDecl(eltQName, enclosingScope, scopeDefined, contentSpecType, contentSpecNodeIndex, attrListHead, dv); if ( DEBUGGING ) { System.out.println(" + fStringPool.toString(eltQName.localpart) + ")"+ " eltType:"+typeStr+" contentSpecType:"+contentSpecType+ " SpecNodeIndex:"+ contentSpecNodeIndex +" enclosingScope: " +enclosingScope + " scopeDefined: " +scopeDefined+"\n"); } fSchemaGrammar.setElementComplexTypeInfo(elementIndex, typeInfo); // REVISIT: should we report error if typeInfo was null? // mark element if its type belongs to different Schema. fSchemaGrammar.setElementFromAnotherSchemaURI(elementIndex, fromAnotherSchema); // set BlockSet, FinalSet, Nillable and Abstract for this element decl fSchemaGrammar.setElementDeclBlockSet(elementIndex, blockSet); fSchemaGrammar.setElementDeclFinalSet(elementIndex, finalSet); fSchemaGrammar.setElementDeclMiscFlags(elementIndex, elementMiscFlags); fSchemaGrammar.setElementDefault(elementIndex, defaultStr); // setSubstitutionGroupElementFullName fSchemaGrammar.setElementDeclSubstitutionGroupAffFullName(elementIndex, substitutionGroupFullName); // substitutionGroup: double-direction if ( substitutionGroupStr.length() > 0 && !ignoreSub) { subGrammar.addElementDeclOneSubstitutionGroupQName(substitutionGroupElementDeclIndex, eltQName, fSchemaGrammar, elementIndex); } // key/keyref/unique processing Element ic = XUtil.getFirstChildElementNS(elementDecl, IDENTITY_CONSTRAINTS); if (ic != null) { Integer elementIndexObj = new Integer(elementIndex); Vector identityConstraints = (Vector)fIdentityConstraints.get(elementIndexObj); if (identityConstraints == null) { identityConstraints = new Vector(); fIdentityConstraints.put(elementIndexObj, identityConstraints); } while (ic != null) { if (DEBUG_IC_DATATYPES) { System.out.println("<ICD>: adding ic for later traversal: "+ic); } identityConstraints.addElement(ic); ic = XUtil.getNextSiblingElementNS(ic, IDENTITY_CONSTRAINTS); } } return eltQName; }// end of method traverseElementDecl(Element) private boolean checkDuplicateElementTypes(int eltNdx, ComplexTypeInfo typeInfo, DatatypeValidator dv) { fSchemaGrammar.getElementDecl(eltNdx, fTempElementDecl); DatatypeValidator edv = fTempElementDecl.datatypeValidator; ComplexTypeInfo eTypeInfo = fSchemaGrammar.getElementComplexTypeInfo(eltNdx); if ( ((eTypeInfo != null)&&(eTypeInfo!=typeInfo)) || ((edv != null)&&(edv != dv)) ) return false; else return true; } private void traverseIdentityNameConstraintsFor(int elementIndex, Vector identityConstraints) throws Exception { // iterate over identity constraints for this element int size = identityConstraints != null ? identityConstraints.size() : 0; if (size > 0) { // REVISIT: Use cached copy. -Ac XMLElementDecl edecl = new XMLElementDecl(); fSchemaGrammar.getElementDecl(elementIndex, edecl); for (int i = 0; i < size; i++) { Element ic = (Element)identityConstraints.elementAt(i); String icName = ic.getLocalName(); if ( icName.equals(SchemaSymbols.ELT_KEY) ) { traverseKey(ic, edecl); } else if ( icName.equals(SchemaSymbols.ELT_UNIQUE) ) { traverseUnique(ic, edecl); } fSchemaGrammar.setElementDecl(elementIndex, edecl); } // loop over vector elements } // if size > 0 } // traverseIdentityNameConstraints(Vector) private void traverseIdentityRefConstraintsFor(int elementIndex, Vector identityConstraints) throws Exception { // iterate over identity constraints for this element int size = identityConstraints != null ? identityConstraints.size() : 0; if (size > 0) { // REVISIT: Use cached copy. -Ac XMLElementDecl edecl = new XMLElementDecl(); fSchemaGrammar.getElementDecl(elementIndex, edecl); for (int i = 0; i < size; i++) { Element ic = (Element)identityConstraints.elementAt(i); String icName = ic.getLocalName(); if ( icName.equals(SchemaSymbols.ELT_KEYREF) ) { traverseKeyRef(ic, edecl); } fSchemaGrammar.setElementDecl(elementIndex, edecl); } // loop over vector elements } // if size > 0 } // traverseIdentityRefConstraints(Vector) private void traverseUnique(Element uElem, XMLElementDecl eDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(uElem, scope); // create identity constraint String uName = uElem.getAttribute(SchemaSymbols.ATT_NAME); if (DEBUG_IDENTITY_CONSTRAINTS) { System.out.println("<IC>: traverseUnique(\""+uElem.getNodeName()+"\") ["+uName+']'); } String eName = getElementNameFor(uElem); Unique unique = new Unique(uName, eName); if(fIdentityConstraintNames.get(fTargetNSURIString+","+uName) != null) { reportGenericSchemaError("More than one identity constraint named " + uName); } fIdentityConstraintNames.put(fTargetNSURIString+","+uName, unique); // get selector and fields traverseIdentityConstraint(unique, uElem); // add to element decl eDecl.unique.addElement(unique); } // traverseUnique(Element,XMLElementDecl) private void traverseKey(Element kElem, XMLElementDecl eDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(kElem, scope); // create identity constraint String kName = kElem.getAttribute(SchemaSymbols.ATT_NAME); if (DEBUG_IDENTITY_CONSTRAINTS) { System.out.println("<IC>: traverseKey(\""+kElem.getNodeName()+"\") ["+kName+']'); } String eName = getElementNameFor(kElem); Key key = new Key(kName, eName); if(fIdentityConstraintNames.get(fTargetNSURIString+","+kName) != null) { reportGenericSchemaError("More than one identity constraint named " + kName); } fIdentityConstraintNames.put(fTargetNSURIString+","+kName, key); // get selector and fields traverseIdentityConstraint(key, kElem); // add to element decl eDecl.key.addElement(key); } // traverseKey(Element,XMLElementDecl) private void traverseKeyRef(Element krElem, XMLElementDecl eDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(krElem, scope); // create identity constraint String krName = krElem.getAttribute(SchemaSymbols.ATT_NAME); String kName = krElem.getAttribute(SchemaSymbols.ATT_REFER); if (DEBUG_IDENTITY_CONSTRAINTS) { System.out.println("<IC>: traverseKeyRef(\""+krElem.getNodeName()+"\") ["+krName+','+kName+']'); } if(fIdentityConstraintNames.get(fTargetNSURIString+","+krName) != null) { reportGenericSchemaError("More than one identity constraint named " + krName); } // verify that key reference "refer" attribute is valid String prefix = ""; String localpart = kName; int colonptr = kName.indexOf(":"); if ( colonptr > 0) { prefix = kName.substring(0,colonptr); localpart = kName.substring(colonptr+1); } String uriStr = resolvePrefixToURI(prefix); IdentityConstraint kId = (IdentityConstraint)fIdentityConstraintNames.get(uriStr+","+localpart); if (kId== null) { reportSchemaError(SchemaMessageProvider.KeyRefReferNotFound, new Object[]{krName,kName}); return; } String eName = getElementNameFor(krElem); KeyRef keyRef = new KeyRef(krName, kId, eName); // add to element decl traverseIdentityConstraint(keyRef, krElem); // add key reference to element decl eDecl.keyRef.addElement(keyRef); // store in fIdentityConstraintNames so can flag schemas in which multiple // keyrefs with the same name are present. fIdentityConstraintNames.put(fTargetNSURIString+","+krName, keyRef); } // traverseKeyRef(Element,XMLElementDecl) private void traverseIdentityConstraint(IdentityConstraint ic, Element icElem) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(icElem, scope); // check for <annotation> and get selector Element sElem = XUtil.getFirstChildElement(icElem); if(sElem == null) { // REVISIT: localize reportGenericSchemaError("The content of an identity constraint must match (annotation?, selector, field+)"); return; } sElem = checkContent( icElem, sElem, false); // General Attribute Checking attrValues = generalCheck(sElem, scope); if(!sElem.getLocalName().equals(SchemaSymbols.ELT_SELECTOR)) { // REVISIT: localize reportGenericSchemaError("The content of an identity constraint must match (annotation?, selector, field+)"); } // and make sure <selector>'s content is fine: checkContent(icElem, XUtil.getFirstChildElement(sElem), true); String sText = sElem.getAttribute(SchemaSymbols.ATT_XPATH); sText = sText.trim(); Selector.XPath sXpath = null; try { // REVISIT: Must get ruling from XML Schema working group // regarding whether steps in the XPath must be // fully qualified if the grammar has a target // namespace. -Ac // RESOLUTION: Yes. sXpath = new Selector.XPath(sText, fStringPool, fNamespacesScope); Selector selector = new Selector(sXpath, ic); if (DEBUG_IDENTITY_CONSTRAINTS) { System.out.println("<IC>: selector: "+selector); } ic.setSelector(selector); } catch (XPathException e) { // REVISIT: Add error message. reportGenericSchemaError(e.getMessage()); return; } // get fields Element fElem = XUtil.getNextSiblingElement(sElem); if(fElem == null) { // REVISIT: localize reportGenericSchemaError("The content of an identity constraint must match (annotation?, selector, field+)"); } while (fElem != null) { // General Attribute Checking attrValues = generalCheck(fElem, scope); if(!fElem.getLocalName().equals(SchemaSymbols.ELT_FIELD)) // REVISIT: localize reportGenericSchemaError("The content of an identity constraint must match (annotation?, selector, field+)"); // and make sure <field>'s content is fine: checkContent(icElem, XUtil.getFirstChildElement(fElem), true); String fText = fElem.getAttribute(SchemaSymbols.ATT_XPATH); fText = fText.trim(); try { // REVISIT: Must get ruling from XML Schema working group // regarding whether steps in the XPath must be // fully qualified if the grammar has a target // namespace. -Ac // RESOLUTION: Yes. Field.XPath fXpath = new Field.XPath(fText, fStringPool, fNamespacesScope); // REVISIT: Get datatype validator. -Ac // cannot statically determine type of field; not just because of descendant/union // but because of <any> and <anyAttribute>. - NG // DatatypeValidator validator = getDatatypeValidatorFor(parent, sXpath, fXpath); // if (DEBUG_IC_DATATYPES) { // System.out.println("<ICD>: datatype validator: "+validator); // must find DatatypeValidator in the Validator... Field field = new Field(fXpath, ic); if (DEBUG_IDENTITY_CONSTRAINTS) { System.out.println("<IC>: field: "+field); } ic.addField(field); } catch (XPathException e) { // REVISIT: Add error message. reportGenericSchemaError(e.getMessage()); return; } fElem = XUtil.getNextSiblingElement(fElem); } } // traverseIdentityConstraint(IdentityConstraint,Element) /* This code is no longer used because datatypes can't be found statically for ID constraints. private DatatypeValidator getDatatypeValidatorFor(Element element, Selector.XPath sxpath, Field.XPath fxpath) throws Exception { // variables String ename = element.getAttribute("name"); if (DEBUG_IC_DATATYPES) { System.out.println("<ICD>: XMLValidator#getDatatypeValidatorFor("+ ename+','+sxpath+','+fxpath+')'); } int localpart = fStringPool.addSymbol(ename); String targetNamespace = fSchemaRootElement.getAttribute("targetNamespace"); int uri = fStringPool.addSymbol(targetNamespace); int edeclIndex = fSchemaGrammar.getElementDeclIndex(uri, localpart, Grammar.TOP_LEVEL_SCOPE); // walk selector XPath.LocationPath spath = sxpath.getLocationPath(); XPath.Step[] ssteps = spath.steps; for (int i = 0; i < ssteps.length; i++) { XPath.Step step = ssteps[i]; XPath.Axis axis = step.axis; XPath.NodeTest nodeTest = step.nodeTest; switch (axis.type) { case XPath.Axis.ATTRIBUTE: { // REVISIT: Add message. -Ac reportGenericSchemaError("not allowed to select attribute"); return null; } case XPath.Axis.CHILD: { int index = fSchemaGrammar.getElementDeclIndex(nodeTest.name, edeclIndex); if (index == -1) { index = fSchemaGrammar.getElementDeclIndex(nodeTest.name, Grammar.TOP_LEVEL_SCOPE); } if (index == -1) { // REVISIT: Add message. -Ac reportGenericSchemaError("no such element \""+fStringPool.toString(nodeTest.name.rawname)+'"'); return null; } edeclIndex = index; break; } case XPath.Axis.SELF: { // no-op break; } default: { // REVISIT: Add message. -Ac reportGenericSchemaError("invalid selector axis"); return null; } } } // walk field XPath.LocationPath fpath = fxpath.getLocationPath(); XPath.Step[] fsteps = fpath.steps; for (int i = 0; i < fsteps.length; i++) { XPath.Step step = fsteps[i]; XPath.Axis axis = step.axis; XPath.NodeTest nodeTest = step.nodeTest; switch (axis.type) { case XPath.Axis.ATTRIBUTE: { if (i != fsteps.length - 1) { // REVISIT: Add message. -Ac reportGenericSchemaError("attribute must be last step"); return null; } // look up validator int adeclIndex = fSchemaGrammar.getAttributeDeclIndex(edeclIndex, nodeTest.name); if (adeclIndex == -1) { // REVISIT: Add message. -Ac reportGenericSchemaError("no such attribute \""+fStringPool.toString(nodeTest.name.rawname)+'"'); } XMLAttributeDecl adecl = new XMLAttributeDecl(); fSchemaGrammar.getAttributeDecl(adeclIndex, adecl); DatatypeValidator validator = adecl.datatypeValidator; return validator; } case XPath.Axis.CHILD: { int index = fSchemaGrammar.getElementDeclIndex(nodeTest.name, edeclIndex); if (index == -1) { index = fSchemaGrammar.getElementDeclIndex(nodeTest.name, Grammar.TOP_LEVEL_SCOPE); } if (index == -1) { // REVISIT: Add message. -Ac reportGenericSchemaError("no such element \""+fStringPool.toString(nodeTest.name.rawname)+'"'); return null; } edeclIndex = index; if (i < fsteps.length - 1) { break; } // NOTE: Let fall through to self case so that we // avoid duplicating code. -Ac } case XPath.Axis.SELF: { // look up validator, if needed if (i == fsteps.length - 1) { XMLElementDecl edecl = new XMLElementDecl(); fSchemaGrammar.getElementDecl(edeclIndex, edecl); if (edecl.type != XMLElementDecl.TYPE_SIMPLE) { // REVISIT: Add message. -Ac reportGenericSchemaError("selected element is not of simple type"); return null; } DatatypeValidator validator = edecl.datatypeValidator; if (validator == null) validator = new StringDatatypeValidator(); return validator; } break; } default: { // REVISIT: Add message. -Ac reportGenericSchemaError("invalid selector axis"); return null; } } } // no validator! // REVISIT: Add message. -Ac reportGenericSchemaError("No datatype validator for field "+fxpath+ " of element "+ename); return null; } // getDatatypeValidatorFor(XPath):DatatypeValidator */ // back in to live code... private String getElementNameFor(Element icnode) { Element enode = (Element)icnode.getParentNode(); String ename = enode.getAttribute("name"); if (ename.length() == 0) { ename = enode.getAttribute("ref"); } return ename; } // getElementNameFor(Element):String int getLocalPartIndex(String fullName){ int colonAt = fullName.indexOf(":"); String localpart = fullName; if ( colonAt > -1 ) { localpart = fullName.substring(colonAt+1); } return fStringPool.addSymbol(localpart); } String getLocalPart(String fullName){ int colonAt = fullName.indexOf(":"); String localpart = fullName; if ( colonAt > -1 ) { localpart = fullName.substring(colonAt+1); } return localpart; } int getPrefixIndex(String fullName){ int colonAt = fullName.indexOf(":"); String prefix = ""; if ( colonAt > -1 ) { prefix = fullName.substring(0,colonAt); } return fStringPool.addSymbol(prefix); } String getPrefix(String fullName){ int colonAt = fullName.indexOf(":"); String prefix = ""; if ( colonAt > -1 ) { prefix = fullName.substring(0,colonAt); } return prefix; } private void checkSubstitutionGroupOK(Element elementDecl, Element substitutionGroupElementDecl, boolean noErrorSoFar, int substitutionGroupElementDeclIndex, SchemaGrammar substitutionGroupGrammar, ComplexTypeInfo typeInfo, ComplexTypeInfo substitutionGroupEltTypeInfo, DatatypeValidator dv, DatatypeValidator substitutionGroupEltDV) throws Exception { // if final="#all" or final="extension restriction" // then it can't be substituted at all (according to 3.3.1) // ??? REVISIT: but it's not mentioned in 3.3.6, where it should be. int finalSet = substitutionGroupGrammar.getElementDeclFinalSet(substitutionGroupElementDeclIndex); if ((finalSet&SchemaSymbols.RESTRICTION) != 0 && (finalSet&SchemaSymbols.EXTENSION) != 0) { reportGenericSchemaError("element " + elementDecl.getAttribute(SchemaSymbols.ATT_NAME) + " cannot be part of the substitution group headed by " + substitutionGroupElementDecl.getAttribute(SchemaSymbols.ATT_NAME)); } // here we must do two things: // 1. Make sure there actually *is* a relation between the types of // the element being nominated and the element doing the nominating; // (see PR 3.3.6 point #3 in the first tableau, for instance; this // and the corresponding tableaux from 3.4.6 and 3.14.6 rule out the nominated // element having an anonymous type declaration. // 2. Make sure the nominated element allows itself to be nominated by // an element with the given type-relation. // Note: we assume that (complex|simple)Type processing checks // whether the type in question allows itself to // be modified as this element desires. // Check for type relationship; // that is, make sure that the type we're deriving has some relationship // to substitutionGroupElt's type. if (typeInfo != null) { // if the two types are the same, just return if (substitutionGroupEltTypeInfo == typeInfo) return; int derivationMethod = typeInfo.derivedBy; if(typeInfo.baseComplexTypeInfo == null) { if (typeInfo.baseDataTypeValidator != null) { // take care of complexType based on simpleType case... DatatypeValidator dTemp = typeInfo.baseDataTypeValidator; for(; dTemp != null; dTemp = dTemp.getBaseValidator()) { // WARNING!!! This uses comparison by reference andTemp is thus inherently suspect! if(dTemp == substitutionGroupEltDV) break; } if (dTemp == null) { if(substitutionGroupEltDV instanceof UnionDatatypeValidator) { // dv must derive from one of its members... Vector subUnionMemberDV = ((UnionDatatypeValidator)substitutionGroupEltDV).getBaseValidators(); int subUnionSize = subUnionMemberDV.size(); boolean found = false; for (int i=0; i<subUnionSize && !found; i++) { DatatypeValidator dTempSub = (DatatypeValidator)subUnionMemberDV.elementAt(i); DatatypeValidator dTempOrig = typeInfo.baseDataTypeValidator; for(; dTempOrig != null; dTempOrig = dTempOrig.getBaseValidator()) { // WARNING!!! This uses comparison by reference andTemp is thus inherently suspect! if(dTempSub == dTempOrig) { found = true; break; } } } if(!found) { // REVISIT: localize reportGenericSchemaError("Element " + elementDecl.getAttribute(SchemaSymbols.ATT_NAME) + " has a type which does not derive from the type of the element at the head of the substitution group"); noErrorSoFar = false; } } else { // REVISIT: localize reportGenericSchemaError("Element " + elementDecl.getAttribute(SchemaSymbols.ATT_NAME) + " has a type which does not derive from the type of the element at the head of the substitution group"); noErrorSoFar = false; } } else { // now let's see if substitutionGroup element allows this: if((derivationMethod & finalSet) != 0) { noErrorSoFar = false; // REVISIT: localize reportGenericSchemaError("element " + elementDecl.getAttribute(SchemaSymbols.ATT_NAME) + " cannot be part of the substitution group headed by " + substitutionGroupElementDecl.getAttribute(SchemaSymbols.ATT_NAME)); } } } else { // REVISIT: localize reportGenericSchemaError("Element " + elementDecl.getAttribute(SchemaSymbols.ATT_NAME) + " which is part of a substitution must have a type which derives from the type of the element at the head of the substitution group"); noErrorSoFar = false; } } else { String eltBaseName = typeInfo.baseComplexTypeInfo.typeName; ComplexTypeInfo subTypeInfo = substitutionGroupEltTypeInfo; for (; subTypeInfo != null && !subTypeInfo.typeName.equals(eltBaseName); subTypeInfo = subTypeInfo.baseComplexTypeInfo); if (subTypeInfo == null) { // then this type isn't in the chain... // REVISIT: localize reportGenericSchemaError("Element " + elementDecl.getAttribute(SchemaSymbols.ATT_NAME) + " has a type whose base is " + eltBaseName + "; this basetype does not derive from the type of the element at the head of the substitution group"); noErrorSoFar = false; } else { // type is fine; does substitutionElement allow this? if((derivationMethod & finalSet) != 0) { noErrorSoFar = false; // REVISIT: localize reportGenericSchemaError("element " + elementDecl.getAttribute(SchemaSymbols.ATT_NAME) + " cannot be part of the substitution group headed by " + substitutionGroupElementDecl.getAttribute(SchemaSymbols.ATT_NAME)); } } } } else if (dv != null) { // do simpleType case... // if the two types are the same, just return if (dv == substitutionGroupEltDV) return; // first, check for type relation. if (!(checkSimpleTypeDerivationOK(dv,substitutionGroupEltDV))) { // REVISIT: localize reportGenericSchemaError("Element " + elementDecl.getAttribute(SchemaSymbols.ATT_NAME) + " has a type which does not derive from the type of the element at the head of the substitution group"); noErrorSoFar = false; } else { // now let's see if substitutionGroup element allows this: if((SchemaSymbols.RESTRICTION & finalSet) != 0) { noErrorSoFar = false; // REVISIT: localize reportGenericSchemaError("element " + elementDecl.getAttribute(SchemaSymbols.ATT_NAME) + " cannot be part of the substitution group headed by " + substitutionGroupElementDecl.getAttribute(SchemaSymbols.ATT_NAME)); } } } } // A utility method to check whether a particular datatypevalidator d, was validly // derived from another datatypevalidator, b private boolean checkSimpleTypeDerivationOK(DatatypeValidator d, DatatypeValidator b) { DatatypeValidator dTemp = d; for(; dTemp != null; dTemp = dTemp.getBaseValidator()) { // WARNING!!! This uses comparison by reference andTemp is thus inherently suspect! if(dTemp == b) break; } if (dTemp == null) { // now if b is a union, then we can // derive from it if we derive from any of its members' types. if(b instanceof UnionDatatypeValidator) { // d must derive from one of its members... Vector subUnionMemberDV = ((UnionDatatypeValidator)b).getBaseValidators(); int subUnionSize = subUnionMemberDV.size(); boolean found = false; for (int i=0; i<subUnionSize && !found; i++) { DatatypeValidator dTempSub = (DatatypeValidator)subUnionMemberDV.elementAt(i); DatatypeValidator dTempOrig = d; for(; dTempOrig != null; dTempOrig = dTempOrig.getBaseValidator()) { // WARNING!!! This uses comparison by reference andTemp is thus inherently suspect! if(dTempSub == dTempOrig) { found = true; break; } } } if(!found) { return false; } } else { return false; } } return true; } // this originally-simple method is much -complicated by the fact that, when we're // redefining something, we've not only got to look at the space of the thing // we're redefining but at the original schema too. // The idea is to start from the top, then go down through // our list of schemas until we find what we aant. // This should not often be necessary, because we've processed // all redefined schemas, but three are conditions in which // not all elements so redefined may have been promoted to // the topmost level. private Element getTopLevelComponentByName(String componentCategory, String name) throws Exception { Element child = null; SchemaInfo curr = fSchemaInfoListRoot; for (; curr != null || curr == fSchemaInfoListRoot; curr = curr.getNext()) { if (curr != null) curr.restore(); if ( componentCategory.equals(SchemaSymbols.ELT_GROUP) ) { child = (Element) fSchemaGrammar.topLevelGroupDecls.get(name); } else if ( componentCategory.equals(SchemaSymbols.ELT_ATTRIBUTEGROUP ) && fSchemaInfoListRoot == null ) { child = (Element) fSchemaGrammar.topLevelAttrGrpDecls.get(name); } else if ( componentCategory.equals(SchemaSymbols.ELT_ATTRIBUTE ) ) { child = (Element) fSchemaGrammar.topLevelAttrDecls.get(name); } if (child != null ) { break; } child = XUtil.getFirstChildElement(fSchemaRootElement); if (child == null) { continue; } while (child != null ){ if ( child.getLocalName().equals(componentCategory)) { if (child.getAttribute(SchemaSymbols.ATT_NAME).equals(name)) { break; } } else if (fRedefineSucceeded && child.getLocalName().equals(SchemaSymbols.ELT_REDEFINE)) { Element gChild = XUtil.getFirstChildElement(child); while (gChild != null ){ if (gChild.getLocalName().equals(componentCategory)) { if (gChild.getAttribute(SchemaSymbols.ATT_NAME).equals(name)) { break; } } gChild = XUtil.getNextSiblingElement(gChild); } if (gChild != null) { child = gChild; break; } } child = XUtil.getNextSiblingElement(child); } if (child != null || fSchemaInfoListRoot == null) break; } // have to reset fSchemaInfoList if(curr != null) curr.restore(); else if (fSchemaInfoListRoot != null) fSchemaInfoListRoot.restore(); return child; } private boolean isTopLevel(Element component) { String parentName = component.getParentNode().getLocalName(); return (parentName.endsWith(SchemaSymbols.ELT_SCHEMA)) || (parentName.endsWith(SchemaSymbols.ELT_REDEFINE)) ; } DatatypeValidator getTypeValidatorFromNS(String newSchemaURI, String localpart) throws Exception { // The following impl is for the case where every Schema Grammar has its own instance of DatatypeRegistry. // Now that we have only one DataTypeRegistry used by all schemas. this is not needed. /** * Traverses notation declaration * and saves it in a registry. * Notations are stored in registry with the following * key: "uri:localname" * * @param notation child <notation> * @return local name of notation * @exception Exception */ private String traverseNotationDecl( Element notation ) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(notation, scope); String name = notation.getAttribute(SchemaSymbols.ATT_NAME); String qualifiedName =name; if (fTargetNSURIString.length () != 0) { qualifiedName = fTargetNSURIString+":"+name; } if (fNotationRegistry.get(qualifiedName)!=null) { return name; } String publicId = notation.getAttribute(SchemaSymbols.ATT_PUBLIC); String systemId = notation.getAttribute(SchemaSymbols.ATT_SYSTEM); if (publicId.length() == 0 && systemId.length() == 0) { //REVISIT: update error messages reportGenericSchemaError("<notation> declaration is invalid"); } if (name.length() == 0) { //REVISIT: update error messages reportGenericSchemaError("<notation> declaration does not have a name"); } fNotationRegistry.put(qualifiedName, name); //we don't really care if something inside <notation> is wrong.. checkContent( notation, XUtil.getFirstChildElement(notation), true ); //REVISIT: wait for DOM L3 APIs to pass info to application //REVISIT: SAX2 does not support notations. API should be changed. return name; } /** * This methods will traverse notation from current schema, * as well as from included or imported schemas * * @param notationName * localName of notation * @param uriStr uriStr for schema grammar * @return return local name for Notation (if found), otherwise * return empty string; * @exception Exception */ private String traverseNotationFromAnotherSchema( String notationName , String uriStr ) throws Exception { SchemaGrammar aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(uriStr); if (uriStr == null || aGrammar==null ||! (aGrammar instanceof SchemaGrammar) ) { // REVISIT: Localize reportGenericSchemaError("!!Schema not found in #traverseNotationDeclFromAnotherSchema, "+ "schema uri: " + uriStr +", groupName: " + notationName); return ""; } String savedNSURIString = fTargetNSURIString; fTargetNSURIString = fStringPool.toString(fStringPool.addSymbol(aGrammar.getTargetNamespaceURI())); if (DEBUGGING) { System.out.println("[traverseFromAnotherSchema]: " + fTargetNSURIString); } String qualifiedName = fTargetNSURIString + ":" + notationName; String localName = (String)fNotationRegistry.get(qualifiedName); if(localName != null ) // we've already traversed this notation return localName; //notation decl has not been traversed yet Element notationDecl = (Element) aGrammar.topLevelNotationDecls.get((Object)notationName); if (notationDecl == null) { // REVISIT: Localize reportGenericSchemaError( "no notation named \"" + notationName + "\" was defined in schema : " + uriStr); return ""; } localName = traverseNotationDecl(notationDecl); fTargetNSURIString = savedNSURIString; return localName; } // end of method traverseNotationFromAnotherSchema /** * Traverse Group Declaration. * * <group * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger * name = NCName * ref = QName> * Content: (annotation? , (all | choice | sequence)?) * <group/> * * @param elementDecl * @return * @exception Exception */ private GroupInfo traverseGroupDecl( Element groupDecl ) throws Exception { // General Attribute Checking int scope = isTopLevel(groupDecl)? GeneralAttrCheck.ELE_CONTEXT_GLOBAL: GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(groupDecl, scope); String groupName = groupDecl.getAttribute(SchemaSymbols.ATT_NAME); String ref = groupDecl.getAttribute(SchemaSymbols.ATT_REF); GroupInfo gInfo = null; Element child = checkContent( groupDecl, XUtil.getFirstChildElement(groupDecl), true ); if (ref.length() != 0) { if (isTopLevel(groupDecl)) // REVISIT: localize reportGenericSchemaError ( "A group with \"ref\" present must not have <schema> or <redefine> as its parent"); if (groupName.length() != 0) // REVISIT: localize reportGenericSchemaError ( "group " + groupName + " cannot refer to another group, but it refers to " + ref); // there should be no children for <group ref="..."> if (XUtil.getFirstChildElement(groupDecl)!=null) reportGenericSchemaError ( "A group with \"ref\" present must not have children"); String prefix = ""; String localpart = ref; int colonptr = ref.indexOf(":"); if ( colonptr > 0) { prefix = ref.substring(0,colonptr); localpart = ref.substring(colonptr+1); } int localpartIndex = fStringPool.addSymbol(localpart); String uriStr = resolvePrefixToURI(prefix); if (!uriStr.equals(fTargetNSURIString)) { gInfo = traverseGroupDeclFromAnotherSchema(localpart, uriStr); if (gInfo != null) { if (DEBUG_NEW_GROUP) findAndCreateElements(gInfo.contentSpecIndex,gInfo.scope); } return gInfo; } try { gInfo = (GroupInfo) fGroupNameRegistry.get(uriStr + "," + localpart); if (gInfo != null) { // Ensure any LEAF elements are created at the // scope of the referencing type if (DEBUG_NEW_GROUP) findAndCreateElements(gInfo.contentSpecIndex,gInfo.scope); return gInfo; } } catch (ClassCastException c) { } // Check if we are in the middle of traversing this group (i.e. circular references) if (fCurrentGroupNameStack.search((Object)localpart) > - 1) { reportGenericSchemaError("mg-props-correct: Circular definition for group " + localpart); return null; } int contentSpecIndex = -1; Element referredGroup = getTopLevelComponentByName(SchemaSymbols.ELT_GROUP,localpart); if (referredGroup == null) { // REVISIT: Localize reportGenericSchemaError("Group " + localpart + " not found in the Schema"); //REVISIT, this should be some custom Exception //throw new RuntimeException("Group " + localpart + " not found in the Schema"); } else { gInfo = traverseGroupDecl(referredGroup); } // Now that we have a tree, ensure any LEAF elements are created at the // scope of the referencing type if (gInfo != null) { if (DEBUG_NEW_GROUP) findAndCreateElements(gInfo.contentSpecIndex,gInfo.scope); } return gInfo; } else if (groupName.length() == 0) // REVISIT: Localize reportGenericSchemaError("a <group> must have a name or a ref present"); String qualifiedGroupName = fTargetNSURIString + "," + groupName; try { gInfo = (GroupInfo) fGroupNameRegistry.get(qualifiedGroupName); if (gInfo != null) { // Ensure any LEAF elements are created at the // scope of the referencing type if (DEBUG_NEW_GROUP) findAndCreateElements(gInfo.contentSpecIndex,gInfo.scope); return gInfo; } } catch (ClassCastException c) { } // if we're here then we're traversing a top-level group that we've never seen before. // Push the group name onto a stack, so that we can check for circular groups fCurrentGroupNameStack.push(groupName); // Save the scope and set the current scope to -1 int savedScope = fCurrentScope; if (DEBUG_NEW_GROUP) fCurrentScope = fScopeCount++; else fCurrentScope = -1; int index = -2; boolean illegalChild = false; String childName = (child != null) ? child.getLocalName() : ""; if (childName.equals(SchemaSymbols.ELT_ALL)) { index = traverseAll(child); } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); } else if (childName.length() != 0 || (child != null && XUtil.getNextSiblingElement(child) != null)) { illegalChild = true; reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } //Must have all or choice or sequence child. if (child == null) { reportGenericSchemaError("Named group must contain an 'all', 'choice' or 'sequence' child"); } else if (XUtil.getNextSiblingElement(child) != null) { illegalChild = true; reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if ( ! illegalChild && child != null) { index = handleOccurrences(index, child, CHILD_OF_GROUP); } gInfo = new GroupInfo(); gInfo.contentSpecIndex = index; gInfo.scope = fCurrentScope; fCurrentScope = savedScope; fCurrentGroupNameStack.pop(); fGroupNameRegistry.put(qualifiedGroupName, gInfo); return gInfo; } private void findAndCreateElements(int csIndex, int scope) { if (csIndex<0 || fCurrentScope==TOP_LEVEL_SCOPE) { return; } fSchemaGrammar.getContentSpec( csIndex, tempContentSpec1); int type = tempContentSpec1.type; int left = tempContentSpec1.value; int right = tempContentSpec1.otherValue; if (type == XMLContentSpec.CONTENTSPECNODE_LEAF) { int eltNdx = fSchemaGrammar.getElementDeclIndex(right, left, scope); if (eltNdx <0) return; fSchemaGrammar.cloneElementDecl(eltNdx, fCurrentScope); } else if (type == XMLContentSpec.CONTENTSPECNODE_CHOICE || type == XMLContentSpec.CONTENTSPECNODE_ALL || type == XMLContentSpec.CONTENTSPECNODE_SEQ) { findAndCreateElements(left,scope); if (right != -2) findAndCreateElements(right,scope); } return; } private GroupInfo traverseGroupDeclFromAnotherSchema( String groupName , String uriStr ) throws Exception { GroupInfo gInfo = null; SchemaGrammar aGrammar = (SchemaGrammar) fGrammarResolver.getGrammar(uriStr); if (uriStr == null || aGrammar==null ||! (aGrammar instanceof SchemaGrammar) ) { // REVISIT: Localize reportGenericSchemaError("!!Schema not found in #traverseGroupDeclFromAnotherSchema, "+ "schema uri: " + uriStr +", groupName: " + groupName); return null; } Element groupDecl = (Element) aGrammar.topLevelGroupDecls.get((Object)groupName); if (groupDecl == null) { // REVISIT: Localize reportGenericSchemaError( "no group named \"" + groupName + "\" was defined in schema : " + uriStr); return null; } NamespacesScope saveNSMapping = fNamespacesScope; int saveTargetNSUri = fTargetNSURI; fTargetNSURI = fStringPool.addSymbol(aGrammar.getTargetNamespaceURI()); fNamespacesScope = aGrammar.getNamespacesScope(); Element child = checkContent( groupDecl, XUtil.getFirstChildElement(groupDecl), true ); String qualifiedGroupName = fTargetNSURIString + "," + groupName; try { gInfo = (GroupInfo) fGroupNameRegistry.get(qualifiedGroupName); if (gInfo != null) return gInfo; } catch (ClassCastException c) { } // if we're here then we're traversing a top-level group that we've never seen before. int index = -2; int savedScope = fCurrentScope; if (DEBUG_NEW_GROUP) fCurrentScope = fScopeCount++; boolean illegalChild = false; String childName = (child != null) ? child.getLocalName() : ""; if (childName.equals(SchemaSymbols.ELT_ALL)) { index = traverseAll(child); } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); } else if (childName.length() != 0 || (child != null && XUtil.getNextSiblingElement(child) != null)) { illegalChild = true; reportSchemaError(SchemaMessageProvider.GroupContentRestricted, new Object [] { "group", childName }); } if ( ! illegalChild && child != null) { index = handleOccurrences( index, child); } gInfo = new GroupInfo(); gInfo.contentSpecIndex = index; gInfo.scope = fCurrentScope; fCurrentScope = savedScope; fGroupNameRegistry.put(qualifiedGroupName, gInfo); fNamespacesScope = saveNSMapping; fTargetNSURI = saveTargetNSUri; return gInfo; } // end of method traverseGroupDeclFromAnotherSchema /** * * Traverse the Sequence declaration * * <sequence * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </sequence> * **/ int traverseSequence (Element sequenceDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(sequenceDecl, scope); Element child = checkContent(sequenceDecl, XUtil.getFirstChildElement(sequenceDecl), true); int csnType = XMLContentSpec.CONTENTSPECNODE_SEQ; int left = -2; int right = -2; boolean hadContent = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; boolean seeParticle = false; String childName = child.getLocalName(); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { GroupInfo grpInfo = traverseGroupDecl(child); index = (grpInfo != null) ? grpInfo.contentSpecIndex:-2; // A content type of all can only appear // as the content type of a complex type definition. if (hasAllContent(index)) { reportSchemaError(SchemaMessageProvider.AllContentLimited, new Object [] { "sequence" }); continue; } seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { reportSchemaError( SchemaMessageProvider.SeqChoiceContentRestricted, new Object [] { "sequence", childName }); continue; } if (index != -2) hadContent = true; if (seeParticle) { index = handleOccurrences( index, child); } if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent) { if (right != -2 || fSchemaGrammar.getDeferContentSpecExpansion()) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); } return left; } /** * * Traverse the Choice declaration * * <choice * id = ID * maxOccurs = string * minOccurs = nonNegativeInteger> * Content: (annotation? , (element | group | choice | sequence | any)*) * </choice> * **/ int traverseChoice (Element choiceDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(choiceDecl, scope); // REVISIT: traverseChoice, traverseSequence can be combined Element child = checkContent(choiceDecl, XUtil.getFirstChildElement(choiceDecl), true); int csnType = XMLContentSpec.CONTENTSPECNODE_CHOICE; int left = -2; int right = -2; boolean hadContent = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; boolean seeParticle = false; String childName = child.getLocalName(); if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_GROUP)) { GroupInfo grpInfo = traverseGroupDecl(child); index = (grpInfo != null) ? grpInfo.contentSpecIndex:-2; // A model group whose {compositor} is "all" can only appear // as the {content type} of a complex type definition. if (hasAllContent(index)) { reportSchemaError(SchemaMessageProvider.AllContentLimited, new Object [] { "choice" }); continue; } seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_CHOICE)) { index = traverseChoice(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_SEQUENCE)) { index = traverseSequence(child); seeParticle = true; } else if (childName.equals(SchemaSymbols.ELT_ANY)) { index = traverseAny(child); seeParticle = true; } else { reportSchemaError( SchemaMessageProvider.SeqChoiceContentRestricted, new Object [] { "choice", childName }); continue; } if (index != -2) hadContent = true; if (seeParticle) { index = handleOccurrences( index, child); } if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent) { if (right != -2 || fSchemaGrammar.getDeferContentSpecExpansion()) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); } return left; } /** * * Traverse the "All" declaration * * <all * id = ID * maxOccurs = 1 : 1 * minOccurs = (0 | 1) : 1> * Content: (annotation? , element*) * </all> **/ int traverseAll(Element allDecl) throws Exception { // General Attribute Checking int scope = GeneralAttrCheck.ELE_CONTEXT_LOCAL; Hashtable attrValues = generalCheck(allDecl, scope); Element child = checkContent(allDecl, XUtil.getFirstChildElement(allDecl), true); int csnType = XMLContentSpec.CONTENTSPECNODE_ALL; int left = -2; int right = -2; boolean hadContent = false; for (; child != null; child = XUtil.getNextSiblingElement(child)) { int index = -2; String childName = child.getLocalName(); // Only elements are allowed in <all> if (childName.equals(SchemaSymbols.ELT_ELEMENT)) { QName eltQName = traverseElementDecl(child); index = fSchemaGrammar.addContentSpecNode( XMLContentSpec.CONTENTSPECNODE_LEAF, eltQName.localpart, eltQName.uri, false); index = handleOccurrences(index, child, PROCESSING_ALL_EL); } else { reportSchemaError(SchemaMessageProvider.AllContentRestricted, new Object [] { childName }); continue; } hadContent = true; if (left == -2) { left = index; } else if (right == -2) { right = index; } else { left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); right = index; } } if (hadContent) { if (right != -2 || fSchemaGrammar.getDeferContentSpecExpansion()) left = fSchemaGrammar.addContentSpecNode(csnType, left, right, false); } return left; } // Determines whether a content spec tree represents an "all" content model private boolean hasAllContent(int contentSpecIndex) { // If the content is not empty, is the top node ALL? if (contentSpecIndex > -1) { XMLContentSpec content = new XMLContentSpec(); fSchemaGrammar.getContentSpec(contentSpecIndex, content); // An ALL node could be optional, so we have to be prepared // to look one level below a ZERO_OR_ONE node for an ALL. if (content.type == XMLContentSpec.CONTENTSPECNODE_ZERO_OR_ONE) { fSchemaGrammar.getContentSpec(content.value, content); } return (content.type == XMLContentSpec.CONTENTSPECNODE_ALL); } return false; } // check the prefix of each element: must be SchemaForSchema // general constrain checking on attriubtes private Hashtable generalCheck(Element element, int scope) throws Exception{ String uri = element.getNamespaceURI(); if (uri == null || !uri.equals(SchemaSymbols.URI_SCHEMAFORSCHEMA)) { reportGenericSchemaError("The namespce name for '"+element.getLocalName()+"' must be "+SchemaSymbols.URI_SCHEMAFORSCHEMA); } return fGeneralAttrCheck.checkAttributes (element, scope); } // utilities from Tom Watson's SchemaParser class // TO DO: Need to make this more conformant with Schema int type parsing private int parseInt (String intString) throws Exception { if ( intString.equals("*") ) { return SchemaSymbols.INFINITY; } else { return Integer.parseInt (intString); } } private int parseSimpleFinal (String finalString) throws Exception { if ( finalString.equals (SchemaSymbols.ATTVAL_POUNDALL) ) { return SchemaSymbols.ENUMERATION+SchemaSymbols.RESTRICTION+SchemaSymbols.LIST; } else { int enumerate = 0; int restrict = 0; int list = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ("restriction in set twice"); } } else if ( token.equals (SchemaSymbols.ELT_LIST) ) { if ( list == 0 ) { list = SchemaSymbols.LIST; } else { // REVISIT: Localize reportGenericSchemaError ("list in set twice"); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid value (" + finalString + ")" ); } } return enumerate+list; } } private int parseDerivationSet (String finalString) throws Exception { if ( finalString.equals (SchemaSymbols.ATTVAL_POUNDALL) ) { return SchemaSymbols.EXTENSION+SchemaSymbols.RESTRICTION; } else { int extend = 0; int restrict = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { if ( extend == 0 ) { extend = SchemaSymbols.EXTENSION; } else { // REVISIT: Localize reportGenericSchemaError ( "extension already in set" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "restriction already in set" ); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid final value (" + finalString + ")" ); } } return extend+restrict; } } private int parseBlockSet (String blockString) throws Exception { if( blockString == null) return fBlockDefault; else if ( blockString.equals (SchemaSymbols.ATTVAL_POUNDALL) ) { return SchemaSymbols.SUBSTITUTION+SchemaSymbols.EXTENSION+SchemaSymbols.RESTRICTION; } else { int extend = 0; int restrict = 0; int substitute = 0; StringTokenizer t = new StringTokenizer (blockString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ATTVAL_SUBSTITUTION) ) { if ( substitute == 0 ) { substitute = SchemaSymbols.SUBSTITUTION; } else { // REVISIT: Localize reportGenericSchemaError ( "The value 'substitution' already in the list" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { if ( extend == 0 ) { extend = SchemaSymbols.EXTENSION; } else { // REVISIT: Localize reportGenericSchemaError ( "The value 'extension' is already in the list" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "The value 'restriction' is already in the list" ); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid block value (" + blockString + ")" ); } } int defaultVal = extend+restrict+substitute; return (defaultVal == 0 ? fBlockDefault : defaultVal); } } private int parseFinalSet (String finalString) throws Exception { if( finalString == null) { return fFinalDefault; } else if ( finalString.equals (SchemaSymbols.ATTVAL_POUNDALL) ) { return SchemaSymbols.EXTENSION+SchemaSymbols.LIST+SchemaSymbols.RESTRICTION+SchemaSymbols.UNION; } else { int extend = 0; int restrict = 0; int list = 0; int union = 0; StringTokenizer t = new StringTokenizer (finalString, " "); while (t.hasMoreTokens()) { String token = t.nextToken (); if ( token.equals (SchemaSymbols.ELT_UNION) ) { if ( union == 0 ) { union = SchemaSymbols.UNION; } else { // REVISIT: Localize reportGenericSchemaError ( "The value 'union' is already in the list" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_EXTENSION) ) { if ( extend == 0 ) { extend = SchemaSymbols.EXTENSION; } else { // REVISIT: Localize reportGenericSchemaError ( "The value 'extension' is already in the list" ); } } else if ( token.equals (SchemaSymbols.ELT_LIST) ) { if ( list == 0 ) { list = SchemaSymbols.LIST; } else { // REVISIT: Localize reportGenericSchemaError ( "The value 'list' is already in the list" ); } } else if ( token.equals (SchemaSymbols.ATTVAL_RESTRICTION) ) { if ( restrict == 0 ) { restrict = SchemaSymbols.RESTRICTION; } else { // REVISIT: Localize reportGenericSchemaError ( "The value 'restriction' is already in the list" ); } } else { // REVISIT: Localize reportGenericSchemaError ( "Invalid final value (" + finalString + ")" ); } } int defaultVal = extend+restrict+list+union; return (defaultVal == 0 ? fFinalDefault : defaultVal); } } private void reportGenericSchemaError (String error) throws Exception { if (fErrorReporter == null) { System.err.println("__TraverseSchemaError__ : " + error); } else { reportSchemaError (SchemaMessageProvider.GenericError, new Object[] { error }); } } private void reportSchemaError(int major, Object args[]) throws Exception { if (fErrorReporter == null) { System.out.println("__TraverseSchemaError__ : " + SchemaMessageProvider.fgMessageKeys[major]); for (int i=0; i< args.length ; i++) { System.out.println((String)args[i]); } } else { fErrorReporter.reportError(fErrorReporter.getLocator(), SchemaMessageProvider.SCHEMA_DOMAIN, major, SchemaMessageProvider.MSG_NONE, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } static class Resolver implements EntityResolver { private static final String SYSTEM[] = { "http: "http: "http: }; private static final String PATH[] = { "structures.dtd", "datatypes.dtd", "versionInfo.ent", }; public InputSource resolveEntity(String publicId, String systemId) throws IOException { // looking for the schema DTDs? for (int i = 0; i < SYSTEM.length; i++) { if (systemId.equals(SYSTEM[i])) { InputSource source = new InputSource(getClass().getResourceAsStream(PATH[i])); source.setPublicId(publicId); source.setSystemId(systemId); return source; } } // use default resolution return null; } // resolveEntity(String,String):InputSource } // class Resolver static class ErrorHandler implements org.xml.sax.ErrorHandler { /** Warning. */ public void warning(SAXParseException ex) { System.err.println("[Warning] "+ getLocationString(ex)+": "+ ex.getMessage()); } /** Error. */ public void error(SAXParseException ex) { System.err.println("[Error] "+ getLocationString(ex)+": "+ ex.getMessage()); } /** Fatal error. */ public void fatalError(SAXParseException ex) throws SAXException { System.err.println("[Fatal Error] "+ getLocationString(ex)+": "+ ex.getMessage()); throw ex; } // Private methods /** Returns a string of the location. */ private String getLocationString(SAXParseException ex) { StringBuffer str = new StringBuffer(); String systemId_ = ex.getSystemId(); if (systemId_ != null) { int index = systemId_.lastIndexOf('/'); if (index != -1) systemId_ = systemId_.substring(index + 1); str.append(systemId_); } str.append(':'); str.append(ex.getLineNumber()); str.append(':'); str.append(ex.getColumnNumber()); return str.toString(); } // getLocationString(SAXParseException):String } static class IgnoreWhitespaceParser extends DOMParser { public void ignorableWhitespace(char ch[], int start, int length) {} public void ignorableWhitespace(int dataIdx) {} } // class IgnoreWhitespaceParser // When in a <redefine>, type definitions being used (and indeed // refs to <group>'s and <attributeGroup>'s) may refer to info // items either in the schema being redefined, in the <redefine>, // or else in the schema doing the redefining. Because of this // latter we have to be prepared sometimes to look for our type // definitions outside the schema stored in fSchemaRootElement. // This simple class does this; it's just a linked list that // lets us look at the <schema>'s on the queue; note also that this // should provide us with a mechanism to handle nested <redefine>'s. // It's also a handy way of saving schema info when importing/including; saves some code. public class SchemaInfo { private Element saveRoot; private SchemaInfo nextRoot; private SchemaInfo prevRoot; private String savedSchemaURL = fCurrentSchemaURL; private boolean saveElementDefaultQualified = fElementDefaultQualified; private boolean saveAttributeDefaultQualified = fAttributeDefaultQualified; private int saveBlockDefault = fBlockDefault; private int saveFinalDefault = fFinalDefault; private NamespacesScope saveNamespacesScope = fNamespacesScope; public SchemaInfo ( boolean saveElementDefaultQualified, boolean saveAttributeDefaultQualified, int saveBlockDefault, int saveFinalDefault, String savedSchemaURL, Element saveRoot, NamespacesScope saveNamespacesScope, SchemaInfo nextRoot, SchemaInfo prevRoot) { this.saveElementDefaultQualified = saveElementDefaultQualified; this.saveAttributeDefaultQualified = saveAttributeDefaultQualified; this.saveBlockDefault = saveBlockDefault; this.saveFinalDefault = saveFinalDefault; this.savedSchemaURL = savedSchemaURL; this.saveRoot = saveRoot ; if(saveNamespacesScope != null) this.saveNamespacesScope = (NamespacesScope)saveNamespacesScope.clone(); this.nextRoot = nextRoot; this.prevRoot = prevRoot; } public void setNext (SchemaInfo next) { nextRoot = next; } public SchemaInfo getNext () { return nextRoot; } public void setPrev (SchemaInfo prev) { prevRoot = prev; } public String getCurrentSchemaURL() { return savedSchemaURL; } public SchemaInfo getPrev () { return prevRoot; } public Element getRoot() { return saveRoot; } // NOTE: this has side-effects!!! public void restore() { fCurrentSchemaURL = savedSchemaURL; fElementDefaultQualified = saveElementDefaultQualified; fAttributeDefaultQualified = saveAttributeDefaultQualified; fBlockDefault = saveBlockDefault; fFinalDefault = saveFinalDefault; fNamespacesScope = (NamespacesScope)saveNamespacesScope.clone(); fSchemaRootElement = saveRoot; } } // class SchemaInfo } // class TraverseSchema
package de.matchbox.client.forms; import de.matchbox.client.Zahl; import de.matchbox.client.forms.models.RoomFormModel; import de.matchbox.client.forms.usercontrols.PlayerControl; import de.matchbox.client.utility.MatchUtility; import de.matchbox.communication.MessageObject; import de.matchbox.communication.classmodels.PlayerModel; import de.matchbox.communication.contentobjects.RoomCommandContentObject; import de.matchbox.communication.contentobjects.roomcommands.EquasionContentObject; import de.matchbox.communication.contentobjects.roomcommands.IRoomCommandContentObject; import de.matchbox.communication.contentobjects.roomcommands.server.CheckEquasionResultContentObject; import de.matchbox.communication.contentobjects.roomcommands.server.ListPlayerContentObject; import de.matchbox.communication.enumeration.MessageType; import de.matchbox.communication.enumeration.RoomCommand; import de.matchbox.communication.shared.abiturklassen.List; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.Toolkit; import javax.swing.Box; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.Timer; public class FrmRoom extends javax.swing.JFrame { private static final long serialVersionUID = 1L; private JLabel jMatchArr[][]; private JLabel jSpaceArr[][]; private final RoomFormModel roomFormModel; private int hasMatch; private String gleichung; private Timer timer; // private int secondsPassed; public FrmRoom(RoomFormModel pRoomFormModel) { this.roomFormModel = pRoomFormModel; this.initComponents(); this.setIconImage(Toolkit.getDefaultToolkit().getImage(getClass().getResource("icon.png"))); this.jMatchArr = new JLabel[8][10]; this.jSpaceArr = new JLabel[8][10]; this.createArr(); this.resetView(); this.setResizable(false); this.setLocationRelativeTo(null); this.initEvents(); this.initEvents2(); this.Rekt.setVisible(false); this.jLabelScope.setVisible(false); this.hasMatch = 0; this.gleichung = ""; this.newEquasion(); } public void newEquasion() { this.hasMatch = 0; } // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jMenu1 = new javax.swing.JMenu(); jPanel1 = new javax.swing.JPanel(); jSpace1_1 = new javax.swing.JLabel(); jSpace2_1 = new javax.swing.JLabel(); jSpace3_1 = new javax.swing.JLabel(); jSpace4_1 = new javax.swing.JLabel(); jSpace5_1 = new javax.swing.JLabel(); jSpace6_1 = new javax.swing.JLabel(); jSpace7_1 = new javax.swing.JLabel(); jMatch1_1 = new javax.swing.JLabel(); jMatch2_1 = new javax.swing.JLabel(); jMatch3_1 = new javax.swing.JLabel(); jMatch4_1 = new javax.swing.JLabel(); jMatch5_1 = new javax.swing.JLabel(); jMatch6_1 = new javax.swing.JLabel(); jMatch7_1 = new javax.swing.JLabel(); jSpace1_2 = new javax.swing.JLabel(); jSpace2_2 = new javax.swing.JLabel(); jSpace3_2 = new javax.swing.JLabel(); jSpace4_2 = new javax.swing.JLabel(); jSpace5_2 = new javax.swing.JLabel(); jSpace6_2 = new javax.swing.JLabel(); jSpace7_2 = new javax.swing.JLabel(); jMatch1_2 = new javax.swing.JLabel(); jMatch2_2 = new javax.swing.JLabel(); jMatch3_2 = new javax.swing.JLabel(); jMatch4_2 = new javax.swing.JLabel(); jMatch5_2 = new javax.swing.JLabel(); jMatch6_2 = new javax.swing.JLabel(); jMatch7_2 = new javax.swing.JLabel(); jSpace1_3 = new javax.swing.JLabel(); jSpace2_3 = new javax.swing.JLabel(); jSpace3_3 = new javax.swing.JLabel(); jSpace4_3 = new javax.swing.JLabel(); jSpace5_3 = new javax.swing.JLabel(); jSpace6_3 = new javax.swing.JLabel(); jSpace7_3 = new javax.swing.JLabel(); jMatch1_3 = new javax.swing.JLabel(); jMatch2_3 = new javax.swing.JLabel(); jMatch3_3 = new javax.swing.JLabel(); jMatch4_3 = new javax.swing.JLabel(); jMatch5_3 = new javax.swing.JLabel(); jMatch6_3 = new javax.swing.JLabel(); jMatch7_3 = new javax.swing.JLabel(); jSpace1_4 = new javax.swing.JLabel(); jSpace2_4 = new javax.swing.JLabel(); jSpace3_4 = new javax.swing.JLabel(); jSpace4_4 = new javax.swing.JLabel(); jSpace5_4 = new javax.swing.JLabel(); jSpace6_4 = new javax.swing.JLabel(); jSpace7_4 = new javax.swing.JLabel(); jMatch1_4 = new javax.swing.JLabel(); jMatch2_4 = new javax.swing.JLabel(); jMatch3_4 = new javax.swing.JLabel(); jMatch4_4 = new javax.swing.JLabel(); jMatch5_4 = new javax.swing.JLabel(); jMatch6_4 = new javax.swing.JLabel(); jMatch7_4 = new javax.swing.JLabel(); jSpace1_5 = new javax.swing.JLabel(); jSpace2_5 = new javax.swing.JLabel(); jSpace3_5 = new javax.swing.JLabel(); jSpace4_5 = new javax.swing.JLabel(); jSpace5_5 = new javax.swing.JLabel(); jSpace6_5 = new javax.swing.JLabel(); jSpace7_5 = new javax.swing.JLabel(); jMatch1_5 = new javax.swing.JLabel(); jMatch2_5 = new javax.swing.JLabel(); jMatch3_5 = new javax.swing.JLabel(); jMatch4_5 = new javax.swing.JLabel(); jMatch5_5 = new javax.swing.JLabel(); jMatch6_5 = new javax.swing.JLabel(); jMatch7_5 = new javax.swing.JLabel(); jSpace1_6 = new javax.swing.JLabel(); jSpace2_6 = new javax.swing.JLabel(); jSpace3_6 = new javax.swing.JLabel(); jSpace4_6 = new javax.swing.JLabel(); jSpace5_6 = new javax.swing.JLabel(); jSpace6_6 = new javax.swing.JLabel(); jSpace7_6 = new javax.swing.JLabel(); jMatch1_6 = new javax.swing.JLabel(); jMatch2_6 = new javax.swing.JLabel(); jMatch3_6 = new javax.swing.JLabel(); jMatch4_6 = new javax.swing.JLabel(); jMatch5_6 = new javax.swing.JLabel(); jMatch6_6 = new javax.swing.JLabel(); jMatch7_6 = new javax.swing.JLabel(); jSpace1_7 = new javax.swing.JLabel(); jSpace2_7 = new javax.swing.JLabel(); jSpace3_7 = new javax.swing.JLabel(); jSpace4_7 = new javax.swing.JLabel(); jSpace5_7 = new javax.swing.JLabel(); jSpace6_7 = new javax.swing.JLabel(); jSpace7_7 = new javax.swing.JLabel(); jMatch1_7 = new javax.swing.JLabel(); jMatch2_7 = new javax.swing.JLabel(); jMatch3_7 = new javax.swing.JLabel(); jMatch4_7 = new javax.swing.JLabel(); jMatch5_7 = new javax.swing.JLabel(); jMatch6_7 = new javax.swing.JLabel(); jMatch7_7 = new javax.swing.JLabel(); jSpace1_8 = new javax.swing.JLabel(); jSpace2_8 = new javax.swing.JLabel(); jSpace3_8 = new javax.swing.JLabel(); jSpace4_8 = new javax.swing.JLabel(); jSpace5_8 = new javax.swing.JLabel(); jSpace6_8 = new javax.swing.JLabel(); jSpace7_8 = new javax.swing.JLabel(); jMatch1_8 = new javax.swing.JLabel(); jMatch2_8 = new javax.swing.JLabel(); jMatch3_8 = new javax.swing.JLabel(); jMatch4_8 = new javax.swing.JLabel(); jMatch5_8 = new javax.swing.JLabel(); jMatch6_8 = new javax.swing.JLabel(); jMatch7_8 = new javax.swing.JLabel(); jSpace1_9 = new javax.swing.JLabel(); jSpace2_9 = new javax.swing.JLabel(); jSpace3_9 = new javax.swing.JLabel(); jSpace4_9 = new javax.swing.JLabel(); jSpace5_9 = new javax.swing.JLabel(); jSpace6_9 = new javax.swing.JLabel(); jSpace7_9 = new javax.swing.JLabel(); jMatch1_9 = new javax.swing.JLabel(); jMatch2_9 = new javax.swing.JLabel(); jMatch3_9 = new javax.swing.JLabel(); jMatch4_9 = new javax.swing.JLabel(); jMatch5_9 = new javax.swing.JLabel(); jMatch6_9 = new javax.swing.JLabel(); jMatch7_9 = new javax.swing.JLabel(); jEaquals = new javax.swing.JLabel(); jPlus = new javax.swing.JLabel(); jMinus = new javax.swing.JLabel(); jLabelScope = new javax.swing.JLabel(); jButtonTest = new javax.swing.JButton(); jLabelInfo = new javax.swing.JLabel(); jButtonCheck = new javax.swing.JButton(); jButtonReset = new javax.swing.JButton(); jPanelPlayer = new javax.swing.JPanel(); Rekt = new javax.swing.JLabel(); jLabelBackground = new javax.swing.JLabel(); mnuMain = new javax.swing.JMenuBar(); mnuFile = new javax.swing.JMenu(); mnuQuit = new javax.swing.JMenuItem(); mnuCon = new javax.swing.JMenu(); jMenuItem1 = new javax.swing.JMenuItem(); jMenu1.setText("jMenu1"); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); setTitle("MatchBox"); setBackground(new java.awt.Color(255, 255, 255)); setMinimumSize(new java.awt.Dimension(1500, 650)); getContentPane().setLayout(null); jPanel1.setBackground(new java.awt.Color(102, 102, 102)); jPanel1.setLayout(null); jSpace1_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace1_1); jSpace1_1.setBounds(20, 50, 100, 12); jSpace2_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace2_1); jSpace2_1.setBounds(10, 60, 12, 100); jSpace3_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace3_1); jSpace3_1.setBounds(120, 60, 12, 100); jSpace4_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace4_1); jSpace4_1.setBounds(20, 160, 100, 12); jSpace5_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace5_1); jSpace5_1.setBounds(10, 170, 12, 100); jSpace6_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace6_1); jSpace6_1.setBounds(120, 170, 12, 100); jSpace7_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace7_1); jSpace7_1.setBounds(20, 270, 100, 12); jMatch1_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch1_1); jMatch1_1.setBounds(20, 50, 100, 12); jMatch2_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch2_1); jMatch2_1.setBounds(10, 60, 12, 100); jMatch3_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch3_1); jMatch3_1.setBounds(120, 60, 12, 100); jMatch4_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch4_1); jMatch4_1.setBounds(20, 160, 100, 12); jMatch5_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch5_1); jMatch5_1.setBounds(10, 170, 12, 100); jMatch6_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch6_1); jMatch6_1.setBounds(120, 170, 12, 100); jMatch7_1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch7_1); jMatch7_1.setBounds(20, 270, 100, 12); jSpace1_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace1_2); jSpace1_2.setBounds(150, 50, 100, 12); jSpace2_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace2_2); jSpace2_2.setBounds(140, 60, 12, 100); jSpace3_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace3_2); jSpace3_2.setBounds(250, 60, 12, 100); jSpace4_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace4_2); jSpace4_2.setBounds(150, 160, 100, 12); jSpace5_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace5_2); jSpace5_2.setBounds(140, 170, 12, 100); jSpace6_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace6_2); jSpace6_2.setBounds(250, 170, 12, 100); jSpace7_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace7_2); jSpace7_2.setBounds(150, 270, 100, 12); jMatch1_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch1_2); jMatch1_2.setBounds(150, 50, 100, 12); jMatch2_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch2_2); jMatch2_2.setBounds(140, 60, 12, 100); jMatch3_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch3_2); jMatch3_2.setBounds(250, 60, 12, 100); jMatch4_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch4_2); jMatch4_2.setBounds(150, 160, 100, 12); jMatch5_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch5_2); jMatch5_2.setBounds(140, 170, 12, 100); jMatch6_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch6_2); jMatch6_2.setBounds(250, 170, 12, 100); jMatch7_2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch7_2); jMatch7_2.setBounds(150, 270, 100, 12); jSpace1_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace1_3); jSpace1_3.setBounds(280, 50, 100, 12); jSpace2_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace2_3); jSpace2_3.setBounds(270, 60, 12, 100); jSpace3_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace3_3); jSpace3_3.setBounds(380, 60, 12, 100); jSpace4_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace4_3); jSpace4_3.setBounds(280, 160, 100, 12); jSpace5_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace5_3); jSpace5_3.setBounds(270, 170, 12, 100); jSpace6_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace6_3); jSpace6_3.setBounds(380, 170, 12, 100); jSpace7_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace7_3); jSpace7_3.setBounds(280, 270, 100, 12); jMatch1_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch1_3); jMatch1_3.setBounds(280, 50, 100, 12); jMatch2_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch2_3); jMatch2_3.setBounds(270, 60, 12, 100); jMatch3_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch3_3); jMatch3_3.setBounds(380, 60, 12, 100); jMatch4_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch4_3); jMatch4_3.setBounds(280, 160, 100, 12); jMatch5_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch5_3); jMatch5_3.setBounds(270, 170, 12, 100); jMatch6_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch6_3); jMatch6_3.setBounds(380, 170, 12, 100); jMatch7_3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch7_3); jMatch7_3.setBounds(280, 270, 100, 12); jSpace1_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace1_4); jSpace1_4.setBounds(520, 50, 100, 12); jSpace2_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace2_4); jSpace2_4.setBounds(510, 60, 12, 100); jSpace3_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace3_4); jSpace3_4.setBounds(620, 60, 12, 100); jSpace4_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace4_4); jSpace4_4.setBounds(520, 160, 100, 12); jSpace5_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace5_4); jSpace5_4.setBounds(510, 170, 12, 100); jSpace6_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace6_4); jSpace6_4.setBounds(620, 170, 12, 100); jSpace7_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace7_4); jSpace7_4.setBounds(520, 270, 100, 12); jMatch1_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch1_4); jMatch1_4.setBounds(520, 50, 100, 12); jMatch2_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch2_4); jMatch2_4.setBounds(510, 60, 12, 100); jMatch3_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch3_4); jMatch3_4.setBounds(620, 60, 12, 100); jMatch4_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch4_4); jMatch4_4.setBounds(520, 160, 100, 12); jMatch5_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch5_4); jMatch5_4.setBounds(510, 170, 12, 100); jMatch6_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch6_4); jMatch6_4.setBounds(620, 170, 12, 100); jMatch7_4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch7_4); jMatch7_4.setBounds(520, 270, 100, 12); jSpace1_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace1_5); jSpace1_5.setBounds(650, 50, 100, 12); jSpace2_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace2_5); jSpace2_5.setBounds(640, 60, 12, 100); jSpace3_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace3_5); jSpace3_5.setBounds(750, 60, 12, 100); jSpace4_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace4_5); jSpace4_5.setBounds(650, 160, 100, 12); jSpace5_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace5_5); jSpace5_5.setBounds(640, 170, 12, 100); jSpace6_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace6_5); jSpace6_5.setBounds(750, 170, 12, 100); jSpace7_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace7_5); jSpace7_5.setBounds(650, 270, 100, 12); jMatch1_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch1_5); jMatch1_5.setBounds(650, 50, 100, 12); jMatch2_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch2_5); jMatch2_5.setBounds(640, 60, 12, 100); jMatch3_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch3_5); jMatch3_5.setBounds(750, 60, 12, 100); jMatch4_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch4_5); jMatch4_5.setBounds(650, 160, 100, 12); jMatch5_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch5_5); jMatch5_5.setBounds(640, 170, 12, 100); jMatch6_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch6_5); jMatch6_5.setBounds(750, 170, 12, 100); jMatch7_5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch7_5); jMatch7_5.setBounds(650, 270, 100, 12); jSpace1_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace1_6); jSpace1_6.setBounds(780, 50, 100, 12); jSpace2_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace2_6); jSpace2_6.setBounds(770, 60, 12, 100); jSpace3_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace3_6); jSpace3_6.setBounds(880, 60, 12, 100); jSpace4_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace4_6); jSpace4_6.setBounds(780, 160, 100, 12); jSpace5_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace5_6); jSpace5_6.setBounds(770, 170, 12, 100); jSpace6_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace6_6); jSpace6_6.setBounds(880, 170, 12, 100); jSpace7_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace7_6); jSpace7_6.setBounds(780, 270, 100, 12); jMatch1_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch1_6); jMatch1_6.setBounds(780, 50, 100, 12); jMatch2_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch2_6); jMatch2_6.setBounds(770, 60, 12, 100); jMatch3_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch3_6); jMatch3_6.setBounds(880, 60, 12, 100); jMatch4_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch4_6); jMatch4_6.setBounds(780, 160, 100, 12); jMatch5_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch5_6); jMatch5_6.setBounds(770, 170, 12, 100); jMatch6_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch6_6); jMatch6_6.setBounds(880, 170, 12, 100); jMatch7_6.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch7_6); jMatch7_6.setBounds(780, 270, 100, 12); jSpace1_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace1_7); jSpace1_7.setBounds(1020, 50, 100, 12); jSpace2_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace2_7); jSpace2_7.setBounds(1010, 60, 12, 100); jSpace3_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace3_7); jSpace3_7.setBounds(1120, 60, 12, 100); jSpace4_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace4_7); jSpace4_7.setBounds(1020, 160, 100, 12); jSpace5_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace5_7); jSpace5_7.setBounds(1010, 170, 12, 100); jSpace6_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace6_7); jSpace6_7.setBounds(1120, 170, 12, 100); jSpace7_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace7_7); jSpace7_7.setBounds(1020, 270, 100, 12); jMatch1_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch1_7); jMatch1_7.setBounds(1020, 50, 100, 12); jMatch2_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch2_7); jMatch2_7.setBounds(1010, 60, 12, 100); jMatch3_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch3_7); jMatch3_7.setBounds(1120, 60, 12, 100); jMatch4_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch4_7); jMatch4_7.setBounds(1020, 160, 100, 12); jMatch5_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch5_7); jMatch5_7.setBounds(1010, 170, 12, 100); jMatch6_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch6_7); jMatch6_7.setBounds(1120, 170, 12, 100); jMatch7_7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch7_7); jMatch7_7.setBounds(1020, 270, 100, 12); jSpace1_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace1_8); jSpace1_8.setBounds(1150, 50, 100, 12); jSpace2_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace2_8); jSpace2_8.setBounds(1140, 60, 12, 100); jSpace3_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace3_8); jSpace3_8.setBounds(1250, 60, 12, 100); jSpace4_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace4_8); jSpace4_8.setBounds(1150, 160, 100, 12); jSpace5_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace5_8); jSpace5_8.setBounds(1140, 170, 12, 100); jSpace6_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace6_8); jSpace6_8.setBounds(1250, 170, 12, 100); jSpace7_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace7_8); jSpace7_8.setBounds(1150, 270, 100, 12); jMatch1_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch1_8); jMatch1_8.setBounds(1150, 50, 100, 12); jMatch2_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch2_8); jMatch2_8.setBounds(1140, 60, 12, 100); jMatch3_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch3_8); jMatch3_8.setBounds(1250, 60, 12, 100); jMatch4_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch4_8); jMatch4_8.setBounds(1150, 160, 100, 12); jMatch5_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch5_8); jMatch5_8.setBounds(1140, 170, 12, 100); jMatch6_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch6_8); jMatch6_8.setBounds(1250, 170, 12, 100); jMatch7_8.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch7_8); jMatch7_8.setBounds(1150, 270, 100, 12); jSpace1_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace1_9); jSpace1_9.setBounds(1280, 50, 100, 12); jSpace2_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace2_9); jSpace2_9.setBounds(1270, 60, 12, 100); jSpace3_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace3_9); jSpace3_9.setBounds(1380, 60, 12, 100); jSpace4_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace4_9); jSpace4_9.setBounds(1280, 160, 100, 12); jSpace5_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace5_9); jSpace5_9.setBounds(1270, 170, 12, 100); jSpace6_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_V.png"))); // NOI18N jPanel1.add(jSpace6_9); jSpace6_9.setBounds(1380, 170, 12, 100); jSpace7_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Empty_H.png"))); // NOI18N jPanel1.add(jSpace7_9); jSpace7_9.setBounds(1280, 270, 100, 12); jMatch1_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch1_9); jMatch1_9.setBounds(1280, 50, 100, 12); jMatch2_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch2_9); jMatch2_9.setBounds(1270, 60, 12, 100); jMatch3_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch3_9); jMatch3_9.setBounds(1380, 60, 12, 100); jMatch4_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch4_9); jMatch4_9.setBounds(1280, 160, 100, 12); jMatch5_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch5_9); jMatch5_9.setBounds(1270, 170, 12, 100); jMatch6_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Vertical.png"))); // NOI18N jPanel1.add(jMatch6_9); jMatch6_9.setBounds(1380, 170, 12, 100); jMatch7_9.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMatch7_9); jMatch7_9.setBounds(1280, 270, 100, 12); jEaquals.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Equals.png"))); // NOI18N jPanel1.add(jEaquals); jEaquals.setBounds(900, 120, 100, 90); jPlus.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Plus.png"))); // NOI18N jPanel1.add(jPlus); jPlus.setBounds(400, 110, 100, 110); jMinus.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/Horizontal.png"))); // NOI18N jPanel1.add(jMinus); jMinus.setBounds(400, 150, 100, 30); jLabelScope.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/photo.jpg.gif"))); // NOI18N jPanel1.add(jLabelScope); jLabelScope.setBounds(400, 320, 310, 280); jButtonTest.setText("Test"); jButtonTest.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButtonTestActionPerformed(evt); } }); jPanel1.add(jButtonTest); jButtonTest.setBounds(30, 480, 53, 23); jLabelInfo.setText("HasMatch"); jPanel1.add(jLabelInfo); jLabelInfo.setBounds(60, 330, 290, 70); jButtonCheck.setText("Check"); jButtonCheck.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButtonCheckActionPerformed(evt); } }); jPanel1.add(jButtonCheck); jButtonCheck.setBounds(450, 450, 61, 23); jButtonReset.setText("Reset"); jButtonReset.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButtonResetActionPerformed(evt); } }); jPanel1.add(jButtonReset); jButtonReset.setBounds(370, 450, 61, 23); jPanel1.add(jPanelPlayer); jPanelPlayer.setBounds(1230, 330, 270, 280); Rekt.setIcon(new javax.swing.ImageIcon(getClass().getResource("/de/matchbox/client/Resources/R3KT.gif"))); // NOI18N jPanel1.add(Rekt); Rekt.setBounds(730, 420, 500, 170); jLabelBackground.setBackground(new java.awt.Color(204, 204, 204)); jPanel1.add(jLabelBackground); jLabelBackground.setBounds(0, -6, 1500, 620); getContentPane().add(jPanel1); jPanel1.setBounds(0, 0, 1500, 620); mnuFile.setText("File"); mnuQuit.setText("Exit"); mnuQuit.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { mnuQuitActionPerformed(evt); } }); mnuFile.add(mnuQuit); mnuMain.add(mnuFile); mnuCon.setText("Conection"); jMenuItem1.setText("Leave the Room"); jMenuItem1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jMenuItem1ActionPerformed(evt); } }); mnuCon.add(jMenuItem1); mnuMain.add(mnuCon); setJMenuBar(mnuMain); pack(); }// </editor-fold>//GEN-END:initComponents private void mnuQuitActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_mnuQuitActionPerformed {//GEN-HEADEREND:event_mnuQuitActionPerformed System.exit(0); }//GEN-LAST:event_mnuQuitActionPerformed private void jButtonTestActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonTestActionPerformed this.hasMatch = 0; }//GEN-LAST:event_jButtonTestActionPerformed private void jButtonCheckActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_jButtonCheckActionPerformed {//GEN-HEADEREND:event_jButtonCheckActionPerformed if (hasMatch == 1) { jLabelInfo.setText("Place you Match before Checking"); } else if (this.areNummbers()) { roomFormModel.send(new MessageObject(MessageType.ROOM_CMD, new RoomCommandContentObject(RoomCommand.CHECK_EQUASION, new EquasionContentObject(MatchUtility.matchToEquation(this.convertToList()))))); } else { jLabelInfo.setText("Nicht alle Zeichen sind Zahlen"); } }//GEN-LAST:event_jButtonCheckActionPerformed private void jButtonResetActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonResetActionPerformed this.setMatches(gleichung); hasMatch = 0; jLabelInfo.setText("Has Match false"); }//GEN-LAST:event_jButtonResetActionPerformed private void jMenuItem1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItem1ActionPerformed roomFormModel.send(new MessageObject(MessageType.ROOM_CMD, new RoomCommandContentObject(RoomCommand.LEAVE_ROOM))); }//GEN-LAST:event_jMenuItem1ActionPerformed public void callTheSolver(String pPlayerName) { JOptionPane.showMessageDialog(null, "Equesion soleved by " + pPlayerName + ". Try the next one", "Too Slow", JOptionPane.INFORMATION_MESSAGE); } public void callTheWinner(String pWinner) { JOptionPane.showMessageDialog(null, "Player " + pWinner + " Won the Game", "Too Slow", JOptionPane.INFORMATION_MESSAGE); } private List convertToList() { List ausgabe = new List(); boolean[] zahlCode; Zahl pZahl = new Zahl(); for (int i = 1; i < 10; i++) { zahlCode = new boolean[]{ !jSpaceArr[1][i].isVisible(), !jSpaceArr[2][i].isVisible(), !jSpaceArr[3][i].isVisible(), !jSpaceArr[4][i].isVisible(), !jSpaceArr[5][i].isVisible(), !jSpaceArr[6][i].isVisible(), !jSpaceArr[7][i].isVisible() }; pZahl = new Zahl(); pZahl.setZahlCode(zahlCode); if (this.isBlank(i)) { ausgabe.append('*'); } else { ausgabe.append(pZahl); } if (i == 3) { if (jPlus.isVisible()) { ausgabe.append('+'); } else { ausgabe.append('-'); } } else if (i == 6) { ausgabe.append('='); } } return ausgabe; } private boolean isBlank(int index) { for (int i = 1; i < 8; i++) { if (!jSpaceArr[i][index].isVisible()) { return false; } } return true; } private boolean areNummbers() { boolean[] zahlCode; Zahl pZahl = new Zahl(); for (int i = 1; i < 10; i++) { zahlCode = new boolean[]{ !jSpaceArr[1][i].isVisible(), !jSpaceArr[2][i].isVisible(), !jSpaceArr[3][i].isVisible(), !jSpaceArr[4][i].isVisible(), !jSpaceArr[5][i].isVisible(), !jSpaceArr[6][i].isVisible(), !jSpaceArr[7][i].isVisible() }; pZahl = new Zahl(); pZahl.setZahlCode(zahlCode); if (!pZahl.isNumber()) { if (!isBlank(i)) { return false; } } } return true; } public void setPlayerList(IRoomCommandContentObject pRoomCommandContentObject) { if (!(pRoomCommandContentObject instanceof ListPlayerContentObject)) { return; } List lList = ((ListPlayerContentObject) pRoomCommandContentObject).getPlayer(); Box lPlayerBox = Box.createVerticalBox(); lList.toFirst(); while (lList.hasAccess()) { PlayerControl lPlayerControl = new PlayerControl((PlayerModel) lList.getObject()); lPlayerControl.setAlignmentX(0.5F); lPlayerControl.setMaximumSize(new Dimension(210, 35)); lPlayerBox.add(lPlayerControl); lList.next(); } this.jPanelPlayer.removeAll(); this.jPanelPlayer.setLayout(new GridLayout(1, 1)); this.jPanelPlayer.add(lPlayerBox); this.jPanelPlayer.updateUI(); } public void setEquasion(RoomCommandContentObject pCommandObject) { this.gleichung = ((EquasionContentObject) (pCommandObject.getContentObject())).getEquasion(); this.setMatches(gleichung); } public void onCheckedEquasion(RoomCommandContentObject pCommandObject) { if (((CheckEquasionResultContentObject) pCommandObject.getContentObject()).isEquasionCorrect()) { jLabelInfo.setText("Well Done! Correct"); this.Rekt.setVisible(true); this.jLabelScope.setVisible(true); timer.start(); } else { jLabelInfo.setText("Sorry, try again"); } } private void createArr() { //Spaces int x = 1; jSpaceArr[1][x] = jSpace1_1; jSpaceArr[2][x] = jSpace2_1; jSpaceArr[3][x] = jSpace3_1; jSpaceArr[4][x] = jSpace4_1; jSpaceArr[5][x] = jSpace5_1; jSpaceArr[6][x] = jSpace6_1; jSpaceArr[7][x] = jSpace7_1;
package elki.math.geometry; import java.util.ArrayList; import java.util.List; import elki.data.spatial.Polygon; import elki.math.geometry.SweepHullDelaunay2D.Triangle; import elki.utilities.datastructures.BitsUtil; import elki.utilities.datastructures.arraylike.IntegerArray; import elki.utilities.documentation.Reference; /** * Compute the alpha-shape of a point set, using Delaunay triangulation. * <p> * Reference: * <p> * H. Edelsbrunner, D. G. Kirkpatrick, R. Seidel<br> * On the shape of a set of points in the plane<br> * IEEE Trans. Inf. Theory 29(4) * * @author Erich Schubert * @since 0.5.0 * * @assoc - - - SweepHullDelaunay2D * @has - - - Polygon */ @Reference(authors = "H. Edelsbrunner, D. G. Kirkpatrick, R. Seidel", title = "On the shape of a set of points in the plane", booktitle = "IEEE Trans. Inf. Theory 29(4)", url = "https://doi.org/10.1109/TIT.1983.1056714", // bibkey = "DBLP:journals/tit/EdelsbrunnerKS83") public class AlphaShape { /** * Alpha shape */ private double alpha2; /** * Points */ private List<double[]> points; /** * Delaunay triangulation */ private ArrayList<SweepHullDelaunay2D.Triangle> delaunay = null; /** * Constructor. * * @param points point set * @param alpha alpha parameter */ public AlphaShape(List<double[]> points, double alpha) { this.alpha2 = alpha * alpha; this.points = points; } /** * Compute the alpha shape. * * @return polygons */ public List<Polygon> compute() { // Compute delaunay triangulation: delaunay = (new SweepHullDelaunay2D(points)).getDelaunay(); List<Polygon> polys = new ArrayList<>(); List<IntegerArray> open = new ArrayList<>(); // Working data long[] visited = BitsUtil.zero(delaunay.size()); IntegerArray stack = new IntegerArray(); // Find an unprocessed triangle to start with: for(int i = 0; i < delaunay.size() && i >= 0; i = BitsUtil.nextClearBit(visited, i + 1)) { assert !BitsUtil.get(visited, i); BitsUtil.setI(visited, i); SweepHullDelaunay2D.Triangle tri = delaunay.get(i); if(tri.r2 <= alpha2) { // Check neighbors assert stack.size == 0; // Initial stack fill: stack.add(i); stack.add(tri.ca); stack.add(tri.c); stack.add(tri.a); stack.add(i); stack.add(tri.bc); stack.add(tri.b); stack.add(tri.c); stack.add(i); stack.add(tri.ab); stack.add(tri.a); stack.add(tri.b); checkNeighbors(open, visited, stack); } for(IntegerArray po : open) { List<double[]> cur = new ArrayList<>(po.size); for(int j = 0; j < po.size; j++) { cur.add(points.get(po.data[j])); } polys.add(new Polygon(cur)); } open.clear(); } return polys; } private void checkNeighbors(List<IntegerArray> open, long[] visited, IntegerArray stack) { assert stack.size == 12; while(!stack.isEmpty()) { assert stack.size >= 4; // pop 4 values from the int stack int cur = stack.data[stack.size - 4]; int ab = stack.data[stack.size - 3]; int a = stack.data[stack.size - 2]; int b = stack.data[stack.size - 1]; stack.size -= 4; if(ab < 0) { // Nonexistant neighbor addEdge(open, a, b); continue; } final Triangle next = delaunay.get(ab); if(BitsUtil.get(visited, ab)) { // We already discarded the neighbor polygon, but we still get an edge. if(next.r2 > alpha2) { addEdge(open, a, b); } continue; } BitsUtil.setI(visited, ab); if(next.r2 <= alpha2) { // Walk 'around' the next triangle if(next.ab == cur) { assert next.b == a && next.a == b; stack.add(ab); stack.add(next.ca); stack.add(next.c); stack.add(b); // other stack.add(ab); stack.add(next.bc); stack.add(a); stack.add(next.c); } else if(next.bc == cur) { assert next.c == a && next.b == b; stack.add(ab); stack.add(next.ab); stack.add(next.a); stack.add(b); // other stack.add(ab); stack.add(next.ca); stack.add(a); stack.add(next.a); } else /* if(next.ca == cur) */ { assert next.ca == cur; assert next.a == a && next.c == b; stack.add(ab); stack.add(next.bc); stack.add(next.b); stack.add(b); // other stack.add(ab); stack.add(next.ab); stack.add(a); stack.add(next.b); } continue; } addEdge(open, a, b); } } /** * Add an edge to the corresponding polygon. This handles holes. * * @param open List of open polygons * @param a previous point * @param b next point */ private void addEdge(List<IntegerArray> open, int a, int b) { for(IntegerArray cur : open) { if(cur.data[cur.size - 1] == a) { cur.add(b); return; } } IntegerArray cur = new IntegerArray(); cur.add(b); open.add(cur); } }
package sk.henrichg.phoneprofilesplus; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.provider.Settings; public class TimeChangedReceiver extends BroadcastReceiver { public TimeChangedReceiver() { } @Override public void onReceive(Context context, Intent intent) { if ((intent != null) && (intent.getAction() != null)) { String action = intent.getAction(); if (action.equals(Intent.ACTION_TIMEZONE_CHANGED) || action.equals(Intent.ACTION_TIME_CHANGED)) { PPApplication.logE(" CallsCounter.logCounter(context, "TimeChangedReceiver.onReceive", "TimeChangedReceiver_onReceive"); Context appContext = context.getApplicationContext(); if (!PPApplication.getApplicationStarted(appContext, true)) return; boolean timeChanged = true; if (action.equals(Intent.ACTION_TIME_CHANGED)) { timeChanged = false; String isAutoTime = Settings.Global.getString(appContext.getContentResolver(), Settings.Global.AUTO_TIME); if (isAutoTime.equals("0")) { timeChanged = true; } } if (timeChanged) { if ((android.os.Build.VERSION.SDK_INT >= 21) && ApplicationPreferences.applicationUseAlarmClock(context)) { ProfileDurationAlarmBroadcastReceiver.removeAlarm(context); Profile.setActivatedProfileForDuration(context, 0); } SearchCalendarEventsJob.scheduleJob(/*appContext, */true, null, true); DataWrapper dataWrapper = new DataWrapper(appContext, false, 0); dataWrapper.clearSensorsStartTime(); dataWrapper.restartEvents(false, true/*, false*/, false, true); } } } } }
package twitter4j; import twitter4j.internal.http.HttpParameter; import java.util.ArrayList; import java.util.List; /** * @author Yusuke Yamamoto - yusuke at mac.com * @since Twitter4J 2.1.1 */ public final class StatusUpdate implements java.io.Serializable { private String status; private long inReplyToStatusId = -1l; private GeoLocation location = null; private String placeId = null; private boolean displayCoordinates = true; private static final long serialVersionUID = -3595502688477609916L; public StatusUpdate(String status){ this.status = status; } public String getStatus() { return status; } public long getInReplyToStatusId() { return inReplyToStatusId; } public void setInReplyToStatusId(long inReplyToStatusId) { this.inReplyToStatusId = inReplyToStatusId; } public StatusUpdate inReplyToStatusId(long inReplyToStatusId) { setInReplyToStatusId(inReplyToStatusId); return this; } public GeoLocation getLocation() { return location; } public void setLocation(GeoLocation location) { this.location = location; } public StatusUpdate location(GeoLocation location) { setLocation(location); return this; } public String getPlaceId() { return placeId; } public void setPlaceId(String placeId) { this.placeId = placeId; } public StatusUpdate placeId(String placeId) { setPlaceId(placeId); return this; } public boolean isDisplayCoordinates() { return displayCoordinates; } public void setDisplayCoordinates(boolean displayCoordinates) { this.displayCoordinates = displayCoordinates; } public StatusUpdate displayCoordinates(boolean displayCoordinates) { setDisplayCoordinates(displayCoordinates); return this; } /*package*/ HttpParameter[] asHttpParameterArray(){ ArrayList<HttpParameter> params = new ArrayList<HttpParameter>(); appendParameter("status", status, params); if(-1 != inReplyToStatusId){ appendParameter("in_reply_to_status_id", inReplyToStatusId, params); } if(null != location){ appendParameter("lat", location.getLatitude(), params); appendParameter("long", location.getLongitude(), params); } appendParameter("place_id", placeId, params); if (!displayCoordinates) { appendParameter("display_coordinates", "false", params); } HttpParameter[] paramArray = new HttpParameter[params.size()]; return params.toArray(paramArray); } private void appendParameter(String name, String value, List<HttpParameter> params) { if (null != value) { params.add(new HttpParameter(name, value)); } } private void appendParameter(String name, double value, List<HttpParameter> params) { params.add(new HttpParameter(name, String.valueOf(value))); } private void appendParameter(String name, long value, List<HttpParameter> params) { params.add(new HttpParameter(name, String.valueOf(value))); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; StatusUpdate that = (StatusUpdate) o; if (displayCoordinates != that.displayCoordinates) return false; if (inReplyToStatusId != that.inReplyToStatusId) return false; if (location != null ? !location.equals(that.location) : that.location != null) return false; if (placeId != null ? !placeId.equals(that.placeId) : that.placeId != null) return false; if (!status.equals(that.status)) return false; return true; } @Override public int hashCode() { int result = status.hashCode(); result = 31 * result + (int) (inReplyToStatusId ^ (inReplyToStatusId >>> 32)); result = 31 * result + (location != null ? location.hashCode() : 0); result = 31 * result + (placeId != null ? placeId.hashCode() : 0); result = 31 * result + (displayCoordinates ? 1 : 0); return result; } @Override public String toString() { return "StatusUpdate{" + "status='" + status + '\'' + ", inReplyToStatusId=" + inReplyToStatusId + ", location=" + location + ", placeId='" + placeId + '\'' + ", displayCoordinates=" + displayCoordinates + '}'; } }
package placebooks.model; import static javax.persistence.CascadeType.ALL; import static javax.persistence.TemporalType.TIMESTAMP; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.persistence.ElementCollection; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.OneToOne; import javax.persistence.Temporal; import org.apache.log4j.Logger; import org.codehaus.jackson.annotate.JsonAutoDetect; import org.codehaus.jackson.annotate.JsonIgnore; import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; import org.codehaus.jackson.map.annotate.JsonDeserialize; import org.codehaus.jackson.map.annotate.JsonSerialize; import org.w3c.dom.Document; import org.w3c.dom.Element; import placebooks.controller.PropertiesSingleton; import placebooks.controller.SearchHelper; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.io.WKTReader; @Entity @JsonAutoDetect(fieldVisibility = Visibility.ANY, getterVisibility = Visibility.NONE) public class PlaceBook { public enum Permission { R("r"), W("w"), R_W("r+w"); private String perms; private Permission(final String perms) { this.perms = perms; } public final String toString() { return perms; } } public enum State { UNPUBLISHED(0), PUBLISHED(1); private int value; private static final Map<Integer, State> lu = new HashMap<Integer, State>(); static { for (State s : EnumSet.allOf(State.class)) lu.put(s.getValue(), s); } private State(int value) { this.value = value; } public int getValue() { return value; } public static State get(int value) { return lu.get(value); } } protected static final Logger log = Logger.getLogger(PlaceBook.class.getName()); @JsonSerialize(using = placebooks.model.json.GeometryJSONSerializer.class) @JsonDeserialize(using = placebooks.model.json.GeometryJSONDeserializer.class) private Geometry geom; // Pertaining to the PlaceBook @Id @GeneratedValue(strategy = GenerationType.AUTO) private String id; @JsonIgnore @OneToOne(cascade = ALL, mappedBy = "placebook") private PlaceBookSearchIndex index = new PlaceBookSearchIndex(); // TODO: Cascading deletes: not sure about this @OneToMany(mappedBy = "placebook", cascade = ALL) private List<PlaceBookItem> items = new ArrayList<PlaceBookItem>(); // Searchable metadata attributes, e.g., title, description, etc. @ElementCollection private Map<String, String> metadata = new HashMap<String, String>(); @ManyToOne private User owner; @Temporal(TIMESTAMP) private Date timestamp; private State state = State.UNPUBLISHED; @JsonIgnore private Map<User, Permission> perms = HashMap.create(); public PlaceBook() { index.setPlaceBook(this); } // Copy constructor public PlaceBook(final PlaceBook p) { this.owner = p.getOwner(); if (this.owner != null) this.owner.add(this); if(p.getGeometry() != null) { this.geom = (Geometry)p.getGeometry().clone(); } else { this.geom = null; } this.timestamp = (Date)p.getTimestamp().clone(); this.metadata = new HashMap<String, String>(p.getMetadata()); index.setPlaceBook(this); this.index.addAll(p.getSearchIndex().getIndex()); for (PlaceBookItem item : p.getItems()) { this.addItem(item.deepCopy()); } log.info("Created copy of PlaceBook; old key = " + p.getKey()); } // Make a new PlaceBook public PlaceBook(final User owner, final Geometry geom) { this(); this.state = State.UNPUBLISHED; this.owner = owner; if (owner != null) { this.owner.add(this); } this.geom = geom; this.timestamp = new Date(); log.info("Created new PlaceBook: timestamp=" + this.timestamp.toString()); } public PlaceBook(final User owner, final Geometry geom, final List<PlaceBookItem> items) { this(owner, geom); setItems(items); } public void addItem(final PlaceBookItem item) { items.add(item); item.setPlaceBook(this); } public void addMetadataEntry(final String key, final String value) { if (value == null) { metadata.remove(key); } else { metadata.put(key, value); } } public void addMetadataEntryIndexed(final String key, final String value) { addMetadataEntry(key, value); index.addAll(SearchHelper.getIndex(value)); } public void calcBoundary() { Geometry bounds = null; float minLat = Float.POSITIVE_INFINITY; float maxLat = Float.NEGATIVE_INFINITY; float minLon = Float.POSITIVE_INFINITY; float maxLon = Float.NEGATIVE_INFINITY; boolean emptySet = false; for (PlaceBookItem item : getItems()) { final Geometry g = item.getGeometry(); if (g != null) { // A Geometry with no dimensions has to be handled System.out.println("Inclunding item " + item.getClass().getSimpleName() + ":"+ item.getKey() + " = " + g.toText()); if (g.getBoundary().isEmpty()) { Coordinate[] cs = g.getCoordinates(); for (Coordinate c : cs) { minLat = Math.min(minLat, (float)c.x); maxLat = Math.max(maxLat, (float)c.x); minLon = Math.min(minLon, (float)c.y); maxLon = Math.max(maxLon, (float)c.y); emptySet = true; } } else { if (bounds != null) bounds = g.union(bounds); else bounds = g; } } } if (emptySet) { try { Geometry empty = new WKTReader().read( "POLYGON ((" + minLat + " " + minLon + ", " + minLat + " " + maxLon + ", " + maxLat + " " + maxLon + ", " + maxLat + " " + minLon + ", " + minLat + " " + minLon + "))"); log.info("empty=" + empty); if (bounds != null) bounds = empty.union(bounds); else bounds = empty; } catch (final Throwable e) { log.error(e.toString()); } } if (bounds != null) { geom = bounds.getBoundary(); } else { geom = null; } log.info("calcBoundary()= " + geom); } public Element createConfigurationRoot(final Document config) { log.info("PlaceBook.appendConfiguration(), key=" + this.getKey()); final Element root = config.createElement(PlaceBook.class.getName()); root.setAttribute("key", this.getKey()); root.setAttribute("owner", this.getOwner().getKey()); if (getTimestamp() != null) { log.info("Setting timestamp=" + this.getTimestamp().toString()); final Element timestamp = config.createElement("timestamp"); timestamp.appendChild( config.createTextNode(this.getTimestamp().toString()) ); root.appendChild(timestamp); } if (getGeometry() != null) { log.info("Setting geometry=" + this.getGeometry().toText()); final Element geometry = config.createElement("geometry"); geometry.appendChild( config.createTextNode(this.getGeometry().toText()) ); root.appendChild(geometry); } if (!metadata.isEmpty()) { log.info("Writing metadata to config"); final Element sElem = config.createElement("metadata"); log.info("metadata set size = " + metadata.size()); for (final Map.Entry<String, String> e : metadata.entrySet()) { log.info("Metadata element key, value=" + e.getKey().toString() + ", " + e.getValue().toString()); final Element elem = config.createElement(e.getKey().toString()); elem.appendChild(config.createTextNode( e.getValue().toString()) ); sElem.appendChild(elem); } root.appendChild(sElem); } return root; } public final Permission getPermission(final User user) { return perms.get(user); } public Geometry getGeometry() { return geom; } public List<PlaceBookItem> getItems() { return Collections.unmodifiableList(items); } public String getKey() { return id; } public Map<String, String> getMetadata() { return Collections.unmodifiableMap(metadata); } public String getMetadataValue(final String key) { return metadata.get(key); } public User getOwner() { return owner; } public String getPackagePath() { return PropertiesSingleton .get(this.getClass().getClassLoader()) .getProperty(PropertiesSingleton.IDEN_PKG, "") + "/" + getKey(); } public State getState() { return state; } public Date getTimestamp() { return timestamp; } public boolean hasMetadata() { return (!metadata.isEmpty()); } // Bit of a dirty hack public boolean hasPlaceBookItemClass(final Class<?> clazz) { for (PlaceBookItem pbi : getItems()) { if (pbi.getClass().getName().equals(clazz.getName())) return true; } return false; } public boolean removeItem(final PlaceBookItem item) { item.setPlaceBook(null); return items.remove(item); } public void setGeometry(final Geometry geom) { this.geom = geom; } public void setItems(final List<PlaceBookItem> items) { this.items.clear(); this.items.addAll(items); } public void setOwner(final User owner) { this.owner = owner; } public void setState(State state) { this.state = state; } public void setTimestamp(final Date timestamp) { this.timestamp = timestamp; } public final PlaceBookSearchIndex getSearchIndex() { return index; } }
package org.guppy4j.reflect; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.util.function.Supplier; /** * Invocation handler that remembers the last invoked method */ public final class MethodCapturer implements InvocationHandler, Supplier<Method> { private Method method; @Override public Object invoke(Object proxy, Method method, Object[] args) { this.method = method; final Class<?> returnType = method.getReturnType(); if (void.class == returnType) { return null; } else { return DefaultValue.forType(returnType); } } @Override public Method get() { return method; } }
package org.xwiki.extension.repository.xwiki.internal; import java.util.Arrays; import java.util.List; import javax.inject.Inject; import javax.inject.Named; import javax.inject.Singleton; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.xwiki.bridge.event.DocumentCreatedEvent; import org.xwiki.bridge.event.DocumentUpdatedEvent; import org.xwiki.component.annotation.Component; import org.xwiki.extension.internal.VersionManager; import org.xwiki.model.reference.AttachmentReference; import org.xwiki.model.reference.AttachmentReferenceResolver; import org.xwiki.model.reference.DocumentReference; import org.xwiki.model.reference.DocumentReferenceResolver; import org.xwiki.model.reference.EntityReference; import org.xwiki.observation.EventListener; import org.xwiki.observation.event.Event; import org.xwiki.rendering.listener.reference.ResourceReference; import org.xwiki.rendering.listener.reference.ResourceType; import org.xwiki.rendering.parser.ResourceReferenceParser; import com.xpn.xwiki.XWikiContext; import com.xpn.xwiki.XWikiException; import com.xpn.xwiki.doc.XWikiDocument; import com.xpn.xwiki.objects.BaseObject; @Component("ExtensionUpdaterListener") @Singleton public class ExtensionUpdaterListener implements EventListener { /** * Listened events. */ private static final List<Event> EVENTS = Arrays.<Event> asList(new DocumentCreatedEvent(), new DocumentUpdatedEvent()); /** * Used to find last version. */ @Inject private VersionManager versionManager; /** * Get the reference of the class in the current wiki. */ @Inject @Named("default/reference") private DocumentReferenceResolver<EntityReference> referenceResolver; /** * The logger to log. */ @Inject private Logger logger; /** * Used to validate download reference. */ @Inject private ResourceReferenceParser resourceReferenceParser; /** * Used to validate download reference. */ @Inject private AttachmentReferenceResolver<String> attachmentResolver; @Override public List<Event> getEvents() { return EVENTS; } @Override public String getName() { return "ExtensionUpdaterListener"; } @Override public void onEvent(Event event, Object source, Object data) { XWikiDocument document = (XWikiDocument) source; XWikiContext context = (XWikiContext) data; // TODO: improve this to do only one save BaseObject extensionObject = document.getXObject(XWikiRepositoryModel.EXTENSION_CLASSREFERENCE); if (extensionObject != null) { XWikiDocument modifiedDocument = updateLastVersion(document, extensionObject, context); validateExtension(modifiedDocument, extensionObject, context); } } private void validateExtension(XWikiDocument document, BaseObject extensionObject, XWikiContext context) { String extensionId = extensionObject.getStringValue(XWikiRepositoryModel.PROP_EXTENSION_ID); boolean valid = !StringUtils.isBlank(extensionId); if (valid) { int nbVersions = 0; List<BaseObject> extensionVersions = document.getXObjects(XWikiRepositoryModel.EXTENSIONVERSION_CLASSREFERENCE); if (extensionVersions != null) { for (BaseObject extensionVersionObject : extensionVersions) { if (extensionVersionObject != null) { // Has a version String extensionVersion = extensionVersionObject.getStringValue(XWikiRepositoryModel.PROP_VERSION_VERSION); if (StringUtils.isBlank(extensionVersion)) { valid = false; break; } // The download reference seems ok String download = extensionVersionObject.getStringValue(XWikiRepositoryModel.PROP_VERSION_DOWNLOAD); if (StringUtils.isNotEmpty(download)) { ResourceReference resourceReference = this.resourceReferenceParser.parse(download); if (ResourceType.ATTACHMENT.equals(resourceReference.getType())) { AttachmentReference attachmentReference = this.attachmentResolver.resolve(resourceReference.getReference(), document.getDocumentReference()); XWikiDocument attachmentDocument; try { attachmentDocument = context.getWiki().getDocument(attachmentReference.getDocumentReference(), context); valid = attachmentDocument.getAttachment(attachmentReference.getName()) != null; } catch (XWikiException e) { valid = false; } } else if (ResourceType.URL.equals(resourceReference.getType())) { valid = true; } else { valid = false; } } else { valid = document.getAttachment(extensionId + "-" + extensionVersion + "." + extensionObject.getStringValue(XWikiRepositoryModel.PROP_EXTENSION_TYPE)) != null; } ++nbVersions; } if (!valid) { break; } } } valid &= nbVersions > 0; } int currentValue = extensionObject.getIntValue(XWikiRepositoryModel.PROP_EXTENSION_VALIDEXTENSION, 0); if ((currentValue == 1) != valid) { try { // FIXME: We can't save directly the provided document coming from the event document = context.getWiki().getDocument(document, context); extensionObject = document.getXObject(XWikiRepositoryModel.EXTENSION_CLASSREFERENCE); extensionObject.setIntValue(XWikiRepositoryModel.PROP_EXTENSION_VALIDEXTENSION, valid ? 1 : 0); context.getWiki().saveDocument(document, "Validated extension", true, context); } catch (XWikiException e) { this.logger.error("Failed to validate extension [{}]", document, e); } } } private XWikiDocument updateLastVersion(XWikiDocument document, BaseObject extensionObject, XWikiContext context) { String lastVersion = findLastVersion(document); if (lastVersion != null && !StringUtils.equals(lastVersion, extensionObject.getStringValue(XWikiRepositoryModel.PROP_EXTENSION_LASTVERSION))) { try { // FIXME: We can't save directly the provided document coming from the event document = context.getWiki().getDocument(document, context); extensionObject = document.getXObject(extensionObject.getReference()); extensionObject.setStringValue(XWikiRepositoryModel.PROP_EXTENSION_LASTVERSION, lastVersion); context.getWiki().saveDocument(document, "Update extension last version", context); } catch (XWikiException e) { this.logger.error("Failed to update extension [{}] last version", document, e); } } return document; } private DocumentReference getClassReference(XWikiDocument document, EntityReference localReference) { return this.referenceResolver.resolve(localReference, document.getDocumentReference().getWikiReference()); } /** * Compare all version located in a document to find the last one. * * @param document the extension document * @return the last version */ private String findLastVersion(XWikiDocument document) { DocumentReference versionClassReference = getClassReference(document, XWikiRepositoryModel.EXTENSIONVERSION_CLASSREFERENCE); List<BaseObject> versionObjects = document.getXObjects(versionClassReference); String lastVersion = null; if (versionObjects != null) { for (BaseObject versionObject : versionObjects) { String version = versionObject.getStringValue(XWikiRepositoryModel.PROP_VERSION_VERSION); if (version != null) { if (lastVersion == null || this.versionManager.compareVersions(version, lastVersion) > 0) { lastVersion = version; } } } } return lastVersion; } }
package org.biojava.bio.gui.sequence; import java.util.*; import java.awt.*; import java.awt.event.*; import java.awt.geom.*; import org.biojava.utils.*; import org.biojava.bio.*; import org.biojava.bio.symbol.*; import org.biojava.bio.seq.*; import org.biojava.bio.gui.*; import org.biojava.bio.seq.impl.*; import org.biojava.bio.seq.genomic.*; import java.util.List; /** * A feature renderer that computes the data necessary to render * multi-exon transcripts without CDS data. * <P> * The actual drawing is done by a child renderer. In this case, * SixFrameRenderer is used, which can use data from this renderer * to display transcripts in the correct translation frames. * * @author David Huen */ public class SixFrameZiggyRenderer extends AbstractChangeable implements FeatureRenderer, java.io.Serializable { private SixFrameRenderer pane; public SixFrameZiggyRenderer(SixFrameRenderer pane) { this.pane = pane; } public void setFill(Paint p) throws ChangeVetoException { pane.setFill(p); } public Paint getFill() { return pane.getFill(); } public void setOutline(Paint p) throws ChangeVetoException { pane.setOutline(p); } public Paint getOutline() { return pane.getOutline(); } public void setBlockDepth(double depth) throws ChangeVetoException { pane.setBlockWidth(depth); } public double getBlockDepth() { return pane.getBlockWidth(); } public double getDepth(SequenceRenderContext src) { return pane.getDepth(src); } private boolean isStop( Sequence seq, int base, StrandedFeature.Strand strand) { // tests whether there is a stop at given location. // the triplet is either base, +1, +2 or -1, -2 // depending on the strand searched if (strand == StrandedFeature.POSITIVE) { // search top strand // first base must be t if (seq.symbolAt(base) != DNATools.t()) return false; // second base cannot be c or t if (seq.symbolAt(base+1) == DNATools.c()) return false; if (seq.symbolAt(base+1) == DNATools.t()) return false; // if second base is g, the third must be a if (seq.symbolAt(base+1) == DNATools.g()) { if (seq.symbolAt(base+2) != DNATools.a()) return false; } else { // second base is a: third must be a or g. if (seq.symbolAt(base+2) == DNATools.c()) return false; if (seq.symbolAt(base+2) == DNATools.t()) return false; } // oh well, must be a stop, innit? return true; } else { // search bottom strand // first base must be t if (seq.symbolAt(base) != DNATools.a()) return false; // second base cannot be c or t on reverse strand if (seq.symbolAt(base-1) == DNATools.a()) return false; if (seq.symbolAt(base-1) == DNATools.g()) return false; // if second base is g, the third must be a if (seq.symbolAt(base-1) == DNATools.c()) { if (seq.symbolAt(base-2) != DNATools.t()) return false; } else { // second base is a: third must be a or g. if (seq.symbolAt(base-2) == DNATools.a()) return false; if (seq.symbolAt(base-2) == DNATools.g()) return false; } // ach! a stop! return true; } } private int findORF( Sequence seq, StrandedFeature.Strand strand) { // finds in a SymbolList the specified phase with // longest ORF and returns the phase. int[] lastStop = {0, 0, 0}; // int[] longestORF = {0, 0, 0}; int bestPhase = 0; int highestORFSize = 0; // scan thru' the sequence looking for stops int length = seq.length(); if (length < 4) return 0; // set limits of search int startSearch, endSearch; if (strand == StrandedFeature.POSITIVE) { startSearch = 1; endSearch = length - 2; } else { startSearch = 3; endSearch = length; } for (int i=startSearch; i <= endSearch; i++) { if (isStop(seq, i, strand)) { // stop found int phase = i%3; int currORFSize = i - lastStop[phase]; // is this a candidate for best phase? if (currORFSize > highestORFSize) { bestPhase = phase; highestORFSize= currORFSize; // longestORF[phase] = currORFSize; } lastStop[phase] = i; // System.out.println("findORF i phase, largest: " + i + " " // + phase + " " + currORFSize); } } // there is always the possibility that there are a few stops // near the beginning then no more. // The best phase will then be misdetected. // Assume closure at end of frame. for (int i=0; i < 3; i++) { int currORFSize = endSearch - lastStop[i]; if (currORFSize > highestORFSize) { bestPhase = i; highestORFSize= currORFSize; // longestORF[phase] = currORFSize; } } return bestPhase; } private Sequence assembleFusedSequence(Feature [] block, Sequence seq) { // assembles a fused sequence from component features // only assembles in the forward direction but will // sort exons as necessary. SimpleAssembly sa = new SimpleAssembly("temp", "temp"); ComponentFeature.Template cft = new ComponentFeature.Template(); cft.annotation = Annotation.EMPTY_ANNOTATION; cft.strand = StrandedFeature.POSITIVE; cft.componentSequence = seq; int last = 0; for (int j= 0; j < block.length; j++) { // fuse all "exons" irrespective of orientation. Feature thisExon = block[j]; cft.componentLocation = thisExon.getLocation(); int length = cft.componentLocation.getMax() - cft.componentLocation.getMin() + 1; cft.location = new RangeLocation(last+1, last+length); last += length; // System.out.println("assemble: " + cft.componentLocation.getMin() + " " + cft.componentLocation.getMax()); try { sa.createFeature(cft); } catch (BioException be) { throw new BioError( be, "Couldn't merge exons." ); } catch (ChangeVetoException cve) { throw new BioError( cve, "Couldn't merge exons." ); } } return sa; } public void renderFeature( Graphics2D g, Feature f, SequenceRenderContext context) { // System.out.println("SixFrameZiggyRenderer called"); if (!(f instanceof StrandedFeature)) return; // create a fused version of the transcript // this solution is ugly as hell, a botched abortion of a fix // the algorithm is hideously simple. Irrespective of the // strandedness of the transcript, a fused sequence will be // generated in the forward direction. // this "transcript" will then be searched for the longest // ORF in the correct strand and the phase of the largest ORF // returned. It really doesn't matter whether the min sequence // end is the 5' or 3' of the transcript as phase is consistent // thru' an ORF. // By just passing the best phase over to SixFrameRenderer, the // the phase of successive exons can be computed from just the // previous exon phase and the preceding intron size. //filter for only the exons FeatureFilter filt = new FeatureFilter.ByType("exon"); FeatureHolder exons = f.filter(filt, false); // sort the returned exons in ascending order // disappointment... int featureCount = exons.countFeatures(); Feature[] orderedExons = new Feature[featureCount]; int i=0; for (Iterator fi=exons.features(); fi.hasNext();) { orderedExons[i++] = (Feature) fi.next(); } Arrays.sort(orderedExons, new Feature.ByLocationComparator()); Sequence fused = assembleFusedSequence(orderedExons, f.getSequence()); StrandedFeature.Strand strand = ((StrandedFeature) f).getStrand(); // findORF will find the best phase within the "ORF" but that // needs to be corrected for the phase in which the ORF is // embedded into the sequence int phase = findORF(fused, strand); // System.out.println("fused length, phase, strand: " + fused.length() + " " // + phase + " " + strand); // System.out.println("sequence is :- " + fused.seqString()); // Iterate over exon child features: these are already ordered. Location loc = null; for (i = 0; i < orderedExons.length; i++) { loc = ((Feature) orderedExons[i]).getLocation(); if (i == 0) { // first exon pane.startZiggy(strand, (2 + loc.getMin() + phase)%3); pane.renderLocation(g, context, loc); // System.out.println("block value is " + loc); } else { pane.renderLocation(g, context, loc); // System.out.println("block value is " + loc); } } } public FeatureHolder processMouseEvent( FeatureHolder hits, SequenceRenderContext src, MouseEvent me ) { return hits; } }
package org.mini2Dx.ui.element; import org.mini2Dx.core.controller.button.ControllerButton; import org.mini2Dx.ui.navigation.UiNavigation; import org.mini2Dx.ui.render.ActionableRenderNode; import com.badlogic.gdx.Input.Keys; /** * Common interface for {@link UiElement}s that can be navigated by keyboard or * controller */ public interface Navigatable { /** * Returns the unique id of the {@link Navigatable} * @return A non-null {@link String} that is the id */ public String getId(); /** * Triggers a navigation and returns the newly highlighted {@link ActionableRenderNode} * @param keycode The navigation {@link Keys} value * @return Null if no {@link UiNavigation} is available */ public ActionableRenderNode navigate(int keycode); /** * Returns the corresponding {@link ActionableRenderNode} mapped to a keyboard hotkey * @param keycode The {@link Keys} keycode that is the hotkey * @return Null if there is no mapping */ public ActionableRenderNode hotkey(int keycode); /** * Returns the corresponding {@link ActionableRenderNode} mapped to a {@link ControllerButton} hotkey * @param button The {@link ControllerButton} that is the hotkey * @return Null if there is no mapping */ public ActionableRenderNode hotkey(ControllerButton button); /** * Maps a {@link ControllerButton} to a {@link Actionable} * @param button The {@link ControllerButton} that is the hotkey * @param actionable The {@link Actionable} to trigger when the hotkey is pressed */ public void setHotkey(ControllerButton button, Actionable actionable); /** * Maps a keyboard button to a {@link Actionable} * @param keycode The {@link Keys} keycode that is the hotkey * @param actionable The {@link Actionable} to trigger when the key is pressed */ public void setHotkey(int keycode, Actionable actionable); /** * Unmaps a {@link ControllerButton} hotkey * @param button The {@link ControllerButton} that is the hotkey */ public void unsetHotkey(ControllerButton button); /** * Unmaps a keyboard hotkey * @param keycode The {@link Keys} keycode that is the hotkey */ public void unsetHotkey(int keycode); /** * Returns the {@link UiNavigation} currently being navigated * @return Null if no navigation is occurring */ public UiNavigation getNavigation(); }
import com.fscz.util.TextJustifyUtils; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; import android.graphics.Paint.Align; import android.graphics.Canvas; import android.graphics.Paint; import android.widget.TextView; import android.os.Build; import android.util.AttributeSet; public class TextViewEx extends TextView { private Paint paint = new Paint(); private String [] blocks; private float spaceOffset = 0; private float horizontalOffset = 0; private float verticalOffset = 0; private float horizontalFontOffset = 0; private float dirtyRegionWidth = 0; private boolean wrapEnabled = false; int left,top,right,bottom=0; private Align _align=Align.LEFT; private float strecthOffset; private float wrappedEdgeSpace; private String block; private String wrappedLine; private String [] lineAsWords; private Object[] wrappedObj; private Bitmap cache = null; private boolean cacheEnabled = false; public TextViewEx(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); //set a minimum of left and right padding so that the texts are not too close to the side screen this.setPadding(10, 0, 10, 0); } public TextViewEx(Context context, AttributeSet attrs) { super(context, attrs); this.setPadding(10, 0, 10, 0); } public TextViewEx(Context context) { super(context); this.setPadding(10, 0, 10, 0); } @Override public void setPadding(int left, int top, int right, int bottom) { // TODO Auto-generated method stub super.setPadding(left+10, top, right+10, bottom); } @Override public void setDrawingCacheEnabled(boolean cacheEnabled) { this.cacheEnabled = cacheEnabled; } public void setText(String st, boolean wrap) { wrapEnabled = wrap; super.setText(st); } public void setTextAlign(Align align) { _align=align; } @SuppressLint("NewApi") @Override protected void onDraw(Canvas canvas) { // If wrap is disabled then, // request original onDraw if(!wrapEnabled) { super.onDraw(canvas); return; } // Active canas needs to be set // based on cacheEnabled Canvas activeCanvas = null; // Set the active canvas based on // whether cache is enabled if (cacheEnabled) { if (cache != null) { // Draw to the OS provided canvas // if the cache is not empty canvas.drawBitmap(cache, 0, 0, paint); return; } else { // Create a bitmap and set the activeCanvas // to the one derived from the bitmap cache = Bitmap.createBitmap(getWidth(), getHeight(), Config.ARGB_4444); activeCanvas = new Canvas(cache); } } else { // Active canvas is the OS // provided canvas activeCanvas = canvas; } // Pull widget properties paint.setColor(getCurrentTextColor()); paint.setTypeface(getTypeface()); paint.setTextSize(getTextSize()); paint.setTextAlign(_align); paint.setFlags(Paint.ANTI_ALIAS_FLAG); //minus out the paddings pixel dirtyRegionWidth = getWidth()-getPaddingLeft()-getPaddingRight(); int maxLines = Integer.MAX_VALUE; int currentapiVersion = android.os.Build.VERSION.SDK_INT; if (currentapiVersion >= android.os.Build.VERSION_CODES.JELLY_BEAN){ maxLines = getMaxLines(); } int lines = 1; blocks = getText().toString().split("((?<=\n)|(?=\n))"); verticalOffset = horizontalFontOffset = getLineHeight() - 0.5f; // Temp fix spaceOffset = paint.measureText(" "); for(int i = 0; i < blocks.length && lines <= maxLines; i++) { block = blocks[i]; horizontalOffset = 0; if(block.length() == 0) { continue; } else if(block.equals("\n")) { verticalOffset += horizontalFontOffset; continue; } block = block.trim(); if(block.length() == 0) { continue; } wrappedObj = TextJustifyUtils.createWrappedLine(block, paint, spaceOffset, dirtyRegionWidth); wrappedLine = ((String) wrappedObj[0]); wrappedEdgeSpace = (Float) wrappedObj[1]; lineAsWords = wrappedLine.split(" "); strecthOffset = wrappedEdgeSpace != Float.MIN_VALUE ? wrappedEdgeSpace/(lineAsWords.length - 1) : 0; for(int j = 0; j < lineAsWords.length; j++) { String word = lineAsWords[j]; if (lines == maxLines && j == lineAsWords.length - 1) { activeCanvas.drawText("...", horizontalOffset, verticalOffset, paint); } else if(j==0){ //if it is the first word of the line, text will be drawn starting from right edge of textview if (_align==Align.RIGHT) { activeCanvas.drawText(word, getWidth()-(getPaddingRight()), verticalOffset, paint); // add in the paddings to the horizontalOffset horizontalOffset+=getWidth()-(getPaddingRight()); } else { activeCanvas.drawText(word, getPaddingLeft(), verticalOffset, paint); horizontalOffset+=getPaddingLeft(); } } else { activeCanvas.drawText(word, horizontalOffset, verticalOffset, paint); } if (_align==Align.RIGHT) horizontalOffset -= paint.measureText(word) + spaceOffset + strecthOffset; else horizontalOffset += paint.measureText(word) + spaceOffset + strecthOffset; } lines++; if(blocks[i].length() > 0) { blocks[i] = blocks[i].substring(wrappedLine.length()); verticalOffset += blocks[i].length() > 0 ? horizontalFontOffset : 0; i } } if (cacheEnabled) { // Draw the cache onto the OS provided // canvas. canvas.drawBitmap(cache, 0, 0, paint); } } }
package org.encog.persist.persistors; import java.util.HashMap; import java.util.Map; import org.encog.EncogError; import org.encog.neural.networks.BasicNetwork; import org.encog.neural.networks.layers.Layer; import org.encog.neural.networks.logic.ART1Logic; import org.encog.neural.networks.logic.BAMLogic; import org.encog.neural.networks.logic.BoltzmannLogic; import org.encog.neural.networks.logic.FeedforwardLogic; import org.encog.neural.networks.logic.HopfieldLogic; import org.encog.neural.networks.logic.NeuralLogic; import org.encog.neural.networks.logic.SimpleRecurrentLogic; import org.encog.neural.networks.synapse.Synapse; import org.encog.parse.tags.read.ReadXML; import org.encog.parse.tags.write.WriteXML; import org.encog.persist.EncogPersistedCollection; import org.encog.persist.EncogPersistedObject; import org.encog.persist.Persistor; /** * The Encog persistor used to persist the BasicNetwork class. * * @author jheaton */ public class BasicNetworkPersistor implements Persistor { /** * The layers tag. */ public static final String TAG_LAYERS = "layers"; /** * The synapses tag. */ public static final String TAG_SYNAPSES = "synapses"; /** * The synapse tag. */ public static final String TAG_SYNAPSE = "synapse"; /** * The properties tag. */ public static final String TAG_PROPERTIES = "properties"; /** * The tags tag. */ public static final String TAG_TAGS = "tags"; /** * The tag tag. */ public static final String TAG_TAG = "tag"; /** * The logic tag. */ public static final String TAG_LOGIC = "logic"; /** * The layer synapse. */ public static final String TAG_LAYER = "layer"; public static final String TAG_PROPERTY = "Property"; /** * The id attribute. */ public static final String ATTRIBUTE_ID = "id"; public static final String ATTRIBUTE_NAME = "name"; public static final String ATTRIBUTE_VALUE = "value"; /** * The type attribute. */ public static final String ATTRIBUTE_TYPE = "type"; /** * The input layer type. */ public static final String ATTRIBUTE_TYPE_INPUT = "input"; /** * The output layer type. */ public static final String ATTRIBUTE_TYPE_OUTPUT = "output"; /** * The hidden layer type. */ public static final String ATTRIBUTE_TYPE_HIDDEN = "hidden"; /** * The both layer type. */ public static final String ATTRIBUTE_TYPE_BOTH = "both"; /** * The unknown layer type. */ public static final String ATTRIBUTE_TYPE_UNKNOWN = "unknown"; /** * The from attribute. */ public static final String ATTRIBUTE_FROM = "from"; /** * The to attribute. */ public static final String ATTRIBUTE_TO = "to"; /** * The to attribute. */ public static final String ATTRIBUTE_LAYER = "layer"; /** * The network that is being loaded. */ private BasicNetwork currentNetwork; /** * A mapping from layers to index numbers. */ private final Map<Layer, Integer> layer2index = new HashMap<Layer, Integer>(); /** * A mapping from index numbers to layers. */ private final Map<Integer, Layer> index2layer = new HashMap<Integer, Layer>(); /** * Handle any layers that should be loaded. * * @param in * The XML reader. */ private void handleLayers(final ReadXML in) { final String end = in.getTag().getName(); while (in.readToTag()) { if (in.is(BasicNetworkPersistor.TAG_LAYER, true)) { final int num = in.getTag().getAttributeInt( BasicNetworkPersistor.ATTRIBUTE_ID); final String type = in.getTag().getAttributeValue( BasicNetworkPersistor.ATTRIBUTE_TYPE); in.readToTag(); final Persistor persistor = PersistorUtil.createPersistor(in .getTag().getName()); final Layer layer = (Layer) persistor.load(in); this.index2layer.put(num, layer); // the type attribute is actually "legacy", but if its there // then use it! if (type != null) { if (type.equals(BasicNetworkPersistor.ATTRIBUTE_TYPE_INPUT)) { this.currentNetwork.tagLayer(BasicNetwork.TAG_INPUT, layer); } else if (type .equals(BasicNetworkPersistor.ATTRIBUTE_TYPE_OUTPUT)) { this.currentNetwork.tagLayer(BasicNetwork.TAG_OUTPUT, layer); } else if (type .equals(BasicNetworkPersistor.ATTRIBUTE_TYPE_BOTH)) { this.currentNetwork.tagLayer(BasicNetwork.TAG_INPUT, layer); this.currentNetwork.tagLayer(BasicNetwork.TAG_OUTPUT, layer); } } // end of legacy processing } if (in.is(end, false)) { break; } } } /** * Process any synapses that should be loaded. * * @param in * The XML reader. */ private void handleSynapses(final ReadXML in) { final String end = in.getTag().getName(); while (in.readToTag()) { if (in.is(BasicNetworkPersistor.TAG_SYNAPSE, true)) { final int from = in.getTag().getAttributeInt( BasicNetworkPersistor.ATTRIBUTE_FROM); final int to = in.getTag().getAttributeInt( BasicNetworkPersistor.ATTRIBUTE_TO); in.readToTag(); final Persistor persistor = PersistorUtil.createPersistor(in .getTag().getName()); final Synapse synapse = (Synapse) persistor.load(in); synapse.setFromLayer(this.index2layer.get(from)); synapse.setToLayer(this.index2layer.get(to)); synapse.getFromLayer().addSynapse(synapse); } if (in.is(end, false)) { break; } } } /** * Load the specified Encog object from an XML reader. * * @param in * The XML reader to use. * @return The loaded object. */ public EncogPersistedObject load(final ReadXML in) { final String name = in.getTag().getAttributes().get( EncogPersistedCollection.ATTRIBUTE_NAME); final String description = in.getTag().getAttributes().get( EncogPersistedCollection.ATTRIBUTE_DESCRIPTION); this.currentNetwork = new BasicNetwork(); this.currentNetwork.setName(name); this.currentNetwork.setDescription(description); while (in.readToTag()) { if (in.is(BasicNetworkPersistor.TAG_LAYERS, true)) { handleLayers(in); } else if (in.is(BasicNetworkPersistor.TAG_SYNAPSES, true)) { handleSynapses(in); } else if (in.is(BasicNetworkPersistor.TAG_PROPERTIES, true)) { handleProperties(in); } else if (in.is(BasicNetworkPersistor.TAG_LOGIC, true)) { handleLogic(in); } else if (in.is(BasicNetworkPersistor.TAG_TAGS, true)) { handleTags(in); } } this.currentNetwork.getStructure().finalizeStructure(); return this.currentNetwork; } private void handleLogic(ReadXML in) { String value = in.readTextToTag(); if (value.equalsIgnoreCase("ART1Logic")) { this.currentNetwork.setLogic(new ART1Logic()); } else if (value.equalsIgnoreCase("BAMLogic")) { this.currentNetwork.setLogic(new BAMLogic()); } else if (value.equalsIgnoreCase("BoltzmannLogic")) { this.currentNetwork.setLogic(new BoltzmannLogic()); } else if (value.equalsIgnoreCase("FeedforwardLogic")) { this.currentNetwork.setLogic(new FeedforwardLogic()); } else if (value.equalsIgnoreCase("HopfieldLogic")) { this.currentNetwork.setLogic(new HopfieldLogic()); } else if (value.equalsIgnoreCase("SimpleRecurrentLogic")) { this.currentNetwork.setLogic(new SimpleRecurrentLogic()); } else { try { NeuralLogic logic = (NeuralLogic) Class.forName(value) .newInstance(); this.currentNetwork.setLogic(logic); } catch (ClassNotFoundException e) { throw new EncogError(e); } catch (InstantiationException e) { throw new EncogError(e); } catch (IllegalAccessException e) { throw new EncogError(e); } } } private void handleProperties(ReadXML in) { final String end = in.getTag().getName(); while (in.readToTag()) { if (in.is(BasicNetworkPersistor.TAG_PROPERTY, true)) { final String name = in.getTag().getAttributeValue( BasicNetworkPersistor.ATTRIBUTE_NAME); String value = in.readTextToTag(); this.currentNetwork.setProperty(name, value); } if (in.is(end, false)) { break; } } } private void handleTags(ReadXML in) { final String end = in.getTag().getName(); while (in.readToTag()) { if (in.is(BasicNetworkPersistor.TAG_TAG, true)) { final String name = in.getTag().getAttributeValue( BasicNetworkPersistor.ATTRIBUTE_NAME); final String layerStr = in.getTag().getAttributeValue( BasicNetworkPersistor.ATTRIBUTE_LAYER); final int layerInt = Integer.parseInt(layerStr); Layer layer = this.index2layer.get(layerInt); this.currentNetwork.tagLayer(name, layer); in.readToTag(); } if (in.is(end, false)) { break; } } } /** * Save the specified Encog object to an XML writer. * * @param obj * The object to save. * @param out * The XML writer to save to. */ public void save(final EncogPersistedObject obj, final WriteXML out) { PersistorUtil.beginEncogObject(EncogPersistedCollection.TYPE_BASIC_NET, out, obj, true); this.currentNetwork = (BasicNetwork) obj; this.currentNetwork.getStructure().finalizeStructure(); // save the layers out.beginTag(BasicNetworkPersistor.TAG_LAYERS); saveLayers(out); out.endTag(); // save the structure of these layers out.beginTag(BasicNetworkPersistor.TAG_SYNAPSES); saveSynapses(out); out.endTag(); saveProperties(out); saveTags(out); saveLogic(out); out.endTag(); } private void saveLogic(WriteXML out) { out.beginTag(BasicNetworkPersistor.TAG_LOGIC); NeuralLogic logic = this.currentNetwork.getLogic(); if (logic instanceof FeedforwardLogic || logic instanceof SimpleRecurrentLogic || logic instanceof BoltzmannLogic || logic instanceof ART1Logic || logic instanceof BAMLogic || logic instanceof HopfieldLogic) { out.addText(logic.getClass().getSimpleName()); } else out.addText(logic.getClass().getName()); out.endTag(); } private void saveProperties(WriteXML out) { // save any properties out.beginTag(BasicNetworkPersistor.TAG_PROPERTIES); for (String key : this.currentNetwork.getProperties().keySet()) { String value = this.currentNetwork.getProperties().get(key); out.addAttribute(BasicNetworkPersistor.ATTRIBUTE_NAME, key); out.beginTag(BasicNetworkPersistor.TAG_PROPERTY); out.addText(value.toString()); out.endTag(); } out.endTag(); } private void saveTags(WriteXML out) { // save any properties out.beginTag(BasicNetworkPersistor.TAG_TAGS); for (String key : this.currentNetwork.getLayerTags().keySet()) { Layer value = this.currentNetwork.getLayerTags().get(key); out.addAttribute(BasicNetworkPersistor.ATTRIBUTE_NAME, key); out.addAttribute(BasicNetworkPersistor.ATTRIBUTE_LAYER, "" + layer2index.get(value)); out.beginTag(BasicNetworkPersistor.TAG_TAG); out.endTag(); } out.endTag(); } /** * Save the layers to the specified XML writer. * * @param out * The XML writer. */ private void saveLayers(final WriteXML out) { int current = 1; for (final Layer layer : this.currentNetwork.getStructure().getLayers()) { out.addAttribute(BasicNetworkPersistor.ATTRIBUTE_ID, "" + current); out.beginTag(BasicNetworkPersistor.TAG_LAYER); final Persistor persistor = layer.createPersistor(); persistor.save(layer, out); out.endTag(); this.layer2index.put(layer, current); current++; } } /** * Save the synapses to the specified XML writer. * * @param out * The XML writer. */ private void saveSynapses(final WriteXML out) { for (final Synapse synapse : this.currentNetwork.getStructure() .getSynapses()) { out.addAttribute(BasicNetworkPersistor.ATTRIBUTE_FROM, "" + this.layer2index.get(synapse.getFromLayer())); out.addAttribute(BasicNetworkPersistor.ATTRIBUTE_TO, "" + this.layer2index.get(synapse.getToLayer())); out.beginTag(BasicNetworkPersistor.TAG_SYNAPSE); final Persistor persistor = synapse.createPersistor(); persistor.save(synapse, out); out.endTag(); } } }
package org.epics.pvmanager; import java.lang.ref.WeakReference; import java.util.concurrent.Executor; /** * Object responsible to notify the PV of changes on the appropriate thread. * * @author carcassi */ class Notifier<T> { private final WeakReference<PV<T>> pvRef; private final Function<T> function; private final Executor notificationExecutor; private volatile PVRecipe pvRecipe; private final ExceptionHandler exceptionHandler; /** * Creates a new notifier. The new notifier will notifier the given pv * with new values calculated by the function, and will use onThread to * perform the notifications. * <p> * After construction, one MUST set the pvRecipe, so that the * dataSource is appropriately closed. * * @param pv the pv on which to notify * @param function the function used to calculate new values * @param notificationExecutor the thread switching mechanism */ Notifier(PV<T> pv, Function<T> function, Executor notificationExecutor, ExceptionHandler exceptionHandler) { this.pvRef = new WeakReference<PV<T>>(pv); this.function = function; this.notificationExecutor = notificationExecutor; this.exceptionHandler = exceptionHandler; } /** * Determines whether the notifier is active or not. * <p> * The notifier becomes inactive if the PV is closed or is garbage collected. * The first time this function determines that the notifier is inactive, * it will ask the data source to close all channels relative to the * pv. * * @return true if new notification should be performed */ boolean isActive() { // Making sure to get the reference once for thread safety final PV<T> pv = pvRef.get(); if (pv != null && !pv.isClosed()) { return true; } else { if (pvRecipe != null) { pvRecipe.getDataSource().disconnect(pvRecipe.getDataSourceRecipe()); pvRecipe = null; } return false; } } /** * Notifies the PV of a new value. */ void notifyPv() { try { // The data will be shipped as part of the task, // which is properly synchronized by the executor final T newValue = function.getValue(); notificationExecutor.execute(new Runnable() { @Override public void run() { PV<T> pv = pvRef.get(); if (pv != null && newValue != null) { Notification<T> notification = NotificationSupport.notification(pv.getValue(), newValue); if (notification.isNotificationNeeded()) { pv.setValue(notification.getNewValue()); } } } }); } catch(RuntimeException ex) { exceptionHandler.handleException(ex); } } void setPvRecipe(PVRecipe pvRecipe) { this.pvRecipe = pvRecipe; } PVRecipe getPvRecipe() { return pvRecipe; } }
/* * @author <a href="oliver.wehrens@aei.mpg.de">Oliver Wehrens</a> * @version $Id$ */ package org.gridlab.gridsphere.portlets.core.beans; import org.jdom.JDOMException; import org.jdom.Element; import org.jdom.Document; import org.jdom.input.SAXBuilder; import java.util.ArrayList; import java.util.Iterator; import java.util.Vector; import java.util.List; import java.net.MalformedURLException; import java.net.URL; public class RSSNews { private static RSSNews instance = new RSSNews(); private int fetchinterval = 60; // in seconds Vector feeds = new Vector(); private RSSNews() { super(); this.add("http: this.add("http: this.add("http://diveintomark.org/xml/rss.xml","DiveIntoMark"); this.add("http://weblog.infoworld.com/udell/rss.xml","weblog"); this.add("http://rss.com.com/2547-12-0-5.xml","CNet"); } public Document getRSSFeed(RSSFeed feed) { Document doc = new Document(new Element("rss")); if ((System.currentTimeMillis()-feed.getLastfetched()>1000*getFetchinterval()) || feed.getLastfetched()==0) { try { SAXBuilder builder = new SAXBuilder(false); URL feedurl = new URL(feed.getUrl()); doc = builder.build(feedurl); feed.setFeed(doc); System.out.println("CACHED TIME :"+feed.getLastfetched()); System.out.println("CURRENT TIME: "+System.currentTimeMillis()); long diff = System.currentTimeMillis()-feed.getLastfetched(); System.out.println("DIFF :"+diff); feed.setLastfetched(System.currentTimeMillis()); System.out.println("FETCHINTERVALL: "+getFetchinterval()); System.out.println(" } catch (MalformedURLException e) { } catch (JDOMException e) { } } else { doc = feed.getFeed(); } return doc; } public Document getRSSFeed(String url) { return getRSSFeed(getFeed(url)); } public String getHTML(String url) { return getHTML(getFeed(url)); } public String getHTML(RSSFeed feed) { Document doc = getRSSFeed(feed); String version = "unknown"; Element root = doc.getRootElement(); String result = new String(); version = root.getAttributeValue("version"); result = result + ("RSS feed from URL: "+feed.getUrl()); try { if (version.equals("2.0") || version.equals("3.14159265359")) { List items = root.getChild("channel").getChildren("item"); Iterator it = items.iterator(); result = result +"<ul>"; while (it.hasNext()) { Element item = (Element)it.next(); String title = item.getChild("title").getText(); String link = item.getChild("link").getText(); String desc = item.getChild("description").getText(); result = result +"<li><a target=\"_new\" href=\""+link+"\">"+title+"</a><br/>"+desc+"</li>"; } result = result +"</ul>"; } else { result = result +"<br/>Unsupported RSS feed version ("+version+")"; } } catch (Exception e) { result = result +"<br/>This is a not a supported RSS feed."; System.out.println("============> "+e); } return result; } public static RSSNews getInstance() { return instance; } public void add(String url, String label) { RSSFeed news = new RSSFeed(url, label); feeds.add(news); } public void delete(String url) { Iterator it = feeds.iterator(); while (it.hasNext()) { RSSFeed feed = (RSSFeed)it.next(); if (feed.getUrl().equals(url)) { feeds.remove(feed); return; } } } public Iterator iterator() { return feeds.iterator(); } public RSSFeed getFeed(String url) { Iterator it = feeds.iterator(); while (it.hasNext()) { RSSFeed feed = (RSSFeed)it.next(); if (feed.getUrl().equals(url)) { return feed; } } return null; } public int getFetchinterval() { return fetchinterval; } public void setFetchinterval(int fetchinterval) { this.fetchinterval = fetchinterval; } }
package com.redhat.ceylon.eclipse.code.html; import static com.redhat.ceylon.eclipse.code.html.HTMLPrinter.convertToHTMLContent; import static com.redhat.ceylon.eclipse.code.html.HTMLPrinter.toHex; import static com.redhat.ceylon.eclipse.util.Highlights.CHARS; import static com.redhat.ceylon.eclipse.util.Highlights.COMMENTS; import static com.redhat.ceylon.eclipse.util.Highlights.IDENTIFIERS; import static com.redhat.ceylon.eclipse.util.Highlights.KEYWORDS; import static com.redhat.ceylon.eclipse.util.Highlights.NUMBERS; import static com.redhat.ceylon.eclipse.util.Highlights.PACKAGES; import static com.redhat.ceylon.eclipse.util.Highlights.STRINGS; import static com.redhat.ceylon.eclipse.util.Highlights.TYPES; import static com.redhat.ceylon.eclipse.util.Highlights.getCurrentThemeColor; import static org.antlr.runtime.Token.HIDDEN_CHANNEL; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.URL; import org.antlr.runtime.Token; import org.eclipse.core.runtime.FileLocator; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.Platform; import org.eclipse.jdt.internal.ui.JavaPlugin; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Shell; import org.osgi.framework.Bundle; import com.redhat.ceylon.compiler.typechecker.model.Declaration; import com.redhat.ceylon.compiler.typechecker.model.Module; import com.redhat.ceylon.compiler.typechecker.model.Package; import com.redhat.ceylon.compiler.typechecker.model.Referenceable; import com.redhat.ceylon.compiler.typechecker.model.Scope; import com.redhat.ceylon.compiler.typechecker.parser.CeylonLexer; import com.redhat.ceylon.compiler.typechecker.util.NewlineFixingStringStream; import com.redhat.ceylon.eclipse.code.editor.CeylonEditor; import com.redhat.ceylon.eclipse.ui.CeylonPlugin; import com.redhat.ceylon.eclipse.util.Escaping; public class HTML { /** * The style sheet (css). */ private static String fgStyleSheet; public static URL fileUrl(String icon) { try { Bundle bundle = CeylonPlugin.getInstance().getBundle(); return FileLocator.toFileURL(FileLocator.find(bundle, new Path("icons/").append(icon), null)); } catch (IOException e) { e.printStackTrace(); return null; } } /** * Returns the Javadoc hover style sheet with the current Javadoc font from the preferences. * @return the updated style sheet * @since 3.4 */ public static String getStyleSheet() { if (fgStyleSheet == null) { fgStyleSheet = loadStyleSheet(); } final StringBuffer monospaceSize = new StringBuffer(); final Font editorFont = CeylonEditor.getEditorFont(); final Font hoverFont = CeylonEditor.getHoverFont(); final FontData monospaceFontData = editorFont.getFontData()[0]; final FontData textFontData = hoverFont.getFontData()[0]; final Display display = Display.getDefault(); display.syncExec(new Runnable() { @Override public void run() { Shell activeShell=display.getActiveShell(); //TODO: how can we make sure this is never called // without a Shell at startup time if (activeShell!=null) { GC gc = new GC(activeShell); Font font = gc.getFont(); gc.setFont(hoverFont); int hoverFontHeight = gc.getFontMetrics().getAscent(); gc.setFont(editorFont); int monospaceFontHeight = gc.getFontMetrics().getAscent(); gc.setFont(font); int ratio = 100 * monospaceFontData.getHeight() * hoverFontHeight / monospaceFontHeight / textFontData.getHeight(); monospaceSize.append(ratio).append("%"); } } }); return HTMLPrinter.convertTopLevelFont(fgStyleSheet, textFontData) .replaceFirst("pre", "pre, tt, code") .replaceFirst("font-family: monospace;", "font-family: '" + monospaceFontData.getName() + "', monospace;" + "font-size: " + monospaceSize + ";") + "body { padding: 15px; }\n"; } /** * Loads and returns the Javadoc hover style sheet. * @return the style sheet, or <code>null</code> if unable to load * @since 3.4 */ public static String loadStyleSheet() { Bundle bundle= Platform.getBundle(JavaPlugin.getPluginId()); URL styleSheetURL= bundle.getEntry("/JavadocHoverStyleSheet.css"); if (styleSheetURL != null) { BufferedReader reader= null; try { reader= new BufferedReader(new InputStreamReader(styleSheetURL.openStream())); StringBuilder buffer= new StringBuilder(1500); String line= reader.readLine(); while (line != null) { buffer.append(line); buffer.append('\n'); line= reader.readLine(); } return buffer.toString(); } catch (IOException ex) { JavaPlugin.log(ex); return ""; } finally { try { if (reader != null) reader.close(); } catch (IOException e) { } } } return null; } public static void addImageAndLabel(StringBuilder buf, Referenceable model, String imageSrcPath, int imageWidth, int imageHeight, String label, int labelLeft, int labelTop) { buf.append("<div style='word-wrap: break-word; position: relative; "); if (imageSrcPath != null) { buf.append("margin-left: ").append(labelLeft).append("px; "); buf.append("padding-top: ").append(labelTop).append("px; "); } buf.append("'>"); if (imageSrcPath != null) { if (model!=null) { buf.append("<a ").append(HTML.link(model)).append(">"); } addImage(buf, imageSrcPath, imageWidth, imageHeight, labelLeft); if (model!=null) { buf.append("</a>"); } } buf.append(label); buf.append("</div>"); } public static void addImage(StringBuilder buf, String imageSrcPath, int imageWidth, int imageHeight, int labelLeft) { StringBuilder imageStyle= new StringBuilder("border:none; position: absolute; "); imageStyle.append("width: ").append(imageWidth).append("px; "); imageStyle.append("height: ").append(imageHeight).append("px; "); imageStyle.append("left: ").append(- labelLeft - 1).append("px; "); buf.append("<!--[if lte IE 6]><![if gte IE 5.5]>\n"); //String tooltip= element == null ? "" : "alt='" + "Open Declaration" + "' "; buf.append("<span ").append("style=\"").append(imageStyle) .append("filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='") .append(imageSrcPath).append("')\"></span>\n"); buf.append("<![endif]><![endif] buf.append("<!--[if !IE]>-->\n"); buf.append("<img ").append("style='").append(imageStyle).append("' src='") .append(imageSrcPath).append("'/>\n"); buf.append("<!--<![endif]-->\n"); buf.append("<!--[if gte IE 7]>\n"); buf.append("<img ").append("style='").append(imageStyle).append("' src='") .append(imageSrcPath).append("'/>\n"); buf.append("<![endif] } public static String getAddress(Referenceable model) { if (model==null) return null; return "dec:" + declink(model); } public static String link(Referenceable model) { return "href='doc:" + declink(model) + "'"; } public static String declink(Referenceable model) { if (model instanceof Package) { Package p = (Package) model; return declink(p.getModule()) + ":" + p.getNameAsString(); } if (model instanceof Module) { return ((Module) model).getNameAsString(); } else if (model instanceof Declaration) { String result = ":" + ((Declaration) model).getName(); Scope container = ((Declaration) model).getContainer(); if (container instanceof Referenceable) { return declink((Referenceable) container) + result; } else { return result; } } else { return ""; } } public static String keyword(String kw) { String kwc = toHex(getCurrentThemeColor(KEYWORDS)); return "<span style='color:"+kwc+"'>"+ kw + "</span>"; } public static String highlightLine(String line) { String kwc = toHex(getCurrentThemeColor(KEYWORDS)); String tc = toHex(getCurrentThemeColor(TYPES)); String ic = toHex(getCurrentThemeColor(IDENTIFIERS)); String sc = toHex(getCurrentThemeColor(STRINGS)); String nc = toHex(getCurrentThemeColor(NUMBERS)); String cc = toHex(getCurrentThemeColor(CHARS)); String pc = toHex(getCurrentThemeColor(PACKAGES)); String lcc = toHex(getCurrentThemeColor(COMMENTS)); CeylonLexer lexer = new CeylonLexer(new NewlineFixingStringStream(line)); Token token; boolean inPackageName = false; StringBuilder result = new StringBuilder(); while ((token=lexer.nextToken()).getType()!=CeylonLexer.EOF) { String s = convertToHTMLContent(token.getText()); int type = token.getType(); if (type!=CeylonLexer.LIDENTIFIER && type!=CeylonLexer.MEMBER_OP && token.getChannel()!=HIDDEN_CHANNEL) { inPackageName = false; } else if (inPackageName) { result.append("<span style='color:"+pc+"'>").append(s).append("</span>"); continue; } switch (type) { case CeylonLexer.FLOAT_LITERAL: case CeylonLexer.NATURAL_LITERAL: result.append("<span style='color:"+nc+"'>").append(s).append("</span>"); break; case CeylonLexer.CHAR_LITERAL: result.append("<span style='color:"+cc+"'>").append(s).append("</span>"); break; case CeylonLexer.STRING_LITERAL: case CeylonLexer.STRING_START: case CeylonLexer.STRING_MID: case CeylonLexer.VERBATIM_STRING: result.append("<span style='color:"+sc+"'>").append(s).append("</span>"); break; case CeylonLexer.UIDENTIFIER: result.append("<span style='color:"+tc+"'>").append(s).append("</span>"); break; case CeylonLexer.LIDENTIFIER: result.append("<span style='color:"+ic+"'>").append(s).append("</span>"); break; case CeylonLexer.MULTI_COMMENT: case CeylonLexer.LINE_COMMENT: result.append("<span style='color:"+lcc+"'>").append(s).append("</span>"); break; case CeylonLexer.IMPORT: case CeylonLexer.PACKAGE: case CeylonLexer.MODULE: inPackageName = true; //then fall through! default: if (Escaping.KEYWORDS.contains(s)) { result.append("<span style='color:"+kwc+"'>").append(s).append("</span>"); } else { result.append(s); } } } return result.toString(); } }
package edu.uci.ics.pregelix.dataflow.context; import java.lang.reflect.Method; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Logger; import edu.uci.ics.hyracks.api.context.IHyracksTaskContext; import edu.uci.ics.hyracks.api.dataflow.state.IStateObject; import edu.uci.ics.hyracks.api.exceptions.HyracksDataException; import edu.uci.ics.hyracks.api.io.FileReference; import edu.uci.ics.pregelix.api.graph.Vertex; public class PJobContext { private static final Logger LOGGER = Logger.getLogger(RuntimeContext.class.getName()); private final Map<Long, List<FileReference>> iterationToFiles = new ConcurrentHashMap<Long, List<FileReference>>(); private final Map<TaskIterationID, IStateObject> appStateMap = new ConcurrentHashMap<TaskIterationID, IStateObject>(); private Long jobIdToSuperStep; private Boolean jobIdToMove; public void close() throws HyracksDataException { for (Entry<Long, List<FileReference>> entry : iterationToFiles.entrySet()) for (FileReference fileRef : entry.getValue()) fileRef.delete(); iterationToFiles.clear(); appStateMap.clear(); } public void clearState() throws HyracksDataException { for (Entry<Long, List<FileReference>> entry : iterationToFiles.entrySet()) for (FileReference fileRef : entry.getValue()) fileRef.delete(); iterationToFiles.clear(); appStateMap.clear(); } public Map<TaskIterationID, IStateObject> getAppStateStore() { return appStateMap; } public static RuntimeContext get(IHyracksTaskContext ctx) { return (RuntimeContext) ctx.getJobletContext().getApplicationContext().getApplicationObject(); } public void setVertexProperties(long numVertices, long numEdges, long currentIteration, ClassLoader cl) { if (jobIdToMove == null || jobIdToMove == true) { if (jobIdToSuperStep == null) { if (currentIteration <= 0) { jobIdToSuperStep = 0L; } else { jobIdToSuperStep = currentIteration; } } long superStep = jobIdToSuperStep; List<FileReference> files = iterationToFiles.remove(superStep - 1); if (files != null) { for (FileReference fileRef : files) { if (fileRef != null) { fileRef.delete(); } } } setProperties(numVertices, numEdges, currentIteration, superStep, false, cl); } System.gc(); } public void recoverVertexProperties(long numVertices, long numEdges, long currentIteration, ClassLoader cl) { if (jobIdToSuperStep == null) { if (currentIteration <= 0) { jobIdToSuperStep = 0L; } else { jobIdToSuperStep = currentIteration; } } long superStep = jobIdToSuperStep; List<FileReference> files = iterationToFiles.remove(superStep - 1); if (files != null) { for (FileReference fileRef : files) { if (fileRef != null) { fileRef.delete(); } } } setProperties(numVertices, numEdges, currentIteration, superStep, true, cl); } public void endSuperStep() { jobIdToMove = true; LOGGER.info("end iteration " + Vertex.getSuperstep()); } public Map<Long, List<FileReference>> getIterationToFiles() { return iterationToFiles; } private void setProperties(long numVertices, long numEdges, long currentIteration, long superStep, boolean toMove, ClassLoader cl) { try { Class<?> vClass = (Class<?>) cl.loadClass("edu.uci.ics.pregelix.api.graph.Vertex"); Method superStepMethod = vClass.getMethod("setSuperstep", Long.TYPE); Method numVerticesMethod = vClass.getMethod("setNumVertices", Long.TYPE); Method numEdgesMethod = vClass.getMethod("setNumEdges", Long.TYPE); if (currentIteration > 0) { //Vertex.setSuperstep(currentIteration); superStepMethod.invoke(null, currentIteration); } else { //Vertex.setSuperstep(++superStep); superStepMethod.invoke(null, ++superStep); } //Vertex.setNumVertices(numVertices); numVerticesMethod.invoke(null, numVertices); //Vertex.setNumEdges(numEdges); numEdgesMethod.invoke(null, numEdges); jobIdToSuperStep = superStep; jobIdToMove = toMove; LOGGER.info("start iteration " + Vertex.getSuperstep()); } catch (Exception e) { throw new IllegalStateException(e); } } }
package org.nick.wwwjdic.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; import org.junit.Test; import org.nick.wwwjdic.KanjiEntry; public class KanjiEntryTest { @Test public void testParse() { String kanjidicStr = " 3063 U9055 B162 G8 S13 S12 F344 J2 N4720 V6099 H3151 DK2014 L1644 K496 DO883 MN39067P MP11.0144 E1006 IN814 DF965 DC274 DJ385 DG713 DM1659 P3-3-10 I2q10.5 Q3430.4 DR1655 ZSP3-2-10 Ywei2 Wwi C . . . -. . . {difference} {differ} "; KanjiEntry entry = KanjiEntry.parseKanjidic(kanjidicStr); assertNotNull(entry); assertEquals("", entry.getKanji()); assertNull(entry.getClassicalRadicalNumber()); assertEquals(344, entry.getFrequncyeRank().intValue()); assertEquals(8, entry.getGrade().intValue()); assertEquals("3063", entry.getJisCode()); assertEquals(2, entry.getJlptLevel().intValue()); assertEquals("wi", entry.getKoreanReading()); assertEquals("wei2", entry.getPinyin()); assertEquals(162, entry.getRadicalNumber()); assertEquals("3-3-10", entry.getSkipCode()); assertEquals(13, entry.getStrokeCount()); assertEquals("9055", entry.getUnicodeNumber()); assertEquals("C . . . -. . .", entry.getReading()); assertEquals("C", entry.getOnyomi()); assertEquals(". . . -. . .", entry.getKunyomi()); assertEquals(2, entry.getMeanings().size()); assertEquals("difference", entry.getMeanings().get(0)); assertEquals("differ", entry.getMeanings().get(1)); } @Test public void testKanjidic() throws Exception { FileInputStream fis = new FileInputStream( "C:/home/nick/android/wwwjdic/wwwjdic-test/dict/kanjidic"); List<String> lines = new ArrayList<String>(); BufferedReader r = new BufferedReader(new InputStreamReader(fis, "EUC-JP")); String line = null; while ((line = r.readLine()) != null) { lines.add(line); } r.close(); for (String l : lines) { if (l.charAt(0) == ' continue; } System.out.println("parsing : " + l); KanjiEntry entry = KanjiEntry.parseKanjidic(l); assertNotNull(entry); assertNotNull(entry.getKanji()); assertNotNull(entry.getReading()); // assertFalse(entry.getMeanings().isEmpty()); } } }
package ucar.nc2.iosp.grid; import ucar.nc2.*; import ucar.nc2.constants._Coordinate; import ucar.nc2.constants.AxisType; import ucar.grid.GridTableLookup; import ucar.grid.GridRecord; import ucar.ma2.DataType; import ucar.ma2.Array; import ucar.grib.GribGridRecord; import ucar.grib.GribNumbers; import ucar.grib.grib2.Grib2Tables; import java.util.*; /** * Handles the Ensemble coordinate dimension */ public class GridEnsembleCoord { /** * logger */ static private org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(GridEnsembleCoord.class); /** * name */ private String name; /** * lookup table */ private GridTableLookup lookup; /** * number of ensembles */ private int ensembles; /** types for the ensembles diemensions */ private int[] ensTypes; /** * product definition # */ private int pdn = -1; /** * sequence # */ private int seq = 0; /** * Create a new GridEnsembleCoord from a record * * @param record record to use * @param lookup lookup table */ GridEnsembleCoord(GridRecord record, GridTableLookup lookup) { this.lookup = lookup; //ensembles = calEnsembles(records); if (record instanceof GribGridRecord) { // check for ensemble GribGridRecord ggr = (GribGridRecord) record; if (ggr.getEnsembleNumber() == GribNumbers.UNDEFINED) { ensembles = -1; return; } ensembles = ggr.getNumberForecasts(); } ensembles = -1; } /** * Create a new GridEnsembleCoord with the list of records * * @param records records to use * @param lookup lookup table */ GridEnsembleCoord(List<GridRecord> records, GridTableLookup lookup) { this.lookup = lookup; calEnsembles(records); } /** * add Ensemble dimension */ void calEnsembles(List<GridRecord> records) { int[] enstypes = new int[100]; ensembles = -1; for( GridRecord gr : records ) { GribGridRecord ggr = (GribGridRecord) gr; pdn = ggr.productType; int ensNumber = ggr.getEnsembleNumber(); if (ensNumber == GribNumbers.UNDEFINED) { ensembles = -1; return; } if ( ensembles < ensNumber ) ensembles = ensNumber; enstypes[ ensNumber ] = ggr.getEnsembleType(); } ensembles++; ensTypes = new int[ ensembles ]; System.arraycopy( enstypes, 0, ensTypes, 0, ensembles); /* GridRecord first = records.get(0); if (first instanceof GribGridRecord) { // check for ensemble GribGridRecord ggr = (GribGridRecord) first; if (ggr.getEnsembleNumber() == GribNumbers.UNDEFINED) return -1; return ggr.getNumberForecasts(); } return -1; */ /* int key = ggr.getRecordKey(); // levelType1, levelValue1, levelType2, levelValue2, //double key = ggr.getRecordKey() + // ggr.levelType1 + ggr.levelValue1 + ggr.levelType2 +ggr.levelValue2; ensembles = 1; for( int i = 1; i < records.size(); i++) { ggr = (GribGridRecord) records.get( i ); //double key1 = ggr.getRecordKey() + // ggr.levelType1 + ggr.levelValue1 + ggr.levelType2 +ggr.levelValue2; if (key == ggr.getRecordKey() ) { ensembles++; } } */ // get the Ensemble keys //System.out.println( "Ensembles ="+ ensembles ); /* enskey = new int[ ensembles ]; ArrayList<Integer> ek = new ArrayList<Integer>(); int ikey; for( int i = 0; i < records.size(); i++) { ggr = (GribGridRecord) records.get( i ); Integer ii = new Integer( ggr.getRecordKey() ); if( ! ek.contains( ii )) ek.add( ii ); //enskey[ ggr.forecastTime ] = ggr.getRecordKey(); if ( ggr.forecastTime == 3) System.out.println( ggr.getRecordKey()+" "+ ggr.productType +" "+ ggr. discipline +" "+ ggr.category +" "+ ggr.paramNumber +" "+ ggr.typeGenProcess+" "+ ggr.levelType1 +" "+ ggr.levelValue1 +" "+ ggr.levelType2 +" "+ ggr.levelValue2 +" "+ ggr.refTime.hashCode() +" "+ ggr.forecastTime); } System.out.println( ek ); System.out.println( ek.size() ); */ /* int ensemble = 0; GridRecord first = recordList.get( 0 ); if ( first instanceof GribGridRecord ) { // check for ensemble GribGridRecord ggr = (GribGridRecord) first; int key = ggr.getRecordKey(); for( int i = 1; i < recordList.size(); i++) { ggr = (GribGridRecord) recordList.get( i ); if (key == ggr.getRecordKey() ) { ensemble++; } } ensembleDimension.add( new Integer( ensemble )); } } return ensembles; */ } /** * Set the sequence number * * @param seq the sequence number */ void setSequence(int seq) { this.seq = seq; } /** * Get the name * * @return the name */ String getName() { if (name != null) { return name; } return (seq == 0) ? "ens" : "ens" + seq; } /** * Add this as a dimension to a netCDF file * * @param ncfile the netCDF file * @param g the group in the file */ void addDimensionsToNetcdfFile(NetcdfFile ncfile, Group g) { ncfile.addDimension(g, new Dimension(getName(), getNEnsembles(), true)); } /** * Add this as a variable to the netCDF file * * @param ncfile the netCDF file * @param g the group in the file */ void addToNetcdfFile(NetcdfFile ncfile, Group g) { Variable v = new Variable(ncfile, g, null, getName()); // v.setDataType(DataType.INT); v.setDataType(DataType.STRING); v.addAttribute(new Attribute("long_name", "ensemble")); /* int[] data = new int[ensembles]; for (int i = 0; i < ensembles; i++) { data[i] = i; } Array dataArray = Array.factory(DataType.INT, new int[]{ensembles}, data); */ String[] data = new String[ensembles]; for (int i = 0; i < ensembles; i++) { data[i] = Grib2Tables.getEnsembleType( pdn, ensTypes[ i ]); } Array dataArray = Array.factory(DataType.STRING, new int[]{ensembles}, data); v.setDimensions(v.getShortName()); v.setCachedData(dataArray, false); /* if ( lookup instanceof Grib2GridTableLookup) { Grib2GridTableLookup g2lookup = (Grib2GridTableLookup) lookup; //v.addAttribute( new Attribute("GRIB_orgReferenceTime", formatter.toDateTimeStringISO( d ))); //v.addAttribute( new Attribute("GRIB2_significanceOfRTName", // g2lookup.getFirstSignificanceOfRTName())); } else if ( lookup instanceof Grib1GridTableLookup) { Grib1GridTableLookup g1lookup = (Grib1GridTableLookup) lookup; //v.addAttribute( new Attribute("GRIB_orgReferenceTime", formatter.toDateTimeStringISO( d ))); //v.addAttribute( new Attribute("GRIB2_significanceOfRTName", // g1lookup.getFirstSignificanceOfRTName())); } */ v.addAttribute(new Attribute(_Coordinate.AxisType, AxisType.Ensemble.toString())); ncfile.addVariable(g, v); } /** * Get the index of a GridRecord * * @param record the record * @return the index or -1 if not found */ int getIndex(GridRecord record) { if (record instanceof GribGridRecord) { GribGridRecord ggr = (GribGridRecord) record; int en = ggr.getEnsembleNumber(); if (en == GribNumbers.UNDEFINED) return 0; return en; } return -1; } /** * Get the number of Ensembles * * @return the number of Ensembles */ int getNEnsembles() { return ensembles; } }
package com.scwang.smartrefresh.layout.footer; import android.content.Context; import android.content.res.TypedArray; import android.graphics.drawable.Drawable; import android.support.annotation.NonNull; import android.support.v7.widget.AppCompatTextView; import android.util.AttributeSet; import android.view.Gravity; import android.view.View; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.scwang.smartrefresh.layout.R; import com.scwang.smartrefresh.layout.api.RefreshFooter; import com.scwang.smartrefresh.layout.api.RefreshKernel; import com.scwang.smartrefresh.layout.api.RefreshLayout; import com.scwang.smartrefresh.layout.constant.RefreshState; import com.scwang.smartrefresh.layout.constant.SpinnerStyle; import com.scwang.smartrefresh.layout.internal.ProgressDrawable; import com.scwang.smartrefresh.layout.util.DensityUtil; import static android.view.ViewGroup.LayoutParams.WRAP_CONTENT; public class ClassicsFooter extends LinearLayout implements RefreshFooter { private static final String REFRESH_BOTTOM_PULLUP = ""; private static final String REFRESH_BOTTOM_RELEASE = ""; private static final String REFRESH_BOTTOM_LOADING = "..."; private TextView mBottomText; private ImageView mProgressView; private ProgressDrawable mProgressDrawable; private SpinnerStyle mSpinnerStyle = SpinnerStyle.Translate; //<editor-fold desc="LinearLayout"> public ClassicsFooter(Context context) { super(context); this.initView(context, null, 0); } public ClassicsFooter(Context context, AttributeSet attrs) { super(context, attrs); this.initView(context, attrs, 0); } public ClassicsFooter(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); this.initView(context, attrs, defStyleAttr); } private void initView(Context context, AttributeSet attrs, int defStyleAttr) { DensityUtil density = new DensityUtil(); setGravity(Gravity.CENTER); setMinimumHeight(density.dip2px(60)); mProgressDrawable = new ProgressDrawable(); mProgressDrawable.setColor(0xff666666); mProgressView = new ImageView(context); mProgressView.setImageDrawable(mProgressDrawable); LayoutParams lpPathView = new LayoutParams(density.dip2px(16), density.dip2px(16)); lpPathView.rightMargin = density.dip2px(10); addView(mProgressView, lpPathView); mBottomText = new AppCompatTextView(context, attrs, defStyleAttr); mBottomText.setTextColor(0xff666666); mBottomText.setTextSize(16); mBottomText.setText(REFRESH_BOTTOM_PULLUP); addView(mBottomText, WRAP_CONTENT, WRAP_CONTENT); if (!isInEditMode()) { mProgressView.setVisibility(GONE); } TypedArray ta = context.obtainStyledAttributes(attrs, R.styleable.ClassicsFooter); mSpinnerStyle = SpinnerStyle.values()[ta.getInt(R.styleable.ClassicsFooter_srlClassicsSpinnerStyle, mSpinnerStyle.ordinal())]; if (ta.hasValue(R.styleable.ClassicsFooter_srlAccentColor)) { int accentColor = ta.getColor(R.styleable.ClassicsFooter_srlAccentColor, 0); setAccentColor(accentColor); } ta.recycle(); } //</editor-fold> //<editor-fold desc="RefreshFooter"> @Override public void onSizeDefined(RefreshKernel layout, int height, int extendHeight) { } @Override public void onPullingUp(float percent, int offset, int bottomHeight, int extendHeight) { } @Override public void onPullReleasing(float percent, int offset, int headHeight, int extendHeight) { } @Override public void onStartAnimator(RefreshLayout layout, int headHeight, int extendHeight) { mProgressView.setVisibility(VISIBLE); mProgressDrawable.start(); } @Override public void onFinish(RefreshLayout layout) { mProgressDrawable.stop(); mProgressView.setVisibility(GONE); } /** * ClassicsFooter * ClassicsFooter has no primary colors */ @Override public void setPrimaryColors(int... colors) { if (mSpinnerStyle == SpinnerStyle.FixedBehind) { if (colors.length > 1) { setBackgroundColor(colors[0]); mBottomText.setTextColor(colors[1]); mProgressDrawable.setColor(colors[1]); } else if (colors.length > 0) { setBackgroundColor(colors[0]); if (colors[0] == 0xffffffff) { mBottomText.setTextColor(0xff666666); mProgressDrawable.setColor(0xff666666); } else { mBottomText.setTextColor(0xffffffff); mProgressDrawable.setColor(0xffffffff); } } } } @NonNull public View getView() { return this; } @Override public SpinnerStyle getSpinnerStyle() { return mSpinnerStyle; } @Override public void onStateChanged(RefreshLayout refreshLayout, RefreshState oldState, RefreshState newState) { switch (newState) { case None: restoreRefreshLayoutBackground(); case PullToUpLoad: mBottomText.setText(REFRESH_BOTTOM_PULLUP); break; case Loading: mBottomText.setText(REFRESH_BOTTOM_LOADING); break; case ReleaseToLoad: mBottomText.setText(REFRESH_BOTTOM_RELEASE); replaceRefreshLayoutBackground(refreshLayout); break; } } //</editor-fold> //<editor-fold desc="private"> private Runnable restoreRunable; private void restoreRefreshLayoutBackground() { if (restoreRunable != null) { restoreRunable.run(); restoreRunable = null; } } private void replaceRefreshLayoutBackground(RefreshLayout refreshLayout) { if (restoreRunable == null && mSpinnerStyle == SpinnerStyle.FixedBehind) { restoreRunable = new Runnable() { Drawable drawable = refreshLayout.getLayout().getBackground(); @Override public void run() { refreshLayout.getLayout().setBackgroundDrawable(drawable); } }; refreshLayout.getLayout().setBackgroundDrawable(getBackground()); } } //</editor-fold> //<editor-fold desc="API"> public ClassicsFooter setSpinnerStyle(SpinnerStyle style) { this.mSpinnerStyle = style; return this; } public ClassicsFooter setAccentColor(int accentColor) { mBottomText.setTextColor(accentColor); mProgressDrawable.setColor(accentColor); return this; } //</editor-fold> }
package nz.gen.geek_central.Compass3D; /* Graphical display of sensor data. */ import java.util.ArrayList; import javax.microedition.khronos.opengles.GL10; class GraphicsUseful { public static android.graphics.Paint FillWithColor ( int TheColor ) /* returns a Paint that will fill with a solid colour. */ { final android.graphics.Paint ThePaint = new android.graphics.Paint(); ThePaint.setStyle(android.graphics.Paint.Style.FILL); ThePaint.setColor(TheColor); return ThePaint; } /*FillWithColor*/ } /*GraphicsUseful*/ public class VectorView extends android.opengl.GLSurfaceView { float Azi, Elev, Roll; class VectorViewRenderer implements Renderer { private final java.nio.IntBuffer VertexBuffer; private final java.nio.IntBuffer NormalBuffer; private final java.nio.ShortBuffer IndexBuffer; final int NrIndexes; private void AddQuad ( ArrayList<Integer> Faces, int Ind1, int Ind2, int Ind3, int Ind4 ) /* adds two triangles to represent a quadrilateral face. */ { Faces.add(Ind1); Faces.add(Ind2); Faces.add(Ind3); Faces.add(Ind4); Faces.add(Ind1); Faces.add(Ind3); } /*AddQuad*/ public VectorViewRenderer() { super(); final int Vertices[], Normals[]; final short Indices[]; { final int one = 0x10000; final float BodyThickness = 0.15f; final float HeadThickness = 0.3f; final float HeadLengthOuter = 0.7f; final float HeadLengthInner = 0.4f; final int NrSegments = 12; final ArrayList<Vec3f> Points = new ArrayList<Vec3f>(); final ArrayList<Vec3f> PointNormals = new ArrayList<Vec3f>(); final ArrayList<Integer> Faces = new ArrayList<Integer>(); final Vec3f BaseNormal = new Vec3f(0.0f, -1.0f, 0.0f); final int Base = Points.size(); Points.add(new Vec3f(0.0f, -1.0f, 0.0f)); PointNormals.add(BaseNormal); /* note point positions may be duplicates, but their normals are different to ensure proper lighting */ int PrevTip = -1, PrevHead1 = -1, PrevHead2 = -1, PrevBodyTop1 = -1, PrevBodyTop2 = -1, PrevBodyBottom1 = -1, PrevBodyBottom2 = -1; int FirstTip = -1, FirstHead1 = -1, FirstHead2 = -1, FirstBodyTop1 = -1, FirstBodyTop2 = -1, FirstBodyBottom1 = -1, FirstBodyBottom2 = -1; final float OuterTiltCos = HeadThickness / (float)Math.hypot(HeadThickness, HeadLengthOuter); final float OuterTiltSin = HeadLengthOuter / (float)Math.hypot(HeadThickness, HeadLengthOuter); final float InnerTiltCos = HeadThickness / (float)Math.hypot(HeadThickness, HeadLengthInner); final float InnerTiltSin = HeadLengthInner / (float)Math.hypot(HeadThickness, HeadLengthInner); for (int i = 0;;) { final int ThisTip, ThisHead1, ThisHead2, ThisBodyTop1, ThisBodyTop2, ThisBodyBottom1, ThisBodyBottom2; if (i < NrSegments) { final float Angle = (float)(2.0 * Math.PI * i / NrSegments); final float Cos = android.util.FloatMath.cos(Angle); final float Sin = android.util.FloatMath.sin(Angle); final float FaceAngle = (float)(2.0 * Math.PI * (2 * i - 1) / (2 * NrSegments)); final float FaceCos = android.util.FloatMath.cos(FaceAngle); final float FaceSin = android.util.FloatMath.sin(FaceAngle); final Vec3f TipNormal = new Vec3f ( FaceCos * OuterTiltSin, OuterTiltCos, FaceSin * OuterTiltSin ); final Vec3f HeadNormal = new Vec3f ( - FaceCos * InnerTiltSin, - InnerTiltCos, - FaceSin * InnerTiltSin ); final Vec3f BodyNormal = new Vec3f(FaceCos, 0.0f, FaceSin); ThisTip = Points.size(); Points.add ( new Vec3f(0.0f, 1.0f, 0.0f) ); PointNormals.add(TipNormal); final Vec3f HeadPoint = new Vec3f(HeadThickness * Cos, 1.0f - HeadLengthOuter, HeadThickness * Sin); ThisHead1 = Points.size(); Points.add(HeadPoint); PointNormals.add(TipNormal); ThisHead2 = Points.size(); Points.add(HeadPoint); PointNormals.add(HeadNormal); final Vec3f BodyTopPoint = new Vec3f(BodyThickness * Cos, 1.0f - HeadLengthInner, BodyThickness * Sin); ThisBodyTop1 = Points.size(); Points.add(BodyTopPoint); PointNormals.add(HeadNormal); ThisBodyTop2 = Points.size(); Points.add(BodyTopPoint); PointNormals.add(BodyNormal); final Vec3f BodyBottomPoint = new Vec3f(BodyThickness * Cos, -1.0f, BodyThickness * Sin); ThisBodyBottom1 = Points.size(); Points.add(BodyBottomPoint); PointNormals.add(BodyNormal); ThisBodyBottom2 = Points.size(); Points.add(BodyBottomPoint); PointNormals.add(BaseNormal); } else { ThisTip = FirstTip; ThisHead1 = FirstHead1; ThisHead2 = FirstHead2; ThisBodyTop1 = FirstBodyTop1; ThisBodyTop2 = FirstBodyTop2; ThisBodyBottom1 = FirstBodyBottom1; ThisBodyBottom2 = FirstBodyBottom2; } if (i != 0) { Faces.add(PrevHead1); Faces.add(ThisTip); Faces.add(ThisHead1); AddQuad(Faces, PrevBodyTop1, PrevHead2, ThisHead2, ThisBodyTop1); AddQuad(Faces, PrevBodyBottom1, PrevBodyTop2, ThisBodyTop2, ThisBodyBottom1); Faces.add(PrevBodyBottom2); Faces.add(ThisBodyBottom2); Faces.add(Base); } else { FirstTip = ThisTip; FirstHead1 = ThisHead1; FirstHead2 = ThisHead2; FirstBodyTop1 = ThisBodyTop1; FirstBodyTop2 = ThisBodyTop2; FirstBodyBottom1 = ThisBodyBottom1; FirstBodyBottom2 = ThisBodyBottom2; } PrevTip = ThisTip; PrevHead1 = ThisHead1; PrevHead2 = ThisHead2; PrevBodyTop1 = ThisBodyTop1; PrevBodyTop2 = ThisBodyTop2; PrevBodyBottom1 = ThisBodyBottom1; PrevBodyBottom2 = ThisBodyBottom2; if (i == NrSegments) break; ++i; } /*for*/ final ArrayList<Integer> VertsFixed = new ArrayList<Integer>(); final ArrayList<Integer> NormalsFixed = new ArrayList<Integer>(); for (int i = 0; i < Points.size(); ++i) { final Vec3f Point = Points.get(i); VertsFixed.add(new Integer((int)(Point.x * one))); VertsFixed.add(new Integer((int)(Point.y * one))); VertsFixed.add(new Integer((int)(Point.z * one))); final Vec3f PointNormal = PointNormals.get(i); NormalsFixed.add(new Integer((int)(PointNormal.x * one))); NormalsFixed.add(new Integer((int)(PointNormal.y * one))); NormalsFixed.add(new Integer((int)(PointNormal.z * one))); } /*for*/ Vertices = new int[VertsFixed.size()]; Normals = new int[VertsFixed.size()]; for (int i = 0; i < Vertices.length; ++i) { Vertices[i] = VertsFixed.get(i); Normals[i] = NormalsFixed.get(i); } /*for*/ Indices = new short[Faces.size()]; NrIndexes = Indices.length; for (int i = 0; i < NrIndexes; ++i) { Indices[i] = (short)(int)Faces.get(i); } /*for*/ } /* Need to use allocateDirect to allocate buffers so garbage collector won't move them. Also make sure byte order is always native. But direct-allocation and order-setting methods are only available for ByteBuffer. Which is why buffers are allocated as ByteBuffers and then converted to more appropriate types. */ VertexBuffer = java.nio.ByteBuffer.allocateDirect(Vertices.length * 4) .order(java.nio.ByteOrder.nativeOrder()) .asIntBuffer(); VertexBuffer.put(Vertices); VertexBuffer.position(0); NormalBuffer = java.nio.ByteBuffer.allocateDirect(Normals.length * 4) .order(java.nio.ByteOrder.nativeOrder()) .asIntBuffer(); NormalBuffer.put(Normals); NormalBuffer.position(0); IndexBuffer = java.nio.ByteBuffer.allocateDirect(Indices.length * 2) .order(java.nio.ByteOrder.nativeOrder()) .asShortBuffer(); IndexBuffer.put(Indices); IndexBuffer.position(0); } /*VectorViewRenderer*/ public void onDrawFrame ( GL10 gl ) { gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT); gl.glMatrixMode(GL10.GL_MODELVIEW); gl.glLoadIdentity(); gl.glLightfv ( /*light =*/ GL10.GL_LIGHT0, /*pname =*/ GL10.GL_POSITION, /*params =*/ new float[] {0.0f, 2.0f, -2.0f, 1.0f}, /*offset =*/ 0 ); gl.glLightfv ( /*light =*/ GL10.GL_LIGHT0, /*pname =*/ GL10.GL_AMBIENT, /*params =*/ new float[] {0.4f, 0.4f, 0.4f, 1.0f}, /*offset =*/ 0 ); gl.glLightfv ( /*light =*/ GL10.GL_LIGHT0, /*pname =*/ GL10.GL_SPECULAR, /*params =*/ new float[] {0.7f, 0.7f, 0.7f, 1.0f}, /*offset =*/ 0 ); gl.glTranslatef(0, 0, -3.0f); gl.glRotatef(Roll, 0, 1, 0); gl.glRotatef(Elev, 1, 0, 0); gl.glRotatef(Azi, 0, 0, 1); gl.glScalef(2.0f, 2.0f, 2.0f); gl.glFrontFace(GL10.GL_CCW); gl.glVertexPointer(3, GL10.GL_FIXED, 0, VertexBuffer); gl.glNormalPointer(GL10.GL_FIXED, 0, NormalBuffer); /* gl.glColor4f(1.0f, 1.0f, 1.0f, 1.0f); */ gl.glMaterialfv ( /*face =*/ GL10.GL_FRONT_AND_BACK, /*pname =*/ GL10.GL_AMBIENT, /*params =*/ new float[] {0.4f, 0.4f, 0.4f, 1.0f}, /*offset =*/ 0 ); gl.glMaterialfv ( /*face =*/ GL10.GL_FRONT_AND_BACK, /*pname =*/ GL10.GL_SPECULAR, /*params =*/ new float[] {0.6f, 0.6f, 0.6f, 1.0f}, /*offset =*/ 0 ); gl.glDrawElements(GL10.GL_TRIANGLES, NrIndexes, GL10.GL_UNSIGNED_SHORT, IndexBuffer); } /*onDrawFrame*/ public void onSurfaceChanged ( GL10 gl, int ViewWidth, int ViewHeight ) { gl.glViewport(0, 0, ViewWidth, ViewHeight); gl.glMatrixMode(GL10.GL_PROJECTION); gl.glLoadIdentity(); gl.glFrustumf ( - (float)ViewWidth / ViewHeight, (float)ViewWidth / ViewHeight, -1.0f, 1.0f, 1.0f, 10.0f ); } /*onSurfaceChanged*/ public void onSurfaceCreated ( GL10 gl, javax.microedition.khronos.egl.EGLConfig Config ) { gl.glClearColor(1.0f, 1.0f, 0.635f, 1.0f); gl.glEnable(GL10.GL_CULL_FACE); gl.glShadeModel(GL10.GL_SMOOTH); gl.glEnable(GL10.GL_LIGHTING); gl.glEnable(GL10.GL_LIGHT0); gl.glEnable(GL10.GL_DEPTH_TEST); gl.glEnableClientState(GL10.GL_VERTEX_ARRAY); gl.glEnableClientState(GL10.GL_NORMAL_ARRAY); } /*onSurfaceCreated*/ } /*VectorViewRenderer*/ public VectorView ( android.content.Context TheContext, android.util.AttributeSet TheAttributes ) { super(TheContext, TheAttributes); setRenderer(new VectorViewRenderer()); setRenderMode(RENDERMODE_WHEN_DIRTY); } /*VectorView*/ public void SetData ( float[] NewData ) { Azi = NewData[0]; Elev = NewData[1]; Roll = NewData[2]; requestRender(); } /*SetData*/ } /*VectorView*/
// $Id: GridHorizCoordSys.java 70 2006-07-13 15:16:05Z caron $ package ucar.nc2.iosp.grid; import ucar.ma2.Array; import ucar.ma2.DataType; import ucar.nc2.*; import ucar.nc2.iosp.AbstractIOServiceProvider; import ucar.nc2.constants.AxisType; import ucar.nc2.constants._Coordinate; import ucar.unidata.geoloc.*; import ucar.unidata.geoloc.projection.*; import ucar.unidata.util.GaussianLatitudes; import ucar.unidata.util.StringUtil; import ucar.grid.GridTableLookup; import ucar.grid.GridDefRecord; import ucar.grib.grib2.Grib2GridTableLookup; import ucar.grib.grib1.Grib1GridTableLookup; import java.util.*; public class GridHorizCoordSys { /** * lookup table */ private GridTableLookup lookup; /** * the grid definition object */ private GridDefRecord gds; /** * group for this system */ private Group g; /** * grid name, shape name and id */ private String grid_name, shape_name, id; /** * flags */ private boolean isLatLon = true, isGaussian = false; /** * GridVariables that have this GridHorizCoordSys */ Map<String,GridVariable> varHash = new HashMap<String,GridVariable>(200); /** * List of GridVariable, sorted by product desc */ Map<String, List<GridVariable>> productHash = new HashMap<String, List<GridVariable>>(100); /** * GridVertCoordSys */ //HashMap vcsHash = new HashMap(30); // GridVertCoordSys /** * startx, starty */ private double startx, starty; private double dx, dy; /** * projection */ private ProjectionImpl proj; /** * list of attributes */ //TODO: generics private ArrayList attributes = new ArrayList(); /** * Create a new GridHorizCoordSys with the grid definition and lookup * * @param gds grid definition * @param lookup lookup table for names * @param g Group for this coord system */ GridHorizCoordSys(GridDefRecord gds, GridTableLookup lookup, Group g) { this.gds = gds; this.dx = gds.getDouble(GridDefRecord.DX) * .001; this.dy = gds.getDouble(GridDefRecord.DY) * .001; this.lookup = lookup; this.g = g; this.grid_name = AbstractIOServiceProvider.createValidNetcdfObjectName( lookup.getGridName(gds)); this.shape_name = lookup.getShapeName(gds); this.g = g; isLatLon = lookup.isLatLon(gds); grid_name = StringUtil.replace(grid_name, ' ', "_"); id = (g == null) ? grid_name : g.getName(); if (isLatLon && (lookup.getProjectionType(gds) == GridTableLookup.GaussianLatLon)) { isGaussian = true; double np = gds.getDouble( GridDefRecord.NP ); // # lats between pole and equator (octet 26/27) np = (Double.isNaN(np)) ? 90 : np; //System.out.println( "np ="+np ); gds.addParam(GridDefRecord.DY, String.valueOf(np)); // fake - need to get actual gaussian calculation here } } /** * Get the ID * * @return the ID */ String getID() { return id; } // unique within the file /** * Get the grid name * * @return the grid name */ String getGridName() { return grid_name; } // used in CF-1 attributes /** * Get the group * * @return the group */ Group getGroup() { return g; } /** * Is this a lat/lon grid * * @return true if is lat/lon */ boolean isLatLon() { return isLatLon; } /** * Get the number of x points * * @return the number of x points */ int getNx() { return gds.getInt(GridDefRecord.NX); } /** * Get the number of Y points * * @return the number of Y points */ int getNy() { return gds.getInt(GridDefRecord.NY); } /** * Get the X spacing in kilometers * * @return the X spacing in kilometers */ private double getDxInKm() { //return gds.getDouble(GridDefRecord.DX) * .001; return dx; } /** * Get the Y spacing in kilometers * * @return the Y spacing in kilometers */ private double getDyInKm() { //return gds.getDouble(GridDefRecord.DY) * .001; return dy; } /** * Add the dimensions associated with this coord sys to the netCDF file * * @param ncfile netCDF file to add to */ void addDimensionsToNetcdfFile(NetcdfFile ncfile) { if (isLatLon) { ncfile.addDimension(g, new Dimension("lat", gds.getInt(GridDefRecord.NY), true)); ncfile.addDimension(g, new Dimension("lon", gds.getInt(GridDefRecord.NX), true)); } else { ncfile.addDimension(g, new Dimension("y", gds.getInt(GridDefRecord.NY), true)); ncfile.addDimension(g, new Dimension("x", gds.getInt(GridDefRecord.NX), true)); } } /** * Add the variables to the netCDF file * * @param ncfile the netCDF file */ void addToNetcdfFile(NetcdfFile ncfile) { if (isLatLon) { double dy = (gds.getDouble(GridDefRecord.LA2) < gds.getDouble(GridDefRecord.LA1)) ? -gds.getDouble(GridDefRecord.DY) : gds.getDouble(GridDefRecord.DY); if (isGaussian) { addGaussianLatAxis(ncfile, "lat", "degrees_north", "latitude coordinate", "latitude", AxisType.Lat); } else { addCoordAxis(ncfile, "lat", gds.getInt(GridDefRecord.NY), gds.getDouble(GridDefRecord.LA1), dy, "degrees_north", "latitude coordinate", "latitude", AxisType.Lat); } addCoordAxis(ncfile, "lon", gds.getInt(GridDefRecord.NX), gds.getDouble(GridDefRecord.LO1), gds.getDouble(GridDefRecord.DX), "degrees_east", "longitude coordinate", "longitude", AxisType.Lon); addCoordSystemVariable(ncfile, "latLonCoordSys", "time lat lon"); } else { makeProjection(ncfile); double[] yData, xData; //TODO: can these be removed if (lookup.getProjectionType(gds) == GridTableLookup.RotatedLatLon) { double dy = (gds.getDouble("La2") < gds.getDouble(GridDefRecord.LA1) ? -gds.getDouble(GridDefRecord.DY) : gds.getDouble(GridDefRecord.DY)); yData = addCoordAxis(ncfile, "y", gds.getInt(GridDefRecord.NY), gds.getDouble(GridDefRecord.LA1), dy, "degrees", "y coordinate of projection", "projection_y_coordinate", AxisType.GeoY); xData = addCoordAxis(ncfile, "x", gds.getInt(GridDefRecord.NX), gds.getDouble(GridDefRecord.LO1), gds.getDouble(GridDefRecord.DX), "degrees", "x coordinate of projection", "projection_x_coordinate", AxisType.GeoX); } else { yData = addCoordAxis(ncfile, "y", gds.getInt(GridDefRecord.NY), starty, getDyInKm(), "km", "y coordinate of projection", "projection_y_coordinate", AxisType.GeoY); xData = addCoordAxis(ncfile, "x", gds.getInt(GridDefRecord.NX), startx, getDxInKm(), "km", "x coordinate of projection", "projection_x_coordinate", AxisType.GeoX); } } } /** * Add a coordinate axis * * @param ncfile the netCDF file to add to * @param name name of the axis * @param n number of points * @param start starting value * @param incr increment between points * @param units units * @param desc description * @param standard_name standard name * @param axis axis type * @return the coordinate values */ private double[] addCoordAxis(NetcdfFile ncfile, String name, int n, double start, double incr, String units, String desc, String standard_name, AxisType axis) { // ncfile.addDimension(g, new Dimension(name, n, true)); Variable v = new Variable(ncfile, g, null, name); v.setDataType(DataType.DOUBLE); v.setDimensions(name); // create the data double[] data = new double[n]; for (int i = 0; i < n; i++) { data[i] = start + incr * i; } Array dataArray = Array.factory(DataType.DOUBLE, new int[]{n}, data); v.setCachedData(dataArray, false); v.addAttribute(new Attribute("units", units)); v.addAttribute(new Attribute("long_name", desc)); v.addAttribute(new Attribute("standard_name", standard_name)); v.addAttribute(new Attribute("grid_spacing", incr + " " + units)); v.addAttribute(new Attribute(_Coordinate.AxisType, axis.toString())); ncfile.addVariable(g, v); return data; } /** * Add a gaussian lat axis * * @param ncfile netCDF file to add to * @param name name of the axis * @param units units * @param desc description * @param standard_name standard name * @param axis axis type * @return the values */ private double[] addGaussianLatAxis(NetcdfFile ncfile, String name, String units, String desc, String standard_name, AxisType axis) { double np = gds.getDouble( GridDefRecord.NUMBERPARALLELS ); if (Double.isNaN(np)) { throw new IllegalArgumentException( "Gaussian LAt/Lon grid must have NumberParallels parameter"); } double startLat = gds.getDouble(GridDefRecord.LA1); double endLat = gds.getDouble(GridDefRecord.LA2); int nlats = (int) (2 * np); GaussianLatitudes gaussLats = new GaussianLatitudes(nlats); int bestStartIndex = 0, bestEndIndex = 0; double bestStartDiff = Double.MAX_VALUE; double bestEndDiff = Double.MAX_VALUE; for (int i = 0; i < nlats; i++) { double diff = Math.abs(gaussLats.latd[i] - startLat); if (diff < bestStartDiff) { bestStartDiff = diff; bestStartIndex = i; } diff = Math.abs(gaussLats.latd[i] - endLat); if (diff < bestEndDiff) { bestEndDiff = diff; bestEndIndex = i; } } assert Math.abs(bestEndIndex - bestStartIndex + 1) == gds.getInt(GridDefRecord.NY); boolean goesUp = bestEndIndex > bestStartIndex; Variable v = new Variable(ncfile, g, null, name); v.setDataType(DataType.DOUBLE); v.setDimensions(name); // create the data int n = (int) gds.getInt(GridDefRecord.NY); int useIndex = bestStartIndex; double[] data = new double[n]; double[] gaussw = new double[n]; for (int i = 0; i < n; i++) { data[i] = gaussLats.latd[useIndex]; gaussw[i] = gaussLats.gaussw[useIndex]; if (goesUp) { useIndex++; } else { useIndex } } Array dataArray = Array.factory(DataType.DOUBLE, new int[]{n}, data); v.setCachedData(dataArray, false); v.addAttribute(new Attribute("units", units)); v.addAttribute(new Attribute("long_name", desc)); v.addAttribute(new Attribute("standard_name", standard_name)); v.addAttribute(new Attribute("weights", "gaussw")); v.addAttribute(new Attribute(_Coordinate.AxisType, axis.toString())); ncfile.addVariable(g, v); v = new Variable(ncfile, g, null, "gaussw"); v.setDataType(DataType.DOUBLE); v.setDimensions(name); v.addAttribute(new Attribute("long_name", "gaussian weights (unnormalized)")); dataArray = Array.factory(DataType.DOUBLE, new int[]{n}, gaussw); v.setCachedData(dataArray, false); ncfile.addVariable(g, v); return data; } /** * Make a projection and add it to the netCDF file * * @param ncfile netCDF file */ private void makeProjection(NetcdfFile ncfile) { switch (lookup.getProjectionType(gds)) { case GridTableLookup.RotatedLatLon: makeRotatedLatLon(ncfile); break; case GridTableLookup.PolarStereographic: makePS(); break; case GridTableLookup.LambertConformal: makeLC(); break; case GridTableLookup.Mercator: makeMercator(); break; case GridTableLookup.Orthographic: makeSpaceViewOrOthographic(); break; default: throw new UnsupportedOperationException("unknown projection = " + gds.getInt(GridDefRecord.GRID_TYPE)); } Variable v = new Variable(ncfile, g, null, grid_name); v.setDataType(DataType.CHAR); v.setDimensions(new ArrayList()); // scalar char[] data = new char[]{'d'}; Array dataArray = Array.factory(DataType.CHAR, new int[0], data); v.setCachedData(dataArray, false); for (int i = 0; i < attributes.size(); i++) { Attribute att = (Attribute) attributes.get(i); v.addAttribute(att); } v.addAttribute(new Attribute("earth_shape", shape_name)); if (gds.getInt(GridDefRecord.GRID_SHAPE_CODE) == 1) { v.addAttribute( new Attribute("spherical_earth_radius_meters", new Double(gds.getDouble(GridDefRecord.RADIUS_SPHERICAL_EARTH)))); } addGDSparams(v); ncfile.addVariable(g, v); } /** * Add the GDS params to the variable as attributes * * @param v the GDS params. */ private void addGDSparams(Variable v) { // add all the gds parameters java.util.Set keys = gds.getKeys(); ArrayList<String> keyList = new ArrayList<String>(keys); Collections.sort(keyList); String pre = (( lookup instanceof Grib2GridTableLookup) || ( lookup instanceof Grib1GridTableLookup) ) ? "GRIB" : "GDS"; for (String key : keyList) { String name = AbstractIOServiceProvider.createValidNetcdfObjectName(pre +"_param_" + key); //AbstractIOServiceProvider.createValidNetcdfObjectName("GDS_param_" + key); String vals = gds.getParam(key); try { int vali = Integer.parseInt(vals); v.addAttribute(new Attribute(name, new Integer(vali))); } catch (Exception e) { try { double vald = Double.parseDouble(vals); v.addAttribute(new Attribute(name, new Double(vald))); } catch (Exception e2) { v.addAttribute(new Attribute(name, vals)); } } } } /** * Add coordinate system variable * * @param ncfile netCDF file * @param name name of the variable * @param dims dimensions */ private void addCoordSystemVariable(NetcdfFile ncfile, String name, String dims) { Variable v = new Variable(ncfile, g, null, name); v.setDataType(DataType.CHAR); v.setDimensions(new ArrayList()); // scalar Array dataArray = Array.factory(DataType.CHAR, new int[0], new char[]{'0'}); v.setCachedData(dataArray, false); v.addAttribute(new Attribute(_Coordinate.Axes, dims)); if (!isLatLon()) { v.addAttribute(new Attribute(_Coordinate.Transforms, getGridName())); } addGDSparams(v); ncfile.addVariable(g, v); } /** * Make a LambertConformalConic projection */ private void makeLC() { // we have to project in order to find the origin proj = new LambertConformal( gds.getDouble(GridDefRecord.LATIN1), gds.getDouble(GridDefRecord.LOV), gds.getDouble(GridDefRecord.LATIN1), gds.getDouble(GridDefRecord.LATIN2)); LatLonPointImpl startLL = new LatLonPointImpl(gds.getDouble(GridDefRecord.LA1), gds.getDouble(GridDefRecord.LO1)); ProjectionPointImpl start = (ProjectionPointImpl) proj.latLonToProj(startLL); startx = start.getX(); starty = start.getY(); if (Double.isNaN(getDxInKm())) { setDxDy(startx, starty, proj); } if (GridServiceProvider.debugProj) { System.out.println("GridHorizCoordSys.makeLC start at latlon "+ startLL); double Lo2 = gds.getDouble(GridDefRecord.LO2); double La2 = gds.getDouble(GridDefRecord.LA2); LatLonPointImpl endLL = new LatLonPointImpl(La2, Lo2); System.out.println("GridHorizCoordSys.makeLC end at latlon "+ endLL); ProjectionPointImpl endPP = (ProjectionPointImpl) proj.latLonToProj(endLL); System.out.println(" end at proj coord " + endPP); double endx = startx + getNx() * getDxInKm(); double endy = starty + getNy() * getDyInKm(); System.out.println(" should be x=" + endx + " y=" + endy); } attributes.add(new Attribute("grid_mapping_name", "lambert_conformal_conic")); if (gds.getDouble(GridDefRecord.LATIN1) == gds.getDouble(GridDefRecord.LATIN2)) { attributes.add(new Attribute("standard_parallel", new Double(gds.getDouble(GridDefRecord.LATIN1)))); } else { double[] data = new double[]{gds.getDouble(GridDefRecord.LATIN1), gds.getDouble(GridDefRecord.LATIN2)}; attributes.add(new Attribute("standard_parallel", Array.factory(DataType.DOUBLE, new int[]{2}, data))); } attributes.add(new Attribute("longitude_of_central_meridian", new Double(gds.getDouble(GridDefRecord.LOV)))); attributes.add(new Attribute("latitude_of_projection_origin", new Double(gds.getDouble(GridDefRecord.LATIN1)))); } /** * Make a PolarStereographic projection */ private void makePS() { double scale = .933; String nproj = gds.getParam(GridDefRecord.NPPROJ); double latOrigin = (nproj == null || nproj.equalsIgnoreCase("true")) ? 90.0 : -90.0; // Why the scale factor?. accordining to GRIB docs: // "Grid lengths are in units of meters, at the 60 degree latitude circle nearest to the pole" // since the scale factor at 60 degrees = k = 2*k0/(1+sin(60)) [Snyder,Working Manual p157] // then to make scale = 1 at 60 degrees, k0 = (1+sin(60))/2 = .933 proj = new Stereographic(latOrigin, gds.getDouble(GridDefRecord.LOV), scale); // we have to project in order to find the origin ProjectionPointImpl start = (ProjectionPointImpl) proj.latLonToProj( new LatLonPointImpl( gds.getDouble(GridDefRecord.LA1), gds.getDouble(GridDefRecord.LO1))); startx = start.getX(); starty = start.getY(); if (Double.isNaN(getDxInKm())) { setDxDy(startx, starty, proj); } if (GridServiceProvider.debugProj) { System.out.println("start at proj coord " + start); LatLonPoint llpt = proj.projToLatLon(start); System.out.println(" end at lat/lon coord " + llpt); System.out.println(" should be lat=" + gds.getDouble(GridDefRecord.LA1) +" lon=" + gds.getDouble(GridDefRecord.LO1)); } attributes.add(new Attribute("grid_mapping_name", "polar_stereographic")); attributes.add(new Attribute("longitude_of_projection_origin", new Double(gds.getDouble(GridDefRecord.LOV)))); attributes.add(new Attribute("straight_vertical_longitude_from_pole", new Double(gds.getDouble(GridDefRecord.LOV)))); attributes.add(new Attribute("scale_factor_at_projection_origin", new Double(scale))); attributes.add(new Attribute("latitude_of_projection_origin", new Double(latOrigin))); } /** * Make a Mercator projection */ private void makeMercator() { /** * Construct a Mercator Projection. * @param lon0 longitude of origin (degrees) * @param par standard parallel (degrees). cylinder cuts earth at this latitude. */ double Latin = gds.getDouble(GridDefRecord.LAD); // name depends on Grib version 1 or 2 if (Double.isNaN(Latin) ) Latin = gds.getDouble(GridDefRecord.LATIN); double Lo1 = gds.getDouble(GridDefRecord.LO1); //gds.Lo1; double La1 = gds.getDouble(GridDefRecord.LA1); //gds.La1; // put longitude origin at first point - doesnt actually matter proj = new Mercator(Lo1, Latin); // find out where ProjectionPoint startP = proj.latLonToProj( new LatLonPointImpl(La1, Lo1)); startx = startP.getX(); starty = startP.getY(); attributes.add(new Attribute("grid_mapping_name", "mercator")); attributes.add(new Attribute("standard_parallel", Latin)); attributes.add(new Attribute("longitude_of_projection_origin", Lo1)); if (GridServiceProvider.debugProj) { double Lo2 = gds.getDouble(GridDefRecord.LO2); if (Lo2 < Lo1) Lo2 += 360; double La2 = gds.getDouble(GridDefRecord.LA2); LatLonPointImpl endLL = new LatLonPointImpl(La2, Lo2); System.out.println("GridHorizCoordSys.makeMercator: end at latlon= "+ endLL); ProjectionPointImpl endPP = (ProjectionPointImpl) proj.latLonToProj(endLL); System.out.println(" start at proj coord "+ new ProjectionPointImpl(startx, starty)); System.out.println(" end at proj coord " + endPP); double endx = startx + (getNx() - 1) * getDxInKm(); double endy = starty + (getNy() - 1) * getDyInKm(); System.out.println(" should be x=" + endx + " y=" + endy); } } // RotatedLatLon private void makeRotatedLatLon(NetcdfFile ncfile) { double splat = gds.getDouble(GridDefRecord.SPLAT); double splon = gds.getDouble(GridDefRecord.SPLON); double spangle = gds.getDouble(GridDefRecord.ROTATIONANGLE); // Given projection coordinates, need LatLon coordinates proj = new RotatedLatLon(splat, splon, spangle); LatLonPoint startLL = proj.projToLatLon( new ProjectionPointImpl( gds.getDouble(GridDefRecord.LO1), gds.getDouble(GridDefRecord.LA1))); startx = startLL.getLongitude(); starty = startLL.getLatitude(); addCoordSystemVariable(ncfile, "latLonCoordSys", "time y x"); // splat, splon, spangle attributes.add(new Attribute("grid_mapping_name", "rotated_lat_lon")); attributes.add(new Attribute("grid_south_pole_latitude", new Double(splat))); attributes.add(new Attribute("grid_south_pole_longitude", new Double(splon))); attributes.add(new Attribute("grid_south_pole_angle", new Double(spangle))); if (GridServiceProvider.debugProj) { System.out.println("Location of pole of rotated grid:"); System.out.println("Lon=" + splon + ", Lat=" + splat); System.out.println("Axial rotation about pole of rotated grid:" + spangle); System.out.println("Location of LL in rotated grid:"); System.out.println("Lon=" + gds.getDouble(GridDefRecord.LO1) + ", " + "Lat=" + gds.getDouble(GridDefRecord.LA1)); System.out.println("Location of LL in non-rotated grid:"); System.out.println("Lon=" + startx + ", Lat=" + starty); double Lo2 = gds.getDouble(GridDefRecord.LO2); double La2 = gds.getDouble(GridDefRecord.LA2); System.out.println("Location of UR in rotated grid:"); System.out.println("Lon=" + Lo2 + ", Lat=" + La2); System.out.println("Location of UR in non-rotated grid:"); LatLonPoint endUR = proj.projToLatLon(new ProjectionPointImpl(Lo2, La2)); System.out.println("Lon=" + endUR.getLongitude() + ", Lat=" + endUR.getLatitude()); double dy = (La2 < gds.getDouble(GridDefRecord.LA1)) ? -gds.getDouble(GridDefRecord.DY) : gds.getDouble(GridDefRecord.DY); double endx = gds.getDouble(GridDefRecord.LO1) + (getNx() - 1) * gds.getDouble(GridDefRecord.DX); double endy = gds.getDouble(GridDefRecord.LA1) + (getNy() - 1) * dy; System.out.println("End point rotated grid should be x="+ endx + " y=" + endy); } } /** * Make a Space View Orthographic projection */ private void makeSpaceViewOrOthographic() { double Lat0 = gds.getDouble(GridDefRecord.LAP); // sub-satellite point lat double Lon0 = gds.getDouble(GridDefRecord.LOP); // sub-satellite point lon double xp = gds.getDouble(GridDefRecord.XP); // sub-satellite point in grid lengths double yp = gds.getDouble(GridDefRecord.YP); double dx = gds.getDouble(GridDefRecord.DX); // apparent diameter in units of grid lengths double dy = gds.getDouble(GridDefRecord.DY); double major_axis = gds.getDouble(GridDefRecord.MAJOR_AXIS_EARTH); double minor_axis = gds.getDouble(GridDefRecord.MINOR_AXIS_EARTH); // Nr = altitude of camera from center, in units of radius double nr = gds.getDouble(GridDefRecord.NR) * 1e-6; double apparentDiameter = 2 * Math.sqrt((nr - 1) / (nr + 1)); // apparent diameter, units of radius (see Snyder p 173) // app diameter kmeters / app diameter grid lengths = m per grid length double gridLengthX = major_axis * apparentDiameter / dx; double gridLengthY = minor_axis * apparentDiameter / dy; // have to add to both for consistency gds.addParam(GridDefRecord.DX, String.valueOf(1000 * gridLengthX)); // meters gds.addParam(GridDefRecord.DX, new Double(1000 * gridLengthX)); gds.addParam(GridDefRecord.DY, String.valueOf(1000 * gridLengthY)); // meters gds.addParam(GridDefRecord.DY, new Double(1000 * gridLengthY)); startx = -gridLengthX * xp; starty = -gridLengthY * yp; double radius = Earth.getRadius() / 1000.0; if (nr == 1111111111.0) { // LOOK: not sure how all ones will appear as a double, need example proj = new Orthographic(Lat0, Lon0, radius); attributes.add(new Attribute("grid_mapping_name", "orthographic")); attributes.add(new Attribute("longitude_of_projection_origin", new Double(Lon0))); attributes.add(new Attribute("latitude_of_projection_origin", new Double(Lat0))); } else { // "space view perspective" double height = (nr - 1.0) * radius; // height = the height of the observing camera in km proj = new VerticalPerspectiveView(Lat0, Lon0, radius, height); attributes.add(new Attribute("grid_mapping_name", "vertical_perspective")); attributes.add(new Attribute("longitude_of_projection_origin", new Double(Lon0))); attributes.add(new Attribute("latitude_of_projection_origin", new Double(Lat0))); attributes.add(new Attribute("height_above_earth", new Double(height))); } if (GridServiceProvider.debugProj) { double Lo2 = gds.getDouble(GridDefRecord.LO2) + 360.0; double La2 = gds.getDouble(GridDefRecord.LA2); LatLonPointImpl endLL = new LatLonPointImpl(La2, Lo2); System.out.println( "GridHorizCoordSys.makeOrthographic end at latlon "+ endLL); ProjectionPointImpl endPP = (ProjectionPointImpl) proj.latLonToProj(endLL); System.out.println(" end at proj coord " + endPP); double endx = startx + getNx() * getDxInKm(); double endy = starty + getNy() * getDyInKm(); System.out.println(" should be x=" + endx + " y=" + endy); } } /** * Calculate the dx and dy from startx, starty and projection. * * @param startx starting x projection point * @param starty starting y projection point * @param proj projection for transform */ private void setDxDy(double startx, double starty, ProjectionImpl proj) { double Lo2 = gds.getDouble(GridDefRecord.LO2); double La2 = gds.getDouble(GridDefRecord.LA2); if (Double.isNaN(Lo2) || Double.isNaN(La2)) { return; } LatLonPointImpl endLL = new LatLonPointImpl(La2, Lo2); ProjectionPointImpl end = (ProjectionPointImpl) proj.latLonToProj(endLL); double dx = 1000 * Math.abs(end.getX() - startx) / (gds.getInt(GridDefRecord.NX) - 1); double dy = 1000 * Math.abs(end.getY() - starty) / (gds.getInt(GridDefRecord.NY) - 1); // have to change both String/Double values for consistency gds.addParam(GridDefRecord.DX, String.valueOf(dx)); gds.addParam(GridDefRecord.DX, new Double (dx)); gds.addParam(GridDefRecord.DY, String.valueOf(dy)); gds.addParam(GridDefRecord.DY, new Double(dy)); } }
package org.requirementsascode; import java.io.Serializable; import java.util.List; import java.util.Set; import java.util.function.Predicate; import java.util.stream.Stream; import org.requirementsascode.flowposition.After; import org.requirementsascode.flowposition.FlowPosition; /** * @author b_muth */ public class InterruptableFlowStep extends FlowStep implements Serializable { private static final long serialVersionUID = -2926490717985964131L; /** * Creates step with the specified name as the last step of the specified flow. * * @param stepName * the name of the step to be created * @param flow * the flow that will contain the new step */ InterruptableFlowStep(String stepName, UseCase useCase, Flow flow) { super(stepName, useCase, flow); appendToLastStepOfFlow(); } private void appendToLastStepOfFlow() { List<FlowStep> flowSteps = getFlow().getSteps(); FlowStep lastFlowStep = flowSteps.size() > 0 ? flowSteps.get(flowSteps.size() - 1) : null; setPreviousStepInFlow(lastFlowStep); setFlowPosition(new After(lastFlowStep)); } @Override public Predicate<ModelRunner> getCondition() { Predicate<ModelRunner> condition; Predicate<ModelRunner> reactWhile = getReactWhile(); if (reactWhile != null) { condition = reactWhile; } else { FlowPosition flowPosition = getFlowPosition(); condition = flowPosition.and(noStepInterrupts()); } return condition; } private Predicate<ModelRunner> noStepInterrupts() { return modelRunner -> { Class<?> theStepsEventClass = getEventClass(); Model model = getModel(); Stream<Step> interruptingStepsStream = model.getModifiableSteps().stream().filter(isInterruptingStep()); Set<Step> interruptingStepsThatCanReact = modelRunner.getStepsInStreamThatCanReactTo(theStepsEventClass, interruptingStepsStream); return interruptingStepsThatCanReact.size() == 0; }; } private Predicate<Step> isInterruptingStep() { return step -> InterruptingFlowStep.class.equals(step.getClass()); } }
package org.supercsv.cellprocessor.constraint; import java.util.HashSet; import java.util.Set; import org.supercsv.cellprocessor.CellProcessorAdaptor; import org.supercsv.cellprocessor.ift.BoolCellProcessor; import org.supercsv.cellprocessor.ift.CellProcessor; import org.supercsv.cellprocessor.ift.DateCellProcessor; import org.supercsv.cellprocessor.ift.DoubleCellProcessor; import org.supercsv.cellprocessor.ift.LongCellProcessor; import org.supercsv.cellprocessor.ift.StringCellProcessor; import org.supercsv.exception.NullInputException; import org.supercsv.exception.SuperCSVException; import org.supercsv.util.CSVContext; /** * This processor enforces the input value to belong to a specific set of given values. * <p> * * @since 1.50 * @author Dominique De Vito */ public class IsIncludedIn extends CellProcessorAdaptor implements BoolCellProcessor, DateCellProcessor, DoubleCellProcessor, LongCellProcessor, StringCellProcessor { protected Set<Object> possibleValues; public IsIncludedIn(final Set<Object> possibleValues) { super(); this.possibleValues = possibleValues; } public IsIncludedIn(final Set<Object> possibleValues, final CellProcessor next) { super(next); this.possibleValues = possibleValues; } public IsIncludedIn(final Object[] possibleValues) { super(); this.possibleValues = createSet(possibleValues); } public IsIncludedIn(final Object[] possibleValues, final CellProcessor next) { super(next); this.possibleValues = createSet(possibleValues); } private static Set<Object> createSet(Object[] arr) { int nb = (arr == null) ? 0 : arr.length; if (nb == 0) { return new HashSet<Object>(); } else { HashSet<Object> set = new HashSet<Object>((4 * nb / 3) + 1); for( int i = 0; i < arr.length; i++ ) { set.add(arr[i]); } return set; } } /** * {@inheritDoc} * * @throws SuperCSVException * upon receiving a string of an un-accepted length * @throws ClassCastException * is the parameter value cannot be cast to a String * @return the argument value if the value is unique */ @Override public Object execute(final Object value, final CSVContext context) throws SuperCSVException, ClassCastException { if( value == null ) { throw new NullInputException("Input cannot be null on line " + context.lineNumber + " at column " + context.columnNumber, context, this); } // check for required hash if( !possibleValues.contains(value) ) { throw new SuperCSVException("Entry \"" + value + "\" on line " + context.lineNumber + " column " + context.columnNumber + " is not accepted as a possible value", context, this); } return next.execute(value, context); } }
package org.yamcs.tctm; import java.io.IOException; import java.net.ConnectException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.SocketChannel; import java.util.Arrays; import java.util.Collection; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yamcs.ConfigurationException; import org.yamcs.YConfiguration; import org.yamcs.YamcsServer; import org.yamcs.cmdhistory.CommandHistoryPublisher; import org.yamcs.commanding.PreparedCommand; import org.yamcs.parameter.ParameterValue; import org.yamcs.parameter.SystemParametersCollector; import org.yamcs.parameter.SystemParametersProducer; import org.yamcs.protobuf.Commanding.CommandId; import org.yamcs.time.TimeService; import org.yamcs.utils.LoggingUtils; import org.yamcs.utils.TimeEncoding; import com.google.common.util.concurrent.AbstractService; /** * Sends raw packets on Tcp socket. * @author nm * */ public class TcpTcDataLink extends AbstractService implements Runnable, TcDataLink, SystemParametersProducer { protected SocketChannel socketChannel=null; protected String host="whirl"; protected int port=10003; protected CommandHistoryPublisher commandHistoryListener; protected Selector selector; SelectionKey selectionKey; protected CcsdsSeqAndChecksumFiller seqAndChecksumFiller=new CcsdsSeqAndChecksumFiller(); protected ScheduledThreadPoolExecutor timer; protected volatile boolean disabled=false; protected int minimumTcPacketLength = -1; //the minimum size of the CCSDS packets uplinked protected volatile long tcCount; private String sv_linkStatus_id, sp_dataCount_id; private SystemParametersCollector sysParamCollector; protected final Logger log; private String yamcsInstance; private String name; TimeService timeService; public TcpTcDataLink(String yamcsInstance, String name, String spec) throws ConfigurationException { log = LoggingUtils.getLogger(this.getClass(), yamcsInstance); YConfiguration c = YConfiguration.getConfiguration("tcp"); this.yamcsInstance = yamcsInstance; host = c.getString(spec, "tcHost"); port = c.getInt(spec, "tcPort"); this.name = name; if(c.containsKey(spec, "minimumTcPacketLength")) { minimumTcPacketLength = c.getInt(spec, "minimumTcPacketLength"); } else { log.debug("minimumTcPacketLength not defined, using the default value {}", minimumTcPacketLength); } timeService = YamcsServer.getTimeService(yamcsInstance); } protected TcpTcDataLink() { log=LoggerFactory.getLogger(this.getClass().getName()); } // dummy constructor which is automatically invoked by subclass constructors public TcpTcDataLink(String host, int port) { this.host=host; this.port=port; openSocket(); log=LoggerFactory.getLogger(this.getClass().getName()); } protected long getCurrentTime() { if(timeService!=null) { return timeService.getMissionTime(); } else { return TimeEncoding.fromUnixTime(System.currentTimeMillis()); } } @Override protected void doStart() { setupSysVariables(); this.timer=new ScheduledThreadPoolExecutor(1); timer.scheduleWithFixedDelay(this, 0, 10, TimeUnit.SECONDS); notifyStarted(); } protected void openSocket() { try { InetAddress address=InetAddress.getByName(host); socketChannel=SocketChannel.open(new InetSocketAddress(address,port)); socketChannel.configureBlocking(false); socketChannel.socket().setKeepAlive(true); selector = Selector.open(); selectionKey = socketChannel.register(selector,SelectionKey.OP_WRITE|SelectionKey.OP_READ); log.info("TC connection established to {}:{}", host, port); } catch (IOException e) { String exc = (e instanceof ConnectException) ? ((ConnectException) e).getMessage() : e.toString(); log.info("Cannot open TC connection to {}:{} '{}'. Retrying in 10s", host, port, exc.toString()); try { socketChannel.close(); } catch (Exception e1) {} try { selector.close(); } catch (Exception e1) {} socketChannel=null; } } protected void disconnect() { if(socketChannel==null) { return; } try { socketChannel.close(); selector.close(); socketChannel=null; } catch (IOException e) { log.warn("Exception caught when checking if the socket to {}:{} is open", host, port, e); } } /** * we check if the socket is open by trying a select on the read part of it * @return */ protected boolean isSocketOpen() { final ByteBuffer bb=ByteBuffer.allocate(16); if(socketChannel==null) { return false; } boolean connected=false; try { selector.select(); if(selectionKey.isReadable()) { int read = socketChannel.read(bb); if(read>0) { log.info("Data read on the TC socket to {}:{}!! : {}",host, port, bb); connected=true; } else if(read<0) { log.warn("TC socket to "+host+":"+port+" has been closed"); socketChannel.close(); selector.close(); socketChannel=null; connected=false; } } else if(selectionKey.isWritable()){ connected=true; } else { log.warn("The TC socket to "+host+":"+port+" is neither writable nor readable"); connected=false; } } catch (IOException e) { log.warn("Exception caught when checking if the socket to {}:{} is open:",host, port, e); connected=false; } return connected; } /** * Sends */ @Override public void sendTc(PreparedCommand pc) { if(disabled) { log.warn("TC disabled, ignoring command "+pc.getCommandId()); return; } ByteBuffer bb = null; if(pc.getBinary().length<minimumTcPacketLength) { //enforce the minimum packet length bb=ByteBuffer.allocate(minimumTcPacketLength); bb.put(pc.getBinary()); bb.putShort(4, (short)(minimumTcPacketLength - 7)); // fix packet length } else { bb=ByteBuffer.wrap(pc.getBinary()); bb.putShort(4, (short)(pc.getBinary().length - 7)); } int retries=5; boolean sent=false; int seqCount=seqAndChecksumFiller.fill(bb, pc.getCommandId().getGenerationTime()); bb.rewind(); while (!sent&&(retries>0)) { if (!isSocketOpen()) { openSocket(); } if(isSocketOpen()) { try { socketChannel.write(bb); tcCount++; sent=true; } catch (IOException e) { log.warn("Error writing to TC socket to {}:{} : {}", host, port, e.getMessage()); try { if(socketChannel.isOpen()) { socketChannel.close(); } selector.close(); socketChannel = null; } catch (IOException e1) { e1.printStackTrace(); } } } retries if(!sent && (retries>0)) { try { log.warn("Command not sent, retrying in 2 seconds"); Thread.sleep(2000); } catch (InterruptedException e) { log.warn("exception {} thrown when sleeping 2 sec", e.toString()); Thread.currentThread().interrupt(); } } } if(sent) { handleAcks(pc.getCommandId(), seqCount); } else { timer.schedule(new TcAckStatus(pc.getCommandId(), "Acknowledge_FSC_Status","NACK"), 100, TimeUnit.MILLISECONDS); } } protected void handleAcks(CommandId cmdId, int seqCount ) { timer.schedule(new TcAck(cmdId,"Final_Sequence_Count", Integer.toString(seqCount)), 200, TimeUnit.MILLISECONDS); timer.schedule(new TcAckStatus(cmdId,"Acknowledge_FSC","ACK: OK"), 400, TimeUnit.MILLISECONDS); timer.schedule(new TcAckStatus(cmdId,"Acknowledge_FRC","ACK: OK"), 800, TimeUnit.MILLISECONDS); timer.schedule(new TcAckStatus(cmdId,"Acknowledge_DASS","ACK: OK"), 1200, TimeUnit.MILLISECONDS); timer.schedule(new TcAckStatus(cmdId,"Acknowledge_MCS","ACK: OK"), 1600, TimeUnit.MILLISECONDS); timer.schedule(new TcAckStatus(cmdId,"Acknowledge_A","ACK A: OK"), 2000, TimeUnit.MILLISECONDS); timer.schedule(new TcAckStatus(cmdId,"Acknowledge_B","ACK B: OK"), 3000, TimeUnit.MILLISECONDS); timer.schedule(new TcAckStatus(cmdId,"Acknowledge_C","ACK C: OK"), 4000, TimeUnit.MILLISECONDS); timer.schedule(new TcAckStatus(cmdId,"Acknowledge_D","ACK D: OK"), 10000, TimeUnit.MILLISECONDS); } @Override public void setCommandHistoryPublisher(CommandHistoryPublisher commandHistoryListener) { this.commandHistoryListener=commandHistoryListener; } @Override public String getLinkStatus() { if (disabled) { return "DISABLED"; } if(isSocketOpen()) { return "OK"; } else { return "UNAVAIL"; } } @Override public String getDetailedStatus() { if(disabled) return String.format("DISABLED (should connect to %s:%d)", host, port); if(isSocketOpen()) { return String.format("OK, connected to %s:%d", host, port); } else { return String.format("Not connected to %s:%d", host, port); } } @Override public void disable() { disabled=true; if(isRunning()) { disconnect(); } } @Override public void enable() { disabled=false; } @Override public boolean isDisabled() { return disabled; } @Override public void run() { if(!isRunning() || disabled) { return; } if (!isSocketOpen()) { openSocket(); } } @Override public void doStop() { disconnect(); notifyStopped(); } class TcAck implements Runnable { CommandId cmdId; String name; String value; TcAck(CommandId cmdId, String name, String value) { this.cmdId=cmdId; this.name=name; this.value=value; } @Override public void run() { commandHistoryListener.updateStringKey(cmdId,name,value); } } public class TcAckStatus extends TcAck { public TcAckStatus(CommandId cmdId, String name, String value) { super(cmdId, name, value); } @Override public void run() { long instant = getCurrentTime(); commandHistoryListener.updateStringKey(cmdId,name+"_Status",value); commandHistoryListener.updateTimeKey(cmdId,name+"_Time", instant); } } @Override public long getDataCount() { return tcCount; } protected void setupSysVariables() { this.sysParamCollector = SystemParametersCollector.getInstance(yamcsInstance); if(sysParamCollector!=null) { sysParamCollector.registerProvider(this, null); sv_linkStatus_id = sysParamCollector.getNamespace()+"/"+name+"/linkStatus"; sp_dataCount_id = sysParamCollector.getNamespace()+"/"+name+"/dataCount"; } else { log.info("System variables collector not defined for instance {} ", yamcsInstance); } } @Override public Collection<ParameterValue> getSystemParameters() { long time = getCurrentTime(); ParameterValue linkStatus = SystemParametersCollector.getPV(sv_linkStatus_id, time, getLinkStatus()); ParameterValue dataCount = SystemParametersCollector.getPV(sp_dataCount_id, time, getDataCount()); return Arrays.asList(linkStatus, dataCount); } public int getMiniminimumTcPacketLength() { return minimumTcPacketLength; } }
package org.pm4j.core.pm.impl; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.pm4j.core.exception.PmRuntimeException; import org.pm4j.core.pm.PmCommand; import org.pm4j.core.pm.PmCommand.CommandState; import org.pm4j.core.pm.PmCommandDecorator; import org.pm4j.core.pm.PmObject; import org.pm4j.core.pm.PmTab; import org.pm4j.core.pm.PmTabSet; import org.pm4j.core.pm.annotation.PmCommandCfg; import org.pm4j.core.pm.annotation.PmCommandCfg.BEFORE_DO; import org.pm4j.core.pm.impl.connector.PmTabSetConnector; import org.pm4j.navi.NaviLink; /** * Basic implementation of a {@link PmTabSet}. * <p> * Each child PM having the marker interface {@link PmTab} will be considered as a tab of this set. * <p> * The tabs are sorted as they are declared. * * @author Olaf Boede */ public class PmTabSetImpl extends PmObjectBase implements PmTabSet { private static final Log LOG = LogFactory.getLog(PmTabSetImpl.class); /** The set of tab switch command decorator definitions. */ private List<TabSwitchDecoratorDefintion> pmCmdDecoratorDefinitions = new ArrayList<TabSwitchDecoratorDefintion>(); /** PM of the currently active tab. */ private PmTab currentTabPm; /** * Creates the tab set PM. * * @param pmParent the PM hierarchy parent instance. */ public PmTabSetImpl(PmObject pmParent) { super(pmParent); } /** * Adds a {@link PmCommandDecorator} to be executed before a tab switch. * <p> * Alternatively you may override {@link #switchToTabPmImpl(PmCommand, PmTab, PmTab)}. * * @param fromTab The from-tab to define the decorator for. If it is <code>null</code>, the decorator will be active for all from-tabs. * @param toTab The to-tab to define the decorator for. If it is <code>null</code>, the decorator will be active for all to-tabs. * @param decorator The decorator logic to be executed before the tab switch. */ public void addTabSwitchCommandDecorator(PmTab fromTab, PmTab toTab, PmCommandDecorator decorator) { pmCmdDecoratorDefinitions.add(new TabSwitchDecoratorDefintion(fromTab, toTab, decorator)); } @Override public boolean switchToTabPm(PmTab toTab) { PmTab _fromTab = getCurrentTabPm(); if (_fromTab == toTab) { // nothing to do. 'successfully' done. return true; } // ensure that the to-tab is initialized (was an issue in domain specific // unit tests): PmInitApi.initPmTree(toTab); // Delegate to an undoable command. PmTabChangeCommand tabChangeCommand = new PmTabChangeCommand(this, _fromTab, toTab); for (TabSwitchDecoratorDefintion d : pmCmdDecoratorDefinitions) { if (d.isDecoratorForSwitch(_fromTab, toTab)) { tabChangeCommand.addCommandDecorator(d.getDecorator()); } } PmTabChangeCommand executedCommand = (PmTabChangeCommand) tabChangeCommand.doIt(); if (LOG.isDebugEnabled() && executedCommand.getCommandState() != CommandState.EXECUTED) { String msg = "The UI logic prevented a switch from tab " + PmUtil.getPmLogString(_fromTab) + " to " + PmUtil.getPmLogString(toTab) + "."; if (executedCommand.getVetoCommandDecorator() != null) { msg += " It has been prevented by the command decorator: " + executedCommand.getVetoCommandDecorator(); } LOG.debug(msg); } // If the UI logic prevents the tab navigation, no exception will be thrown. // So we check here if the tab navigation was really successfully performed. if (executedCommand.getCommandState() == CommandState.EXECUTED) { // The visible tab needs to be loaded/initialized. BroadcastPmEventProcessor.doDeferredEventsForVisiblePms(toTab); return true; } else { return false; } } // TODO: rename to beforeSwitch and add an afterSwitch; remove the command reference. protected boolean switchToTabPmImpl(@Deprecated PmCommand tabChangeCmd, PmTab fromTab, PmTab toTab) { return true; } /** * Gets called before a tab switch operation. Subclasses may control here if * the tab switch should be allowed.<br> * The default implementation always allows to switch the tab. * <p> * For more generic logic you may consider using * {@link #addTabSwitchCommandDecorator(PmTab, PmTab, PmCommandDecorator)}. * * @param fromTab * The tab to leave. * @param toTab * The tab to enter. * @param tabChangeCmd * The internally used tab change command. May be used for command confirmation scenarios. * * @return <code>true</code> if the switch is allowed.<br> * <code>false</code> prevents the tab switch. * */ protected boolean beforeSwitch(PmTab fromTab, PmTab toTab, PmCommand tabChangeCmd) { return switchToTabPmImpl(tabChangeCmd, fromTab, toTab); } /** * @return The currently active tab. */ @Override public PmTab getCurrentTabPm() { PmInitApi.initThisPmOnly(this); return (currentTabPm != null) ? currentTabPm : getFirstTabPm(); } /** * The default implementation just provides the set of all sub-elements. * <p> * Please override the implementation if that default behavior does not match. * * @return The set of tabs. */ @Override public List<PmTab> getTabPms() { return PmUtil.getPmChildrenOfType(this, PmTab.class); } @Override public int getTabIndex(PmTab pmTab) { List<PmTab> list = getTabPms(); for (int i=0; i<list.size(); ++i) { PmTab t = list.get(i); if (t.equals(pmTab)) { return i; } } // not found throw new PmRuntimeException(this, "The given tab does not belong to the tab set: " + pmTab); } /** * Provides the first tab within the tab set. * <p> * The default implementation just provides the first sub-element.<br> * It should be overridden if the first tab is not the first sub-element. * * @return The first tab of the tab set. */ protected PmTab getFirstTabPm() { List<PmTab> tabs = getTabPms(); if (tabs.size() > 0) { return tabs.get(0); } else { LOG.warn("Tabset '" + getPmRelativeName() + "' has no sub tab PMs."); return null; } } /** * @return A view technology specific tab set logic connector. */ private PmTabSetConnector getPmToTabSetViewConnector() { return (PmTabSetConnector) getPmToViewConnector(); } /** * Container for a tab switch specific command decorator definition. */ class TabSwitchDecoratorDefintion { private String fromName; private String toName; private PmCommandDecorator decorator; public TabSwitchDecoratorDefintion(PmTab fromTab, PmTab toTab, PmCommandDecorator decorator) { this(fromTab != null ? fromTab.getPmRelativeName() : null, toTab != null ? toTab.getPmRelativeName() : null, decorator); } public TabSwitchDecoratorDefintion(String fromTabName, String toTabName, PmCommandDecorator decorator) { this.fromName = fromTabName; this.toName = toTabName; this.decorator = decorator; } public boolean isDecoratorForSwitch(PmObject fromTab, PmObject toTab) { return ((toName == null) || (toTab != null && toTab.getPmRelativeName().equals(toName))) && ((fromName == null) || (fromTab != null) && fromTab.getPmRelativeName().equals(fromName)); } public PmCommandDecorator getDecorator() { return decorator; } } /** * The command that internally executes a tab switch. * <p> * It supports undo and command decorators. */ @PmCommandCfg(beforeDo=BEFORE_DO.DO_NOTHING) static class PmTabChangeCommand extends PmCommandImpl { private final PmTabSetImpl tabSet; private final PmTab fromTab; private final PmTab toTab; public PmTabChangeCommand(PmTabSetImpl tabSet, PmTab fromTab, PmTab toTab) { super(toTab); this.tabSet = tabSet; this.fromTab = fromTab; this.toTab = toTab; assert fromTab != null; assert toTab != null; } /** * The tab switch can only be executed if the usual preconditions are fulfilled * and the PmTab */ @Override protected boolean beforeDo() { boolean canDo = super.beforeDo(); if (canDo) { canDo = tabSet.beforeSwitch(fromTab, toTab, this); } return canDo; } @Override protected void doItImpl() { PmTabSetConnector viewConnector = tabSet.getPmToTabSetViewConnector(); if (viewConnector != null) { viewConnector.switchToTab(toTab); } // Only successfully executed tab switches need to be undone. setUndoCommand(new PmTabChangeCommand(tabSet, toTab, fromTab)); } @Override protected NaviLink afterDo(boolean changeCommandHistory) { tabSet.currentTabPm = toTab; return super.afterDo(changeCommandHistory); } } /** * Provides a possibility to react on tab changes together with the information about the source and target tab. * * @author MMANZ */ public static class PmTabSetCommandDecoratorAdapter implements PmCommandDecorator { @Override public final boolean beforeDo(PmCommand cmd) { PmTabChangeCommand tabChangeCommand = (PmTabChangeCommand) cmd; return beforeTabChange(tabChangeCommand.fromTab, tabChangeCommand.toTab); } /* (non-Javadoc) * @see org.pm4j.core.pm.PmCommandDecorator#afterDo(org.pm4j.core.pm.PmCommand) */ @Override public final void afterDo(PmCommand cmd) { PmTabChangeCommand tabChangeCommand = (PmTabChangeCommand) cmd; afterTabChange(tabChangeCommand.fromTab, tabChangeCommand.toTab); } protected void afterTabChange(PmTab fromTab, PmTab toTab) { // do nothing } protected boolean beforeTabChange(PmTab fromTab, PmTab toTab) { return true; } } }
package io.narayana.lra.coordinator.api; import io.narayana.lra.Current; import io.narayana.lra.coordinator.domain.model.LRAData; import io.narayana.lra.coordinator.domain.model.LRAStatusHolder; import io.narayana.lra.coordinator.domain.model.Transaction; import io.narayana.lra.coordinator.domain.service.LRAService; import io.narayana.lra.logging.LRALogger; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import io.swagger.annotations.ResponseHeader; import javax.enterprise.context.ApplicationScoped; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.inject.Inject; import javax.ws.rs.NotFoundException; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.POST; import javax.ws.rs.Produces; import javax.ws.rs.PUT; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.core.Context; import javax.ws.rs.core.Link; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLEncoder; import java.util.HashMap; import java.util.List; import java.util.Map; import org.eclipse.microprofile.lra.annotation.LRAStatus; import static io.narayana.lra.LRAConstants.CLIENT_ID_PARAM_NAME; import static io.narayana.lra.LRAConstants.COMPENSATE; import static io.narayana.lra.LRAConstants.COMPLETE; import static io.narayana.lra.LRAConstants.COORDINATOR_PATH_NAME; import static io.narayana.lra.LRAConstants.PARENT_LRA_PARAM_NAME; import static io.narayana.lra.LRAConstants.RECOVERY_COORDINATOR_PATH_NAME; import static io.narayana.lra.LRAConstants.STATUS; import static io.narayana.lra.LRAConstants.STATUS_PARAM_NAME; import static io.narayana.lra.LRAConstants.TIMELIMIT_PARAM_NAME; import static java.util.stream.Collectors.toList; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import static javax.ws.rs.core.Response.Status.PRECONDITION_FAILED; import static org.eclipse.microprofile.lra.annotation.ws.rs.LRA.LRA_HTTP_CONTEXT_HEADER; import static org.eclipse.microprofile.lra.annotation.ws.rs.LRA.LRA_HTTP_RECOVERY_HEADER; @ApplicationScoped @Path(COORDINATOR_PATH_NAME) @Api(value = COORDINATOR_PATH_NAME, tags = "LRA Coordinator") public class Coordinator { @Context private UriInfo context; @Inject // Will not work in an async scenario: CDI-452 private LRAService lraService; // Performing a GET on /lra-io.narayana.lra.coordinator returns a list of all LRAs. @GET @Path("/") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Returns all LRAs", notes = "Gets both active and recovering LRAs", response = LRAData.class, responseContainer = "List") public List<LRAData> getAllLRAs( @ApiParam(value = "Filter the returned LRAs to only those in the give state (see CompensatorStatus)", required = false) @QueryParam(STATUS_PARAM_NAME) @DefaultValue("") String state) { List<LRAStatusHolder> lras = lraService.getAll(state); if (lras == null) { LRALogger.i18NLogger.error_invalidQueryForGettingLraStatuses(state); throw new WebApplicationException(Response.status(BAD_REQUEST) .entity(String.format("Invalid query '%s' to get LRAs", state)).build()); } return lras.stream().map(Coordinator::convert).collect(toList()); } private static LRAData convert(LRAStatusHolder lra) { return new LRAData(lra.getLraId(), lra.getClientId(), lra.getStatus().name(), lra.isClosed(), lra.isCancelled(), lra.isRecovering(), lra.isActive(), lra.isTopLevel(), lra.getStartTime(), lra.getFinishTime()); } @GET @Path("{LraId}/status") @Produces(MediaType.TEXT_PLAIN) @ApiOperation(value = "Obtain the status of an LRA as a string", response = String.class) @ApiResponses({ @ApiResponse(code = 404, message = "The coordinator has no knowledge of this LRA"), @ApiResponse(code = 204, message = "The LRA exists and has not yet been asked to close or cancel " + " - compare this response with a 200 response.s"), @ApiResponse(code = 200, message = "The LRA exists. The status is reported in the content body.") }) public Response getLRAStatus( @ApiParam(value = "The unique identifier of the LRA", required = true) @PathParam("LraId")String lraId, @QueryParam("effectivelyActive") @DefaultValue("false") boolean isEffectivelyActive) throws NotFoundException { Transaction transaction = lraService.getTransaction(toURI(lraId)); LRAStatus status = transaction.getLRAStatus(); if (status == null) { return Response.noContent().build(); // 204 means the LRA is still active } if (isEffectivelyActive) { // effectively active means that LRA is either in LRAStatus.Active status or it's a nested LRA in one // of the final states (LRAStatus.Cancelled or LRAStatus.Closed) if (!transaction.isTopLevel() && (status == LRAStatus.Cancelled || status == LRAStatus.Closed)) { return Response.noContent().build(); } } return Response.ok(status.name()).build(); } @GET @Path("{LraId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Obtain the status of an LRA as a JSON structure", response = String.class) @ApiResponses({ @ApiResponse(code = 404, message = "The coordinator has no knowledge of this LRA"), @ApiResponse(code = 204, message = "The LRA exists and has not yet been asked to close or cancel " + " - compare this response with a 200 response.s"), @ApiResponse(code = 200, message = "The LRA exists. The status is reported in the content body.") }) public LRAData getLRAInfo( @ApiParam(value = "The unique identifier of the LRA", required = true) @PathParam("LraId") String lraId) throws NotFoundException { return lraService.getLRA(toURI(lraId)); } /* @GET @Path("{LraId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Obtain the status of an LRA as a string", response = String.class) @ApiResponses({ @ApiResponse(code = 404, message = "The coordinator has no knowledge of this LRA"), @ApiResponse(code = 200, message = "The LRA exists. A JSON representation of the state is reported in the content body.") }) public LRAStatus getDetailedLRAStatus( @ApiParam(value = "The unique identifier of the LRA", required = true) @PathParam("LraId")String lraId) throws NotFoundException { return new LRAStatus(lraService.getTransaction(toURI(lraId))); }*/ // Performing a GET on /lra-io.narayana.lra.coordinator/<LraId> returns 200 if the lra is still active. @GET @Path("/status/{LraId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Indicates whether an LRA is active", response = Boolean.class) @ApiResponses({ @ApiResponse(code = 404, message = "The coordinator has no knowledge of this LRA"), @ApiResponse(code = 200, message = "If the LRA exists") }) public Boolean isActiveLRA( @ApiParam(value = "The unique identifier of the LRA", required = true) @PathParam("LraId")String lraId) throws NotFoundException { return lraService.getTransaction(toURI(lraId)).isActive(); } // Performing a POST on /lra-io.narayana.lra.coordinator/start?ClientID=<ClientID> will start a new lra with a default timeout and // return a lra URL of the form <machine>/lra-io.narayana.lra.coordinator/<LraId>. // Adding a query parameter, timeout=<timeout>, will start a new lra with the specified timeout. // If the lra is terminated because of a timeout, the lra URL is deleted and all further invocations on the URL will return 404. // The invoker can assume this was equivalent to a compensate operation. @POST @Path("start") @Produces({MediaType.APPLICATION_JSON, MediaType.TEXT_PLAIN}) @ApiOperation(value = "Start a new LRA", notes = "The LRA model uses a presumed nothing protocol: the coordinator must communicate\n" + "with Compensators in order to inform them of the LRA activity. Every time a\n" + "Compensator is enrolled with a LRA, the coordinator must make information about\n" + "it durable so that the Compensator can be contacted when the LRA terminates,\n" + "even in the event of subsequent failures. Compensators, clients and coordinators\n" + "cannot make any presumption about the state of the global transaction without\n" + "consulting the coordinator and all compensators, respectively.", response = String.class) @ApiResponses({ @ApiResponse(code = 201, message = "The request was successful and the response body contains the id of the new LRA"), @ApiResponse(code = 500, message = "A new LRA could not be started") }) public Response startLRA( @ApiParam(value = "Each client is expected to have a unique identity (which can be a URL).", required = false) @QueryParam(CLIENT_ID_PARAM_NAME) @DefaultValue("") String clientId, @ApiParam(value = "Specifies the maximum time in milli seconds that the LRA will exist for.\n" + "If the LRA is terminated because of a timeout, the LRA URL is deleted.\n" + "All further invocations on the URL will return 404.\n" + "The invoker can assume this was equivalent to a compensate operation.") @QueryParam(TIMELIMIT_PARAM_NAME) @DefaultValue("0") Long timelimit, @ApiParam(value = "The enclosing LRA if this new LRA is nested", required = false) @QueryParam(PARENT_LRA_PARAM_NAME) @DefaultValue("") String parentLRA, @HeaderParam(LRA_HTTP_CONTEXT_HEADER) String parentId) throws WebApplicationException { URI parentLRAUrl = null; if (parentLRA != null && !parentLRA.isEmpty()) { parentLRAUrl = toURI(parentLRA, "Invalid parent LRA id"); } String coordinatorUrl = String.format("%s%s", context.getBaseUri(), COORDINATOR_PATH_NAME); URI lraId = lraService.startLRA(coordinatorUrl, parentLRAUrl, clientId, timelimit); if (parentLRAUrl != null) { // register with the parentLRA as a participant Client client = ClientBuilder.newClient(); String compensatorUrl = null; URL url = null; try { url = lraId.toURL(); compensatorUrl = String.format("%s/%s", coordinatorUrl, URLEncoder.encode(url.toString(), "UTF-8")); } catch (UnsupportedEncodingException | MalformedURLException e) { LRALogger.i18NLogger.error_invalidFormatToEncodeUrl(url, e); throw new WebApplicationException("Invalid parent LRA id", e, BAD_REQUEST); } Response response; if (lraService.hasTransaction(parentLRAUrl)) { response = joinLRAViaBody(parentLRAUrl.toASCIIString(), timelimit, null, compensatorUrl); } else { response = client.target(parentLRA).request().put(Entity.text(compensatorUrl)); } if (response.getStatus() != Response.Status.OK.getStatusCode()) { return response; } } Current.push(lraId); return Response.status(Response.Status.CREATED) .entity(lraId) .header(LRA_HTTP_CONTEXT_HEADER, Current.getContexts()) .build(); } @PUT @Path("{LraId}/renew") @ApiOperation(value = "Update the TimeLimit for an existing LRA", notes = "LRAs can be automatically cancelled if they aren't closed or cancelled before the TimeLimit\n" + "specified at creation time is reached.\n" + "The time limit can be updated.\n") @ApiResponses({ @ApiResponse(code = 200, message = "If the LRA timelimit has been updated"), @ApiResponse(code = 404, message = "The coordinator has no knowledge of this LRA"), @ApiResponse(code = 412, message = "The LRA is not longer active (ie in the complete or compensate messages have been sent") }) public Response renewTimeLimit( @ApiParam(value = "The new time limit for the LRA", required = true) @QueryParam(TIMELIMIT_PARAM_NAME) @DefaultValue("0") Long timelimit, @PathParam("LraId")String lraId) throws NotFoundException { return Response.status(lraService.renewTimeLimit(toURI(lraId), timelimit)).build(); } @GET @Path("{NestedLraId}/status") public Response getNestedLRAStatus(@PathParam("NestedLraId")String nestedLraId) { if (!lraService.hasTransaction(nestedLraId)) { // it must have compensated TODO maybe it's better to keep nested LRAs in separate collection return Response.ok(LRAStatus.Cancelled.name()).build(); } Transaction lra = lraService.getTransaction(toURI(nestedLraId)); LRAStatus status = lra.getLRAStatus(); if (status == null || lra.getLRAStatus() == null) { LRALogger.i18NLogger.error_cannotGetStatusOfNestedLraURI(nestedLraId, lra.getId()); throw new WebApplicationException(Response.status(Response.Status.PRECONDITION_FAILED) .entity(String.format("LRA is in the wrong state for operation '%s': %s", "getNestedLRAStatus", "The LRA is still active")).build()); } return Response.ok(lra.getLRAStatus().name()).build(); } @PUT @Path("{NestedLraId}/complete") public Response completeNestedLRA(@PathParam("NestedLraId") String nestedLraId) { return endLRA(toURI(nestedLraId), false, true); } @PUT @Path("{NestedLraId}/compensate") public Response compensateNestedLRA(@PathParam("NestedLraId") String nestedLraId) { return endLRA(toURI(nestedLraId), true, true); } @PUT @Path("{NestedLraId}/forget") public Response forgetNestedLRA(@PathParam("NestedLraId") String nestedLraId) { lraService.remove(null, toURI(nestedLraId)); return Response.ok().build(); } // Performing a PUT on lra-coordinator/<LraId>/close will trigger the successful completion of the lra and all // compensators will be dropped by the io.narayana.lra.coordinator. // The complete message will be sent to the compensators. Question: is this message best effort or at least once? // Upon termination, the URL is implicitly deleted. If it no longer exists, then 404 will be returned. // The invoker cannot know for sure whether the lra completed or compensated without enlisting a participant. // TODO rework spec to allow an LRAStatus header everywhere @PUT @Path("{LraId}/close") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Attempt to close an LRA", notes = "Trigger the successful completion of the LRA. All" + " compensators will be dropped by the coordinator." + " The complete message will be sent to the compensators." + " Upon termination, the URL is implicitly deleted." + " The invoker cannot know for sure whether the lra completed or compensated without enlisting a participant.", response = Boolean.class) @ApiResponses({ @ApiResponse(code = 404, message = "The coordinator has no knowledge of this LRA"), @ApiResponse(code = 200, message = "The complete message was sent to all coordinators") }) public Response closeLRA( @ApiParam(value = "The unique identifier of the LRA", required = true) @PathParam("LraId")String txId) throws NotFoundException { return endLRA(toURI(txId), false, false); } @PUT @Path("{LraId}/cancel") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Attempt to cancel an LRA", notes = " Trigger the compensation of the LRA. All" + " compensators will be triggered by the coordinator (ie the compensate message will be sent to each compensators)." + " Upon termination, the URL is implicitly deleted." + " The invoker cannot know for sure whether the lra completed or compensated without enlisting a participant.", response = Boolean.class) @ApiResponses({ @ApiResponse(code = 404, message = "The coordinator has no knowledge of this LRA"), @ApiResponse(code = 200, message = "The compensate message was sent to all coordinators") }) public Response cancelLRA( @ApiParam(value = "The unique identifier of the LRA", required = true) @PathParam("LraId")String lraId) throws NotFoundException { return endLRA(toURI(lraId), true, false); } private Response endLRA(URI lraId, boolean compensate, boolean fromHierarchy) throws NotFoundException { LRAStatusHolder status = lraService.endLRA(lraId, compensate, fromHierarchy); return Response.ok(status.getStatus().name()).build(); // return compensatorData == null // ? Response.ok().status(status.getHttpStatus()).build() // : Response.ok(compensatorData).status(status.getHttpStatus()).build(); } @PUT @Path("{LraId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "A Compensator can join with the LRA at any time prior to the completion of an activity", response = String.class) @ResponseHeader(name = LRA_HTTP_RECOVERY_HEADER, response = String.class, description = "If the participant is successfully registered with the LRA then this header\n" + " will contain a unique resource reference for that participant:\n" + " - HTTP GET on the reference returns the original participant URL;\n" + " - HTTP PUT on the reference will overwrite the old participant URL with the new one supplied.") @ApiResponses({ @ApiResponse(code = 404, message = "The coordinator has no knowledge of this LRA"), @ApiResponse(code = 412, message = "The LRA is not longer active (ie in the complete or compensate messages have been sent"), @ApiResponse(code = 200, message = "The participant was successfully registered with the LRA and" + " the response body contains a unique resource reference for that participant:\n" + " - HTTP GET on the reference returns the original participant URL;\n" + " - HTTP PUT on the reference will overwrite the old participant URL with the new one supplied." ) }) public Response joinLRAViaBody( @ApiParam(value = "The unique identifier of the LRA", required = true) @PathParam("LraId")String lraId, @ApiParam(value = "The time limit (in seconds) that the Compensator can guarantee that it can compensate the work performed by the service." + " After this time period has elapsed, it may no longer be possible to undo the work within the scope of this (or any enclosing) LRA." + " It may therefore be necessary for the application or service to start other activities to explicitly try to compensate this work." + " The application or coordinator may use this information to control the lifecycle of a LRA.", required = true) @QueryParam(TIMELIMIT_PARAM_NAME) @DefaultValue("0") long timeLimit, @ApiParam(value = "The resource paths that the coordinator will use to complete or compensate and to request" + " the status of the participant. The link rel names are" + " complete, compensate and status.", required = false) @HeaderParam("Link") @DefaultValue("") String compensatorLink, @ApiParam(value = "opaque data that will be stored with the coordinator and passed back to\n" + "the participant when the LRA is closed or cancelled.\n") String compensatorData) throws NotFoundException { // test to see if the join request contains any participant specific data boolean isLink = isLink(compensatorData); if (compensatorLink != null && !compensatorLink.isEmpty()) { return joinLRA(toURI(lraId), timeLimit, null, compensatorLink, compensatorData); } if (!isLink) { // interpret the content as a standard participant url compensatorData += "/"; Map<String, String> terminateURIs = new HashMap<>(); try { terminateURIs.put(COMPENSATE, new URL(compensatorData + "compensate").toExternalForm()); terminateURIs.put(COMPLETE, new URL(compensatorData + "complete").toExternalForm()); terminateURIs.put(STATUS, new URL(compensatorData + "status").toExternalForm()); } catch (MalformedURLException e) { if (LRALogger.logger.isTraceEnabled()) { LRALogger.logger.tracef(e, "Cannot join to LRA id '%s' with body as compensator url '%s' is invalid", lraId, compensatorData); } return Response.status(PRECONDITION_FAILED).build(); } // register with the coordinator // put the lra id in an http header StringBuilder linkHeaderValue = new StringBuilder(); terminateURIs.forEach((k, v) -> makeLink(linkHeaderValue, "", k, v)); // or use Collectors.joining(",") compensatorData = linkHeaderValue.toString(); } return joinLRA(toURI(lraId), timeLimit, null, compensatorData, null); } private static StringBuilder makeLink(StringBuilder b, String uriPrefix, String key, String value) { if (value == null) { return b; } String terminationUri = uriPrefix == null ? value : String.format("%s%s", uriPrefix, value); Link link = Link.fromUri(terminationUri).rel(key).type(MediaType.TEXT_PLAIN).build(); if (b.length() != 0) { b.append(','); } return b.append(link); } private boolean isLink(String linkString) { try { Link.valueOf(linkString); return true; } catch (IllegalArgumentException e) { return false; } } private Response joinLRA(URI lraId, long timeLimit, String compensatorUrl, String linkHeader, String userData) throws NotFoundException { final String recoveryUrlBase = String.format("http: context.getRequestUri().getAuthority(), RECOVERY_COORDINATOR_PATH_NAME); StringBuilder recoveryUrl = new StringBuilder(); int status = lraService.joinLRA(recoveryUrl, lraId, timeLimit, compensatorUrl, linkHeader, recoveryUrlBase, userData); try { return Response.status(status) .entity(recoveryUrl) .location(new URI(recoveryUrl.toString())) .header(LRA_HTTP_RECOVERY_HEADER, recoveryUrl) .build(); } catch (URISyntaxException e) { LRALogger.i18NLogger.error_invalidRecoveryUrlToJoinLRAURI(recoveryUrl.toString(), lraId); throw new WebApplicationException("Invalid recovery URL", e, INTERNAL_SERVER_ERROR); } } // A participant can resign from a lra at any time prior to the completion of an activity by performing a // PUT on lra-coordinator/<LraId>/remove with the URL of the participant. @PUT @Path("{LraId}/remove") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "A Compensator can resign from the LRA at any time prior to the completion of an activity", response = Boolean.class) @ApiResponses({ @ApiResponse(code = 404, message = "The coordinator has no knowledge of this LRA"), @ApiResponse(code = 412, message = "The LRA is not longer active (ie in the complete or compensate messages have been sent"), @ApiResponse(code = 200, message = "If the participant was successfully removed from the LRA") }) public Response leaveLRA( @ApiParam(value = "The unique identifier of the LRA", required = true) @PathParam("LraId") String lraId, String compensatorUrl) throws NotFoundException, URISyntaxException { String reqUri = context.getRequestUri().toString(); reqUri = reqUri.substring(0, reqUri.lastIndexOf('/')); int status = 0; status = lraService.leave(new URI(reqUri), compensatorUrl); return Response.status(status).build(); } private URI toURI(String lraId) { return toURI(lraId, "Invalid LRA id format"); } private URI toURI(String lraId, String message) { URL url; try { // see if it already in the correct format url = new URL(lraId); url.toURI(); } catch (Exception e) { try { url = new URL(String.format("%s%s/%s", context.getBaseUri(), COORDINATOR_PATH_NAME, lraId)); } catch (MalformedURLException e1) { LRALogger.i18NLogger.error_invalidStringFormatOfUrl(lraId, e1); throw new WebApplicationException(message, e1, BAD_REQUEST); } } try { return url.toURI(); } catch (URISyntaxException e) { LRALogger.i18NLogger.error_invalidStringFormatOfUrl(lraId, e); throw new WebApplicationException(message, e, BAD_REQUEST); } } }
package edu.umd.cs.piccolox.pswing; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Component; import java.awt.Container; import java.awt.Font; import java.awt.Graphics2D; import java.awt.RenderingHints; import java.awt.Shape; import java.awt.Stroke; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.ContainerAdapter; import java.awt.event.ContainerEvent; import java.awt.event.ContainerListener; import java.awt.geom.Rectangle2D; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import javax.swing.JComponent; import javax.swing.RepaintManager; import edu.umd.cs.piccolo.PCamera; import edu.umd.cs.piccolo.PLayer; import edu.umd.cs.piccolo.PNode; import edu.umd.cs.piccolo.util.PBounds; import edu.umd.cs.piccolo.util.PPaintContext; /** * <b>PSwing</b> is used to add Swing Components to a Piccolo2D canvas. * <p> * Example: adding a swing JButton to a PCanvas: * * <pre> * PSwingCanvas canvas = new PSwingCanvas(); * JButton button = new JButton(&quot;Button&quot;); * swing = new PSwing(canvas, button); * canvas.getLayer().addChild(swing); * </pre> * * <p> * NOTE: PSwing has the current limitation that it does not listen for Container * events. This is only an issue if you create a PSwing and later add Swing * components to the PSwing's component hierarchy that do not have double * buffering turned off or have a smaller font size than the minimum font size * of the original PSwing's component hierarchy. * </p> * <p> * For instance, the following bit of code will give unexpected results: * * <pre> * JPanel panel = new JPanel(); * PSwing swing = new PSwing(panel); * JPanel newChild = new JPanel(); * newChild.setDoubleBuffered(true); * panel.add(newChild); * </pre> * * </p> * <p> * NOTE: PSwing cannot be correctly interacted with through multiple cameras. * There is no support for it yet. * </p> * <p> * NOTE: PSwing is java.io.Serializable. * </p> * <p> * <b>Warning:</b> Serialized objects of this class will not be compatible with * future Piccolo releases. The current serialization support is appropriate for * short term storage or RMI between applications running the same version of * Piccolo. A future release of Piccolo will provide support for long term * persistence. * </p> * * @author Sam R. Reid * @author Benjamin B. Bederson * @author Lance E. Good * */ public class PSwing extends PNode implements Serializable, PropertyChangeListener { /** Default serial version UID. */ private static final long serialVersionUID = 1L; /** Key for this object in the Swing component's client properties. */ public static final String PSWING_PROPERTY = "PSwing"; /** Temporary repaint bounds. */ private static final PBounds TEMP_REPAINT_BOUNDS2 = new PBounds(); /** Default Greek threshold, <code>0.3d</code>. */ private static final double DEFAULT_GREEK_THRESHOLD = 0.3d; /** The cutoff at which the Swing component is rendered greek. */ private double greekThreshold = DEFAULT_GREEK_THRESHOLD; /** Swing component for this Swing node. */ private JComponent component = null; /** Minimum font size. */ private double minFontSize = Double.MAX_VALUE; /** * Default stroke, <code>new BasicStroke()</code>. Cannot be made static * because BasicStroke is not serializable. */ private Stroke defaultStroke = new BasicStroke(); /** * Default font, 12 point <code>"SansSerif"</code>. Will be made final in * version 2.0. */ // public static final Font DEFAULT_FONT = new Font(Font.SANS_SERIF, // Font.PLAIN, 12); jdk 1.6+ private static final Font DEFAULT_FONT = new Font("Serif", Font.PLAIN, 12); /** Swing canvas for this swing node. */ private PSwingCanvas canvas; /** * Used to keep track of which nodes we've attached listeners to since no * built in support in PNode. */ private final ArrayList listeningTo = new ArrayList(); /** The parent listener for camera/canvas changes. */ private final PropertyChangeListener parentListener = new PropertyChangeListener() { /** {@inheritDoc} */ public void propertyChange(final PropertyChangeEvent evt) { final PNode parent = (PNode) evt.getNewValue(); clearListeners((PNode) evt.getOldValue()); if (parent == null) { updateCanvas(null); } else { listenForCanvas(parent); } } /** * Clear out all the listeners registered to make sure there are no * stray references. * * @param fromParent Parent to start with for clearing listeners */ private void clearListeners(final PNode fromParent) { if (fromParent != null && isListeningTo(fromParent)) { fromParent.removePropertyChangeListener(PNode.PROPERTY_PARENT, parentListener); listeningTo.remove(fromParent); clearListeners(fromParent.getParent()); } } }; private final PropertyChangeListener reshapeListener = new PropertyChangeListener() { public void propertyChange(final PropertyChangeEvent evt) { repaint(); } }; /** * Listens to container nodes for changes to its contents. Any additions * will automatically have double buffering turned off. */ private final ContainerListener doubleBufferRemover = new ContainerAdapter() { public void componentAdded(final ContainerEvent event) { Component childComponent = event.getChild(); if (childComponent != null && childComponent instanceof JComponent) { disableDoubleBuffering(((JComponent) childComponent)); } }; /** * Disables double buffering on every component in the hierarchy of the * targetComponent. * * I'm assuming that the intent of the is method is that it should be * called explicitly by anyone making changes to the hierarchy of the * Swing component graph. */ private void disableDoubleBuffering(final JComponent targetComponent) { targetComponent.setDoubleBuffered(false); for (int i = 0; i < targetComponent.getComponentCount(); i++) { final Component c = targetComponent.getComponent(i); if (c instanceof JComponent) { disableDoubleBuffering((JComponent) c); } } } }; /** * Create a new visual component wrapper for the specified Swing component. * * @param component Swing component to be wrapped */ public PSwing(final JComponent component) { this.component = component; component.putClientProperty(PSWING_PROPERTY, this); initializeComponent(component); component.revalidate(); component.addPropertyChangeListener(new PropertyChangeListener() { /** {@inheritDoc} */ public void propertyChange(final PropertyChangeEvent evt) { updateBounds(); } }); component.addComponentListener(new ComponentAdapter() { /** {@inheritDoc} */ public void componentHidden(final ComponentEvent e) { setVisible(false); } /** {@inheritDoc} */ public void componentShown(final ComponentEvent e) { setVisible(true); } }); updateBounds(); listenForCanvas(this); } /** * @deprecated by {@link #PSwing(JComponent)} * * @param swingCanvas canvas on which the PSwing node will be embedded * @param component not used */ public PSwing(final PSwingCanvas swingCanvas, final JComponent component) { this(component); } /** * Ensures the bounds of the underlying component are accurate, and sets the * bounds of this PNode. */ public void updateBounds() { // Avoid setBounds if it is unnecessary // TODO: should we make sure this is called at least once // TODO: does this sometimes need to be called when size already equals // preferred size, to relayout/update things? if (componentNeedsResizing()) { component.setBounds(0, 0, component.getPreferredSize().width, component.getPreferredSize().height); } setBounds(0, 0, component.getPreferredSize().width, component.getPreferredSize().height); } private boolean componentNeedsResizing() { return component.getWidth() != component.getPreferredSize().width || component.getHeight() != component.getPreferredSize().height; } /** * Determines if the Swing component should be rendered normally or as a * filled rectangle. * <p/> * The transform, clip, and composite will be set appropriately when this * object is rendered. It is up to this object to restore the transform, * clip, and composite of the Graphics2D if this node changes any of them. * However, the color, font, and stroke are unspecified by Piccolo. This * object should set those things if they are used, but they do not need to * be restored. * * @param renderContext Contains information about current render. */ public void paint(final PPaintContext renderContext) { final Graphics2D g2 = renderContext.getGraphics(); if (defaultStroke == null) { defaultStroke = new BasicStroke(); } g2.setStroke(defaultStroke); g2.setFont(DEFAULT_FONT); if (component.getParent() == null) { component.revalidate(); } if (shouldRenderGreek(renderContext)) { paintAsGreek(g2); } else { paint(g2); } } /** * Return true if this Swing node should render as greek given the specified * paint context. * * @param paintContext paint context * @return true if this Swing node should render as greek given the * specified paint context */ protected boolean shouldRenderGreek(final PPaintContext paintContext) { return paintContext.getScale() < greekThreshold || minFontSize * paintContext.getScale() < 0.5; } /** * Paints the Swing component as greek. * * @param g2 The graphics used to render the filled rectangle */ public void paintAsGreek(final Graphics2D g2) { final Color background = component.getBackground(); final Color foreground = component.getForeground(); final Rectangle2D rect = getBounds(); if (background != null) { g2.setColor(background); } g2.fill(rect); if (foreground != null) { g2.setColor(foreground); } g2.draw(rect); } /** {@inheritDoc} */ public void setVisible(final boolean visible) { super.setVisible(visible); component.setVisible(visible); } /** * Remove from the SwingWrapper; throws an exception if no canvas is * associated with this PSwing. */ public void removeFromSwingWrapper() { if (canvas != null && isComponentSwingWrapped()) { canvas.getSwingWrapper().remove(component); } } private boolean isComponentSwingWrapped() { return Arrays.asList(canvas.getSwingWrapper().getComponents()).contains(component); } /** * Renders the wrapped component to the graphics context provided. * * @param g2 graphics context for rendering the JComponent */ public void paint(final Graphics2D g2) { if (component.getBounds().isEmpty()) { // The component has not been initialized yet. return; } final PSwingRepaintManager manager = (PSwingRepaintManager) RepaintManager.currentManager(component); manager.lockRepaint(component); final RenderingHints oldHints = g2.getRenderingHints(); g2.setRenderingHint(RenderingHints.KEY_FRACTIONALMETRICS, RenderingHints.VALUE_FRACTIONALMETRICS_OFF); component.paint(g2); g2.setRenderingHints(oldHints); manager.unlockRepaint(component); } /** * Repaints the specified portion of this visual component. Note that the * input parameter may be modified as a result of this call. * * @param repaintBounds bounds that need repainting */ public void repaint(final PBounds repaintBounds) { final Shape sh = getTransform().createTransformedShape(repaintBounds); TEMP_REPAINT_BOUNDS2.setRect(sh.getBounds2D()); repaintFrom(TEMP_REPAINT_BOUNDS2, this); } /** * Returns the Swing component that this visual component wraps. * * @return The Swing component wrapped by this PSwing node */ public JComponent getComponent() { return component; } /** * We need to turn off double buffering of Swing components within Piccolo * since all components contained within a native container use the same * buffer for double buffering. With normal Swing widgets this is fine, but * for Swing components within Piccolo this causes problems. This function * recurses the component tree rooted at c, and turns off any double * buffering in use. It also updates the minimum font size based on the font * size of c and adds a property change listener to listen for changes to * the font. * * @param c The Component to be recursively unDoubleBuffered */ private void initializeComponent(final Component c) { if (c.getFont() != null) { minFontSize = Math.min(minFontSize, c.getFont().getSize()); } c.addPropertyChangeListener("font", this); // Update shape when any property (such as text or font) changes. c.addPropertyChangeListener(reshapeListener); c.addComponentListener(new ComponentAdapter() { public void componentResized(final ComponentEvent e) { updateBounds(); } public void componentShown(final ComponentEvent e) { updateBounds(); } }); if (c instanceof Container) { initializeChildren((Container) c); ((Container) c).addContainerListener(doubleBufferRemover); } if (c instanceof JComponent) { ((JComponent) c).setDoubleBuffered(false); } } private void initializeChildren(final Container c) { final Component[] children = c.getComponents(); if (children != null) { for (int j = 0; j < children.length; j++) { initializeComponent(children[j]); } } } /** * Listens for changes in font on components rooted at this PSwing. * * @param evt property change event representing the change in font */ public void propertyChange(final PropertyChangeEvent evt) { final Component source = (Component) evt.getSource(); if (source.getFont() != null && component.isAncestorOf(source)) { minFontSize = Math.min(minFontSize, source.getFont().getSize()); } } private void readObject(final ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); initializeComponent(component); } /** * Attaches a listener to the specified node and all its parents to listen * for a change in the PSwingCanvas. Only PROPERTY_PARENT listeners are * added so this code wouldn't handle if a PLayer were viewed by a different * PCamera since that constitutes a child change. * * @param node The child node at which to begin a parent-based traversal for * adding listeners. */ private void listenForCanvas(final PNode node) { // need to get the full tree for this node PNode p = node; while (p != null) { listenToNode(p); final PNode parent = p; // System.out.println( "parent = " + parent.getClass() ); if (parent instanceof PLayer) { final PLayer player = (PLayer) parent; // System.out.println( "Found player: with " + // player.getCameraCount() + " cameras" ); for (int i = 0; i < player.getCameraCount(); i++) { final PCamera cam = player.getCamera(i); if (cam.getComponent() instanceof PSwingCanvas) { updateCanvas((PSwingCanvas) cam.getComponent()); break; } } } p = p.getParent(); } } /** * Attach a property change listener to the specified node, if one has not * already been attached. * * @param node the node to listen to for parent/pcamera/pcanvas changes */ private void listenToNode(final PNode node) { if (!isListeningTo(node)) { listeningTo.add(node); node.addPropertyChangeListener(PNode.PROPERTY_PARENT, parentListener); } } /** * Determine whether this PSwing is already listening to the specified node * for camera/canvas changes. * * @param node the node to check * @return true if this PSwing is already listening to the specified node * for camera/canvas changes */ private boolean isListeningTo(final PNode node) { for (int i = 0; i < listeningTo.size(); i++) { final PNode pNode = (PNode) listeningTo.get(i); if (pNode == node) { return true; } } return false; } /** * Removes this PSwing from previous PSwingCanvas (if any), and ensure that * this PSwing is attached to the new PSwingCanvas. * * @param newCanvas the new PSwingCanvas (may be null) */ private void updateCanvas(final PSwingCanvas newCanvas) { if (newCanvas == canvas) { return; } if (canvas != null) { canvas.removePSwing(this); } if (newCanvas != null) { canvas = newCanvas; canvas.addPSwing(this); updateBounds(); repaint(); canvas.invalidate(); canvas.revalidate(); canvas.repaint(); } } /** * Return the Greek threshold scale. When the scale will be below this * threshold the Swing component is rendered as 'Greek' instead of painting * the Swing component. Defaults to {@link #DEFAULT_GREEK_THRESHOLD}. * * @see PSwing#paintGreek(PPaintContext) * @return the current Greek threshold scale */ public double getGreekThreshold() { return greekThreshold; } /** * Set the Greek threshold in scale to <code>greekThreshold</code>. When the * scale will be below this threshold the Swing component is rendered as * 'Greek' instead of painting the Swing component.. * * @see PSwing#paintGreek(PPaintContext) * @param greekThreshold Greek threshold in scale */ public void setGreekThreshold(final double greekThreshold) { this.greekThreshold = greekThreshold; invalidatePaint(); } }
package VASSAL.tools.io; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FilterInputStream; import java.io.InputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.zip.CRC32; import java.util.zip.CheckedOutputStream; import java.util.zip.Checksum; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipInputStream; import java.util.zip.ZipOutputStream; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import VASSAL.Info; import VASSAL.tools.concurrent.CountingReadWriteLock; /** * @author Joel Uckelman * @since 3.2.0 */ public class ZipArchive implements FileArchive { private final File archiveFile; private ZipFile zipFile; private boolean modified = false; private boolean closed = true; private static class Entry { public ZipEntry ze; public File file; public Entry(ZipEntry ze, File file) { this.ze = ze; this.file = file; } @Override public String toString() { return getClass().getName() + "[file=\"" + file + "\", ze=\"" + ze + "\"]"; } } private final Map<String, Entry> entries = new HashMap<>(); private final ReadWriteLock rwl = new CountingReadWriteLock(); private final Lock r = rwl.readLock(); private final Lock w = rwl.writeLock(); /** * Opens a ZIP archive. * * @param path the name of the archive * @throws IOException */ public ZipArchive(String path) throws IOException { this(path, false); } /** * Opens a ZIP archive. * * @param file the name of the archive * @throws IOException */ public ZipArchive(File file) throws IOException { this(file, false); } /** * Opens a ZIP archive. * * @param path the name of the archive * @param truncate if <code>true</code>, truncate the archive file on open * @throws IOException */ public ZipArchive(String path, boolean truncate) throws IOException { this(new File(path), truncate); } /** * Opens a ZIP archive. * * @param file the name of the archive * @param truncate if <code>true</code>, truncate the archive file on open * @throws IOException */ public ZipArchive(File file, boolean truncate) throws IOException { if (file == null) throw new IllegalArgumentException(); this.archiveFile = file; if (truncate) { archiveFile.delete(); } } /** * Copies a ZIP archive. * * @param src the name of the source archive * @param dst the name of the destination archive * @throws IOException */ public ZipArchive(FileArchive src, String dst) throws IOException { this(src, new File(dst)); } /** * Copies a ZIP archive. * * @param src the name of the source archive * @param dst the name of the destination archive * @throws IOException */ public ZipArchive(FileArchive src, File dst) throws IOException { this(dst, true); final byte[] buf = new byte[8192]; // copy each entry to the new archive for (String name : src.getFiles()) { try (InputStream in = src.getInputStream(name); OutputStream out = getOutputStream(name)) { IOUtils.copy(in, out, buf); } } flush(); } /** {@inheritDoc} */ @Override public String getName() { return archiveFile.getPath(); } /** {@inheritDoc} */ @Override public File getFile() { return archiveFile; } /** {@inheritDoc} */ @Override public boolean isClosed() { return closed; } /** {@inheritDoc} */ @Override public boolean isModified() { return modified; } /** * {@inheritDoc} * * <b>Note:</b> It is impeative the that calling code ensures that this * stream is eventually closed, since the returned stream holds a read * lock on the archive. */ @Override public InputStream getInputStream(String path) throws IOException { r.lock(); try { openIfClosed(); final Entry e = entries.get(path); if (e == null) { throw new FileNotFoundException(path + " not in archive"); } InputStream in = null; if (e.file != null) { in = new FileInputStream(e.file); } else if (zipFile != null) { // NB: Undocumented, but ZipFile.getInputStream can return null! in = zipFile.getInputStream(e.ze); } if (in == null) { throw new FileNotFoundException(path + " not in archive"); } return new ZipArchiveInputStream(in); } catch (IOException ex) { r.unlock(); throw ex; } } /** * {@inheritDoc} * * <b>Note:</b> It is imperative the that calling code ensures that this * stream is eventually closed, since the returned stream holds a write * lock on the archive. */ @Override public OutputStream getOutputStream(String path) throws IOException { return getOutputStream(path, true); } /** * Gets an {@link OutputStream} to write to the given file. * * <b>Note:</b> It is imperative the that calling code ensures that this * stream is eventually closed, since the returned stream holds a write * lock on the archive. * * @param path the path to the file in the archive * @param compress whether to compress the file * @return an <code>OutputStream</code> for the requested file * @throws IOException */ public OutputStream getOutputStream(String path, boolean compress) throws IOException { w.lock(); try { openIfClosed(); modified = true; // set up new ZipEntry final ZipEntry ze = new ZipEntry(path); ze.setMethod(compress ? ZipEntry.DEFLATED : ZipEntry.STORED); // create new temp file final String name = archiveFile.getName(); final String base = FilenameUtils.getBaseName(name); final String ext = FilenameUtils.getExtension(name); final File tf = File.createTempFile(base, ext, Info.getTempDir()); // set up new Entry final Entry e = new Entry(ze, tf); final Entry old = entries.put(path, e); // clean up old temp file if (old != null && old.file != null) { old.file.delete(); } return new ZipArchiveOutputStream( new FileOutputStream(e.file), new CRC32(), e.ze ); } catch (IOException ex) { w.unlock(); throw ex; } } /** {@inheritDoc} */ @Override public void add(String path, String extPath) throws IOException { add(path, new File(extPath)); } /** {@inheritDoc} */ @Override public void add(String path, File extPath) throws IOException { try (FileInputStream in = new FileInputStream(extPath)) { add(path, in); } } /** {@inheritDoc} */ @Override public void add(String path, byte[] bytes) throws IOException { add(path, new ByteArrayInputStream(bytes)); } /** {@inheritDoc} */ @Override public void add(String path, InputStream in) throws IOException { try (OutputStream out = getOutputStream(path)) { IOUtils.copy(in, out); } } /** {@inheritDoc} */ @Override public boolean remove(String path) throws IOException { w.lock(); try { openIfClosed(); final Entry e = entries.remove(path); if (e != null) { modified = true; if (e.file != null) { e.file.delete(); } } return e != null; } finally { w.unlock(); } } /** {@inheritDoc} */ @Override public void revert() throws IOException { w.lock(); try { if (!modified) { return; } // delete all temporary files for (Entry e : entries.values()) { if (e != null && e.file != null) { e.file.delete(); } } modified = false; } finally { w.unlock(); } } /** {@inheritDoc} */ @Override public void flush() throws IOException { w.lock(); try { if (modified) { writeToDisk(); } } finally { w.unlock(); } } /** {@inheritDoc} */ @Override public void close() throws IOException { w.lock(); try { if (closed) { return; } else if (modified) { writeToDisk(); } else if (zipFile != null) { zipFile.close(); zipFile = null; closed = true; entries.clear(); } } finally { w.unlock(); } } private void moveFile(Path src, Path dst) throws IOException { // Replace dst with src try { // attempt an atomic move Files.move(src, dst, StandardCopyOption.ATOMIC_MOVE); } catch (IOException ignore) { // Atomic move failed; this doesn't necessarily indicate a problem, as // some filesystems don't support atomic moves and atomic moves are // impossible when the source and destination aren't on the same // filesystem. try { // attempt to copy to the destination Files.copy(src, dst, StandardCopyOption.REPLACE_EXISTING); } catch (IOException e) { // copy failed final String fmt = "Unable to overwrite %s, so data written to %s instead: %s"; throw new IOException( String.format( fmt, dst.toAbsolutePath(), src.toAbsolutePath(), e.getMessage() ), e ); } try { // successful copy, so remove the source Files.delete(src); } catch (IOException e) { // successful copy, but removing the source failed final String fmt = "File %s saved, but unable to remove temporary file %s: %s"; throw new IOException( String.format( fmt, dst.toAbsolutePath(), src.toAbsolutePath(), e.getMessage() ), e ); } } } private void writeToDisk() throws IOException { // write all files to a temporary zip archive final String name = archiveFile.getName(); final String base = FilenameUtils.getBaseName(name); final String ext = FilenameUtils.getExtension(name); final File tmpFile = File.createTempFile(base, ext, Info.getTempDir()); try (OutputStream fout = new FileOutputStream(tmpFile); OutputStream bout = new BufferedOutputStream(fout); ZipOutputStream out = new ZipOutputStream(bout)) { out.setLevel(9); final byte[] buf = new byte[8192]; if (zipFile != null) { zipFile.close(); zipFile = null; // copy unmodified file into the temp archive try (InputStream fin = new FileInputStream(archiveFile); InputStream bin = new BufferedInputStream(fin); ZipInputStream in = new ZipInputStream(bin)) { ZipEntry ze = null; while ((ze = in.getNextEntry()) != null) { // skip modified or removed entries final Entry e = entries.get(ze.getName()); if (e == null || e.file != null) continue; // We can't reuse entries for compressed files because there's // no way to reset all fields to acceptable values. if (ze.getMethod() == ZipEntry.DEFLATED) { final ZipEntry nze = new ZipEntry(ze.getName()); nze.setTime(ze.getTime()); ze = nze; } out.putNextEntry(ze); IOUtils.copy(in, out, buf); entries.remove(ze.getName()); } } } for (Entry e : entries.values()) { // skip removed or unmodified files if (e == null || e.file == null) continue; // write new or modified file into the temp archive try (FileInputStream in = new FileInputStream(e.file)) { e.ze.setTime(e.file.lastModified()); out.putNextEntry(e.ze); IOUtils.copy(in, out, buf); } } } // Replace old archive with temp archive moveFile(tmpFile.toPath(), archiveFile.toPath()); // Delete all temporary files for (Entry e : entries.values()) { if (e != null && e.file != null) { e.file.delete(); } } closed = true; modified = false; entries.clear(); } /** {@inheritDoc} */ @Override public boolean contains(String path) throws IOException { r.lock(); try { openIfClosed(); return entries.containsKey(path); } finally { r.unlock(); } } /** {@inheritDoc} */ @Override public long getSize(String path) throws IOException { r.lock(); try { openIfClosed(); final Entry e = entries.get(path); if (e == null) { throw new FileNotFoundException(path + " not in archive"); } return e.file == null ? e.ze.getSize() : e.file.length(); } finally { r.unlock(); } } /** {@inheritDoc} */ @Override public long getMTime(String path) throws IOException { r.lock(); try { openIfClosed(); final Entry e = entries.get(path); if (e == null) { throw new FileNotFoundException(path + " not in archive"); } return e.file == null ? e.ze.getTime() : e.file.lastModified(); } finally { r.unlock(); } } /** {@inheritDoc} */ @Override public List<String> getFiles() throws IOException { r.lock(); try { openIfClosed(); return new ArrayList<>(entries.keySet()); } finally { r.unlock(); } } /** {@inheritDoc} */ @Override public List<String> getFiles(String root) throws IOException { if (root.length() == 0) { return getFiles(); } r.lock(); try { openIfClosed(); // FIXME: directories need not have entries in the ZipFile! // if (!entries.containsKey(root)) // throw new FileNotFoundException(root + " not in archive"); root += '/'; final ArrayList<String> names = new ArrayList<>(); for (String n : entries.keySet()) { if (n.startsWith(root)) { names.add(n); } } return names; } finally { r.unlock(); } } /** Rebuilds the {@link Entry}s from our underlying {@link ZipFile}. */ private synchronized void readEntries() throws IOException { entries.clear(); if (archiveFile.exists() && archiveFile.length() > 0) { zipFile = new ZipFile(archiveFile); zipFile.stream().forEach(e -> entries.put(e.getName(), new Entry(e, null))); } } /** Opens the archive if it is closed. */ private synchronized void openIfClosed() throws IOException { if (closed) { readEntries(); modified = false; closed = false; } } /** An {@link InputStream} which releases the read lock on close. */ private class ZipArchiveInputStream extends FilterInputStream { public ZipArchiveInputStream(InputStream in) { super(in); if (in == null) { throw new NullPointerException("in == null"); } } private boolean closed = false; @Override public void close() throws IOException { if (closed) { return; } try { super.close(); } finally { r.unlock(); closed = true; } } } /** * An {@link OutputStream} which calculates a checksum, counts bytes * written, and releases the write lock on close. */ private class ZipArchiveOutputStream extends CheckedOutputStream { private ZipEntry entry; private long count = 0; public ZipArchiveOutputStream(OutputStream out, Checksum cksum, ZipEntry e) { super(out, cksum); if (out == null) { throw new NullPointerException("out == null"); } if (cksum == null) { throw new NullPointerException("cksum == null"); } if (e == null) { throw new NullPointerException("e == null"); } entry = e; } @Override public void write(byte[] bytes, int off, int len) throws IOException { super.write(bytes, off, len); count += len; } @Override public void write(int b) throws IOException { super.write(b); ++count; } @Override public void flush() throws IOException { super.flush(); entry.setSize(count); entry.setCrc(getChecksum().getValue()); } private boolean closed = false; @Override public void close() throws IOException { if (closed) { return; } try { super.close(); } finally { w.unlock(); closed = true; } } } public static void main(String[] args) throws IOException { final ZipArchive archive = new ZipArchive("test.zip"); // write test archive.add("NOTES", "NOTES"); archive.add("README.txt", "README.txt"); archive.flush(); // read test try (InputStream in = archive.getInputStream("NOTES")) { IOUtils.copy(in, System.out); } archive.close(); } }
// This file is part of Serleena. // Nicola Mometto, Filippo Sestini, Tobia Tesan, Sebastiano Valle. // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // all copies or substantial portions of the Software. // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. package com.kyloth.serleena.presenters; import com.kyloth.serleena.model.IExperience; import com.kyloth.serleena.model.ITrack; import com.kyloth.serleena.presentation.IExperienceActivationObserver; import com.kyloth.serleena.presentation.IExperienceActivationSource; import com.kyloth.serleena.presentation.ISerleenaActivity; import com.kyloth.serleena.presentation.ITrackSelectionPresenter; import com.kyloth.serleena.presentation.ITrackSelectionView; /** * Concretizza ITrackSelectionPresenter. * * @use Viene utilizzata solamente dall'Activity, che ne mantiene un riferimento. Il Presenter, alla creazione, si registra alla sua Vista, passando se stesso come parametro dietro interfaccia. * @field view : ITrackSelectionView Vista associata al presenter * @field activity : ISerleenaActivity Activity a cui il presenter appartiene * @author Filippo Sestini <sestini.filippo@gmail.com> * @version 1.0.0 */ public class TrackSelectionPresenter implements ITrackSelectionPresenter, IExperienceActivationObserver { private ISerleenaActivity activity; private ITrackSelectionView view; public TrackSelectionPresenter(ITrackSelectionView view, ISerleenaActivity activity, IExperienceActivationSource source) throws IllegalArgumentException { if (view == null) throw new IllegalArgumentException("Illegal null view"); if (activity == null) throw new IllegalArgumentException("Illegal null activity"); if (source == null) throw new IllegalArgumentException("Illegal null experience " + "activation source"); this.activity = activity; this.view = view; this.view.attachPresenter(this); source.attachObserver(this); } /** * Implementa IPresenter.resume(). * * Non effettua operazioni, in quanto non vengono utilizzate risorse da * acquisire o rilasciare. */ @Override public void resume() { } /** * Implementa IPresenter.resume(). * * Non effettua operazioni, in quanto non vengono utilizzate risorse da * acquisire o rilasciare. */ @Override public void pause() { } @Override public void activateTrack(ITrack track) throws IllegalArgumentException { if (track == null) throw new IllegalArgumentException("Illegal null track"); activity.getSensorManager().getTrackCrossingManager().startTrack(track); } /** * Implementa IExperienceActivationObserver.onExperienceActivated(). * * La lista rappresentata dalla vista viene popolata con i Percorsi * dell'Esperienza attivata. * * @param experience Esperienza attivata. */ @Override public void onExperienceActivated(IExperience experience) { if (experience == null) throw new IllegalArgumentException("Illegal null experience"); view.setTracks(experience.getTracks()); } }
package uk.ac.ic.wlgitbridge; import org.eclipse.jgit.api.errors.GitAPIException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import uk.ac.ic.wlgitbridge.application.GitBridgeApp; import uk.ac.ic.wlgitbridge.snapshot.servermock.server.MockSnapshotServer; import uk.ac.ic.wlgitbridge.snapshot.servermock.state.SnapshotAPIState; import uk.ac.ic.wlgitbridge.snapshot.servermock.state.SnapshotAPIStateBuilder; import uk.ac.ic.wlgitbridge.snapshot.servermock.util.FileUtil; import uk.ac.ic.wlgitbridge.util.Util; import java.io.*; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class WLGitBridgeIntegrationTest { private Runtime runtime = Runtime.getRuntime(); private Map<String, Map<String, SnapshotAPIState>> states = new HashMap<String, Map<String, SnapshotAPIState>>() {{ put("canCloneARepository", new HashMap<String, SnapshotAPIState>() {{ put("state", new SnapshotAPIStateBuilder(getResourceAsStream("/canCloneARepository/state/state.json")).build()); }}); put("canCloneMultipleRepositories", new HashMap<String, SnapshotAPIState>() {{ put("state", new SnapshotAPIStateBuilder(getResourceAsStream("/canCloneMultipleRepositories/state/state.json")).build()); }}); put("cannotCloneAProtectedProject", new HashMap<String, SnapshotAPIState>() {{ put("state", new SnapshotAPIStateBuilder(getResourceAsStream("/cannotCloneAProtectedProject/state/state.json")).build()); }}); put("canPullAModifiedTexFile", new HashMap<String, SnapshotAPIState>() {{ put("base", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullAModifiedTexFile/base/state.json")).build()); put("withModifiedTexFile", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullAModifiedTexFile/withModifiedTexFile/state.json")).build()); }}); put("canPullADeletedTexFile", new HashMap<String, SnapshotAPIState>() {{ put("base", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullADeletedTexFile/base/state.json")).build()); put("withDeletedTexFile", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullADeletedTexFile/withDeletedTexFile/state.json")).build()); }}); put("canPullAModifiedBinaryFile", new HashMap<String, SnapshotAPIState>() {{ put("base", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullAModifiedBinaryFile/base/state.json")).build()); put("withModifiedBinaryFile", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullAModifiedBinaryFile/withModifiedBinaryFile/state.json")).build()); }}); put("canPullADeletedBinaryFile", new HashMap<String, SnapshotAPIState>() {{ put("base", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullADeletedBinaryFile/base/state.json")).build()); put("withDeletedBinaryFile", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullADeletedBinaryFile/withDeletedBinaryFile/state.json")).build()); }}); put("canPullADuplicateBinaryFile", new HashMap<String, SnapshotAPIState>() {{ put("base", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullADuplicateBinaryFile/base/state.json")).build()); put("withDuplicateBinaryFile", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullADuplicateBinaryFile/withDuplicateBinaryFile/state.json")).build()); }}); put("canCloneDuplicateBinaryFiles", new HashMap<String, SnapshotAPIState>() {{ put("state", new SnapshotAPIStateBuilder(getResourceAsStream("/canCloneDuplicateBinaryFiles/state/state.json")).build()); }}); put("canPullUpdatedBinaryFiles", new HashMap<String, SnapshotAPIState>() {{ put("base", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullUpdatedBinaryFiles/base/state.json")).build()); put("withUpdatedBinaryFiles", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/state.json")).build()); }}); put("canPullAModifiedNestedFile", new HashMap<String, SnapshotAPIState>() {{ put("base", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullAModifiedNestedFile/base/state.json")).build()); put("withModifiedNestedFile", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullAModifiedNestedFile/withModifiedNestedFile/state.json")).build()); }}); put("canPullDeletedNestedFiles", new HashMap<String, SnapshotAPIState>() {{ put("base", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullDeletedNestedFiles/base/state.json")).build()); put("withDeletedNestedFiles", new SnapshotAPIStateBuilder(getResourceAsStream("/canPullDeletedNestedFiles/withDeletedNestedFiles/state.json")).build()); }}); put("canPushFilesSuccessfully", new HashMap<String, SnapshotAPIState>() {{ put("state", new SnapshotAPIStateBuilder(getResourceAsStream("/canPushFilesSuccessfully/state/state.json")).build()); }}); put("pushFailsOnFirstStageOutOfDate", new HashMap<String, SnapshotAPIState>() {{ put("state", new SnapshotAPIStateBuilder(getResourceAsStream("/pushFailsOnFirstStageOutOfDate/state/state.json")).build()); }}); put("pushFailsOnSecondStageOutOfDate", new HashMap<String, SnapshotAPIState>() {{ put("state", new SnapshotAPIStateBuilder(getResourceAsStream("/pushFailsOnSecondStageOutOfDate/state/state.json")).build()); }}); put("pushFailsOnInvalidFiles", new HashMap<String, SnapshotAPIState>() {{ put("state", new SnapshotAPIStateBuilder(getResourceAsStream("/pushFailsOnInvalidFiles/state/state.json")).build()); }}); put("pushFailsOnInvalidProject", new HashMap<String, SnapshotAPIState>() {{ put("state", new SnapshotAPIStateBuilder(getResourceAsStream("/pushFailsOnInvalidProject/state/state.json")).build()); }}); put("pushFailsOnUnexpectedError", new HashMap<String, SnapshotAPIState>() {{ put("state", new SnapshotAPIStateBuilder(getResourceAsStream("/pushFailsOnUnexpectedError/state/state.json")).build()); }}); put("pushSucceedsAfterRemovingInvalidFiles", new HashMap<String, SnapshotAPIState>() {{ put("invalidState", new SnapshotAPIStateBuilder(getResourceAsStream("/pushSucceedsAfterRemovingInvalidFiles/invalidState/state.json")).build()); put("validState", new SnapshotAPIStateBuilder(getResourceAsStream("/pushSucceedsAfterRemovingInvalidFiles/validState/state.json")).build()); }}); }}; @Rule public TemporaryFolder folder = new TemporaryFolder(); @Test public void canCloneARepository() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3857, getResource("/canCloneARepository").toFile()); server.start(); server.setState(states.get("canCloneARepository").get("state")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33857, 3857) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33857, dir); wlgb.stop(); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canCloneARepository/state/testproj"), testprojDir.toPath())); } @Test public void canCloneMultipleRepositories() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3858, getResource("/canCloneMultipleRepositories").toFile()); server.start(); server.setState(states.get("canCloneMultipleRepositories").get("state")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33858, 3858) }); wlgb.run(); File dir = folder.newFolder(); File testproj1Dir = cloneRepository("testproj1", 33858, dir); File testproj2Dir = cloneRepository("testproj2", 33858, dir); wlgb.stop(); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canCloneMultipleRepositories/state/testproj1"), testproj1Dir.toPath())); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canCloneMultipleRepositories/state/testproj2"), testproj2Dir.toPath())); } @Test public void canPullAModifiedTexFile() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3859, getResource("/canPullAModifiedTexFile").toFile()); server.start(); server.setState(states.get("canPullAModifiedTexFile").get("base")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33859, 3859) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33859, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullAModifiedTexFile/base/testproj"), testprojDir.toPath())); server.setState(states.get("canPullAModifiedTexFile").get("withModifiedTexFile")); Process gitWithModifiedTexFile = runtime.exec("git pull", null, testprojDir); int exitCodeWithModifiedTexFile = gitWithModifiedTexFile.waitFor(); wlgb.stop(); assertEquals(0, exitCodeWithModifiedTexFile); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullAModifiedTexFile/withModifiedTexFile/testproj"), testprojDir.toPath())); } @Test public void canPullADeletedTexFile() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3860, getResource("/canPullADeletedTexFile").toFile()); server.start(); server.setState(states.get("canPullADeletedTexFile").get("base")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33860, 3860) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33860, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullADeletedTexFile/base/testproj"), testprojDir.toPath())); server.setState(states.get("canPullADeletedTexFile").get("withDeletedTexFile")); Process gitWithDeletedTexFile = runtime.exec("git pull", null, testprojDir); int exitCodeWithDeletedTexFile = gitWithDeletedTexFile.waitFor(); wlgb.stop(); assertEquals(0, exitCodeWithDeletedTexFile); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullADeletedTexFile/withDeletedTexFile/testproj"), testprojDir.toPath())); } @Test public void canPullAModifiedBinaryFile() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3862, getResource("/canPullAModifiedBinaryFile").toFile()); server.start(); server.setState(states.get("canPullAModifiedBinaryFile").get("base")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33862, 3862) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33862, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullAModifiedBinaryFile/base/testproj"), testprojDir.toPath())); server.setState(states.get("canPullAModifiedBinaryFile").get("withModifiedBinaryFile")); Process gitWithModifiedBinaryFile = runtime.exec("git pull", null, testprojDir); int exitCodeWithModifiedBinaryFile = gitWithModifiedBinaryFile.waitFor(); wlgb.stop(); assertEquals(0, exitCodeWithModifiedBinaryFile); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullAModifiedBinaryFile/withModifiedBinaryFile/testproj"), testprojDir.toPath())); } @Test public void canPullADeletedBinaryFile() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3863, getResource("/canPullADeletedBinaryFile").toFile()); server.start(); server.setState(states.get("canPullADeletedBinaryFile").get("base")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33863, 3863) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33863, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullADeletedBinaryFile/base/testproj"), testprojDir.toPath())); server.setState(states.get("canPullADeletedBinaryFile").get("withDeletedBinaryFile")); Process gitWithDeletedBinaryFile = runtime.exec("git pull", null, testprojDir); int exitCodeWithDeletedBinaryFile = gitWithDeletedBinaryFile.waitFor(); wlgb.stop(); assertEquals(0, exitCodeWithDeletedBinaryFile); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullADeletedBinaryFile/withDeletedBinaryFile/testproj"), testprojDir.toPath())); } @Test public void canPullADuplicateBinaryFile() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(4001, getResource("/canPullADuplicateBinaryFile").toFile()); server.start(); server.setState(states.get("canPullADuplicateBinaryFile").get("base")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(44001, 4001) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 44001, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullADuplicateBinaryFile/base/testproj"), testprojDir.toPath())); server.setState(states.get("canPullADuplicateBinaryFile").get("withDuplicateBinaryFile")); Process gitWithDeletedBinaryFile = runtime.exec("git pull", null, testprojDir); int exitCodeWithDeletedBinaryFile = gitWithDeletedBinaryFile.waitFor(); wlgb.stop(); assertEquals(0, exitCodeWithDeletedBinaryFile); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullADuplicateBinaryFile/withDuplicateBinaryFile/testproj"), testprojDir.toPath())); } @Test public void canCloneDuplicateBinaryFiles() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(4002, getResource("/canCloneDuplicateBinaryFiles").toFile()); server.start(); server.setState(states.get("canCloneDuplicateBinaryFiles").get("state")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(44002, 4002) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 44002, dir); wlgb.stop(); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canCloneDuplicateBinaryFiles/state/testproj"), testprojDir.toPath())); } @Test public void canPullUpdatedBinaryFiles() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(4003, getResource("/canPullUpdatedBinaryFiles").toFile()); server.start(); server.setState(states.get("canPullUpdatedBinaryFiles").get("base")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(44003, 4003) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 44003, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullUpdatedBinaryFiles/base/testproj"), testprojDir.toPath())); server.setState(states.get("canPullUpdatedBinaryFiles").get("withUpdatedBinaryFiles")); Process gitWithDeletedBinaryFile = runtime.exec("git pull", null, testprojDir); int exitCodeWithDeletedBinaryFile = gitWithDeletedBinaryFile.waitFor(); wlgb.stop(); assertEquals(0, exitCodeWithDeletedBinaryFile); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/testproj"), testprojDir.toPath())); } @Test public void canPullAModifiedNestedFile() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3864, getResource("/canPullAModifiedNestedFile").toFile()); server.start(); server.setState(states.get("canPullAModifiedNestedFile").get("base")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33864, 3864) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33864, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullAModifiedNestedFile/base/testproj"), testprojDir.toPath())); server.setState(states.get("canPullAModifiedNestedFile").get("withModifiedNestedFile")); Process gitWithModifiedNestedFile = runtime.exec("git pull", null, testprojDir); int exitCodeWithModifiedNestedFile = gitWithModifiedNestedFile.waitFor(); wlgb.stop(); assertEquals(0, exitCodeWithModifiedNestedFile); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullAModifiedNestedFile/withModifiedNestedFile/testproj"), testprojDir.toPath())); } @Test public void canPullDeletedNestedFiles() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3865, getResource("/canPullDeletedNestedFiles").toFile()); server.start(); server.setState(states.get("canPullDeletedNestedFiles").get("base")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33865, 3865) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33865, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullDeletedNestedFiles/base/testproj"), testprojDir.toPath())); server.setState(states.get("canPullDeletedNestedFiles").get("withDeletedNestedFiles")); Process gitWithDeletedBinaryFile = runtime.exec("git pull", null, testprojDir); int exitCodeWithDeletedBinaryFile = gitWithDeletedBinaryFile.waitFor(); wlgb.stop(); assertEquals(0, exitCodeWithDeletedBinaryFile); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPullDeletedNestedFiles/withDeletedNestedFiles/testproj"), testprojDir.toPath())); } @Test public void canPushFilesSuccessfully() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3866, getResource("/canPushFilesSuccessfully").toFile()); server.start(); server.setState(states.get("canPushFilesSuccessfully").get("state")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33866, 3866) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33866, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/canPushFilesSuccessfully/state/testproj"), testprojDir.toPath())); runtime.exec("touch push.tex", null, testprojDir).waitFor(); runtime.exec("git add -A", null, testprojDir).waitFor(); runtime.exec("git commit -m \"push\"", null, testprojDir).waitFor(); Process gitPush = runtime.exec("git push", null, testprojDir); int pushExitCode = gitPush.waitFor(); wlgb.stop(); assertEquals(0, pushExitCode); } private static final String EXPECTED_OUT_PUSH_OUT_OF_DATE_FIRST = "To http://127.0.0.1:33867/testproj.git\n" + " ! [rejected] master -> master (non-fast-forward)\n" + "error: failed to push some refs to 'http://127.0.0.1:33867/testproj.git'\n" + "hint: Updates were rejected because the tip of your current branch is behind\n" + "hint: its remote counterpart. Integrate the remote changes (e.g.\n" + "hint: 'git pull ...') before pushing again.\n" + "hint: See the 'Note about fast-forwards' in 'git push --help' for details.\n"; @Test public void pushFailsOnFirstStageOutOfDate() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3867, getResource("/pushFailsOnFirstStageOutOfDate").toFile()); server.start(); server.setState(states.get("pushFailsOnFirstStageOutOfDate").get("state")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33867, 3867) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33867, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/pushFailsOnFirstStageOutOfDate/state/testproj"), testprojDir.toPath())); runtime.exec("touch push.tex", null, testprojDir).waitFor(); runtime.exec("git add -A", null, testprojDir).waitFor(); runtime.exec("git commit -m \"push\"", null, testprojDir).waitFor(); Process gitPush = runtime.exec("git push", null, testprojDir); int pushExitCode = gitPush.waitFor(); wlgb.stop(); assertEquals(1, pushExitCode); assertEquals(EXPECTED_OUT_PUSH_OUT_OF_DATE_FIRST, Util.fromStream(gitPush.getErrorStream(), 2)); } private static final String EXPECTED_OUT_PUSH_OUT_OF_DATE_SECOND = "To http://127.0.0.1:33868/testproj.git\n" + " ! [rejected] master -> master (non-fast-forward)\n" + "error: failed to push some refs to 'http://127.0.0.1:33868/testproj.git'\n" + "hint: Updates were rejected because the tip of your current branch is behind\n" + "hint: its remote counterpart. Integrate the remote changes (e.g.\n" + "hint: 'git pull ...') before pushing again.\n" + "hint: See the 'Note about fast-forwards' in 'git push --help' for details.\n"; @Test public void pushFailsOnSecondStageOutOfDate() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3868, getResource("/pushFailsOnSecondStageOutOfDate").toFile()); server.start(); server.setState(states.get("pushFailsOnSecondStageOutOfDate").get("state")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33868, 3868) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33868, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/pushFailsOnSecondStageOutOfDate/state/testproj"), testprojDir.toPath())); runtime.exec("touch push.tex", null, testprojDir).waitFor(); runtime.exec("git add -A", null, testprojDir).waitFor(); runtime.exec("git commit -m \"push\"", null, testprojDir).waitFor(); Process gitPush = runtime.exec("git push", null, testprojDir); int pushExitCode = gitPush.waitFor(); wlgb.stop(); assertEquals(1, pushExitCode); assertEquals(EXPECTED_OUT_PUSH_OUT_OF_DATE_SECOND, Util.fromStream(gitPush.getErrorStream(), 2)); } private static final List<String> EXPECTED_OUT_PUSH_INVALID_FILES = Arrays.asList( "remote: error: invalid files", "remote: hint: You have 4 invalid files in your Overleaf project:", "remote: hint: file1.invalid (error)", "remote: hint: file2.exe (invalid file extension)", "remote: hint: hello world.png (rename to: hello_world.png)", "remote: hint: an image.jpg (rename to: an_image.jpg)", "To http://127.0.0.1:33869/testproj.git", "! [remote rejected] master -> master (invalid files)", "error: failed to push some refs to 'http://127.0.0.1:33869/testproj.git'" ); @Test public void pushFailsOnInvalidFiles() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3869, getResource("/pushFailsOnInvalidFiles").toFile()); server.start(); server.setState(states.get("pushFailsOnInvalidFiles").get("state")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33869, 3869) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33869, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/pushFailsOnInvalidFiles/state/testproj"), testprojDir.toPath())); runtime.exec("touch push.tex", null, testprojDir).waitFor(); runtime.exec("git add -A", null, testprojDir).waitFor(); runtime.exec("git commit -m \"push\"", null, testprojDir).waitFor(); Process gitPush = runtime.exec("git push", null, testprojDir); int pushExitCode = gitPush.waitFor(); wlgb.stop(); assertEquals(1, pushExitCode); List<String> actual = Util.linesFromStream(gitPush.getErrorStream(), 2, "[K"); assertEquals(EXPECTED_OUT_PUSH_INVALID_FILES, actual); } private static final List<String> EXPECTED_OUT_PUSH_INVALID_PROJECT = Arrays.asList( "remote: error: invalid project", "remote: hint: project: no main file", "remote: hint: The project would have no (editable) main .tex file.", "To http://127.0.0.1:33870/testproj.git", "! [remote rejected] master -> master (invalid project)", "error: failed to push some refs to 'http://127.0.0.1:33870/testproj.git'" ); @Test public void pushFailsOnInvalidProject() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3870, getResource("/pushFailsOnInvalidProject").toFile()); server.start(); server.setState(states.get("pushFailsOnInvalidProject").get("state")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33870, 3870) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33870, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/pushFailsOnInvalidProject/state/testproj"), testprojDir.toPath())); runtime.exec("touch push.tex", null, testprojDir).waitFor(); runtime.exec("git add -A", null, testprojDir).waitFor(); runtime.exec("git commit -m \"push\"", null, testprojDir).waitFor(); Process gitPush = runtime.exec("git push", null, testprojDir); int pushExitCode = gitPush.waitFor(); wlgb.stop(); assertEquals(1, pushExitCode); List<String> actual = Util.linesFromStream(gitPush.getErrorStream(), 2, "[K"); assertEquals(EXPECTED_OUT_PUSH_INVALID_PROJECT, actual); } private static final List<String> EXPECTED_OUT_PUSH_UNEXPECTED_ERROR = Arrays.asList( "remote: error: Overleaf error", "remote: hint: There was an internal error with the Overleaf server.", "remote: hint: Please contact Overleaf.", "To http://127.0.0.1:33871/testproj.git", "! [remote rejected] master -> master (Overleaf error)", "error: failed to push some refs to 'http://127.0.0.1:33871/testproj.git'" ); /* this one prints a stack trace */ @Test public void pushFailsOnUnexpectedError() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3871, getResource("/pushFailsOnUnexpectedError").toFile()); server.start(); server.setState(states.get("pushFailsOnUnexpectedError").get("state")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33871, 3871) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33871, dir); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/pushFailsOnUnexpectedError/state/testproj"), testprojDir.toPath())); runtime.exec("touch push.tex", null, testprojDir).waitFor(); runtime.exec("git add -A", null, testprojDir).waitFor(); runtime.exec("git commit -m \"push\"", null, testprojDir).waitFor(); Process gitPush = runtime.exec("git push", null, testprojDir); int pushExitCode = gitPush.waitFor(); wlgb.stop(); assertEquals(1, pushExitCode); List<String> actual = Util.linesFromStream(gitPush.getErrorStream(), 2, "[K"); assertEquals(EXPECTED_OUT_PUSH_UNEXPECTED_ERROR, actual); } private static final List<String> EXPECTED_OUT_PUSH_INVALID_EXE_FILE = Arrays.asList( "remote: error: invalid files", "remote: hint: You have 1 invalid files in your Overleaf project:", "remote: hint: file1.exe (invalid file extension)", "To http://127.0.0.1:33872/testproj.git", "! [remote rejected] master -> master (invalid files)", "error: failed to push some refs to 'http://127.0.0.1:33872/testproj.git'" ); @Test public void pushSucceedsAfterRemovingInvalidFiles() throws IOException, GitAPIException, InterruptedException { MockSnapshotServer server = new MockSnapshotServer(3872, getResource("/pushSucceedsAfterRemovingInvalidFiles").toFile()); server.start(); server.setState(states.get("pushSucceedsAfterRemovingInvalidFiles").get("invalidState")); GitBridgeApp wlgb = new GitBridgeApp(new String[] { makeConfigFile(33872, 3872) }); wlgb.run(); File dir = folder.newFolder(); File testprojDir = cloneRepository("testproj", 33872, dir); // try to push invalid file; it should fail assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/pushSucceedsAfterRemovingInvalidFiles/invalidState/testproj"), testprojDir.toPath())); assertEquals(0, runtime.exec("touch file1.exe", null, testprojDir).waitFor()); assertEquals(0, runtime.exec("git add -A", null, testprojDir).waitFor()); assertEquals(0, runtime.exec("git commit -m \"push\"", null, testprojDir).waitFor()); Process gitPush = runtime.exec("git push", null, testprojDir); int pushExitCode = gitPush.waitFor(); assertEquals(1, pushExitCode); List<String> actual = Util.linesFromStream(gitPush.getErrorStream(), 0, "[K"); assertEquals(EXPECTED_OUT_PUSH_INVALID_EXE_FILE, actual); // remove invalid file and push again; it should succeed this time assertEquals(0, runtime.exec("git rm file1.exe", null, testprojDir).waitFor()); assertEquals(0, runtime.exec("git commit -m remove_invalid_file", null, testprojDir).waitFor()); server.setState(states.get("pushSucceedsAfterRemovingInvalidFiles").get("validState")); gitPush = runtime.exec("git push", null, testprojDir); pushExitCode = gitPush.waitFor(); wlgb.stop(); assertEquals(0, pushExitCode); assertTrue(FileUtil.gitDirectoriesAreEqual(getResource("/pushSucceedsAfterRemovingInvalidFiles/validState/testproj"), testprojDir.toPath())); } private File cloneRepository(String repositoryName, int port, File dir) throws IOException, InterruptedException { String repo = "git clone http://127.0.0.1:" + port + "/" + repositoryName + ".git"; assertEquals(0, runtime.exec(repo, null, dir).waitFor()); File repositoryDir = new File(dir, repositoryName); assertEquals(0, runtime.exec("git config user.name TEST", null, repositoryDir).waitFor()); assertEquals(0, runtime.exec("git config user.email test@test.com", null, repositoryDir).waitFor()); assertEquals(0, runtime.exec("git config push.default matching", null, repositoryDir).waitFor()); return repositoryDir; } private String makeConfigFile(int port, int apiPort) throws IOException { File wlgb = folder.newFolder(); File config = folder.newFile(); PrintWriter writer = new PrintWriter(config); writer.println("{\n" + "\t\"port\": " + port + ",\n" + "\t\"rootGitDirectory\": \"" + wlgb.getAbsolutePath() + "\",\n" + "\t\"apiBaseUrl\": \"http://127.0.0.1:" + apiPort + "/api/v0\",\n" + "\t\"username\": \"\",\n" + "\t\"password\": \"\",\n" + "\t\"postbackBaseUrl\": \"http://127.0.0.1:" + port + "\",\n" + "\t\"serviceName\": \"Overleaf\"\n," + " \"oauth2\": {\n" + " \"oauth2ClientID\": \"clientID\",\n" + " \"oauth2ClientSecret\": \"oauth2 client secret\",\n" + " \"oauth2Server\": \"https: " }\n" + "}\n"); writer.close(); return config.getAbsolutePath(); } private Path getResource(String path) { return Paths.get("src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest" + path); } private InputStream getResourceAsStream(String path) { try { return new FileInputStream(getResource(path).toFile()); } catch (FileNotFoundException e) { throw new RuntimeException(e); } } private static String withoutWhitespace(String s) { return s.replaceAll("\\s",""); } }
package arez.processor; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import java.util.List; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.type.ExecutableType; /** * Declaration of a reference. */ final class ReferenceDescriptor { @Nonnull private final ComponentDescriptor _componentDescriptor; @Nonnull private final String _name; @Nullable private ExecutableElement _method; @Nullable private ExecutableType _methodType; @Nullable private String _linkType; @Nullable private ObservableDescriptor _observable; @Nullable private ExecutableElement _idMethod; @Nullable private ExecutableType _idMethodType; @Nullable private String _inverseName; @Nullable private Multiplicity _inverseMultiplicity; ReferenceDescriptor( @Nonnull final ComponentDescriptor componentDescriptor, @Nonnull final String name ) { _componentDescriptor = Objects.requireNonNull( componentDescriptor ); _name = Objects.requireNonNull( name ); } void setIdMethod( @Nonnull final ExecutableElement method, @Nonnull final ExecutableType methodType ) { assert null == _idMethod; assert null == _idMethodType; _idMethod = Objects.requireNonNull( method ); _idMethodType = Objects.requireNonNull( methodType ); } void setObservable( @Nonnull final ObservableDescriptor observable ) { setIdMethod( observable.getGetter(), observable.getGetterType() ); assert null == _observable; _observable = observable; _observable.setReferenceDescriptor( this ); } void setMethod( @Nonnull final ExecutableElement method, @Nonnull final ExecutableType methodType, @Nonnull final String linkType, @Nullable final String inverseName, @Nullable final Multiplicity inverseMultiplicity ) { assert null == _method; assert null == _methodType; assert null == _linkType; assert null == _inverseName; assert null == _inverseMultiplicity; assert ( null == inverseName && null == inverseMultiplicity ) || ( null != inverseName && null != inverseMultiplicity ); _method = Objects.requireNonNull( method ); _methodType = Objects.requireNonNull( methodType ); _linkType = Objects.requireNonNull( linkType ); _inverseName = inverseName; _inverseMultiplicity = inverseMultiplicity; } @Nonnull String getLinkType() { assert null != _linkType; return _linkType; } @Nonnull ExecutableElement getMethod() { assert null != _method; return _method; } @Nonnull private ExecutableElement getIdMethod() { assert null != _idMethod; return _idMethod; } @Nonnull String getLinkMethodName() { return GeneratorUtil.FRAMEWORK_PREFIX + "link_" + _name; } @Nonnull private String getDelinkMethodName() { return GeneratorUtil.FRAMEWORK_PREFIX + "delink_" + _name; } @Nonnull String getFieldName() { return GeneratorUtil.REFERENCE_FIELD_PREFIX + _name; } boolean hasInverse() { return null != _inverseName; } @Nonnull Multiplicity getInverseMultiplicity() { assert null != _inverseMultiplicity; return _inverseMultiplicity; } void buildFields( @Nonnull final TypeSpec.Builder builder ) { assert null != _method; final FieldSpec.Builder field = FieldSpec.builder( TypeName.get( _method.getReturnType() ), getFieldName(), Modifier.PRIVATE ). addAnnotation( GeneratorUtil.NULLABLE_CLASSNAME ); builder.addField( field.build() ); } void buildMethods( @Nonnull final TypeSpec.Builder builder ) throws ArezProcessorException { builder.addMethod( buildReferenceMethod() ); builder.addMethod( buildLinkMethod() ); if ( hasInverse() ) { builder.addMethod( buildDelinkMethod() ); } } @Nonnull private MethodSpec buildReferenceMethod() throws ArezProcessorException { assert null != _method; assert null != _methodType; assert null != _idMethod; assert null != _idMethodType; assert null != _linkType; final String methodName = _method.getSimpleName().toString(); final MethodSpec.Builder builder = MethodSpec.methodBuilder( methodName ); ProcessorUtil.copyAccessModifiers( _method, builder ); ProcessorUtil.copyTypeParameters( _methodType, builder ); ProcessorUtil.copyWhitelistedAnnotations( _method, builder ); builder.addAnnotation( Override.class ); builder.returns( TypeName.get( _method.getReturnType() ) ); GeneratorUtil.generateNotDisposedInvariant( _componentDescriptor, builder, methodName ); final boolean isNullable = !getIdMethod().getReturnType().getKind().isPrimitive() && null == ProcessorUtil.findAnnotationByType( getIdMethod(), Constants.NONNULL_ANNOTATION_CLASSNAME ); if ( !"LAZY".equals( _linkType ) ) { final CodeBlock.Builder block = CodeBlock.builder(); block.beginControlFlow( "if ( $T.shouldCheckApiInvariants() )", GeneratorUtil.AREZ_CLASSNAME ); if ( isNullable ) { block.addStatement( "$T.apiInvariant( () -> null != $N || null == $N(), () -> \"Nullable reference method " + "named '$N' invoked on component named '\" + $N() + \"' and reference has not been " + "resolved yet is not lazy. Id = \" + $N() )", GeneratorUtil.GUARDS_CLASSNAME, getFieldName(), _idMethod.getSimpleName(), _method.getSimpleName(), _componentDescriptor.getComponentNameMethodName(), _idMethod.getSimpleName() ); } else { block.addStatement( "$T.apiInvariant( () -> null != $N, () -> \"Nonnull reference method named '$N' " + "invoked on component named '\" + $N() + \"' but reference has not been resolved yet " + "is not lazy. Id = \" + $N() )", GeneratorUtil.GUARDS_CLASSNAME, getFieldName(), _method.getSimpleName(), _componentDescriptor.getComponentNameMethodName(), _idMethod.getSimpleName() ); } block.endControlFlow(); builder.addCode( block.build() ); if ( null != _observable ) { if ( _observable.canReadOutsideTransaction() ) { builder.addStatement( "this.$N.reportObservedIfTrackingTransactionActive()", _observable.getFieldName() ); } else { builder.addStatement( "this.$N.reportObserved()", _observable.getFieldName() ); } } } else { if ( null == _observable ) { builder.addStatement( "this.$N()", getLinkMethodName() ); } else { final CodeBlock.Builder block = CodeBlock.builder(); block.beginControlFlow( "if ( null == this.$N )", getFieldName() ); block.addStatement( "this.$N()", getLinkMethodName() ); block.nextControlFlow( "else" ); if ( _observable.canReadOutsideTransaction() ) { block.addStatement( "this.$N.reportObservedIfTrackingTransactionActive()", _observable.getFieldName() ); } else { block.addStatement( "this.$N.reportObserved()", _observable.getFieldName() ); } block.endControlFlow(); builder.addCode( block.build() ); } } builder.addStatement( "return this.$N", getFieldName() ); return builder.build(); } @Nonnull private MethodSpec buildLinkMethod() throws ArezProcessorException { final String methodName = getLinkMethodName(); final MethodSpec.Builder builder = MethodSpec.methodBuilder( methodName ); builder.addModifiers( Modifier.PRIVATE ); GeneratorUtil.generateNotDisposedInvariant( _componentDescriptor, builder, methodName ); final boolean isNullable = !getIdMethod().getReturnType().getKind().isPrimitive() && null == ProcessorUtil.findAnnotationByType( getIdMethod(), Constants.NONNULL_ANNOTATION_CLASSNAME ); if ( "EAGER".equals( getLinkType() ) ) { /* * Linking under eager should always proceed and does not need a null check * as the link method only called when a link is required. */ builder.addStatement( "final $T id = this.$N()", getIdMethod().getReturnType(), getIdMethod().getSimpleName() ); if ( isNullable ) { final CodeBlock.Builder nestedBlock = CodeBlock.builder(); nestedBlock.beginControlFlow( "if ( null != id )" ); buildLookup( nestedBlock ); nestedBlock.nextControlFlow( "else" ); nestedBlock.addStatement( "this.$N = null", getFieldName() ); nestedBlock.endControlFlow(); builder.addCode( nestedBlock.build() ); } else { buildLookup( builder ); } } else { final CodeBlock.Builder block = CodeBlock.builder(); block.beginControlFlow( "if ( null == this.$N )", getFieldName() ); block.addStatement( "final $T id = this.$N()", getIdMethod().getReturnType(), getIdMethod().getSimpleName() ); if ( isNullable ) { final CodeBlock.Builder nestedBlock = CodeBlock.builder(); nestedBlock.beginControlFlow( "if ( null != id )" ); buildLookup( nestedBlock ); nestedBlock.endControlFlow(); block.add( nestedBlock.build() ); } else { buildLookup( block ); } block.endControlFlow(); builder.addCode( block.build() ); } return builder.build(); } private void buildLookup( @Nonnull final MethodSpec.Builder builder ) { builder.addStatement( "this.$N = this.$N().findById( $T.class, id )", getFieldName(), GeneratorUtil.LOCATOR_METHOD_NAME, getMethod().getReturnType() ); final CodeBlock.Builder block = CodeBlock.builder(); block.beginControlFlow( "if ( $T.shouldCheckApiInvariants() )", GeneratorUtil.AREZ_CLASSNAME ); block.addStatement( "$T.apiInvariant( () -> null != $N, () -> \"Reference method named '$N' " + "invoked on component named '\" + $N() + \"' missing related entity. Id = \" + $N() )", GeneratorUtil.GUARDS_CLASSNAME, getFieldName(), getMethod().getSimpleName(), _componentDescriptor.getComponentNameMethodName(), getIdMethod().getSimpleName() ); block.endControlFlow(); builder.addCode( block.build() ); if ( hasInverse() ) { assert null != _inverseName; final String linkMethodName = _inverseMultiplicity == Multiplicity.MANY ? GeneratorUtil.getInverseAddMethodName( _inverseName ) : GeneratorUtil.getInverseSetMethodName( _inverseName ); builder.addStatement( "( ($T) this.$N ).$N( this )", getArezClassName(), getFieldName(), linkMethodName ); } } private void buildLookup( @Nonnull final CodeBlock.Builder builder ) { builder.addStatement( "this.$N = this.$N().findById( $T.class, id )", getFieldName(), GeneratorUtil.LOCATOR_METHOD_NAME, getMethod().getReturnType() ); final CodeBlock.Builder block = CodeBlock.builder(); block.beginControlFlow( "if ( $T.shouldCheckApiInvariants() )", GeneratorUtil.AREZ_CLASSNAME ); block.addStatement( "$T.apiInvariant( () -> null != $N, () -> \"Reference method named '$N' " + "invoked on component named '\" + $N() + \"' missing related entity. Id = \" + $N() )", GeneratorUtil.GUARDS_CLASSNAME, getFieldName(), getMethod().getSimpleName(), _componentDescriptor.getComponentNameMethodName(), getIdMethod().getSimpleName() ); block.endControlFlow(); builder.add( block.build() ); if ( hasInverse() ) { assert null != _inverseName; final String linkMethodName = _inverseMultiplicity == Multiplicity.MANY ? GeneratorUtil.getInverseAddMethodName( _inverseName ) : GeneratorUtil.getInverseSetMethodName( _inverseName ); assert null != _method; builder.addStatement( "( ($T) this.$N ).$N( this )", getArezClassName(), getFieldName(), linkMethodName ); } } void buildDisposer( @Nonnull final MethodSpec.Builder builder ) { if ( hasInverse() ) { builder.addStatement( "this.$N()", getDelinkMethodName() ); } } @Nonnull private MethodSpec buildDelinkMethod() throws ArezProcessorException { assert null != _method; assert null != _inverseName; final String methodName = getDelinkMethodName(); final MethodSpec.Builder builder = MethodSpec.methodBuilder( methodName ); builder.addModifiers( Modifier.PRIVATE ); GeneratorUtil.generateNotDisposedInvariant( _componentDescriptor, builder, methodName ); final CodeBlock.Builder nestedBlock = CodeBlock.builder(); nestedBlock.beginControlFlow( "if ( null != $N )", getFieldName() ); assert null != _inverseName; assert null != _inverseName; final String delinkMethodName = _inverseMultiplicity == Multiplicity.MANY ? GeneratorUtil.getInverseRemoveMethodName( _inverseName ) : GeneratorUtil.getInverseUnsetMethodName( _inverseName ); nestedBlock.addStatement( "( ($T) this.$N ).$N( this )", getArezClassName(), getFieldName(), delinkMethodName ); nestedBlock.addStatement( "this.$N = null", getFieldName() ); nestedBlock.endControlFlow(); builder.addCode( nestedBlock.build() ); return builder.build(); } @Nonnull private ClassName getArezClassName() { assert null != _method; final ClassName other = (ClassName) TypeName.get( _method.getReturnType() ); final StringBuilder sb = new StringBuilder(); final String packageName = other.packageName(); if ( null != packageName ) { sb.append( packageName ); sb.append( "." ); } final List<String> simpleNames = other.simpleNames(); final int end = simpleNames.size() - 1; for ( int i = 0; i < end; i++ ) { sb.append( simpleNames.get( i ) ); sb.append( "_" ); } sb.append( "Arez_" ); sb.append( simpleNames.get( end ) ); return ClassName.bestGuess( sb.toString() ); } void validate() throws ArezProcessorException { if ( null == _idMethod ) { assert null != _method; throw new ArezProcessorException( "@Reference exists but there is no corresponding @ReferenceId", _method ); } else if ( null == _method ) { throw new ArezProcessorException( "@ReferenceId exists but there is no corresponding @Reference", _idMethod ); } else if ( null != _observable && !_observable.hasSetter() ) { throw new ArezProcessorException( "@ReferenceId added to @Observable method but expectSetter = false on " + "property which is not compatible with @ReferenceId", _idMethod ); } } }
package de.factoryfx.factory; import java.util.*; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.google.common.base.Throwables; import de.factoryfx.data.AttributeAndName; import de.factoryfx.data.Data; import de.factoryfx.data.attribute.Attribute; import de.factoryfx.factory.atrribute.*; import de.factoryfx.factory.log.FactoryLogEntry; import de.factoryfx.factory.log.FactoryLogEntryEvent; import de.factoryfx.factory.log.FactoryLogEntryEventType; /** * @param <L> liveobject created from this factory * @param <V> runtime visitor */ @JsonInclude(JsonInclude.Include.NON_NULL) @JsonTypeInfo(use=JsonTypeInfo.Id.MINIMAL_CLASS, include=JsonTypeInfo.As.PROPERTY, property="@class") public class FactoryBase<L,V> extends Data implements Iterable<FactoryBase<?, V>>{ public FactoryBase() { } @JsonIgnore private L createdLiveObject; @JsonIgnore private boolean started=false; @JsonIgnore boolean needRecreation =false; @JsonIgnore private FactoryLogEntry factoryLogEntry=new FactoryLogEntry(this); @JsonIgnore private L previousLiveObject; private void resetLog() { factoryLogEntry=new FactoryLogEntry(this); } private L instance() { if (needRecreation){ previousLiveObject = this.createdLiveObject; this.createdLiveObject = reCreate(previousLiveObject); needRecreation=false; started=false; } else { if (createdLiveObject==null){ createdLiveObject = create(); } } return createdLiveObject; } <U> U loggedAction(FactoryLogEntryEventType type, Supplier<U> action){ long start=System.nanoTime(); U result = action.get(); factoryLogEntry.events.add(new FactoryLogEntryEvent(type,System.nanoTime()-start)); return result; } private void loggedAction(FactoryLogEntryEventType type, Runnable action){ loggedAction(type, (Supplier<Void>) () -> { action.run(); return null; }); } L create(){ if (creator==null){ throw new IllegalStateException("no creator defined: "+getClass()); } return loggedAction(FactoryLogEntryEventType.CREATE, ()-> creator.get()); } private L reCreate(L previousLiveObject) { if (reCreatorWithPreviousLiveObject!=null){ return loggedAction(FactoryLogEntryEventType.RECREATE, ()-> { return reCreatorWithPreviousLiveObject.apply(previousLiveObject); }); } return create(); } private void start() { if (!started && starterWithNewLiveObject!=null && createdLiveObject!=null){//createdLiveObject is null e.g. if object ist not instanced in the parent factory loggedAction(FactoryLogEntryEventType.START, ()-> { starterWithNewLiveObject.accept(createdLiveObject); started=true; }); } } private void destroy(Set<FactoryBase<?,V>> previousFactories) { if (!previousFactories.contains(this) && destroyerWithPreviousLiveObject!=null){ loggedAction(FactoryLogEntryEventType.DESTROY, ()-> { destroyerWithPreviousLiveObject.accept(createdLiveObject); }); } if (previousLiveObject!=null && destroyerWithPreviousLiveObject!=null){ loggedAction(FactoryLogEntryEventType.DESTROY, ()-> { destroyerWithPreviousLiveObject.accept(previousLiveObject); }); } previousLiveObject=null; } private void determineRecreationNeed(Set<Data> changedData, ArrayDeque<FactoryBase<?,?>> path){ if (needRecreation){ return;//already checked } path.push(this); needRecreation =changedData.contains(this) || createdLiveObject==null; //null means newly added if (needRecreation){ for (FactoryBase factoryBase: path){ factoryBase.needRecreation =true; } } visitChildFactoriesAndViewsFlat(child -> child.determineRecreationNeed(changedData,path)); path.pop(); } private void loopDetector(){ collectChildFactoriesDeep(); } private Set<FactoryBase<?,V>> collectChildFactoriesDeep(){ final HashSet<FactoryBase<?, V>> result = new HashSet<>(); collectChildFactoriesDeep(this,result,new HashSet<>()); return result; } private void collectChildFactoriesDeep(FactoryBase<?,V> factory, Set<FactoryBase<?, V>> result, Set<FactoryBase<?, V>> stack){ if (result.add(factory)){ stack.add(factory); factory.visitChildFactoriesAndViewsFlat(child -> collectChildFactoriesDeep(child,result,stack)); stack.remove(factory); } else { if (stack.contains(factory)){ throw new IllegalStateException("Factories contains a cycle, circular dependencies are not supported cause it indicates a design flaw."); } } } private List<FactoryBase<?,V>> collectChildrenFactoriesFlat() { List<FactoryBase<?,V>> result = new ArrayList<>(); this.visitChildFactoriesAndViewsFlat(result::add); return result; } @Override public Iterator<FactoryBase<?, V>> iterator() { return collectChildrenFactoriesFlat().iterator(); } private String debugInfo(){ try { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("ID:\n "); stringBuilder.append(getId()); stringBuilder.append("\nAttributes:\n"); this.internal().visitAttributesFlat((attributeVariableName, attribute) -> { stringBuilder.append(" ").append(attribute.internal_getPreferredLabelText(Locale.ENGLISH)).append(": ").append(attribute.getDisplayText()).append("\n"); }); return stringBuilder.toString().trim(); } catch (Exception e) { return "can't create debuginfo text cause:\n"+ Throwables.getStackTraceAsString(e); } } private void runtimeQuery(V visitor) { if (executorWidthVisitorAndCurrentLiveObject!=null){ executorWidthVisitorAndCurrentLiveObject.accept(visitor,createdLiveObject); } } @SuppressWarnings("unchecked") private void visitChildFactoriesAndViewsFlat(Consumer<FactoryBase<?,V>> consumer) { for (AttributeAndName attributeAndName: this.internal().getAttributes()){ Attribute<?,?> attribute=attributeAndName.attribute; if (attribute instanceof FactoryReferenceAttribute) { FactoryBase<?, V> factory = (FactoryBase<?, V>)attribute.get(); if (factory!=null){ consumer.accept(factory); } } if (attribute instanceof FactoryReferenceListAttribute) { List<?> factories = ((FactoryReferenceListAttribute<?, ?>) attribute).get(); for (Object factory: factories){ consumer.accept((FactoryBase<?, V>)factory); } } if (attribute instanceof FactoryViewReferenceAttribute) { FactoryBase<?, V> factory = (FactoryBase<?, V>)attribute.get(); if (factory!=null){ consumer.accept(factory); } } if (attribute instanceof FactoryViewListReferenceAttribute) { List<?> factories = ((FactoryViewListReferenceAttribute<?, ?, ?>) attribute).get(); for (Object factory: factories){ consumer.accept((FactoryBase<?, V>)factory); } } if (attribute instanceof FactoryPolymorphicReferenceAttribute) { FactoryBase<?, V> factory = (FactoryBase<?, V>)attribute.get(); if (factory!=null){ consumer.accept(factory); } } } } final FactoryInternal<L,V> factoryInternal = new FactoryInternal<>(this); /** <b>internal methods should be only used from the framework.</b> * They may change in the Future. * There is no fitting visibility in java therefore this workaround. */ public FactoryInternal<L,V> internalFactory(){ return factoryInternal; } public static class FactoryInternal<L,V> { private final FactoryBase<L,V> factory; public FactoryInternal(FactoryBase<L, V> factory) { this.factory = factory; } /** create and prepare the liveobject*/ public L create(){ return factory.create(); } public FactoryLogEntry createFactoryLogEntry() { return factory.createFactoryLogEntry(false); } public FactoryLogEntry createFactoryLogEntryFlat(){ return factory.createFactoryLogEntry(true); } /**determine which live objects needs recreation*/ public void determineRecreationNeed(Set<Data> changedData) { factory.determineRecreationNeed(changedData,new ArrayDeque<>()); } public void resetLog() { factory.resetLog(); } /** start the liveObject e.g open a port*/ public void start() { factory.start(); } /** start the liveObject e.g open a port*/ public void destroy(Set<FactoryBase<?,V>> previousFactories) { factory.destroy(previousFactories); } /** execute visitor to get runtime information from the liveobject*/ public void runtimeQuery(V visitor) { factory.runtimeQuery(visitor); } public void visitChildFactoriesAndViewsFlat(Consumer<FactoryBase<?,V>> consumer) { factory.visitChildFactoriesAndViewsFlat(consumer); } public L instance() { return factory.instance(); } public void loopDetector() { factory.loopDetector(); } public Set<FactoryBase<?,V>> collectChildFactoriesDeep(){ return factory.collectChildFactoriesDeep(); } public HashMap<String,FactoryBase<?,V>> collectChildFactoriesDeepMap(){ final Set<FactoryBase<?, V>> factoryBases = factory.collectChildFactoriesDeep(); HashMap<String, FactoryBase<?, V>> result = new HashMap<>(); for (FactoryBase<?, V> factory: factoryBases){ result.put(factory.getId(),factory); } return result; } public List<FactoryBase<?,V>> collectChildrenFactoriesFlat() { return factory.collectChildrenFactoriesFlat(); } public String debugInfo() { return factory.debugInfo(); } } private FactoryLogEntry createFactoryLogEntry(boolean flat) { if (factoryLogEntry.hasEvents()){ if (!flat){ this.internalFactory().collectChildrenFactoriesFlat().forEach(child -> { factoryLogEntry.children.add(child.createFactoryLogEntry(flat)); }); factoryLogEntry.children.removeIf(Objects::isNull); } return factoryLogEntry; } return null; } Supplier<L> creator=null; Function<L,L> reCreatorWithPreviousLiveObject=null; Consumer<L> starterWithNewLiveObject=null; Consumer<L> destroyerWithPreviousLiveObject=null; BiConsumer<V,L> executorWidthVisitorAndCurrentLiveObject=null; private void setCreator(Supplier<L> creator){ this.creator=creator; } private void setReCreator(Function<L,L> reCreatorWithPreviousLiveObject ) { this.reCreatorWithPreviousLiveObject=reCreatorWithPreviousLiveObject; } private void setStarter(Consumer<L> starterWithNewLiveObject) { this.starterWithNewLiveObject=starterWithNewLiveObject; } private void setDestroyer(Consumer<L> destroyerWithPreviousLiveObject) { this.destroyerWithPreviousLiveObject=destroyerWithPreviousLiveObject; } private void setRuntimeQueryExecutor(BiConsumer<V,L> executorWidthVisitorAndCurrentLiveObject) { this.executorWidthVisitorAndCurrentLiveObject=executorWidthVisitorAndCurrentLiveObject; } final LiveCycleConfig<L,V> liveCycleConfig = new LiveCycleConfig<>(this); /** live cycle configurations api<br> *<br> * Update Order<br> * 1. recreate for changed, create for new<br> * 2. destroy removed, updated<br> * 3. start new<br> *<br> * The goal is to keep the time between destroy and start as short as possible cause that's essentially the application downtime. * Therefore slow operation should be executed in create.<br> *<br> * should be used in the default constructor * */ protected LiveCycleConfig<L,V> configLiveCycle(){ return liveCycleConfig; } public static class LiveCycleConfig<L,V> { private final FactoryBase<L,V> factory; public LiveCycleConfig(FactoryBase<L, V> factory) { this.factory = factory; } /**create and prepare the liveObject*/ public void setCreator(Supplier<L> creator){ factory.setCreator(creator); } /**the factory data has changed therefore a new liveobject is needed.<br> * previousLiveObject can be used to reuse resources like connection pools etc.<br> * passed old liveobject is never null * */ public void setReCreator(Function<L,L> reCreatorWithPreviousLiveObject ) { factory.setReCreator(reCreatorWithPreviousLiveObject); } /** start the liveObject e.g open a port*/ public void setStarter(Consumer<L> starterWithNewLiveObject) { factory.setStarter(starterWithNewLiveObject); } /** finally free liveObject e.g close a port*/ public void setDestroyer(Consumer<L> destroyerWithPreviousLiveObject) { factory.setDestroyer(destroyerWithPreviousLiveObject); } /**execute visitor to get runtime information from the liveObjects*/ public void setRuntimeQueryExecutor(BiConsumer<V,L> executorWidthVisitorAndCurrentLiveObject) { factory.setRuntimeQueryExecutor(executorWidthVisitorAndCurrentLiveObject); } } }
package org.nohope.typetools; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.introspect.VisibilityChecker; import com.fasterxml.jackson.datatype.joda.JodaModule; import org.nohope.logging.Logger; import org.nohope.logging.LoggerFactory; import org.nohope.typetools.json.ColorModule; import java.io.IOException; import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.ANY; import static com.fasterxml.jackson.annotation.JsonInclude.Include.NON_EMPTY; import static com.fasterxml.jackson.databind.SerializationFeature.*; public final class JSON { public static final JSON JSON = new JSON(new ObjectMapper(), true); private static final Logger LOG = LoggerFactory.getLogger(JSON.class); private final ObjectMapper usualMapper; private final ObjectMapper prettyMapper; private JSON(final ObjectMapper mapper, final boolean builtin) { usualMapper = mapper.copy(); if (builtin) { usualMapper.registerModule(new JodaModule()); usualMapper.registerModule(new ColorModule()); usualMapper.setSerializationInclusion(NON_EMPTY); usualMapper.enableDefaultTypingAsProperty(ObjectMapper.DefaultTyping.NON_FINAL, "@class"); usualMapper.setVisibilityChecker(VisibilityChecker.Std.defaultInstance().withFieldVisibility(ANY)); } prettyMapper = usualMapper.copy(); prettyMapper.configure(INDENT_OUTPUT, true); prettyMapper.configure(WRITE_DATES_AS_TIMESTAMPS, false); prettyMapper.configure(FAIL_ON_EMPTY_BEANS, false); } public static JSON customize(final ObjectMapper mapper) { return customize(mapper, true); } public static JSON customize(final ObjectMapper mapper, final boolean builtin) { return new JSON(mapper, builtin); } public Object pretty(final Object obj) { return pretty(obj, defaultErrorMessage(obj)); } public Object jsonify(final Object obj) { return jsonify(obj, defaultErrorMessage(obj)); } /** * Serializes given object with Jackson, then deserializes into class required. * Not so fast, so shouldn't be used for deep copying. * * @throws IOException */ public <T> T copyAs(final Object source, final Class<T> clazz) throws IOException { final byte [] marshalled = usualMapper.writeValueAsBytes(source); return usualMapper.readValue(marshalled, clazz); } private Object jsonify(final Object obj, final String onErrorMessage) { return jsonifyWith(usualMapper, obj, onErrorMessage); } private Object pretty(final Object obj, final String onErrorMessage) { return jsonifyWith(prettyMapper, obj, onErrorMessage); } @SuppressWarnings("PMD.AvoidCatchingThrowable") private static Object jsonifyWith(final ObjectMapper mapper, final Object obj, final String onErrorMessage) { return new Object() { @Override public String toString() { try { return mapper.writeValueAsString(obj); } catch (final Throwable e) { LOG.error(e, "Unable to jsonify object of class {}", obj == null ? null : obj.getClass()); return onErrorMessage; } } }; } private static String defaultErrorMessage(final Object obj) { if (null != obj) { return "<? " + obj.getClass().getCanonicalName() + "/>"; } return "<?null />"; } }
package git4idea.vfs; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.util.NlsContexts; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.VcsVFSListener; import com.intellij.openapi.vcs.changes.ChangeListManagerImpl; import com.intellij.openapi.vcs.update.RefreshVFsSynchronously; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.AppUIUtil; import com.intellij.vcsUtil.VcsFileUtil; import com.intellij.vcsUtil.VcsUtil; import git4idea.GitUtil; import git4idea.GitVcs; import git4idea.commands.Git; import git4idea.commands.GitCommand; import git4idea.commands.GitLineHandler; import git4idea.i18n.GitBundle; import git4idea.index.GitStageManagerKt; import git4idea.util.GitFileUtils; import git4idea.util.GitVcsConsoleWriter; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.TestOnly; import java.io.File; import java.util.*; import static com.intellij.util.containers.ContainerUtil.map; import static com.intellij.util.containers.ContainerUtil.map2Map; public final class GitVFSListener extends VcsVFSListener { private GitVFSListener(@NotNull GitVcs vcs) { super(vcs); } @NotNull public static GitVFSListener createInstance(@NotNull GitVcs vcs) { GitVFSListener listener = new GitVFSListener(vcs); listener.installListeners(); return listener; } @NotNull @Override protected String getAddTitle() { return GitBundle.getString("vfs.listener.add.title"); } @NotNull @Override protected String getSingleFileAddTitle() { return GitBundle.getString("vfs.listener.add.single.title"); } @NotNull @Override protected String getSingleFileAddPromptTemplate() { return GitBundle.getString("vfs.listener.add.single.prompt"); } @Override protected void executeAdd(@NotNull final List<VirtualFile> addedFiles, @NotNull final Map<VirtualFile, VirtualFile> copiedFiles) { executeAddWithoutIgnores(addedFiles, copiedFiles, (notIgnoredAddedFiles, copiedFilesMap) -> originalExecuteAdd(notIgnoredAddedFiles, copiedFilesMap)); } @Override protected void executeAddWithoutIgnores(@NotNull List<VirtualFile> addedFiles, @NotNull Map<VirtualFile, VirtualFile> copyFromMap, @NotNull ExecuteAddCallback executeAddCallback) { saveUnsavedVcsIgnoreFiles(); final ProgressManager progressManager = ProgressManager.getInstance(); progressManager.run(new Task.Backgroundable(myProject, GitBundle.getString("vfs.listener.checking.ignored"), true) { @Override public void run(@NotNull ProgressIndicator pi) { // Filter added files before further processing Map<VirtualFile, List<VirtualFile>> sortedFiles = GitUtil.sortFilesByGitRootIgnoringMissing(myProject, addedFiles); final HashSet<VirtualFile> retainedFiles = new HashSet<>(); for (Map.Entry<VirtualFile, List<VirtualFile>> e : sortedFiles.entrySet()) { VirtualFile root = e.getKey(); List<VirtualFile> files = e.getValue(); pi.setText(root.getPresentableUrl()); try { retainedFiles.addAll(Git.getInstance().untrackedFiles(myProject, root, files)); } catch (VcsException ex) { GitVcsConsoleWriter.getInstance(myProject).showMessage(ex.getMessage()); } } addedFiles.retainAll(retainedFiles); AppUIUtil.invokeLaterIfProjectAlive(myProject, () -> executeAddCallback.executeAdd(addedFiles, copyFromMap)); } }); } /** * The version of execute add before overriding * * @param addedFiles the added files * @param copiedFiles the copied files */ private void originalExecuteAdd(List<VirtualFile> addedFiles, final Map<VirtualFile, VirtualFile> copiedFiles) { super.executeAdd(addedFiles, copiedFiles); } @Override protected void performAdding(@NotNull final Collection<VirtualFile> addedFiles, @NotNull final Map<VirtualFile, VirtualFile> copyFromMap) { // copied files (copyFromMap) are ignored, because they are included into added files. performAdding(map(addedFiles, VcsUtil::getFilePath)); } private void performAdding(Collection<? extends FilePath> filesToAdd) { performBackgroundOperation(filesToAdd, GitBundle.getString("add.adding"), new LongOperationPerRootExecutor() { @Override public void execute(@NotNull VirtualFile root, @NotNull List<? extends FilePath> files) throws VcsException { executeAdding(root, files); if (!myProject.isDisposed()) { VcsFileUtil.markFilesDirty(myProject, files); } } @Override public Collection<File> getFilesToRefresh() { return Collections.emptyList(); } }); } @NotNull @Override protected String getDeleteTitle() { return GitBundle.getString("vfs.listener.delete.title"); } @Override protected String getSingleFileDeleteTitle() { return GitBundle.getString("vfs.listener.delete.single.title"); } @Override protected String getSingleFileDeletePromptTemplate() { return GitBundle.getString("vfs.listener.delete.single.prompt"); } @Override protected void performDeletion(@NotNull final List<FilePath> filesToDelete) { performBackgroundOperation(filesToDelete, GitBundle.getString("remove.removing"), new LongOperationPerRootExecutor() { @Override public void execute(@NotNull VirtualFile root, @NotNull List<? extends FilePath> files) throws VcsException { executeDeletion(root, files); if (!myProject.isDisposed()) { VcsFileUtil.markFilesDirty(myProject, files); } } @Override public Collection<File> getFilesToRefresh() { return Collections.emptySet(); } }); } @Override protected void performMoveRename(@NotNull final List<MovedFileInfo> movedFiles) { List<FilePath> toAdd = new ArrayList<>(); List<FilePath> toRemove = new ArrayList<>(); List<MovedFileInfo> toForceMove = new ArrayList<>(); for (MovedFileInfo movedInfo : movedFiles) { String oldPath = movedInfo.myOldPath; String newPath = movedInfo.myNewPath; if (!movedInfo.isCaseSensitive() && GitUtil.isCaseOnlyChange(oldPath, newPath)) { toForceMove.add(movedInfo); } else { toRemove.add(VcsUtil.getFilePath(oldPath)); toAdd.add(VcsUtil.getFilePath(newPath)); } } Collection<FilePath> selectedToAdd; Collection<FilePath> selectedToRemove; if (isStageEnabled()) { selectedToAdd = selectFilePathsToAdd(toAdd); selectedToRemove = selectFilePathsToDelete(toRemove); } else { selectedToAdd = toAdd; selectedToRemove = toRemove; } LOG.debug("performMoveRename. \ntoAdd: " + toAdd + "\ntoRemove: " + toRemove + "\ntoForceMove: " + toForceMove); GitVcs.runInBackground(new Task.Backgroundable(myProject, GitBundle.getString("progress.title.moving.files")) { @Override public void run(@NotNull ProgressIndicator indicator) { try { List<FilePath> dirtyPaths = new ArrayList<>(); List<File> toRefresh = new ArrayList<>(); //perform adding for (Map.Entry<VirtualFile, List<FilePath>> toAddEntry : GitUtil.sortFilePathsByGitRootIgnoringMissing(myProject, selectedToAdd).entrySet()) { List<FilePath> files = toAddEntry.getValue(); executeAdding(toAddEntry.getKey(), files); dirtyPaths.addAll(files); } //perform deletion for (Map.Entry<VirtualFile, List<FilePath>> toRemoveEntry : GitUtil.sortFilePathsByGitRootIgnoringMissing(myProject, selectedToRemove).entrySet()) { List<FilePath> paths = toRemoveEntry.getValue(); executeDeletion(toRemoveEntry.getKey(), paths); dirtyPaths.addAll(paths); } //perform force move if needed Map<FilePath, MovedFileInfo> filesToForceMove = map2Map(toForceMove, info -> Pair.create(VcsUtil.getFilePath(info.myNewPath), info)); dirtyPaths.addAll(map(toForceMove, fileInfo -> VcsUtil.getFilePath(fileInfo.myOldPath))); for (Map.Entry<VirtualFile, List<FilePath>> toForceMoveEntry : GitUtil.sortFilePathsByGitRootIgnoringMissing(myProject, filesToForceMove.keySet()).entrySet()) { List<FilePath> paths = toForceMoveEntry.getValue(); toRefresh.addAll(executeForceMove(toForceMoveEntry.getKey(), paths, filesToForceMove)); dirtyPaths.addAll(paths); } VcsFileUtil.markFilesDirty(myProject, dirtyPaths); RefreshVFsSynchronously.refreshFiles(toRefresh); } catch (VcsException ex) { GitVcsConsoleWriter.getInstance(myProject).showMessage(ex.getMessage()); } } }); } private void executeAdding(@NotNull VirtualFile root, @NotNull List<? extends FilePath> files) throws VcsException { LOG.debug("Git: adding files: " + files); GitFileUtils.addPaths(myProject, root, files, false, false); } private void executeDeletion(@NotNull VirtualFile root, @NotNull List<? extends FilePath> files) throws VcsException { GitFileUtils.deletePaths(myProject, root, files, "--ignore-unmatch", "--cached", "-r"); } private Set<File> executeForceMove(@NotNull VirtualFile root, @NotNull List<? extends FilePath> files, @NotNull Map<FilePath, MovedFileInfo> filesToMove) { Set<File> toRefresh = new HashSet<>(); for (FilePath file : files) { MovedFileInfo info = filesToMove.get(file); GitLineHandler h = new GitLineHandler(myProject, root, GitCommand.MV); h.addParameters("-f"); h.addRelativePaths(VcsUtil.getFilePath(info.myOldPath), VcsUtil.getFilePath(info.myNewPath)); Git.getInstance().runCommand(h); toRefresh.add(new File(info.myOldPath)); toRefresh.add(new File(info.myNewPath)); } return toRefresh; } private boolean isStageEnabled() { return GitStageManagerKt.isStageAvailable(myProject); } @Override protected boolean isDirectoryVersioningSupported() { return false; } @Override protected boolean isRecursiveDeleteSupported() { return true; } @Override protected boolean isFileCopyingFromTrackingSupported() { return false; } @Override protected @NotNull Collection<FilePath> selectFilePathsToDelete(@NotNull final List<FilePath> deletedFiles) { if (isStageEnabled()) { return super.selectFilePathsToDelete(deletedFiles); } // For git asking about vcs delete does not make much sense. The result is practically identical. return deletedFiles; } private void performBackgroundOperation(@NotNull Collection<? extends FilePath> files, @NotNull @NlsContexts.ProgressTitle String operationTitle, @NotNull LongOperationPerRootExecutor executor) { Map<VirtualFile, List<FilePath>> sortedFiles = GitUtil.sortFilePathsByGitRootIgnoringMissing(myProject, files); GitVcs.runInBackground(new Task.Backgroundable(myProject, operationTitle) { @Override public void run(@NotNull ProgressIndicator indicator) { for (Map.Entry<VirtualFile, List<FilePath>> e : sortedFiles.entrySet()) { try { executor.execute(e.getKey(), e.getValue()); } catch (final VcsException ex) { GitVcsConsoleWriter.getInstance(myProject).showMessage(ex.getMessage()); } } RefreshVFsSynchronously.refreshFiles(executor.getFilesToRefresh()); } }); } private interface LongOperationPerRootExecutor { void execute(@NotNull VirtualFile root, @NotNull List<? extends FilePath> files) throws VcsException; Collection<File> getFilesToRefresh(); } @TestOnly public void waitForAllEventsProcessedInTestMode() { assert ApplicationManager.getApplication().isUnitTestMode(); ((ChangeListManagerImpl)myChangeListManager).waitEverythingDoneInTestMode(); myExternalFilesProcessor.waitForEventsProcessedInTestMode(); } }
package org.objectweb.proactive.examples.components.userguide.multicast; import java.util.HashMap; import java.util.Map; import org.objectweb.fractal.adl.Factory; import org.objectweb.fractal.api.Component; import org.objectweb.fractal.util.Fractal; import org.objectweb.proactive.api.PADeployment; import org.objectweb.proactive.core.component.adl.Launcher; import org.objectweb.proactive.core.descriptor.data.ProActiveDescriptor; public class Main { private static String descriptor = ""; public static void main(String[] args) { if (args.length == 1) { descriptor = args[0]; } else { descriptor = Main.class.getResource("../deploymentDescriptorOld.xml").toString(); } System.err.println("Launch multicast example"); Main.manualLauncher(); //Main.proactiveLauncher(); } private static void proactiveLauncher() { System.err.println("Begin Launcher"); String arg0 = "-fractal"; // using the fractal component model String arg1 = "org.objectweb.proactive.examples.components.userguide.multicast.adl.Launcher"; // which component definition to load String arg2 = "runnable"; String arg3 = descriptor; // the deployment descriptor for proactive try { Launcher.main(new String[] { arg0, arg1, arg2, arg3 }); } catch (Exception e) { e.printStackTrace(); } } private static void manualLauncher() { try { Factory f = org.objectweb.proactive.core.component.adl.FactoryFactory.getFactory(); Map<String, Object> context = new HashMap<String, Object>(); ProActiveDescriptor deploymentDescriptor = PADeployment.getProactiveDescriptor(descriptor); context.put("deployment-descriptor", deploymentDescriptor); deploymentDescriptor.activateMappings(); Component launcher = null; launcher = (Component) f.newComponent( "org.objectweb.proactive.examples.components.userguide.multicast.adl.Launcher", context); if (launcher == null) { System.err.println("Component Launcher creation failed!"); return; } Fractal.getLifeCycleController(launcher).startFc(); //root // System.out.println("Components started!"); ((java.lang.Runnable) launcher.getFcInterface("runnable")).run(); Thread.sleep(10000); deploymentDescriptor.killall(false); System.exit(0); } catch (Exception e) { e.printStackTrace(); } } }
package org.exist.backup; import org.apache.commons.codec.binary.Base64; import org.bouncycastle.crypto.digests.RIPEMD160Digest; import org.exist.TestUtils; import org.exist.security.Account; import org.exist.security.MessageDigester; import org.exist.security.SecurityManager; import org.exist.test.ExistWebServer; import org.exist.xmldb.*; import org.junit.ClassRule; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.xmldb.api.DatabaseManager; import org.xmldb.api.base.Collection; import org.xmldb.api.base.XMLDBException; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import static java.nio.charset.StandardCharsets.UTF_8; import static org.exist.util.FileUtils.withUnixSep; import static org.junit.Assert.*; @RunWith(Parameterized.class) public class XMLDBRestoreTest { @Rule public final ExistWebServer existWebServer = new ExistWebServer(true, false, true, true); private static final String PORT_PLACEHOLDER = "${PORT}"; @ClassRule public static final TemporaryFolder tempFolder = new TemporaryFolder(); @Parameterized.Parameters(name = "{0}") public static java.util.Collection<Object[]> data() { return Arrays.asList(new Object[][]{ {"local", XmldbURI.EMBEDDED_SERVER_URI.toString()}, {"remote", "xmldb:exist://localhost:" + PORT_PLACEHOLDER + "/xmlrpc"}, }); } @Parameterized.Parameter public String apiName; @Parameterized.Parameter(value = 1) public String baseUri; private final String getBaseUri() { return baseUri.replace(PORT_PLACEHOLDER, Integer.toString(existWebServer.getPort())); } @Test public void restoreValidZipBackup() throws IOException, XMLDBException { final Path zipFile = createZipBackupWithValidContent(); final TestRestoreListener listener = new TestRestoreListener(); final XmldbURI rootUri = XmldbURI.create(getBaseUri()).append(XmldbURI.ROOT_COLLECTION_URI); restoreBackup(rootUri, zipFile, null, listener); assertEquals(5, listener.restored.size()); assertEquals(0, listener.warnings.size()); assertEquals(0, listener.errors.size()); } @Test public void restoreValidDirBackup() throws IOException, XMLDBException { final Path contentsFile = createBackupWithValidContent(); final TestRestoreListener listener = new TestRestoreListener(); final XmldbURI rootUri = XmldbURI.create(getBaseUri()).append(XmldbURI.ROOT_COLLECTION_URI); restoreBackup(rootUri, contentsFile, null, listener); assertEquals(5, listener.restored.size()); assertEquals(0, listener.warnings.size()); assertEquals(0, listener.errors.size()); } @Test public void restoreIsBestEffortAttempt() throws IOException, XMLDBException { final Path contentsFile = createBackupWithInvalidContent(); final TestRestoreListener listener = new TestRestoreListener(); final XmldbURI rootUri = XmldbURI.create(getBaseUri()).append(XmldbURI.ROOT_COLLECTION_URI); restoreBackup(rootUri, contentsFile, null, listener); assertEquals(3, listener.restored.size()); assertEquals(1, listener.warnings.size()); } @Test public void restoreBackupWithDifferentAdminPassword() throws IOException, XMLDBException { final String backupAdminPassword = UUID.randomUUID().toString(); final Path contentsFile = createBackupWithDifferentAdminPassword(backupAdminPassword); final TestRestoreListener listener = new TestRestoreListener(); final XmldbURI rootUri = XmldbURI.create(getBaseUri()).append(XmldbURI.ROOT_COLLECTION_URI); restoreBackup(rootUri, contentsFile, backupAdminPassword, listener); assertEquals(3, listener.restored.size()); assertEquals(0, listener.warnings.size()); assertEquals(0, listener.errors.size()); } @Test public void restoreUserWithoutGroupIsPlacedInNoGroup() throws IOException, XMLDBException { final String username = UUID.randomUUID().toString() + "-user"; final Path contentsFile = createBackupWithUserWithoutPrimaryGroup(username); final TestRestoreListener listener = new TestRestoreListener(); final XmldbURI rootUri = XmldbURI.create(getBaseUri()).append(XmldbURI.ROOT_COLLECTION_URI); restoreBackup(rootUri, contentsFile, null, listener); assertEquals(2, listener.restored.size()); assertEquals(0, listener.warnings.size()); assertEquals(0, listener.errors.size()); final Collection collection = DatabaseManager.getCollection(rootUri.toString(), TestUtils.ADMIN_DB_USER, TestUtils.ADMIN_DB_PWD); final EXistUserManagementService userManagementService = (EXistUserManagementService) collection.getService("UserManagementService", "1.0"); final Account account = userManagementService.getAccount(username); assertNotNull(account); assertEquals(SecurityManager.UNKNOWN_GROUP, account.getPrimaryGroup()); assertArrayEquals(new String[] { SecurityManager.UNKNOWN_GROUP }, account.getGroups()); } @Test public void restoreUserWithNoSuchGroupIsPlacedInNoGroup() throws IOException, XMLDBException { final String username = UUID.randomUUID().toString() + "-user"; final Path contentsFile = createBackupWithUserInNoSuchGroup(username); final TestRestoreListener listener = new TestRestoreListener(); final XmldbURI rootUri = XmldbURI.create(getBaseUri()).append(XmldbURI.ROOT_COLLECTION_URI); restoreBackup(rootUri, contentsFile, null, listener); assertEquals(2, listener.restored.size()); assertEquals(0, listener.warnings.size()); assertEquals(0, listener.errors.size()); final Collection collection = DatabaseManager.getCollection(rootUri.toString(), TestUtils.ADMIN_DB_USER, TestUtils.ADMIN_DB_PWD); final EXistUserManagementService userManagementService = (EXistUserManagementService) collection.getService("UserManagementService", "1.0"); final Account account = userManagementService.getAccount(username); assertNotNull(account); assertEquals(SecurityManager.UNKNOWN_GROUP, account.getPrimaryGroup()); assertArrayEquals(new String[] { SecurityManager.UNKNOWN_GROUP }, account.getGroups()); } /** * Restores users with groups from /db/system/security/exist */ @Ignore("Not yet supported") @Test public void restoreUserWithGroupsFromExistRealm() throws IOException, XMLDBException { final Path backupPath = tempFolder.newFolder().toPath(); final Path restorePath = backupPath.resolve("db").resolve("system").resolve("security").resolve("exist").resolve(BackupDescriptor.COLLECTION_DESCRIPTOR); restoreUserWithGroups(backupPath, restorePath, 8); } /** * Restores users with groups from /db/system/security */ @Ignore("Not yet supported") @Test public void restoreUserWithGroupsFromSecurityCollection() throws IOException, XMLDBException { final Path backupPath = tempFolder.newFolder().toPath(); final Path restorePath = backupPath.resolve("db").resolve("system").resolve("security").resolve(BackupDescriptor.COLLECTION_DESCRIPTOR); restoreUserWithGroups(backupPath, restorePath, 9); } /** * Restores users with groups from /db/system */ @Ignore("Not yet supported") @Test public void restoreUserWithGroupsFromSystemCollection() throws IOException, XMLDBException { final Path backupPath = tempFolder.newFolder().toPath(); final Path restorePath = backupPath.resolve("db").resolve("system").resolve(BackupDescriptor.COLLECTION_DESCRIPTOR); restoreUserWithGroups(backupPath, restorePath, 10); } /** * Restores users with groups from /db */ @Test public void restoreUserWithGroupsFromDbCollection() throws IOException, XMLDBException { final Path backupPath = tempFolder.newFolder().toPath(); final Path restorePath = backupPath.resolve("db").resolve(BackupDescriptor.COLLECTION_DESCRIPTOR); restoreUserWithGroups(backupPath, restorePath, 11); } private void restoreUserWithGroups(final Path backupPath, final Path restorePath, final int expectedRestoredCount) throws IOException, XMLDBException { final String username = UUID.randomUUID().toString() + "-user"; final String primaryGroup = username; // personal group final String group1 = UUID.randomUUID().toString() + "-group-1"; final String group2 = UUID.randomUUID().toString() + "-group-2"; final String group3 = UUID.randomUUID().toString() + "-group-3"; final TestRestoreListener listener = new TestRestoreListener(); final XmldbURI rootUri = XmldbURI.create(getBaseUri()).append(XmldbURI.ROOT_COLLECTION_URI); createBackupWithUserInGroups(backupPath, username, primaryGroup, group1, group2, group3); restoreBackup(rootUri, restorePath, null, listener); assertEquals(expectedRestoredCount, listener.restored.size()); assertEquals(0, listener.warnings.size()); assertEquals(0, listener.errors.size()); final Collection collection = DatabaseManager.getCollection(rootUri.toString(), TestUtils.ADMIN_DB_USER, TestUtils.ADMIN_DB_PWD); final EXistUserManagementService userManagementService = (EXistUserManagementService) collection.getService("UserManagementService", "1.0"); final Account account = userManagementService.getAccount(username); assertNotNull(account); assertEquals(primaryGroup, account.getPrimaryGroup()); assertArrayEquals(new String[] { primaryGroup, group1, group2, group3 }, account.getGroups()); } private static void restoreBackup(final XmldbURI uri, final Path backup, @Nullable final String backupPassword, final RestoreServiceTaskListener listener) throws XMLDBException { final Collection collection = DatabaseManager.getCollection(uri.toString(), TestUtils.ADMIN_DB_USER, TestUtils.ADMIN_DB_PWD); final EXistRestoreService restoreService = (EXistRestoreService) collection.getService("RestoreService", "1.0"); restoreService.restore(backup.normalize().toAbsolutePath().toString(), backupPassword, listener, false); } private static Path createZipBackupWithValidContent() throws IOException { final Path dbContentsFile = createBackupWithValidContent(); final Path dbDir = dbContentsFile.getParent(); final Path zipFile = File.createTempFile("backup", ".zip", tempFolder.getRoot()).toPath(); try (final ZipOutputStream out = new ZipOutputStream(Files.newOutputStream(zipFile))) { Files.walkFileTree(dbDir, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException { final Path zipEntryPath = dbDir.relativize(file); final String zipEntryName = withUnixSep(zipEntryPath.toString()); final ZipEntry zipEntry = new ZipEntry(zipEntryName); out.putNextEntry(zipEntry); Files.copy(file, out); out.closeEntry(); return FileVisitResult.CONTINUE; } }); } return zipFile; } private static Path createBackupWithValidContent() throws IOException { final Path backupDir = tempFolder.newFolder().toPath(); final Path db = Files.createDirectories(backupDir.resolve("db")); final Path col1 = Files.createDirectories(db.resolve("col1")); final String dbContents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db\" owner=\"SYSTEM\" group=\"dba\" mode=\"755\" created=\"2019-05-15T15:58:39.385+04:00\" version=\"1\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <subcollection name=\"col1\" filename=\"col1\"/>\n" + "</collection>"; final String col1Contents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db/col1\" owner=\"admin\" group=\"dba\" mode=\"755\" created=\"2019-05-15T15:58:39.385+04:00\" deduplicate-blobs=\"false\" version=\"2\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <resource type=\"XMLResource\" name=\"doc1.xml\" owner=\"admin\" group=\"dba\" mode=\"644\" created=\"2019-05-15T15:58:48.638+04:00\" modified=\"2019-05-15T15:58:48.638+04:00\" filename=\"doc1.xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n" + " <resource type=\"XMLResource\" name=\"doc2.xml\" owner=\"admin\" group=\"dba\" mode=\"644\" created=\"2019-05-15T15:58:48.638+04:00\" modified=\"2019-05-15T15:58:48.638+04:00\" filename=\"doc2.xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n" + " <resource type=\"XMLResource\" name=\"doc3.xml\" owner=\"admin\" group=\"dba\" mode=\"644\" created=\"2019-05-15T15:58:49.618+04:00\" modified=\"2019-05-15T15:58:49.618+04:00\" filename=\"doc3.xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n" + "</collection>"; final String doc1 = "<doc1/>"; final String doc2 = "<doc2/>"; final String doc3 = "<doc3/>"; final Path dbContentsFile = Files.write(db.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), dbContents.getBytes(UTF_8)); final Path col1ContentsFile = Files.write(col1.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), col1Contents.getBytes(UTF_8)); Files.write(col1.resolve("doc1.xml"), doc1.getBytes(UTF_8)); Files.write(col1.resolve("doc2.xml"), doc2.getBytes(UTF_8)); Files.write(col1.resolve("doc3.xml"), doc3.getBytes(UTF_8)); return dbContentsFile; } private static Path createBackupWithInvalidContent() throws IOException { final Path backupDir = tempFolder.newFolder().toPath(); final Path col1 = Files.createDirectories(backupDir.resolve("db").resolve("col1")); final String contents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db/col1\" owner=\"admin\" group=\"dba\" mode=\"755\" created=\"2019-05-15T15:58:39.385+04:00\" deduplicate-blobs=\"false\" version=\"2\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <resource type=\"XMLResource\" name=\"doc1.xml\" owner=\"admin\" group=\"dba\" mode=\"644\" created=\"2019-05-15T15:58:48.638+04:00\" modified=\"2019-05-15T15:58:48.638+04:00\" filename=\"doc1.xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n" + " <resource type=\"XMLResource\" name=\"doc2.xml\" owner=\"admin\" group=\"dba\" mode=\"644\" created=\"2019-05-15T15:58:48.638+04:00\" modified=\"2019-05-15T15:58:48.638+04:00\" filename=\"doc2.xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n" + " <resource type=\"XMLResource\" name=\"doc3.xml\" owner=\"admin\" group=\"dba\" mode=\"644\" created=\"2019-05-15T15:58:49.618+04:00\" modified=\"2019-05-15T15:58:49.618+04:00\" filename=\"doc3.xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n" + "</collection>"; final String doc1 = "<doc1/>"; final String doc2 = "<doc2>invalid"; final String doc3 = "<doc3/>"; final Path contentsFile = Files.write(col1.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), contents.getBytes(UTF_8)); Files.write(col1.resolve("doc1.xml"), doc1.getBytes(UTF_8)); Files.write(col1.resolve("doc2.xml"), doc2.getBytes(UTF_8)); Files.write(col1.resolve("doc3.xml"), doc3.getBytes(UTF_8)); return contentsFile; } private static Path createBackupWithDifferentAdminPassword(final String backupPassword) throws IOException { final Path backupDir = tempFolder.newFolder().toPath(); final Path accountsCol = Files.createDirectories(backupDir.resolve("db").resolve("system").resolve("security").resolve("exist").resolve("accounts")); final String contents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db/system/security/exist/accounts\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2019-05-15T19:51:06.258+04:00\" deduplicate-blobs=\"false\" version=\"2\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <resource type=\"XMLResource\" name=\"admin.xml\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2019-05-15T19:51:06.319+04:00\" modified=\"2019-05-15T20:49:40.153+04:00\" filename=\"admin.xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n" + " <resource type=\"XMLResource\" name=\"guest.xml\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2019-05-15T19:51:06.512+04:00\" modified=\"2019-05-15T19:51:06.566+04:00\" filename=\"guest.xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>" + "</collection>"; // password for `admin` final String backupPasswordHash = Base64.encodeBase64String(ripemd160(backupPassword)); final String backupPasswordDigest = MessageDigester.byteArrayToHex(ripemd160("admin:exist:" + backupPassword)); final String admin = "<account xmlns=\"http: final String guest = "<account xmlns=\"http: final Path contentsFile = Files.write(accountsCol.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), contents.getBytes(UTF_8)); Files.write(accountsCol.resolve("admin.xml"), admin.getBytes(UTF_8)); Files.write(accountsCol.resolve("guest.xml"), guest.getBytes(UTF_8)); return contentsFile; } private static Path createBackupWithUserWithoutPrimaryGroup(final String username) throws IOException { final Path backupDir = tempFolder.newFolder().toPath(); final Path accountsCol = Files.createDirectories(backupDir.resolve("db").resolve("system").resolve("security").resolve("exist").resolve("accounts")); final String contents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db/system/security/exist/accounts\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2019-05-15T15:58:39.385+04:00\" deduplicate-blobs=\"false\" version=\"2\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <resource type=\"XMLResource\" name=\"" + username + ".xml\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2019-05-15T15:58:48.638+04:00\" modified=\"2019-05-15T15:58:48.638+04:00\" filename=\"" + username + ".xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n" + "</collection>"; // account with no primary group! final String backupPasswordHash = Base64.encodeBase64String(ripemd160(username)); final String backupPasswordDigest = MessageDigester.byteArrayToHex(ripemd160(username + ":exist:" + username)); final String invalidUserDoc = "<account xmlns=\"http://exist-db.org/Configuration\" id=\"999\">\n" + "<password>{RIPEMD160}" + backupPasswordHash + "</password>\n" + "<digestPassword>" + backupPasswordDigest + "</digestPassword>\n" + "<expired>false</expired>\n" + "<enabled>true</enabled>\n" + "<umask>022</umask>\n" + "<name>" + username + "</name>\n" + "</account>"; final Path contentsFile = Files.write(accountsCol.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), contents.getBytes(UTF_8)); Files.write(accountsCol.resolve(username + ".xml"), invalidUserDoc.getBytes(UTF_8)); return contentsFile; } private static Path createBackupWithUserInNoSuchGroup(final String username) throws IOException { final Path backupDir = tempFolder.newFolder().toPath(); final Path accountsCol = Files.createDirectories(backupDir.resolve("db").resolve("system").resolve("security").resolve("exist").resolve("accounts")); final String contents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db/system/security/exist/accounts\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2019-05-15T15:58:39.385+04:00\" deduplicate-blobs=\"false\" version=\"2\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <resource type=\"XMLResource\" name=\"" + username + ".xml\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2019-05-15T15:58:48.638+04:00\" modified=\"2019-05-15T15:58:48.638+04:00\" filename=\"" + username + ".xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n" + "</collection>"; // account with no such group! final String backupPasswordHash = Base64.encodeBase64String(ripemd160(username)); final String backupPasswordDigest = MessageDigester.byteArrayToHex(ripemd160(username + ":exist:" + username)); final String invalidUserDoc = "<account xmlns=\"http://exist-db.org/Configuration\" id=\"999\">\n" + "<password>{RIPEMD160}" + backupPasswordHash + "</password>\n" + "<digestPassword>" + backupPasswordDigest + "</digestPassword>\n" + "<group name=\"no-such-group\"/>\n" + "<expired>false</expired>\n" + "<enabled>true</enabled>\n" + "<umask>022</umask>\n" + "<name>" + username + "</name>\n" + "</account>\n"; final Path contentsFile = Files.write(accountsCol.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), contents.getBytes(UTF_8)); Files.write(accountsCol.resolve(username + ".xml"), invalidUserDoc.getBytes(UTF_8)); return contentsFile; } private static void createBackupWithUserInGroups(final Path backupDir, final String username, final String... groupNames) throws IOException { final Path dbCol = Files.createDirectories(backupDir.resolve("db")); final Path systemCol = Files.createDirectories(dbCol.resolve("system")); final Path securityCol = Files.createDirectories(systemCol.resolve("security")); final Path existRealmCol = Files.createDirectories(securityCol.resolve("exist")); final Path groupsCol = Files.createDirectories(existRealmCol.resolve("groups")); final Path accountsCol = Files.createDirectories(existRealmCol.resolve("accounts")); final String dbContents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db\" version=\"1\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2021-01-28T04:06:13.166Z\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <subcollection name=\"system\" filename=\"system\"/>\n" + "</collection>"; final String systemContents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db/system\" version=\"1\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2021-01-28T04:06:13.166Z\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <subcollection name=\"security\" filename=\"security\"/>\n" + "</collection>"; final String securityContents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db/system/security\" version=\"1\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2021-01-28T04:06:13.166Z\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <subcollection name=\"exist\" filename=\"exist\"/>\n" + "</collection>"; final String existRealmContents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db/system/security/exist\" version=\"1\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2021-01-28T04:06:13.166Z\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <subcollection name=\"accounts\" filename=\"accounts\"/>\n" + " <subcollection name=\"groups\" filename=\"groups\"/>\n" + "</collection>"; final StringBuilder groupsContents = new StringBuilder( "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db/system/security/exist/groups\" version=\"1\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2021-01-28T04:06:13.172Z\">\n" + " <acl entries=\"0\" version=\"1\"/>\n"); for (final String groupName : groupNames) { groupsContents.append( " <resource type=\"XMLResource\" name=\"" + groupName + ".xml\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2019-05-15T15:58:48.638+04:00\" modified=\"2019-05-15T15:58:48.638+04:00\" filename=\"" + groupName + ".xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n"); } groupsContents.append( "</collection>"); final String accountsContents = "<collection xmlns=\"http://exist.sourceforge.net/NS/exist\" name=\"/db/system/security/exist/accounts\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2019-05-15T15:58:39.385+04:00\" deduplicate-blobs=\"false\" version=\"2\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " <resource type=\"XMLResource\" name=\"" + username + ".xml\" owner=\"SYSTEM\" group=\"dba\" mode=\"770\" created=\"2019-05-15T15:58:48.638+04:00\" modified=\"2019-05-15T15:58:48.638+04:00\" filename=\"" + username + ".xml\" mimetype=\"application/xml\">\n" + " <acl entries=\"0\" version=\"1\"/>\n" + " </resource>\n" + "</collection>"; // account with no such group! final String backupPasswordHash = Base64.encodeBase64String(ripemd160(username)); final String backupPasswordDigest = MessageDigester.byteArrayToHex(ripemd160(username + ":exist:" + username)); final StringBuilder userDoc = new StringBuilder( "<account xmlns=\"http://exist-db.org/Configuration\" id=\"999\">\n" + "<password>{RIPEMD160}" + backupPasswordHash + "</password>\n" + "<digestPassword>" + backupPasswordDigest + "</digestPassword>\n"); for (final String groupName : groupNames) { userDoc.append( "<group name=\"" + groupName + "\"/>\n"); } userDoc.append( "<expired>false</expired>\n" + "<enabled>true</enabled>\n" + "<umask>022</umask>\n" + "<name>" + username + "</name>\n" + "</account>\n"); final Path dbContentsFile = Files.write(dbCol.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), dbContents.getBytes(UTF_8)); final Path systemContentsFile = Files.write(systemCol.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), systemContents.getBytes(UTF_8)); final Path securityContentsFile = Files.write(securityCol.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), securityContents.getBytes(UTF_8)); final Path existRealmContentsFile = Files.write(existRealmCol.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), existRealmContents.getBytes(UTF_8)); final Path groupsContentsFile = Files.write(groupsCol.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), groupsContents.toString().getBytes(UTF_8)); final Path accountsContentsFile = Files.write(accountsCol.resolve(BackupDescriptor.COLLECTION_DESCRIPTOR), accountsContents.getBytes(UTF_8)); int groupId = 123; for (final String groupName : groupNames) { final String groupDoc = "<group xmlns=\"http://exist-db.org/Configuration\" id=\"" + (groupId++) + "\">\n" + " <manager name=\"admin\"/>\n" + " <metadata key=\"http://exist-db.org/security/description\">Group named: " + groupName + "</metadata>\n" + " <name>" + groupName + "</name>\n" + "</group>"; Files.write(groupsCol.resolve(groupName + ".xml"), groupDoc.getBytes(UTF_8)); } Files.write(accountsCol.resolve(username + ".xml"), userDoc.toString().getBytes(UTF_8)); } private static byte[] ripemd160(final String s) { final RIPEMD160Digest digester = new RIPEMD160Digest(); final byte[] data = s.getBytes(); digester.update(data, 0, data.length); final byte[] digest = new byte[digester.getDigestSize()]; digester.doFinal(digest, 0); return digest; } private static class TestRestoreListener extends AbstractRestoreServiceTaskListener { final List<String> restored = new ArrayList<>(); final List<String> warnings = new ArrayList<>(); final List<String> errors = new ArrayList<>(); @Override public void createdCollection(final String collection) { restored.add(collection); } @Override public void restoredResource(final String resource) { restored.add(resource); } @Override public void info(final String message) { } @Override public void warn(final String message) { warnings.add(message); } @Override public void error(final String message) { errors.add(message); } } }