answer
stringlengths
17
10.2M
package squeek.veganoption.integration.tic; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.EnumChatFormatting; import net.minecraftforge.oredict.OreDictionary; import squeek.veganoption.content.ContentHelper; import squeek.veganoption.content.modules.Bioplastic; import squeek.veganoption.content.registry.CompostRegistry; import squeek.veganoption.content.registry.CompostRegistry.FoodSpecifier; import squeek.veganoption.helpers.LangHelper; import squeek.veganoption.integration.IntegrationHandler; import squeek.veganoption.integration.IntegratorBase; import cpw.mods.fml.common.event.FMLInterModComms; // TODO: Faux feather as a valid fletching material public class TConstruct extends IntegratorBase { public static final int MATID_PLASTIC = 1000; // what MFR uses public static final String MATNAME_PLASTIC = "Plastic"; // what MFR uses /** * VO specific, necessary to make sure that VO's MaterialSet (shard/rod combo) * doesn't overwrite the MaterialSet that uses {@link MATNAME_PLASTIC} added * by the addPartBuilderMaterial IMC (in either VO or MFR) */ public static final String KEY_PLASTICROD_MATERIALSET = "BioplasticRodSet"; public static final String ITEMNAME_TOOLROD = "toolRod"; public static final String ITEMNAME_JERKY = "jerky"; public static final String ITEMNAME_GOLDENHEAD = "goldHead"; public static final String ITEMNAME_DIAMONDAPPLE = "diamondApple"; public static final String ITEMNAME_STRANGEFOOD = "strangeFood"; @Override public void oredict() { super.oredict(); ItemStack plasticToolRod = new ItemStack(getItem(ITEMNAME_TOOLROD), 1, MATID_PLASTIC); OreDictionary.registerOre(ContentHelper.plasticRodOreDict, plasticToolRod); } @Override public void init() { super.init(); registerPlasticToolMaterial(); CompostRegistry.blacklist(new FoodSpecifier() { private final Set<String> itemNameBlacklist = new HashSet<String>( Arrays.asList( fullItemName(ITEMNAME_JERKY), fullItemName(ITEMNAME_GOLDENHEAD), fullItemName(ITEMNAME_DIAMONDAPPLE), fullItemName(ITEMNAME_STRANGEFOOD) ) ); @Override public boolean matches(ItemStack itemStack) { // meat and diamonds are bad for composting String itemName = Item.itemRegistry.getNameForObject(itemStack.getItem()); return itemNameBlacklist.contains(itemName); } }); } public void registerPlasticToolMaterial() { NBTTagCompound tag = new NBTTagCompound(); if (!IntegrationHandler.integrationExists(IntegrationHandler.MODID_MINEFACTORY_RELOADED)) { // material values mirrored from from MFR's plastic tag.setInteger("Id", MATID_PLASTIC); tag.setString("Name", MATNAME_PLASTIC); tag.setString("localizationString", LangHelper.prependModId("tic.material.plastic")); tag.setInteger("Durability", 1500); tag.setInteger("MiningSpeed", 600); tag.setInteger("HarvestLevel", 1); tag.setInteger("Attack", -1); tag.setFloat("HandleModifier", 0.1f); tag.setFloat("Bow_ProjectileSpeed", 4.2f); tag.setInteger("Bow_DrawSpeed", 20); tag.setFloat("Projectile_Mass", 0.25f); tag.setFloat("Projectile_Fragility", 0.5f); tag.setString("Style", EnumChatFormatting.GRAY.toString()); tag.setInteger("Color", 0xFFADADAD); FMLInterModComms.sendMessage(modID, "addMaterial", tag); tag = new NBTTagCompound(); tag.setInteger("MaterialId", MATID_PLASTIC); tag.setTag("Item", new ItemStack(Bioplastic.bioplastic).writeToNBT(new NBTTagCompound())); tag.setInteger("Value", 1); FMLInterModComms.sendMessage(modID, "addPartBuilderMaterial", tag); // without MFR, there is no need to register a shard, so just add the rod // note: this doesn't really do much afaik registerShardAndRod(KEY_PLASTICROD_MATERIALSET, null, new ItemStack(Bioplastic.plasticRod), MATID_PLASTIC); } else { // mfr registers plastic sheets as shards, so do the same registerShardAndRod(KEY_PLASTICROD_MATERIALSET, new ItemStack(Bioplastic.bioplastic), new ItemStack(Bioplastic.plasticRod), MATID_PLASTIC); } } // avoid the oredict lookup every getRealHandle call public static List<ItemStack> plasticRodItems = null; /** * Allow plasticRod to be used as a tool rod directly by replacing it at build-time with an actual ToolRod * * Called from squeek.veganoption.asm.Hooks, which is called from tconstruct.tools.TinkerToolEvents.buildTool * I really don't want to have the build depend on TiC, so I just hook in through ASM. * Simply including ToolBuildEvent.java and listening for the event didn't seem to work. * * This seems to be the 'correct' way to allow usage of non-ToolRod items as ToolRods */ public static ItemStack getRealHandle(ItemStack itemStack) { if (plasticRodItems == null) plasticRodItems = OreDictionary.getOres(ContentHelper.plasticRodOreDict); for (ItemStack item : plasticRodItems) { if (OreDictionary.itemMatches(item, itemStack, false)) { Item toolRodItem = (Item) Item.itemRegistry.getObject(IntegrationHandler.MODID_TINKERS_CONSTRUCT + ":" + ITEMNAME_TOOLROD); if (toolRodItem != null) { return new ItemStack(toolRodItem, 1, MATID_PLASTIC); } } } return itemStack; } public static final String patternBuilderClassName = "tconstruct.library.crafting.PatternBuilder"; public static Object PatternBuilder = null; public static Method registerMaterialSet = null; static { try { Class<?> patternBuilderClass = Class.forName(patternBuilderClassName); Field instanceField = patternBuilderClass.getDeclaredField("instance"); PatternBuilder = instanceField.get(null); registerMaterialSet = patternBuilderClass.getDeclaredMethod("registerMaterialSet", String.class, ItemStack.class, ItemStack.class, int.class); } catch (Exception e) { e.printStackTrace(); } } public void registerShardAndRod(String key, ItemStack shard, ItemStack rod, int matID) { try { registerMaterialSet.invoke(PatternBuilder, key, shard, rod, matID); } catch (Exception e) { e.printStackTrace(); } } }
package org.lucee.mongodb; import java.util.ArrayList; import java.util.List; import java.util.Iterator; import java.util.Map.Entry; import org.lucee.mongodb.support.DBCollectionImplSupport; import org.lucee.mongodb.util.print; import com.mongodb.DBCollection; import com.mongodb.DBCursor; import com.mongodb.DBObject; import com.mongodb.WriteConcern; import lucee.runtime.PageContext; import lucee.runtime.dump.DumpData; import lucee.runtime.dump.DumpProperties; import lucee.runtime.dump.DumpTable; import lucee.runtime.exp.PageException; import lucee.runtime.type.Array; import lucee.runtime.type.Collection; import lucee.runtime.type.Collection.Key; import lucee.runtime.type.Struct; import lucee.runtime.type.dt.DateTime; public class DBCollectionImpl extends DBCollectionImplSupport { private DBCollection coll; public DBCollectionImpl(DBCollection coll) { this.coll=coll; } @Override public Object get(PageContext pc, Key key, Object defaultValue) { throw new UnsupportedOperationException("there are no properties for this DBCollection!"); } @Override public Object get(PageContext pc, Key key) throws PageException { throw new UnsupportedOperationException("there are no properties for this DBCollection!"); } @Override public Object set(PageContext pc, Key propertyName, Object value) throws PageException { throw new UnsupportedOperationException("you cannot set a property to DBCollection!"); } @Override public Object setEL(PageContext pc, Key propertyName, Object value) { throw new UnsupportedOperationException("you cannot set a property to DBCollection!"); } @Override public Object call(PageContext pc, Key methodName, Object[] args) throws PageException { // aggregate if(methodName.equals("aggregate")) { int len=checkArgLength("aggregate",args,1,-1); // no length limitation List<DBObject> pipeline = new ArrayList<DBObject>(); // Pipeline array as single argument if(len==1 && decision.isArray(args[0])) { Array arr = caster.toArray(args[0]); if(arr.size()==0) throw exp.createApplicationException("the array passed to the function aggregate needs at least 1 element"); Iterator<Object> it = arr.valueIterator(); while(it.hasNext()){ pipeline.add(toDBObject(it.next())); } } else { // First argument is first operation, second argument is array of additional operations if(len==2 && decision.isArray(args[1])){ Array arr = caster.toArray(args[1]); pipeline.add(toDBObject(args[0])); Iterator<Object> it = arr.valueIterator(); while(it.hasNext()){ pipeline.add(toDBObject(it.next())); } } // N arguments of pipeline operations else { for(int i=0;i<len;i++){ pipeline.add(toDBObject(args[i])); } } } return toCFML(coll.aggregate(pipeline)); } // dataSize if(methodName.equals("dataSize")) { checkArgLength("dataSize",args,0,0); return toCFML(coll.getStats().get("size")); } // distinct if(methodName.equals("distinct")) { int len=checkArgLength("distinct",args,1,2); if(len==1){ return toCFML(coll.distinct( caster.toString(args[0]) )); } else if(len==2){ return toCFML(coll.distinct( caster.toString(args[0]), toDBObject(args[1]) )); } } // drop if(methodName.equals("drop")) { checkArgLength("drop",args,0,0); coll.drop(); return null; } // dropIndex if(methodName.equals("dropIndex")) { checkArgLength("dropIndex",args,1,1); DBObject dbo = toDBObject(args[0], null); if(dbo!=null) coll.dropIndex(dbo); else coll.dropIndex(caster.toString(args[0])); return null; } // dropIndexes if(methodName.equals("dropIndexes")) { int len=checkArgLength("dropIndexes",args,0,1); if(len==0){ coll.dropIndexes(); return null; } else if(len==1){ coll.dropIndexes(caster.toString(args[0])); return null; } } // ensureIndex if(methodName.equals("ensureIndex")) { int len=checkArgLength("ensureIndex",args,1,3); if(len==1){ DBObject dbo = toDBObject(args[0], null); if(dbo!=null) coll.ensureIndex(dbo); else coll.ensureIndex(caster.toString(args[0])); return null; } if(len==2){ DBObject p1 = toDBObject(args[0]); DBObject p2 = toDBObject(args[1], null); if(p2!=null) coll.ensureIndex(p1,p2); else coll.ensureIndex(p1,caster.toString(args[1])); return null; } else if(len==3){ coll.ensureIndex( toDBObject(args[0]), caster.toString(args[1]), caster.toBooleanValue(args[2]) ); return null; } } // getStats if(methodName.equals("getStats") || methodName.equals("stats")) { checkArgLength("getStats",args,0,0); return toCFML(coll.getStats()); } // getIndexes if(methodName.equals("getIndexes") || methodName.equals("getIndexInfo")) { checkArgLength(methodName.getString(),args,0,0); return toCFML(coll.getIndexInfo()); } // find if(methodName.equals("find")) { int len=checkArgLength("find",args,0,3); DBCursor cursor=null; if(len==0) { cursor=coll.find(); } else if(len==1){ cursor=coll.find( toDBObject(args[0]) ); } else if(len==2){ cursor=coll.find( toDBObject(args[0]), toDBObject(args[1]) ); } else if(len==3){ cursor=coll.find( toDBObject(args[0]), toDBObject(args[1]) ).skip(caster.toIntValue(args[2])); } return toCFML(cursor); } // findOne else if(methodName.equals("findOne")) { int len=checkArgLength("findOne",args,0,3); DBObject obj=null; if(len==0) { obj=coll.findOne(); } else if(len==1){ DBObject arg1 = toDBObject(args[0],null); if(arg1!=null)obj=coll.findOne(arg1); else obj=coll.findOne(args[0]); } else if(len==2){ DBObject arg1 = toDBObject(args[0],null); if(arg1!=null) obj=coll.findOne(arg1,toDBObject(args[1])); else obj=coll.findOne(args[0],toDBObject(args[1])); } else if(len==3){ obj=coll.findOne( toDBObject(args[0]), toDBObject(args[1]), toDBObject(args[2]) ); } return toCFML(obj); } // findAndModify if(methodName.equals("findAndModify")) { int len=checkArgLength("findAndModify",args,2,3); DBObject obj=null; if(len==2){ obj=coll.findAndModify( toDBObject(args[0]), toDBObject(args[1]) ); } if(len==3){ obj=coll.findAndModify( toDBObject(args[0]), toDBObject(args[1]), toDBObject(args[2]) ); } // TODO more options return toCFML(obj); } //group if(methodName.equals("group")) { int len=checkArgLength("group",args,1,1); if(len==1){ return toCFML(coll.group( toDBObject(args[0]) )); } } // insert if(methodName.equals("insert")) { checkArgLength("insert",args,1,1); return toCFML(coll.insert( toDBObjectArray(args[0])) ); } //mapReduce if(methodName.equals("mapReduce")) { int len=checkArgLength("mapReduce",args,1,1); if(len==1){ return toCFML(coll.mapReduce( toDBObject(args[0]) )); } } // reIndex if(methodName.equals("reIndex") || methodName.equals("resetIndexCache")) { checkArgLength("resetIndexCache",args,0,0); coll.resetIndexCache(); return null; } // remove if(methodName.equals("remove")) { checkArgLength("remove",args,1,1); return toCFML(coll.remove(toDBObject(args[0]))); } // rename if(methodName.equals("rename") || methodName.equals("renameCollection")) { int len=checkArgLength(methodName.getString(),args,1,2); if(len==1){ return toCFML(coll.rename( caster.toString(args[0]) )); } else if(len==2){ return toCFML(coll.rename( caster.toString(args[0]), caster.toBooleanValue(args[1]) )); } } // save if(methodName.equals("save")) { checkArgLength("save",args,1,1); return toCFML(coll.save( toDBObject(args[0])) ); } // storageSize if(methodName.equals("storageSize")) { checkArgLength("storageSize",args,0,0); return toCFML(coll.getStats().get("storageSize")); } // totalIndexSize if(methodName.equals("totalIndexSize")) { checkArgLength("totalIndexSize",args,0,0); return toCFML(coll.getStats().get("totalIndexSize")); } // update if(methodName.equals("update")) { int len = checkArgLength("update",args,2,4); if(len==2){ return toCFML(coll.update( toDBObject(args[0]), toDBObject(args[1]) )); } else if(len==3){ return toCFML(coll.update( toDBObject(args[0]), toDBObject(args[1]), caster.toBooleanValue(args[2]), false )); } else if(len==4){ return toCFML(coll.update( toDBObject(args[0]), toDBObject(args[1]), caster.toBooleanValue(args[2]), caster.toBooleanValue(args[3]) )); } } String functionNames = "aggregate,dataSize,distinct,drop,dropIndex,dropIndexes,ensureIndex,stats,getIndexes,find,findOne,findAndModify," + "group,insert,mapReduce,reIndex,remove,rename,save,storageSize,totalIndexSize,update"; throw exp.createApplicationException("function "+methodName+" does not exist existing functions are ["+functionNames+"]"); } @Override public Object callWithNamedValues(PageContext pc, Key methodName, Struct args) throws PageException { throw new UnsupportedOperationException("named arguments are not supported yet!"); } @Override public DumpData toDumpData(PageContext pageContext, int maxlevel, DumpProperties dp) { DBCursor cursor = coll.find(); Iterator<DBObject> it = cursor.iterator(); DumpTable table = new DumpTable("struct","#339933","#8e714e","#000000"); table.setTitle("DBCollection"); maxlevel DBObject obj; while(it.hasNext()) { obj = it.next(); table.appendRow(0, __toDumpData(toCFML(obj), pageContext,maxlevel,dp) ); } return table; } public DBCollection getDBCollection() { return coll; } }
package restservices.publish; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.Arrays; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.json.JSONObject; import restservices.RestServices; import restservices.consume.RestConsumeException; import restservices.consume.RestConsumer; import restservices.proxies.ServiceDefinition; import restservices.publish.RestPublishException.RestExceptionType; import restservices.util.Utils; import com.google.common.collect.ImmutableMap; import com.mendix.core.Core; import com.mendix.core.CoreException; import com.mendix.externalinterface.connector.RequestHandler; import com.mendix.m2ee.api.IMxRuntimeRequest; import com.mendix.m2ee.api.IMxRuntimeResponse; import com.mendix.modules.webservices.WebserviceException; import com.mendix.systemwideinterfaces.core.IContext; import communitycommons.XPath; public class RestServiceHandler extends RequestHandler{ private static RestServiceHandler instance = null; private static boolean started = false; public static void start(IContext context) throws Exception { if (instance == null) { RestServices.LOGPUBLISH.info("Starting RestServices module..."); instance = new RestServiceHandler(); Core.addRequestHandler(RestServices.PATH_REST, instance); started = true; loadConfig(context); RestServices.LOGPUBLISH.info("Starting RestServices module... DONE"); } } private static void loadConfig(IContext context) throws CoreException { for (ServiceDefinition def : XPath.create(context, ServiceDefinition.class).all()) { loadConfig(def, false); } } public static void loadConfig(ServiceDefinition def, boolean throwOnFailure) { if (!started) return; RestServices.LOGPUBLISH.info("Loading service " + def.getName()+ "..."); String errors = null; try { ConsistencyChecker.check(def); } catch(Exception e) { errors = "Failed to run consistency checks: " + e.getMessage(); } if (errors != null) { String msg = "Failed to load service '" + def.getName() + "': \n" + errors; RestServices.LOGPUBLISH.error(msg); if (throwOnFailure) throw new IllegalStateException(msg); } else { RestServices.LOGPUBLISH.info("Reloading definition of service '" + def.getName() + "'"); PublishedService service = new PublishedService(def); RestServices.registerService(service.getName(), service); RestServices.LOGPUBLISH.info("Loading service " + def.getName()+ "... DONE"); } } @Override public void processRequest(IMxRuntimeRequest req, IMxRuntimeResponse resp, String path) { long start = System.currentTimeMillis(); HttpServletRequest request = (HttpServletRequest) req.getOriginalRequest(); HttpServletResponse response = (HttpServletResponse) resp.getOriginalResponse(); String method = request.getMethod(); String requestStr = method + " " + path; URL u; try { u = new URL(request.getRequestURL().toString()); } catch (MalformedURLException e1) { throw new IllegalStateException(e1); } String[] basePath = u.getPath().split("/"); String[] parts = Arrays.copyOfRange(basePath, 2, basePath.length); response.setCharacterEncoding(RestServices.UTF8); response.setHeader("Expires", "-1"); if (RestServices.LOGPUBLISH.isDebugEnabled()) RestServices.LOGPUBLISH.debug("incoming request: " + Utils.getRequestUrl(request)); RestServiceRequest rsr = new RestServiceRequest(request, response); try { PublishedService service = null; PublishedMicroflow mf = null; if (parts.length > 0) { service = RestServices.getService(parts[0]); mf = RestServices.getPublishedMicroflow(parts[0]); if (service == null && mf == null) throw new RestPublishException(RestExceptionType.NOT_FOUND, "Unknown service: '" + parts[0] + "'"); } if (service != null && !isMetaDataRequest(method, parts, rsr) && !rsr.authenticate(service.getRequiredRole(), getSessionFromRequest(req))){ throw new RestPublishException(RestExceptionType.UNAUTHORIZED, "Unauthorized. Please provide valid credentials or set up a Mendix user session"); } else if (mf != null && !rsr.authenticate(mf.getRequiredRole(), getSessionFromRequest(req))) { throw new RestPublishException(RestExceptionType.UNAUTHORIZED, "Unauthorized. Please provide valid credentials or set up a Mendix user session"); } if (rsr.getContext() != null) { rsr.startTransaction(); RestServiceRequest.setCurrentRequest(rsr); } if (mf != null) { if (isMetaDataRequest(method, parts, rsr)) mf.serveDescription(rsr); else mf.execute(rsr); } else dispatch(method, parts, rsr, service); if (rsr.getContext() != null && rsr.getContext().isInTransaction()) rsr.getContext().endTransaction(); if (RestServices.LOGPUBLISH.isDebugEnabled()) RestServices.LOGPUBLISH.debug("Served " + requestStr + " in " + (System.currentTimeMillis() - start) + "ms."); } catch(RestPublishException rre) { RestServices.LOGPUBLISH.warn("Failed to serve " + requestStr + " " + rre.getType() + " " + rre.getMessage()); rollback(rsr); serveErrorPage(rsr, rre.getStatusCode(), rre.getType().toString() + ": " + requestStr, rre.getMessage()); } catch(Throwable e) { rollback(rsr); Throwable cause = ExceptionUtils.getRootCause(e); if (cause instanceof WebserviceException) { RestServices.LOGPUBLISH.warn("Invalid request " + requestStr + ": " +cause.getMessage()); serveErrorPage(rsr, HttpStatus.SC_BAD_REQUEST, "Invalid request data at: " + requestStr, cause.getMessage()); } else { RestServices.LOGPUBLISH.error("Failed to serve " + requestStr + ": " +e.getMessage(), e); serveErrorPage(rsr, HttpStatus.SC_INTERNAL_SERVER_ERROR, "Failed to serve: " + requestStr, "An internal server error occurred. Please check the application logs or contact a system administrator."); } } finally { rsr.dispose(); } } private boolean isMetaDataRequest(String method, String[] parts, RestServiceRequest rsr) { return "GET".equals(method) && parts.length == 1 && rsr.request.getParameter(RestServices.PARAM_ABOUT) != null; } public static void requestParamsToJsonMap(RestServiceRequest rsr, JSONObject target) { for (String param : rsr.request.getParameterMap().keySet()) target.put(param, rsr.request.getParameter(param)); } private void rollback(RestServiceRequest rsr) { if (rsr != null && rsr.getContext() != null && rsr.getContext().isInTransaction()) rsr.getContext().rollbackTransAction(); } private void serveErrorPage(RestServiceRequest rsr, int status, String title, String detail) { rsr.response.reset(); rsr.response.setStatus(status); //reques authentication if (status == HttpStatus.SC_UNAUTHORIZED) rsr.response.addHeader(RestServices.HEADER_WWWAUTHENTICATE, "Basic realm=\"Rest Services\""); rsr.startDoc(); switch(rsr.getResponseContentType()) { default: case HTML: rsr.write("<h1>" + title + "</h1><p>" + detail + "</p><p>Status code:" + status + "</p>"); break; case JSON: case XML: rsr.datawriter.value(new JSONObject(ImmutableMap.of("error", (Object) title, "status", status, "message", detail))); break; } rsr.endDoc(); } private void dispatch(String method, String[] parts, RestServiceRequest rsr, PublishedService service) throws Exception, IOException, CoreException, RestPublishException { boolean handled = false; boolean isGet = "GET".equals(method); switch(parts.length) { case 0: if (isGet) { handled = true; ServiceDescriber.serveServiceOverview(rsr); } break; case 1: if (isGet) { handled = true; if (rsr.request.getParameter(RestServices.PARAM_ABOUT) != null) new ServiceDescriber(rsr, service.def).serveServiceDescription(); else if (rsr.request.getParameter(RestServices.PARAM_COUNT) != null) service.serveCount(rsr); else service.serveListing(rsr, "true".equals(rsr.getRequestParameter(RestServices.PARAM_DATA,"false")), Integer.valueOf(rsr.getRequestParameter(RestServices.PARAM_OFFSET, "-1")), Integer.valueOf(rsr.getRequestParameter(RestServices.PARAM_LIMIT, "-1"))); } else if ("POST".equals(method)) { handled = true; JSONObject data; if (RestServices.CONTENTTYPE_FORMENCODED.equalsIgnoreCase(rsr.request.getContentType())) { data = new JSONObject(); requestParamsToJsonMap(rsr, data); } else { String body = IOUtils.toString(rsr.request.getInputStream()); data = new JSONObject(body); } service.servePost(rsr, data); } break; case 2: if (isGet) { handled = true; service.serveGet(rsr, Utils.urlDecode(parts[1])); } else if ("PUT" .equals(method)) { handled = true; String body = IOUtils.toString(rsr.request.getInputStream()); service.servePut(rsr, Utils.urlDecode(parts[1]), new JSONObject(body), rsr.getETag()); } else if ("DELETE".equals(method) && parts.length == 2) { handled = true; service.serveDelete(rsr, Utils.urlDecode(parts[1]), rsr.getETag()); } break; case 3: if (isGet && "changes".equals(parts[1])) { handled = true; if ("list".equals(parts[2])) service.getChangeLogManager().serveChanges(rsr, false); else if ("feed".equals(parts[2])) service.getChangeLogManager().serveChanges(rsr, true); else throw new RestPublishException(RestExceptionType.NOT_FOUND, "changes/" + parts[2] + " is not a valid change request. Please use 'changes/list' or 'changes/feed'"); } } if (!handled) throw new RestPublishException(RestExceptionType.METHOD_NOT_ALLOWED, "Unsupported operation: " + method + " on " + rsr.request.getPathInfo()); } }
package org.ccnx.ccn.io; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; import java.util.logging.Level; import javax.crypto.BadPaddingException; import javax.crypto.Cipher; import javax.crypto.IllegalBlockSizeException; import org.ccnx.ccn.CCNHandle; import org.ccnx.ccn.CCNInterestListener; import org.ccnx.ccn.ContentVerifier; import org.ccnx.ccn.config.SystemConfiguration; import org.ccnx.ccn.impl.security.crypto.ContentKeys; import org.ccnx.ccn.impl.support.DataUtils; import org.ccnx.ccn.impl.support.Log; import org.ccnx.ccn.io.content.Link.LinkObject; import org.ccnx.ccn.profiles.SegmentationProfile; import org.ccnx.ccn.profiles.VersioningProfile; import org.ccnx.ccn.profiles.security.access.AccessControlManager; import org.ccnx.ccn.profiles.security.access.AccessDeniedException; import org.ccnx.ccn.protocol.CCNTime; import org.ccnx.ccn.protocol.ContentName; import org.ccnx.ccn.protocol.ContentObject; import org.ccnx.ccn.protocol.Exclude; import org.ccnx.ccn.protocol.ExcludeComponent; import org.ccnx.ccn.protocol.Interest; import org.ccnx.ccn.protocol.KeyLocator; import org.ccnx.ccn.protocol.PublisherPublicKeyDigest; import org.ccnx.ccn.protocol.SignedInfo.ContentType; /** * This abstract class is the superclass of all classes representing an input stream of * bytes segmented and stored in CCN. * * @see SegmentationProfile for description of CCN segmentation */ public abstract class CCNAbstractInputStream extends InputStream implements ContentVerifier, CCNInterestListener { /** * Flags: * DONT_DEREFERENCE to prevent dereferencing in case we are attempting to read a link. */ protected CCNHandle _handle; /** * The Link we dereferenced to get here, if any. This may contain * a link dereferenced to get to it, and so on. */ protected LinkObject _dereferencedLink = null; public enum FlagTypes { DONT_DEREFERENCE }; protected EnumSet<FlagTypes> _flags = EnumSet.noneOf(FlagTypes.class); /** * The segment we are currently reading from. */ protected ContentObject _currentSegment = null; /** * first segment of the stream we are reading, which is the GONE segment (see ContentType) if content is deleted. * this cached first segment is used to supply certain information it contains, such as for computing digest only * when required */ private ContentObject _firstSegment = null; /** * Internal stream used for buffering reads. May include filters. */ protected InputStream _segmentReadStream = null; /** * The name prefix of the segmented stream we are reading, up to (but not including) * a segment number. */ protected ContentName _baseName = null; protected PublisherPublicKeyDigest _publisher = null; /** * The segment number to start with. If not specified, is SegmentationProfile#baseSegment(). */ protected Long _startingSegmentNumber = null; /** * The timeout to use for segment retrieval. */ protected int _timeout = SystemConfiguration.getDefaultTimeout(); /** * Encryption/decryption handler. */ protected Cipher _cipher; protected ContentKeys _keys; /** * If this content uses Merkle Hash Trees or other bulk signatures to amortize * signature cost, we can amortize verification cost as well by caching verification * data as follows: store the currently-verified root signature, so we don't have to re-verify it; * and the verified root hash. For each piece of incoming content, see if it aggregates * to the same root, if so don't reverify signature. If not, assume it's part of * a new tree and change the root. */ protected byte [] _verifiedRootSignature = null; protected byte [] _verifiedProxy = null; protected boolean _atEOF = false; /** * Used for mark(int) and reset(). */ protected int _readlimit = 0; protected int _markOffset = 0; protected long _markBlock = 0; protected ArrayList<ContentObject> inOrderSegments = new ArrayList<ContentObject>(); protected ArrayList<ContentObject> outOfOrderSegments = new ArrayList<ContentObject>(); protected long _nextPipelineSegment = -1; //this is the segment number of the next segment needed protected long _lastRequestedPipelineSegment = -1; //this is the segment number of the last interest we sent out protected long _lastInOrderSegment = -1; protected ContentName _basePipelineName = null; protected long _lastSegmentNumber = -1; protected ArrayList<Interest> _sentInterests = new ArrayList<Interest>(); private Thread waitingThread = null; private long waitingSegment; private long waitSleep = 0; private long _holes = 0; private long _totalReceived = 0; private long _pipelineStartTime; private Long readerReady = -1L; private double avgResponseTime = -1; private Thread processor = null; private long processingSegment = -1; private ArrayList<IncomingSegment> incoming = new ArrayList<IncomingSegment>(); /** * Set up an input stream to read segmented CCN content under a given name. * Note that this constructor does not currently retrieve any * data; data is not retrieved until read() is called. This will change in the future, and * this constructor will retrieve the first block. * * @param baseName Name to read from. If contains a segment number, will start to read from that * segment. * @param startingSegmentNumber Alternative specification of starting segment number. If * unspecified, will be SegmentationProfile#baseSegment(). * @param publisher The key we require to have signed this content. If null, will accept any publisher * (subject to higher-level verification). * @param keys The keys to use to decrypt this content. Null if content unencrypted, or another * process will be used to retrieve the keys. * @param handle The CCN handle to use for data retrieval. If null, the default handle * given by CCNHandle#getHandle() will be used. * @throws IOException Not currently thrown, will be thrown when constructors retrieve first block. */ public CCNAbstractInputStream( ContentName baseName, Long startingSegmentNumber, PublisherPublicKeyDigest publisher, ContentKeys keys, EnumSet<FlagTypes> flags, CCNHandle handle) throws IOException { super(); if (null == baseName) { throw new IllegalArgumentException("baseName cannot be null!"); } _handle = handle; if (null == _handle) { _handle = CCNHandle.getHandle(); } _publisher = publisher; if (null != keys) { keys.requireDefaultAlgorithm(); _keys = keys; } if (null != flags) { _flags = flags; } // So, we assume the name we get in is up to but not including the sequence // numbers, whatever they happen to be. If a starting segment is given, we // open from there, otherwise we open from the leftmost number available. // We assume by the time you've called this, you have a specific version or // whatever you want to open -- this doesn't crawl versions. If you don't // offer a starting segment index, but instead offer the name of a specific // segment, this will use that segment as the starting segment. _baseName = baseName; if (SegmentationProfile.isSegment(baseName)) { _startingSegmentNumber = SegmentationProfile.getSegmentNumber(baseName); _baseName = baseName.parent(); } else { _startingSegmentNumber = SegmentationProfile.baseSegment(); } if (startingSegmentNumber != null) { _startingSegmentNumber = startingSegmentNumber; } //TODO this base name does not include the version!!!!!!!!! Log.info(Log.FAC_IO, "CCNAbstractInputStream: {0} segment {1}", _baseName, _startingSegmentNumber); startPipeline(); } /** * Set up an input stream to read segmented CCN content starting with a given * ContentObject that has already been retrieved. * @param startingSegment The first segment to read from. If this is not the * first segment of the stream, reading will begin from this point. * We assume that the signature on this segment was verified by our caller. * @param keys The keys to use to decrypt this content. Null if content unencrypted, or another * process will be used to retrieve the keys. * @param any flags necessary for processing this stream; have to hand in in constructor in case * first segment provided, so can apply to that segment * @param handle The CCN handle to use for data retrieval. If null, the default handle * given by CCNHandle#getHandle() will be used. * @throws IOException */ public CCNAbstractInputStream(ContentObject startingSegment, ContentKeys keys, EnumSet<FlagTypes> flags, CCNHandle handle) throws IOException { super(); _handle = handle; if (null == _handle) { _handle = CCNHandle.getHandle(); } if (null != keys) { keys.requireDefaultAlgorithm(); _keys = keys; } if (null != flags) { _flags = flags; } _baseName = SegmentationProfile.segmentRoot(startingSegment.name()); try { _startingSegmentNumber = SegmentationProfile.getSegmentNumber(startingSegment.name()); } catch (NumberFormatException nfe) { throw new IOException("Stream starter segment name does not contain a valid segment number, so the stream does not know what content to start with."); } setFirstSegment(startingSegment); Log.info(Log.FAC_IO, "CCNAbstractInputStream: {0} segment {1}", _baseName, _startingSegmentNumber); startPipeline(); } private void startPipeline() { synchronized (inOrderSegments) { Log.info(Log.FAC_PIPELINE, "PIPELINE: starting pipelining"); _pipelineStartTime = System.currentTimeMillis(); if (SystemConfiguration.PIPELINE_STATS) System.out.println("plot "+(System.currentTimeMillis() - _pipelineStartTime)+" inOrder: "+inOrderSegments.size() +" outOfOrder: "+outOfOrderSegments.size() + " interests: "+_sentInterests.size() +" holes: "+_holes + " received: "+_totalReceived+" ["+_baseName+"].1"+ " toProcess "+incoming.size()); long segmentToGet = -1; Interest interest = null; if(_basePipelineName == null) { _basePipelineName = _baseName.clone(); } Log.info(Log.FAC_PIPELINE, "PIPELINE: BaseName for pipeline: {0} base name: {1}", _basePipelineName, _baseName); if (_currentSegment!=null) { Log.info(Log.FAC_PIPELINE, "PIPELINE: we already have the first segment... start from there: {0}", _currentSegment.name()); //we already have the starting segment... //is the first segment the last one? if (SegmentationProfile.isLastSegment(_currentSegment)) { //this is the last segment... don't pipeline Log.info(Log.FAC_PIPELINE, "PIPELINE: we already have the last segment... don't need to pipeline (returning)"); return; } else { //this isn't the last segment, start up pipelining... only ask for next segment to start Log.info(Log.FAC_PIPELINE, "PIPELINE: this isn't the last segment... need to start up pipelining"); } } else { Log.info(Log.FAC_PIPELINE, "PIPELINE: need to get the first segment: startingSegmentNumber={0}",_startingSegmentNumber); } segmentToGet = nextSegmentNumber(); _nextPipelineSegment = segmentToGet; //check here if it is an instance of a versioned stream. if so, and the basename doesn't have a version in it, do not send the interest if (this instanceof CCNVersionedInputStream && !VersioningProfile.hasTerminalVersion(_basePipelineName)) { Log.info(Log.FAC_PIPELINE, "this is a versioned stream without a terminal version, skip sending non-versioned interest"); } else { Log.info(Log.FAC_PIPELINE, "this is not a versioned stream or it is a versioned stream without the version set in the base name, go ahead and get the first segment"); interest = SegmentationProfile.segmentInterest(_basePipelineName, segmentToGet, _publisher); try { interest.userTime = System.currentTimeMillis(); _handle.expressInterest(interest, this); _sentInterests.add(interest); _lastRequestedPipelineSegment = segmentToGet; Log.info(Log.FAC_PIPELINE, "PIPELINE: expressed interest for segment {0} in startPipeline(): {1}", segmentToGet, interest); } catch(IOException e) { //could not express interest for next segment... logging the error Log.warning(Log.FAC_PIPELINE, "Failed to express interest for pipelining segments in CCNAbstractInputStream: Interest = {0}", interest.name()); } } } } private void receivePipelineContent(ContentObject co) { long returnedSegment = SegmentationProfile.getSegmentNumber(co.name()); ArrayList<Interest> toRemove = new ArrayList<Interest>(); //is there a reader ready? long rr; synchronized(readerReady) { rr = readerReady; } //while(rr > -1) { if(rr > -1) { //there is a reader waiting Log.info(Log.FAC_PIPELINE, "PIPELINE: there is a reader waiting, we should wait unless we have their segment"); if(returnedSegment == rr) { //this is the segment they want, we should just finish Log.info(Log.FAC_PIPELINE, "PIPELINE: we are working on their segment... we should finish!"); //break; } else { if (haveSegmentBuffered(rr)) { //we have their segment //this isn't their segment, but the one they want is here. we should defer Log.info(Log.FAC_PIPELINE, "PIPELINE: we are deferring until they are done"); try { inOrderSegments.wait(); //readerReady.wait(); synchronized(readerReady) { rr = readerReady; } } catch (InterruptedException e) { Log.info(Log.FAC_PIPELINE, "PIPELINE: we can go back to processing"); //break; } } else { //we don't have their segment, we should keep going Log.info(Log.FAC_PIPELINE, "PIPELINE: we don't have their segment, keep processing this one."); } } } //are we at the last segment? synchronized(inOrderSegments) { if (SegmentationProfile.isLastSegment(co)) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we just got the last segment..."); _lastSegmentNumber = returnedSegment; } long segNum; //synchronized (_sentInterests) { for(Interest i: _sentInterests) { segNum = SegmentationProfile.getSegmentNumber(i.name()); if(segNum == returnedSegment || (_lastSegmentNumber > -1 && segNum > _lastSegmentNumber)) { if(Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { Log.info(Log.FAC_PIPELINE, "PIPELINE: cancelling interest for segment "+SegmentationProfile.getSegmentNumber(i.name())+" Interest: "+i); } _handle.cancelInterest(i, this); toRemove.add(i); } } _sentInterests.removeAll(toRemove); toRemove.clear(); } synchronized(inOrderSegments) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: received pipeline segment: {0}", co.name()); /* synchronized(readySegment) { //can we help the reader with a shortcut if (readySegment == null) { //need to set the ready segment if(inOrderSegments.size() > 0) readySegment = inOrderSegments.get(0); } } */ if (returnedSegment == _nextPipelineSegment) { _totalReceived++; if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we got the segment ({0}) we were expecting!", returnedSegment); if(waitingSegment!=-1) if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: someone is waiting for segment: {0}", waitingSegment); //this is the next segment in order inOrderSegments.add(co); _lastInOrderSegment = returnedSegment; //do we have any out of order segments to move over? if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { Log.info(Log.FAC_PIPELINE, "PIPELINE: before checking ooos:" ); printSegments(); } if (outOfOrderSegments.size() > 0 ) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we have out of order segments to check"); //this was a hole.. cancel its other interests while (outOfOrderSegments.size() > 0 ) { if(SegmentationProfile.getSegmentNumber(outOfOrderSegments.get(0).name()) == nextInOrderSegmentNeeded()) { _lastInOrderSegment = SegmentationProfile.getSegmentNumber(outOfOrderSegments.get(0).name()); inOrderSegments.add(outOfOrderSegments.remove(0)); } else { //the first one isn't what we wanted.. if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { Log.info(Log.FAC_PIPELINE, "PIPELINE: we have "+SegmentationProfile.getSegmentNumber(outOfOrderSegments.get(0).name())+" but need "+nextInOrderSegmentNeeded()+" breaking from loop, we don't have the one we need"); } break; } } } if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { Log.info(Log.FAC_PIPELINE, "PIPELINE: after checking ooos: "); printSegments(); } //if we had out of order segments, we might still want to advance the pipeline... } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we got segment {0} an Out of Order segment... we were expecting segment {1}", returnedSegment, _nextPipelineSegment); //this segment is out of order //make sure it wasn't a previous segment that we don't need any more... if (_nextPipelineSegment > returnedSegment) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: this is a previous segment... drop"); } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: this is a pipeline segment, add to outOfOrderSegment queue"); _totalReceived++; _holes++; int i = 0; for (ContentObject c:outOfOrderSegments) { if(returnedSegment < SegmentationProfile.getSegmentNumber(c.name())) break; i++; } outOfOrderSegments.add(i, co); //now we have a hole to fill if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we got a segment out of order, need to fill a hole at "+nextInOrderSegmentNeeded()); attemptHoleFilling(_nextPipelineSegment); } } _nextPipelineSegment = nextInOrderSegmentNeeded(); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: the next segment needed is {0}", _nextPipelineSegment); synchronized(incoming) { processingSegment = -1; } if(waitingThread!=null && returnedSegment == waitingSegment) { inOrderSegments.notifyAll(); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: notifyAll: min sleep {0}", (System.currentTimeMillis()-waitSleep)); try { inOrderSegments.wait(); } catch (InterruptedException e) { //back to me... keep going } } } } private void advancePipeline(boolean attemptHoleFilling) { synchronized(inOrderSegments) { //first check if we have tokens to spend on interests... boolean doneAdvancing = false; //check outstanding interests if(Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { String s = "have interests out for segments: ["; for(Interest i: _sentInterests) s = s + " "+SegmentationProfile.getSegmentNumber(i.name()); s = s + " ]"; if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: "+s); for(Interest i: _sentInterests) if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: {0}", i.name()); } Interest i = null; while (_sentInterests.size() + inOrderSegments.size() + outOfOrderSegments.size() < SystemConfiguration.PIPELINE_SIZE && !doneAdvancing) { //we have tokens to use i = null; if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: _lastSegmentNumber = {0}", _lastSegmentNumber); //if we haven't gotten a valid base segment, we do not want to advance the pipeline. if (_lastRequestedPipelineSegment == SegmentationProfile.baseSegment()) { Log.info(Log.FAC_PIPELINE, "PIPELINE: the last segment number is the base segment, need to make sure we have received the base segment before we press on"); //the last thing we asked for was the base segment... have we gotten it yet? if (_lastInOrderSegment == -1) { Log.info(Log.FAC_PIPELINE, "PIPELINE: _lastInOrderSegment == -1, we have not received the base segment, do not advance the pipeline"); return; } else { Log.info(Log.FAC_PIPELINE, "PIPELINE: _lastInOrderSegment == {0}, we have received the base segment, we can advance the pipeline!", _lastInOrderSegment); } } if (_lastSegmentNumber == -1) { //we don't have the last segment already... i = SegmentationProfile.segmentInterest(_basePipelineName, _lastRequestedPipelineSegment + 1, _publisher); try { i.userTime = System.currentTimeMillis(); _handle.expressInterest(i, this); _sentInterests.add(i); _lastRequestedPipelineSegment++; if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: requested segment "+_lastRequestedPipelineSegment +" ("+(SystemConfiguration.PIPELINE_SIZE - _sentInterests.size())+" tokens)"); } catch (IOException e) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.WARNING)) Log.warning(Log.FAC_PIPELINE, "failed to express interest for CCNAbstractInputStream pipeline"); } } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: setting doneAdvancing to true"); doneAdvancing = true; } } } } private void attemptHoleFilling() { synchronized(inOrderSegments) { if(outOfOrderSegments.size() > 0) { long firstOOO = SegmentationProfile.getSegmentNumber(outOfOrderSegments.get(0).name()); long holeCheck = _nextPipelineSegment; while (holeCheck < firstOOO) { attemptHoleFilling(holeCheck); holeCheck++; } } } } private void attemptHoleFilling(long hole) { //holes... just ask for the next segment we are expecting if we haven't already if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: checking for a hole at segment: {0}", hole); //first check the incoming segments to see if it is here already synchronized (incoming) { for (IncomingSegment i: incoming) if(i.segmentNumber == hole) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: segment {0} is already here, just needs to be processed", hole); return; } if(processingSegment != -1 && hole == processingSegment) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: the segment is being processed... not a hole."); return; } } Interest i = SegmentationProfile.segmentInterest(_basePipelineName, hole, _publisher); int index = -1; int index2 = -1; long elapsed1 = -1; long elapsed2 = -1; Interest expressed; try { synchronized (inOrderSegments) { // see if this interest is already there index = _sentInterests.indexOf(i); if (index > -1) { expressed = _sentInterests.get(index); elapsed1 = System.currentTimeMillis() - expressed.userTime; if(elapsed1 == -1) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: base segment is there, but the express time is -1, it must be getting processed"); return; } if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: base interest is already there, try adding excludes elapsed time = {0} interest: {1}", elapsed1, expressed); } else { // base interest isn't there, the the exclude could be. } //for (int attempt = 1; attempt < SystemConfiguration.PIPELINE_SEGMENTATTEMPTS; attempt++) { int attempt = 1; Exclude ex = new Exclude(); ex.add(new byte[][]{SegmentationProfile.getSegmentNumberNameComponent(hole+attempt)}); i.exclude(ex); Interest toDelete = null; index2 = 0; long excludedSegment = -1; ExcludeComponent ec = null; long tempseg = -1; for(Interest expInt: _sentInterests) { tempseg = SegmentationProfile.getSegmentNumber(expInt.name()); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: checking if this interest {0} is for our hole at {1}", tempseg, hole); if (tempseg == hole) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: this is a match! does it have excludes?"); //this is the interest we want to look at if(expInt.exclude()!=null) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: yep! it is a holefilling attempt"); ec = (ExcludeComponent)expInt.exclude().value(0); excludedSegment = SegmentationProfile.getSegmentNumber(ec.getBytes()); attempt = (int) (excludedSegment - hole); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: this is attempt: {0}", attempt); if (attempt < SystemConfiguration.PIPELINE_SEGMENTATTEMPTS) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we have more attempts that we can try... "); toDelete = expInt; ex = new Exclude(); ex.add(new byte[][]{SegmentationProfile.getSegmentNumberNameComponent(hole+attempt+1)}); i.exclude(ex); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: going to express the next attempt: {0}", i); } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we have tried as many times as we can... break here"); return; } } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: this isn't a holefilling attempt, must be the base interest"); } break; } else { //if this is for a segment after ours, break if (tempseg > hole) break; } index2++; } if(toDelete!=null) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we can try again to fill the hole!"); expressed = toDelete; if(expressed.userTime == -1) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: hole filling segment is there, but the express time is -1, it must be getting processed"); return; } else { elapsed2 = System.currentTimeMillis() - expressed.userTime; if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: elapsed2 time {0}", elapsed2); if(elapsed2 > avgResponseTime * SystemConfiguration.PIPELINE_RTTFACTOR && avgResponseTime > -1) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: expressing the next interest! {0}", i); i.userTime = System.currentTimeMillis(); _handle.expressInterest(i, this); _sentInterests.add(index2, i); _handle.cancelInterest(toDelete, this); _sentInterests.remove(toDelete); adjustAvgResponseTimeForHole(); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { Log.info(Log.FAC_PIPELINE, "PIPELINE: expressed: {0} deleted: {1}", i, toDelete); Log.info(Log.FAC_PIPELINE, "PIPELINE: current expressed interests: "); for(Interest p: _sentInterests) Log.info(Log.FAC_PIPELINE, "PIPELINE: {0}", p); } return; } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: need to give the earlier attempt a chance to work"); return; } } } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we don't have any holefilling attempts... for {0}", hole); if (index == -1) { // the base interest wasn't even there (neither was the // hole filling one) i.exclude(null); } } if((elapsed1 > avgResponseTime * 2 && avgResponseTime > -1) || (avgResponseTime == -1 && elapsed1 > SystemConfiguration.INTEREST_REEXPRESSION_DEFAULT)) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { if(i.exclude() == null) Log.info(Log.FAC_PIPELINE, "PIPELINE: adding the base interest or the first holefilling attempt!!! {0}", i); else Log.info(Log.FAC_PIPELINE, "PIPELINE: adding the first holefilling attempt! {0}", i); } i.userTime = System.currentTimeMillis(); _handle.expressInterest(i, this); if (index != -1) _sentInterests.add(index, i); else _sentInterests.add(i); // remove the first instance after we express and insert the new // interest if (index != -1) { _handle.cancelInterest(_sentInterests.remove(index+1), this); adjustAvgResponseTimeForHole(); } if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: requested segment {0} to fill hole: {1} with Interest: {2}", hole, i.name(), i); return; } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we need to wait longer to see if the original interest will return the segment: avgResponseTime: {0}", avgResponseTime); } } } catch (IOException e) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.WARNING)) Log.warning(Log.FAC_PIPELINE, "failed to express interest for CCNAbstractInputStream pipeline"); } } private void adjustAvgResponseTimeForHole() { synchronized(incoming) { avgResponseTime = 0.9 * avgResponseTime + 0.1 * (SystemConfiguration.PIPELINE_RTTFACTOR * avgResponseTime); } } private void printSegments() { String s = "inOrder: ["; for(ContentObject c: inOrderSegments) s += " "+SegmentationProfile.getSegmentNumber(c.name()); s += " ] outOrder: ["; for(ContentObject c: outOfOrderSegments) s += " "+SegmentationProfile.getSegmentNumber(c.name()); s += "]"; Log.info(Log.FAC_PIPELINE, "PIPELINE: " + s); } private long nextInOrderSegmentNeeded() { synchronized(inOrderSegments) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { if (_currentSegment==null) Log.info(Log.FAC_PIPELINE, "PIPELINE: current segment: - lastInOrderSegment number {0} _startingSegmentNumber {1}", _lastInOrderSegment, _startingSegmentNumber); else Log.info(Log.FAC_PIPELINE, "PIPELINE: current segment: "+SegmentationProfile.getSegmentNumber(_currentSegment.name()) + " lastInOrderSegment number "+_lastInOrderSegment + " _startingSegmentNumber "+_startingSegmentNumber); if (outOfOrderSegments.size() > 0) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { Log.info(Log.FAC_PIPELINE, "PIPELINE: we have out of order segments..."); printSegments(); } } } if (_lastInOrderSegment != -1) return _lastInOrderSegment +1; else return _startingSegmentNumber; } } private boolean haveSegmentBuffered(long segmentNumber) { synchronized(inOrderSegments) { ContentObject co = null; for (int i = 0; i < inOrderSegments.size(); i++) { co = inOrderSegments.get(i); if (SegmentationProfile.getSegmentNumber(co.name()) == segmentNumber) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: have segment {0} in iOS, return true.", segmentNumber); return true; } } for (int i = 0; i < outOfOrderSegments.size(); i++) { co = outOfOrderSegments.get(i); if (SegmentationProfile.getSegmentNumber(co.name()) == segmentNumber) { //this is the segment we wanted if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: segment {0} is in our oOOS queue, return true", segmentNumber); return true; } else { if(SegmentationProfile.getSegmentNumber(co.name()) > segmentNumber) { //we have a hole to fill... if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: our out of order segments are past the requested segment... we have a hole"); //attemptHoleFilling(); attemptHoleFilling(segmentNumber); break; } } } return false; } } private ContentObject getPipelineSegment(long segmentNumber) throws IOException{ synchronized(inOrderSegments) { ContentObject co = null; while (inOrderSegments.size() > 0) { co = inOrderSegments.remove(0); if (SegmentationProfile.getSegmentNumber(co.name()) == segmentNumber) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: had segment {0} in iOS, setting current.", segmentNumber); _currentSegment = co; if (inOrderSegments.size() > 0 || segmentNumber == 1) advancePipeline(false); else advancePipeline(true); return co; } } while (outOfOrderSegments.size() > 0) { co = outOfOrderSegments.get(0); if (SegmentationProfile.getSegmentNumber(co.name()) == segmentNumber) { //this is the segment we wanted if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: segment {0} was in our oOOS queue", segmentNumber); outOfOrderSegments.remove(0); _currentSegment = co; return co; } else { if(SegmentationProfile.getSegmentNumber(co.name()) > segmentNumber) { //we have a hole to fill... if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: our out of order segments are past the requested segment... we have a hole"); break; } else { outOfOrderSegments.remove(0); } } } if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { Log.info(Log.FAC_PIPELINE, "PIPELINE: we do not have the segment yet... was it requested?"); Log.info(Log.FAC_PIPELINE, "PIPELINE: need segment: {0} _lastRequestedPipelineSegment: {1}", segmentNumber, _lastRequestedPipelineSegment); String s = "current interests out for segments: ["; for(Interest i: _sentInterests) s += " "+SegmentationProfile.getSegmentNumber(i.name()); s += "]"; Log.info(Log.FAC_PIPELINE, "PIPELINE: "+s); } //need to actually get the requested segment if it hasn't been asked for //this is needed for seek, skip, etc //if we haven't requested the segment... should we ditch everything we have? probably if (requestedSegment(segmentNumber)) { //we already requested it. just wait for it to come in attemptHoleFilling(segmentNumber); } else { //we haven't requested it... send request and ditch what we have Interest interest = SegmentationProfile.segmentInterest(_basePipelineName, segmentNumber, _publisher); try { interest.userTime = System.currentTimeMillis(); _handle.expressInterest(interest, this); cancelInterests(); _sentInterests.add(interest); resetPipelineState(); _lastRequestedPipelineSegment = segmentNumber; _nextPipelineSegment = segmentNumber; if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we hadn't asked for segment {0} asking now... {1}", segmentNumber, interest); } catch (IOException e) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.WARNING)) Log.warning(Log.FAC_PIPELINE, "failed to express interest for CCNAbstractInputStream pipeline"); } } //check outstanding interests if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { String s = "have interests out for segments: ["; for(Interest i: _sentInterests) s += " "+SegmentationProfile.getSegmentNumber(i.name()); s += "]"; Log.info(Log.FAC_PIPELINE, "PIPELINE: "+s); } } return null; } private void cancelInterests() { synchronized(inOrderSegments) { for (Interest i: _sentInterests) { _handle.cancelInterest(i, this); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: canceling interest: {0}", i); } _sentInterests.clear(); } } private void resetPipelineState() { synchronized(inOrderSegments) { inOrderSegments.clear(); outOfOrderSegments.clear(); _nextPipelineSegment = -1; _lastRequestedPipelineSegment = -1; _lastInOrderSegment = -1; _lastSegmentNumber = -1; _currentSegment = null; } } private boolean requestedSegment(long number) { synchronized(incoming) { for(IncomingSegment i: incoming) if (i.segmentNumber == number) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we already asked for it and it is just waiting to be processed"); return true; } if (processingSegment!=-1 && processingSegment == number) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: someone is processing it right now!"); return true; } } synchronized(inOrderSegments) { for (Interest i: _sentInterests) if(SegmentationProfile.getSegmentNumber(i.name()) == number) return true; return false; } } private void setPipelineName(ContentName n) { //we need to set the base name for pipelining... we might not have had the version (or the full name) _basePipelineName = n.clone(); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: setting _basePipelineName {0}", _basePipelineName); synchronized(inOrderSegments) { //need to remove interest for first segment of old name ArrayList<Interest> remove = new ArrayList<Interest>(); for(Interest i: _sentInterests) { if(SegmentationProfile.segmentRoot(i.name()).equals(_basePipelineName)) { //the name matches, keep it } else { //name doesn't match... remove it remove.add(i); } } for(Interest i: remove) { _handle.cancelInterest(i,this); _sentInterests.remove(i); } } } public boolean readerReadyCheck(long nextSegment) { synchronized(inOrderSegments) { //is there a reader ready? long rr; synchronized(readerReady) { rr = readerReady; } if(rr > -1) { //there is a reader waiting if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: there is a reader waiting, we should wait unless we have their segment"); if (nextSegment == rr) { //this is the segment they want, we should just finish if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we are working on their segment... we should finish!"); return false; //break; } else { if (haveSegmentBuffered(rr)) { //we have their segment //this isn't their segment, but the one they want is here. we should defer if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we are deferring until they are done"); return true; } else { //we don't have their segment, we should keep going if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we don't have their segment, keep processing this one."); return false; } } } return false; } } public Interest handleContent(ContentObject result, Interest interest) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: in handleContent for {0} at {1}", result.name(), System.currentTimeMillis()); long starttime = System.currentTimeMillis(); IncomingSegment is; synchronized(incoming) { if(avgResponseTime == -1) { avgResponseTime = starttime - interest.userTime; } else { //do not include hole filling responses, they will be extra fast //if (interest.exclude()==null) avgResponseTime = 0.9 * avgResponseTime + 0.1 * (starttime - interest.userTime); } interest.userTime = -1; if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: in handleContent after reading {0} avgResponseTime {1}", result.name(), avgResponseTime); is = new IncomingSegment(result, interest); int index = 0; for (IncomingSegment i: incoming) { if (i.segmentNumber > is.segmentNumber) break; index++; } incoming.add(index, is); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { String s = "segments to process: ["; for (IncomingSegment i: incoming) s += " "+i.segmentNumber; s += " ]"; Log.info(Log.FAC_PIPELINE, "PIPELINE: " + s); } if (processor == null) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: processor was null, setting it to me."); //no threads are actively processing content... this one will processor = Thread.currentThread(); //synchronized(inOrderSegments) { is = incoming.remove(0); //_sentInterests.add(is.interest); processingSegment = SegmentationProfile.getSegmentNumber(is.content.name()); } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { Log.info(Log.FAC_PIPELINE, "PIPELINE: processor not null, returning"); //another thread is already processing... just dump my content object and return Log.info(Log.FAC_PIPELINE, "PIPELINE: {0} done with handleContent after reading {1}", (System.currentTimeMillis() - starttime), result.name()); } return null; } } //this thread will continue processing the incoming content objects until they are empty synchronized(inOrderSegments){ while (is != null) { //was this a content object we were looking for? //synchronized(inOrderSegments) { if (SystemConfiguration.PIPELINE_STATS) System.out.println("plot "+(System.currentTimeMillis() - _pipelineStartTime)+" inOrder: "+inOrderSegments.size() +" outOfOrder: "+outOfOrderSegments.size() + " interests: "+_sentInterests.size() +" holes: "+_holes + " received: "+_totalReceived+" ["+_baseName+"].2"+ " toProcess "+incoming.size()); if (_sentInterests.remove(is.interest)) { //we had this interest outstanding... if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we were expecting this data! we had outstanding interests: {0}", is.interest); } else { //we must have canceled the interest... drop content object if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we must have canceled the interest, dropping ContentObject(s). old interest: {0}", is.interest); //does this match one of our other interests? Interest checkInterest; is.interest = null; for (int i = 0; i < _sentInterests.size(); i++) { checkInterest = _sentInterests.get(i); if (checkInterest.matches(result)) { //we found a match! if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: the incoming packet's interest is gone, but it matches another interest, using that"); is.interest = checkInterest; break; } } if (is.interest == null) is = null; } //verify the content object if (verify(result)) { //this content verified } else { //content didn't verify, don't hand it up... //TODO content that fails verification needs to be handled better. need to express a new interest if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "Dropping content object due to failed verification: {0} Need to add interest re-expression with exclude", is.content.name()); _sentInterests.remove(is.interest); is = null; } if (is != null) receivePipelineContent(is.content); synchronized(incoming) { if (incoming.size() == 0) { processor = null; is = null; if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: that was the last one, resetting processor to null"); } else { is = incoming.remove(0); //_sentInterests.add(is.interest); processingSegment = SegmentationProfile.getSegmentNumber(is.content.name()); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: processing first segment in incoming arraylist, segment {0}", is.segmentNumber); } if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) { String s = "segments to process: ["; for (IncomingSegment i: incoming) s += " "+i.segmentNumber; s += " ]"; Log.info(Log.FAC_PIPELINE, "PIPELINE: " + s); } } advancePipeline(false); }//try holding lock more consistently to control how notify is done } //while loop for processing incoming segments attemptHoleFilling(); if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: {0} done with handleContent after reading {1}", (System.currentTimeMillis() - starttime), result.name()); return null; } /** * Set the timeout that will be used for all content retrievals on this stream. * Default is 5 seconds. * @param timeout Milliseconds */ public void setTimeout(int timeout) { _timeout = timeout; } /** * Add flags to this stream. Adds to existing flags. */ public void addFlags(EnumSet<FlagTypes> additionalFlags) { _flags.addAll(additionalFlags); } /** * Add a flag to this stream. Adds to existing flags. */ public void addFlag(FlagTypes additionalFlag) { _flags.add(additionalFlag); } /** * Set flags on this stream. Replaces existing flags. */ public void setFlags(EnumSet<FlagTypes> flags) { if (null == flags) { _flags.clear(); } else { _flags = flags; } } /** * Clear the flags on this stream. */ public void clearFlags() { _flags.clear(); } /** * Remove a flag from this stream. */ public void removeFlag(FlagTypes flag) { _flags.remove(flag); } /** * Check whether this stream has a particular flag set. */ public boolean hasFlag(FlagTypes flag) { return _flags.contains(flag); } /** * @return The name used to retrieve segments of this stream (not including the segment number). */ public ContentName getBaseName() { return _baseName; } /** * @return The version of the stream being read, if its name is versioned. */ public CCNTime getVersion() { if (null == _baseName) return null; return VersioningProfile.getTerminalVersionAsTimestampIfVersioned(_baseName); } /** * Returns the digest of the first segment of this stream. * Together with firstSegmentNumber() and getBaseName() this method may be used to * identify the stream content unambiguously. * * @return The digest of the first segment of this stream * @throws NoMatchingContentException if no content available * @throws IOException on communication error */ public byte[] getFirstDigest() throws NoMatchingContentFoundException, IOException { if (null == _firstSegment) { ContentObject firstSegment = getFirstSegment(); setFirstSegment(firstSegment); // sets _firstSegment, does link dereferencing } return _firstSegment.digest(); } @Override public int read() throws IOException { byte [] b = new byte[1]; if (read(b, 0, 1) < 0) { return -1; } return (0x000000FF & b[0]); } @Override public int read(byte[] b) throws IOException { return read(b, 0, b.length); } @Override public int read(byte[] buf, int offset, int len) throws IOException { if (null == buf) throw new NullPointerException("Buffer cannot be null!"); return readInternal(buf, offset, len); } /** * Actual mechanism used to trigger segment retrieval and perform content reads. * Subclasses define different schemes for retrieving content across segments. * @param buf As in read(byte[], int, int). * @param offset As in read(byte[], int, int). * @param len As in read(byte[], int, int). * @return As in read(byte[], int, int). * @throws IOException if a segment cannot be retrieved, or there is an error in lower-level * segment retrieval mechanisms. Uses subclasses of IOException to help provide * more information. In particular, throws NoMatchingContentFoundException when * no content found within the timeout given. */ protected abstract int readInternal(byte [] buf, int offset, int len) throws IOException; /** * Called to set the first segment when opening a stream. This does initialization * and setup particular to the first segment of a stream. Subclasses should not override * unless they really know what they are doing. Calls #setCurrentSegment(ContentObject) * for the first segment. If the content is encrypted, and keys are not provided * for this stream, they are looked up according to the namespace. Note that this * assumes that all segments of a given piece of content are either encrypted or not. * @param newSegment Must not be null * @throws IOException If newSegment is null or decryption keys set up incorrectly */ protected void setFirstSegment(ContentObject newSegment) throws IOException { if (null == newSegment) { throw new NoMatchingContentFoundException("Cannot find first segment of " + getBaseName()); } LinkObject theLink = null; while (newSegment.isType(ContentType.LINK) && (!hasFlag(FlagTypes.DONT_DEREFERENCE))) { // Automated dereferencing. Want to make a link object to read in this link, then // dereference it to get the segment we really want. We then fix up the _baseName, // and continue like nothing ever happened. theLink = new LinkObject(newSegment, _handle); pushDereferencedLink(theLink); // set _dereferencedLink to point to the new link, pushing // old ones down the stack if necessary // dereference will check for link cycles newSegment = _dereferencedLink.dereference(_timeout); if (Log.isLoggable(Log.FAC_IO, Level.INFO)) Log.info(Log.FAC_IO, "CCNAbstractInputStream: dereferencing link {0} to {1}, resulting data {2}", theLink.getVersionedName(), theLink.link(), ((null == newSegment) ? "null" : newSegment.name())); if (newSegment == null) { // TODO -- catch error states. Do we throw exception or return null? // Set error states -- when do we find link cycle and set the error on the link? // Clear error state when update is successful. // Two cases -- link loop or data not found. if (_dereferencedLink.hasError()) { if (_dereferencedLink.getError() instanceof LinkCycleException) { // Leave the link set on the input stream, so that caller can explore errors. if (Log.isLoggable(Log.FAC_IO, Level.WARNING)) { Log.warning(Log.FAC_IO, "Hit link cycle on link {0} pointing to {1}, cannot dereference. See this.dereferencedLink() for more information!", _dereferencedLink.getVersionedName(), _dereferencedLink.link().targetName()); } } // Might also cover NoMatchingContentFoundException here...for now, just return null // so can call it more than once. throw _dereferencedLink.getError(); } else { throw new NoMatchingContentFoundException("Cannot find first segment of " + getBaseName() + ", which is a link pointing to " + _dereferencedLink.link().targetName()); } } _baseName = SegmentationProfile.segmentRoot(newSegment.name()); // go around again, } _firstSegment = newSegment; if (newSegment.isType(ContentType.GONE)) { if (Log.isLoggable(Log.FAC_IO, Level.INFO)) Log.info(Log.FAC_IO, "setFirstSegment: got gone segment: {0}", newSegment.name()); } else if (newSegment.isType(ContentType.ENCR) && (null == _keys)) { // The block is encrypted and we don't have keys // Get the content name without the segment parent ContentName contentName = SegmentationProfile.segmentRoot(newSegment.name()); // Attempt to retrieve the keys for this namespace _keys = AccessControlManager.keysForInput(contentName, newSegment.signedInfo().getPublisherKeyID(), _handle); if (_keys == null) throw new AccessDeniedException("Cannot find keys to decrypt content."); } setCurrentSegment(newSegment); } /** * Set up current segment for reading, including preparation for decryption if necessary. * Called after getSegment/getFirstSegment/getNextSegment, which take care of verifying * the segment for us. Assumes newSegment has been verified. * @throws IOException If decryption keys set up incorrectly */ protected void setCurrentSegment(ContentObject newSegment) throws IOException { _currentSegment = null; _segmentReadStream = null; if (null == newSegment) { if (Log.isLoggable(Log.FAC_IO, Level.INFO)) Log.info(Log.FAC_IO, "FINDME: Setting current segment to null! Did a segment fail to verify?"); return; } _currentSegment = newSegment; // Should we only set these on the first retrieval? // getSegment will ensure we get a requested publisher (if we have one) for the // first segment; once we have a publisher, it will ensure that future segments match it. _publisher = newSegment.signedInfo().getPublisherKeyID(); if (deletionInformation() != newSegment) { // want pointer ==, not equals() here // if we're decrypting, then set it up now if (_keys != null) { // We only do automated lookup of keys on first segment. Otherwise // we assume we must have the keys or don't try to decrypt. try { // Reuse of current segment OK. Don't expect to have two separate readers // independently use this stream without state confusion anyway. // Assume getBaseName() returns name without segment information. // Log verification only on highest log level (won't execute on lower logging level). if (Log.isLoggable(Log.FAC_IO, Level.FINEST)) { if (!SegmentationProfile.segmentRoot(_currentSegment.name()).equals(getBaseName())) { Log.finest(Log.FAC_IO, "ASSERT: getBaseName()={0} does not match segmentless part of _currentSegment.name()={1}", getBaseName(), SegmentationProfile.segmentRoot(_currentSegment.name())); } } _cipher = _keys.getSegmentDecryptionCipher(getBaseName(), _publisher, SegmentationProfile.getSegmentNumber(_currentSegment.name())); } catch (InvalidKeyException e) { Log.warning(Log.FAC_IO, "InvalidKeyException: " + e.getMessage()); throw new IOException("InvalidKeyException: " + e.getMessage()); } catch (InvalidAlgorithmParameterException e) { Log.warning(Log.FAC_IO, "InvalidAlgorithmParameterException: " + e.getMessage()); throw new IOException("InvalidAlgorithmParameterException: " + e.getMessage()); } // Let's optimize random access to this buffer (e.g. as used by the decoders) by // decrypting a whole ContentObject at a time. It's not a huge security risk, // and right now we can't rewind the buffers so if we do try to decode out of // an encrypted block we constantly restart from the beginning and redecrypt // the content. // Previously we used our own UnbufferedCipherInputStream class directly as // our _segmentReadStream for encrypted data, as Java's CipherInputStreams // assume block-oriented boundaries for decryption, and buffer incorrectly as a result. // If we want to go back to incremental decryption, putting a small cache into that // class to optimize going backwards would help. // Unless we use a compressing cipher, the maximum data length for decrypted data // is _currentSegment.content().length. But we might as well make something // general that will handle all cases. There may be a more efficient way to // do this; want to minimize copies. byte [] bodyData = _cipher.update(_currentSegment.content()); byte[] tailData; try { tailData = _cipher.doFinal(); } catch (IllegalBlockSizeException e) { Log.warning(Log.FAC_IO, "IllegalBlockSizeException: " + e.getMessage()); throw new IOException("IllegalBlockSizeException: " + e.getMessage()); } catch (BadPaddingException e) { Log.warning(Log.FAC_IO, "BadPaddingException: " + e.getMessage()); throw new IOException("BadPaddingException: " + e.getMessage()); } if ((null == tailData) || (0 == tailData.length)) { _segmentReadStream = new ByteArrayInputStream(bodyData); } else if ((null == bodyData) || (0 == bodyData.length)) { _segmentReadStream = new ByteArrayInputStream(tailData); } else { byte [] allData = new byte[bodyData.length + tailData.length]; // Still avoid 1.6 array ops System.arraycopy(bodyData, 0, allData, 0, bodyData.length); System.arraycopy(tailData, 0, allData, bodyData.length, tailData.length); _segmentReadStream = new ByteArrayInputStream(allData); } } else { if (_currentSegment.signedInfo().getType().equals(ContentType.ENCR)) { // We only do automated lookup of keys on first segment. Log.warning(Log.FAC_IO, "Asked to read encrypted content, but not given a key to decrypt it. Decryption happening at higher level?"); } _segmentReadStream = new ByteArrayInputStream(_currentSegment.content()); } } } /** * Rewinds read buffers for current segment to beginning of the segment. * @throws IOException */ protected void rewindSegment() throws IOException { if (null == _currentSegment) { if (Log.isLoggable(Log.FAC_IO, Level.INFO)) Log.info(Log.FAC_IO, "Cannot rewind null segment."); } if (null == _segmentReadStream) { setCurrentSegment(_currentSegment); } _segmentReadStream.reset(); // will reset to 0 if mark not called } /** * Retrieves a specific segment of this stream, indicated by segment number. * Three navigation options: get first (leftmost) segment, get next segment, * or get a specific segment. * Have to assume that everyone is using our segment number encoding. Probably * easier to ask raw streams to use that encoding (e.g. for packet numbers) * than to flag streams as to whether they are using integers or segments. * @param number Segment number to retrieve. See SegmentationProfile for numbering. * If we already have this segment as #currentSegmentNumber(), will just * return the current segment, and will not re-retrieve it from the network. * @throws IOException If no matching content found (actually throws NoMatchingContentFoundException) * or if there is an error at lower layers. **/ protected ContentObject getSegment(long number) throws IOException { long ttgl = System.currentTimeMillis(); synchronized(readerReady){ readerReady = number; } synchronized (inOrderSegments) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: time to get lock in getSegment (number) "+(System.currentTimeMillis() - ttgl)); // check if the base name was updated (in case we didn't have the version) for pipelining if (_baseName.equals(_basePipelineName)) { // we already have the base name... if (SystemConfiguration.PIPELINE_STATS) System.out.println("plot " + (System.currentTimeMillis() - _pipelineStartTime) + " inOrder: " + inOrderSegments.size() + " outOfOrder: " + outOfOrderSegments.size() + " interests: " + _sentInterests.size() + " holes: " + _holes + " received: " + _totalReceived + " [" + _baseName + "].3"+ " toProcess "+incoming.size()); } else { // we don't have the base name... set for pipelining. setPipelineName(_baseName); startPipeline(); } if (_currentSegment != null) { // what segment do we have right now? maybe we already have it if (currentSegmentNumber() == number) { // we already have this segment... just use it return _currentSegment; } } ContentObject co = getPipelineSegment(number); if (co != null) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we had segment {0} already!!", number); advancePipeline(false); synchronized(readerReady) { //readerReady.notifyAll(); readerReady = -1L; inOrderSegments.notifyAll(); } return co; } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we don't have segment {0} pipelined... blocking", number); } // the segment was not available... we need to wait until the // pipeline gets it in synchronized(inOrderSegments) { long start = System.currentTimeMillis(); long sleep = 0; long sleepCheck = 0; Log.info(Log.FAC_PIPELINE, "PIPELINE: _timeout = {0}", _timeout); waitingThread = Thread.currentThread(); waitingSegment = number; while (sleep < _timeout) { try{ start = System.currentTimeMillis(); waitSleep = start; sleepCheck = _timeout - sleep; if(avgResponseTime > 0 && avgResponseTime < (long)SystemConfiguration.SHORT_TIMEOUT) { if(avgResponseTime > sleepCheck) inOrderSegments.wait(sleepCheck); else inOrderSegments.wait((long)avgResponseTime); } else { if((long)SystemConfiguration.SHORT_TIMEOUT > sleepCheck) inOrderSegments.wait(sleepCheck); else inOrderSegments.wait((long)SystemConfiguration.SHORT_TIMEOUT); } } catch(InterruptedException e1) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: awake: interrupted! {0}", sleep); //break; } sleep += System.currentTimeMillis() - start; if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: slept for {0} ms total", sleep); if(haveSegmentBuffered(number)) break; else { attemptHoleFilling(number); } } if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: awake: done sleeping {0}", sleep); waitingThread = null; waitingSegment = -1; co = getPipelineSegment(number); synchronized(readerReady) { //readerReady.notifyAll(); readerReady = -1L; inOrderSegments.notifyAll(); } } if (co != null) { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we had segment {0} already!!", number); return co; } else { if (Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: we don't have segment {0} pipelined... what happened?", number); } if(Log.isLoggable(Log.FAC_PIPELINE, Level.INFO)) Log.info(Log.FAC_PIPELINE, "PIPELINE: Cannot get segment " + number + " of file {0} expected segment: {1}.", _baseName, SegmentationProfile.segmentName(_baseName, number)); throw new IOException("Cannot get segment " + number + " of file "+ _baseName + " expected segment: "+ SegmentationProfile.segmentName(_baseName, number)); } } /** * Checks whether we might have a next segment. * @return Returns false if this content is marked as GONE (see ContentType), or if we have * retrieved the segment marked as the last one, or, in a very rare case, if we're * reading content that does not have segment markers. */ protected boolean hasNextSegment() throws IOException { // We're looking at content marked GONE if (isGone()) { if (Log.isLoggable(Log.FAC_IO, Level.FINER)) Log.finer(Log.FAC_IO, "getNextSegment: We have a gone segment, no next segment. Gone segment: {0}", _firstSegment.name()); return false; } if (null == _currentSegment) { if (Log.isLoggable(Log.FAC_IO, Level.SEVERE)) Log.severe(Log.FAC_IO, "hasNextSegment() called when we have no current segment!"); throw new IOException("hasNextSegment() called when we have no current segment!"); } // Check to see if finalBlockID is the current segment. If so, there should // be no next segment. (If the writer makes a mistake and guesses the wrong // value for finalBlockID, they won't put that wrong value in the segment they're // guessing itself -- unless they want to try to extend a "closed" stream. // Normally by the time they write that segment, they either know they're done or not. if (null != _currentSegment.signedInfo().getFinalBlockID()) { if (Arrays.equals(_currentSegment.signedInfo().getFinalBlockID(), _currentSegment.name().lastComponent())) { if (Log.isLoggable(Log.FAC_IO, Level.FINER)) { Log.finer(Log.FAC_IO, "getNextSegment: there is no next segment. We have segment: " + DataUtils.printHexBytes(_currentSegment.name().lastComponent()) + " which is marked as the final segment."); } return false; } } if (!SegmentationProfile.isSegment(_currentSegment.name())) { if (Log.isLoggable(Log.FAC_IO, Level.INFO)) Log.info(Log.FAC_IO, "Unsegmented content: {0}. No next segment.", _currentSegment.name()); return false; } return true; } /** * Retrieve the next segment of the stream. Convenience method, uses #getSegment(long). * @return the next segment, if found. * @throws IOException */ protected ContentObject getNextSegment() throws IOException { if (null == _currentSegment) { if (Log.isLoggable(Log.FAC_IO, Level.FINE)) Log.fine(Log.FAC_IO, "getNextSegment: no current segment, getting first segment."); ContentObject firstSegment = getFirstSegment(); setFirstSegment(firstSegment); return firstSegment; } if (Log.isLoggable(Log.FAC_IO, Level.FINE)) Log.fine(Log.FAC_IO, "getNextSegment: getting segment after {0}", _currentSegment.name()); // TODO: This should call setCurrentSegment, no? return getSegment(nextSegmentNumber()); } /** * Retrieves the first segment of the stream, based on specified startingSegmentNumber * (see #CCNAbstractInputStream(ContentName, Long, PublisherPublicKeyDigest, ContentKeys, CCNHandle)). * @return the first segment, if found. * @throws IOException If can't get a valid starting segment number */ public ContentObject getFirstSegment() throws IOException { if (null != _firstSegment) { return _firstSegment; } else if (null != _startingSegmentNumber) { ContentObject firstSegment = getSegment(_startingSegmentNumber); if (Log.isLoggable(Log.FAC_IO, Level.FINE)) { Log.fine(Log.FAC_IO, "getFirstSegment: segment number: " + _startingSegmentNumber + " got segment? " + ((null == firstSegment) ? "no " : firstSegment.name())); } // Do not call setFirstSegment() here because that should only be done when // we are initializing since it does one-time processing including changing the // current segment. Callers to this method may be simply needing the first segment // without changing current. return firstSegment; } else { throw new IOException("Stream does not have a valid starting segment number."); } } /** * Method to determine whether a retrieved block is the first segment of this stream (as * specified by startingSegmentNumber, (see #CCNAbstractInputStream(ContentName, Long, PublisherPublicKeyDigest, ContentKeys, CCNHandle)). * Overridden by subclasses to implement narrower constraints on names. Once first * segment is retrieved, further segments can be identified just by segment-naming * conventions (see SegmentationProfile). * * @param desiredName The expected name prefix for the stream. * For CCNAbstractInputStream, assume that desiredName contains the name up to but not including * segmentation information. * @param segment The potential first segment. * @return True if it is the first segment, false otherwise. */ protected boolean isFirstSegment(ContentName desiredName, ContentObject segment) { if ((null != segment) && (SegmentationProfile.isSegment(segment.name()))) { if (Log.isLoggable(Log.FAC_IO, Level.FINER)) Log.finer(Log.FAC_IO, "is {0} a first segment of {1}", segment.name(), desiredName); // In theory, the segment should be at most a versioning component different from desiredName. // In the case of complex segmented objects (e.g. a KeyDirectory), where there is a version, // then some name components, then a segment, desiredName should contain all of those other // name components -- you can't use the usual versioning mechanisms to pull first segment anyway. if (!desiredName.equals(SegmentationProfile.segmentRoot(segment.name()))) { if (Log.isLoggable(Log.FAC_IO, Level.FINE)) Log.fine(Log.FAC_IO, "Desired name :{0} is not a prefix of segment: {1}",desiredName, segment.name()); return false; } if (null != _startingSegmentNumber) { return (_startingSegmentNumber.longValue() == SegmentationProfile.getSegmentNumber(segment.name())); } else { return SegmentationProfile.isFirstSegment(segment.name()); } } return false; } /** * If we traversed a link to get this object, make it available. */ public synchronized LinkObject getDereferencedLink() { return _dereferencedLink; } /** * Use only if you know what you are doing. */ protected synchronized void setDereferencedLink(LinkObject dereferencedLink) { _dereferencedLink = dereferencedLink; } /** * Add a LinkObject to the stack we had to dereference to get here. */ protected synchronized void pushDereferencedLink(LinkObject dereferencedLink) { if (null == dereferencedLink) { return; } if (null != _dereferencedLink) { if (null != dereferencedLink.getDereferencedLink()) { if (Log.isLoggable(Log.FAC_IO, Level.WARNING)) { Log.warning(Log.FAC_IO, "Merging two link stacks -- {0} already has a dereferenced link from {1}. Behavior unpredictable.", dereferencedLink.getVersionedName(), dereferencedLink.getDereferencedLink().getVersionedName()); } } dereferencedLink.pushDereferencedLink(_dereferencedLink); } setDereferencedLink(dereferencedLink); } /** * Verifies the signature on a segment using cached bulk signature data (from Merkle Hash Trees) * if it is available. * TODO -- check to see if it matches desired publisher. * @param segment the segment whose signature to verify in the context of this stream. */ public boolean verify(ContentObject segment) { // First we verify. // Low-level verify just checks that signer actually signed. // High-level verify checks trust. try { // We could have several options here. This segment could be simply signed. // or this could be part of a Merkle Hash Tree. If the latter, we could // already have its signing information. if (null == segment.signature().witness()) { return segment.verify(_handle.keyManager()); } // Compare to see whether this segment matches the root signature we previously verified, if // not, verify and store the current signature. // We need to compute the proxy regardless. byte [] proxy = segment.computeProxy(); // OK, if we have an existing verified signature, and it matches this segment's // signature, the proxy ought to match as well. if ((null != _verifiedRootSignature) && (Arrays.equals(_verifiedRootSignature, segment.signature().signature()))) { if ((null == proxy) || (null == _verifiedProxy) || (!Arrays.equals(_verifiedProxy, proxy))) { if (Log.isLoggable(Log.FAC_VERIFY, Level.WARNING)) { Log.warning(Log.FAC_VERIFY, "VERIFICATION FAILURE: Found segment of stream: " + segment.name() + " whose digest fails to verify; segment length: " + segment.contentLength()); Log.info("Verification failure: " + segment.name() + " timestamp: " + segment.signedInfo().getTimestamp() + " content length: " + segment.contentLength() + " proxy: " + DataUtils.printBytes(proxy) + " expected proxy: " + DataUtils.printBytes(_verifiedProxy) + " ephemeral digest: " + DataUtils.printBytes(segment.digest())); SystemConfiguration.outputDebugObject(segment); } return false; } } else { // Verifying a new segment. See if the signature verifies, otherwise store the signature // and proxy. if (!ContentObject.verify(proxy, segment.signature().signature(), segment.signedInfo(), segment.signature().digestAlgorithm(), _handle.keyManager())) { if (Log.isLoggable(Log.FAC_VERIFY, Level.WARNING)) { Log.warning(Log.FAC_VERIFY, "VERIFICATION FAILURE: Found segment of stream: " + segment.name().toString() + " whose signature fails to verify; segment length: " + segment.contentLength() + "."); Log.info("Verification failure: " + segment.name() + " timestamp: " + segment.signedInfo().getTimestamp() + " content length: " + segment.contentLength() + " proxy: " + DataUtils.printBytes(proxy) + " expected proxy: " + DataUtils.printBytes(_verifiedProxy) + " ephemeral digest: " + DataUtils.printBytes(segment.digest())); SystemConfiguration.outputDebugObject(segment); } return false; } else { // Remember current verifiers _verifiedRootSignature = segment.signature().signature(); _verifiedProxy = proxy; } } if (Log.isLoggable(Log.FAC_IO, Level.INFO)) Log.info(Log.FAC_IO, "Got segment: {0}, verified.", segment.name()); } catch (Exception e) { Log.warning(Log.FAC_IO, "Got an " + e.getClass().getName() + " exception attempting to verify segment: " + segment.name().toString() + ", treat as failure to verify."); Log.warningStackTrace(e); return false; } return true; } /** * Returns the first segment number for this stream. * @return The index of the first segment of stream data. */ public long firstSegmentNumber() { return _startingSegmentNumber.longValue(); } /** * Returns the segment number for the next segment. * Default segmentation generates sequentially-numbered stream * segments but this method may be overridden in subclasses to * perform re-assembly on streams that have been segmented differently. * @return The index of the next segment of stream data. */ public long nextSegmentNumber() { if (null == _currentSegment) { return _startingSegmentNumber.longValue(); } else { return segmentNumber() + 1; } } /** * @return Returns the segment number of the current segment if we have one, otherwise * the expected startingSegmentNumber. */ public long segmentNumber() { if (null == _currentSegment) { return _startingSegmentNumber; } else { // This needs to work on streaming content that is not traditional fragments. // The segmentation profile tries to do that, though it is seeming like the // new segment representation means we will have to assume that representation // even for stream content. return SegmentationProfile.getSegmentNumber(_currentSegment.name()); } } /** * @return Returns the segment number of the current segment if we have one, otherwise -1. */ protected long currentSegmentNumber() { if (null == _currentSegment) { return -1; // make sure we don't match inappropriately } return segmentNumber(); } /** * Checks to see whether this content has been marked as GONE (deleted). Will retrieve the first * segment if we do not already have it in order to make this determination. * @return true if stream is GONE. * @throws NoMatchingContentFound exception if no first segment found * @throws IOException if there is other difficulty retrieving the first segment. */ public boolean isGone() throws NoMatchingContentFoundException, IOException { // TODO: once first segment is always read in constructor this code will change if (null == _firstSegment) { ContentObject firstSegment = getFirstSegment(); setFirstSegment(firstSegment); // sets _firstSegment, does link dereferencing, // throws NoMatchingContentFoundException if firstSegment is null. // this way all retry behavior is localized in the various versions of getFirstSegment. // Previously what would happen is getFirstSegment would be called by isGone, return null, // and we'd have a second chance to catch it on the call to update if things were slow. But // that means we would get a more general update on a gone object. } if (_firstSegment.isType(ContentType.GONE)) { return true; } else { return false; } } /** * Return the single segment of a stream marked as GONE. This method * should be called only after checking isGone() == true otherwise it * may return the wrong result. * @return the GONE segment or null if state unknown or stream is not marked GONE */ public ContentObject deletionInformation() { if (null != _firstSegment && _firstSegment.isType(ContentType.GONE)) return _firstSegment; else return null; } /** * Callers may need to access information about this stream's publisher. * We eventually should (TODO) ensure that all the segments we're reading * match in publisher information, and cache the verified publisher info. * (In particular once we're doing trust calculations, to ensure we do them * only once per stream.) * But we do verify each segment, so start by pulling what's in the current segment. * @return the publisher of the data in the stream (either as requested, or once we have * data, as observed). */ public PublisherPublicKeyDigest publisher() { return _publisher; } /** * @return the key locator for this stream's publisher. * @throw IOException if unable to obtain content (NoMatchingContentFoundException) */ public KeyLocator publisherKeyLocator() throws IOException { if (null == _firstSegment) { ContentObject firstSegment = getFirstSegment(); setFirstSegment(firstSegment); } return _firstSegment.signedInfo().getKeyLocator(); } /** * @return the name of the current segment held by this string, or "null". Used for debugging. */ public String currentSegmentName() { return ((null == _currentSegment) ? "null" : _currentSegment.name().toString()); } @Override public int available() throws IOException { if (null == _segmentReadStream) return 0; return _segmentReadStream.available(); } /** * @return Whether this stream believes it is at eof (has read past the end of the * last segment of the stream). */ public boolean eof() { //Log.finest(Log.FAC_IO, "Checking eof: there yet? " + _atEOF); return _atEOF; } @Override public void close() throws IOException { Log.info(Log.FAC_IO, "CCNAbstractInputStream: close {0}: shutting down pipelining", _baseName); //now that we have pipelining, we need to cancel our interests and clean up //cancel our outstanding interests cancelInterests(); resetPipelineState(); } @Override public synchronized void mark(int readlimit) { // Shouldn't have a problem if we are GONE, and don't want to // deal with exceptions raised by a call to isGone. _readlimit = readlimit; _markBlock = segmentNumber(); if (null == _segmentReadStream) { _markOffset = 0; } else { try { _markOffset = _currentSegment.contentLength() - _segmentReadStream.available(); if (_segmentReadStream.markSupported()) { _segmentReadStream.mark(readlimit); } } catch (IOException e) { throw new RuntimeException(e); } } if (Log.isLoggable(Log.FAC_IO, Level.FINEST)) Log.finest(Log.FAC_IO, "mark: block: " + segmentNumber() + " offset: " + _markOffset); } @Override public boolean markSupported() { return true; } @Override public synchronized void reset() throws IOException { if (isGone()) return; // TODO: when first block is read in constructor this check can be removed if (_currentSegment == null) { setFirstSegment(getFirstSegment()); setCurrentSegment(getSegment(_markBlock)); } else if (currentSegmentNumber() == _markBlock) { //already have the correct segment if (tell() == _markOffset){ //already have the correct offset } else { // Reset and skip. if (_segmentReadStream.markSupported()) { _segmentReadStream.reset(); if (Log.isLoggable(Log.FAC_IO, Level.FINEST)) Log.finest(Log.FAC_IO, "reset within block: block: " + segmentNumber() + " offset: " + _markOffset + " eof? " + _atEOF); return; } else { setCurrentSegment(_currentSegment); } } } else { // getSegment doesn't pull segment if we already have the right one setCurrentSegment(getSegment(_markBlock)); } _segmentReadStream.skip(_markOffset); _atEOF = false; if (Log.isLoggable(Log.FAC_IO, Level.FINEST)) Log.finest(Log.FAC_IO, "reset: block: " + segmentNumber() + " offset: " + _markOffset + " eof? " + _atEOF); } @Override public long skip(long n) throws IOException { if (isGone()) return 0; if (Log.isLoggable(Log.FAC_IO, Level.FINER)) Log.finer(Log.FAC_IO, "in skip("+n+")"); if (n < 0) { return 0; } return readInternal(null, 0, (int)n); } /** * @return Currently returns 0. Can be optionally overridden by subclasses. * @throws IOException */ protected int segmentCount() throws IOException { return 0; } /** * Seek a stream to a specific byte offset from the start. Tries to avoid retrieving * extra segments. * @param position * @throws IOException */ public void seek(long position) throws IOException { if (isGone()) return; // can't seek gone stream if (Log.isLoggable(Log.FAC_IO, Level.FINER)) { Log.finer(Log.FAC_IO, "Seeking stream to {0}", position); } // TODO: when first block is read in constructor this check can be removed if ((_currentSegment == null) || (!SegmentationProfile.isFirstSegment(_currentSegment.name()))) { setFirstSegment(getFirstSegment()); skip(position); } else if (position > tell()) { // we are on the first segment already, just move forward skip(position - tell()); } else { // we are on the first segment already, just rewind back to the beginning rewindSegment(); skip(position); } } /** * @return Returns position in byte offset. For CCNAbstractInputStream, provide an inadequate * base implementation that returns the offset into the current segment (not the stream as * a whole). * @throws IOException */ public long tell() throws IOException { if (isGone()) return 0; return _currentSegment.contentLength() - _segmentReadStream.available(); } /** * @return Total length of the stream, if known, otherwise -1. * @throws IOException */ public long length() throws IOException { return -1; } private class IncomingSegment { public ContentObject content; public Interest interest; public long segmentNumber; private IncomingSegment(ContentObject co, Interest i) { content = co; interest = i; segmentNumber = SegmentationProfile.getSegmentNumber(co.name()); } } }
package org.jpos.iso.channel; import org.jpos.iso.*; import org.jpos.iso.packager.XMLPackager; import java.io.BufferedReader; import java.io.EOFException; import java.io.IOException; import java.io.InputStreamReader; import java.net.ServerSocket; import java.net.Socket; /** * Implements an ISOChannel able to exchange <b>jPOS generated</b> * (or compliant) XML based ISO-8583 messages * @author <a href="mailto:apr@cs.com.uy">Alejandro P. Revilla</a> * @version $Id$ * * @see ISOMsg * @see ISOException * @see ISOChannel */ public class XMLChannel extends BaseChannel { BufferedReader reader = null; /** * Public constructor (used by Class.forName("...").newInstance()) */ public XMLChannel () { super(); } /** * Constructs client ISOChannel * @param host server TCP Address * @param port server port number * @param p an ISOPackager * @see ISOPackager */ public XMLChannel (String host, int port, ISOPackager p) { super(host, port, p); } /** * Construct server ISOChannel * @param p an ISOPackager * @see ISOPackager * @exception IOException */ public XMLChannel (ISOPackager p) throws IOException { super(p); } /** * constructs a server ISOChannel associated with a Server Socket * @param p an ISOPackager * @param serverSocket where to accept a connection * @exception IOException * @see ISOPackager */ public XMLChannel (ISOPackager p, ServerSocket serverSocket) throws IOException { super(p, serverSocket); } /** * @return a byte array with the received message * @exception IOException */ protected byte[] streamReceive() throws IOException { int sp = 0; StringBuilder sb = new StringBuilder(); while (reader != null) { String s = reader.readLine(); if (s == null) throw new EOFException(); sb.append (s); if (s.contains("<" + XMLPackager.ISOMSG_TAG)) sp++; if (s.contains("</" + XMLPackager.ISOMSG_TAG + ">") && --sp <= 0) break; } if (sb.length() == 0) throw new EOFException(); return sb.toString().getBytes(); } protected int getHeaderLength() { // XML Channel does not support header return 0; } protected void sendMessageHeader(ISOMsg m, int len) { // XML Channel does not support header } protected void connect (Socket socket) throws IOException { super.connect (socket); reader = new BufferedReader (new InputStreamReader (serverIn)); } public void disconnect () throws IOException { super.disconnect (); if (reader != null) reader.close (); reader = null; } }
package opendap.wcs.v2_0.http; import opendap.coreServlet.ReqInfo; import opendap.wcs.v2_0.*; import org.jdom.Document; import org.jdom.Element; import org.jdom.input.SAXBuilder; import org.jdom.output.Format; import org.jdom.output.XMLOutputter; import org.slf4j.Logger; import org.xml.sax.EntityResolver; import org.xml.sax.InputSource; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.*; import java.net.URLDecoder; public class XmlRequestHandler implements opendap.coreServlet.DispatchHandler, WcsResponder { protected Logger log; protected HttpServlet dispatchServlet; protected boolean _initialized; protected String _prefix; protected Element _config; public XmlRequestHandler() { super(); log = org.slf4j.LoggerFactory.getLogger(getClass()); } public void init(HttpServlet servlet, Element config) throws Exception { if (_initialized) return; dispatchServlet = servlet; _config = config; ingestPrefix(); _initialized = true; } private void ingestPrefix() throws Exception{ String msg; Element e = _config.getChild("prefix"); if(e!=null) _prefix = e.getTextTrim(); if(_prefix.startsWith("/")) _prefix = _prefix.substring(1, _prefix.length()); while(_prefix.endsWith("/")){ _prefix = _prefix.substring(0,_prefix.length()-2); } log.info("Initialized. prefix="+ _prefix); } public boolean requestCanBeHandled(HttpServletRequest request) throws Exception { return wcsRequestDispatch(request, null, false); } public void handleRequest(HttpServletRequest request, HttpServletResponse response) throws Exception { wcsRequestDispatch(request, response, true); } public long getLastModified(HttpServletRequest req) { return -1; } public void destroy() { } private boolean wcsRequestDispatch(HttpServletRequest request, HttpServletResponse response, boolean sendResponse) throws InterruptedException { String relativeURL = ReqInfo.getLocalUrl(request); if (relativeURL.startsWith("/")) relativeURL = relativeURL.substring(1, relativeURL.length()); boolean isWcsEndPoint = false; if (relativeURL != null) { if (relativeURL.startsWith(_prefix)) { isWcsEndPoint = true; if (sendResponse){ try { handleWcsRequest(request, response); } catch (WcsException wcse) { log.error(wcse.getMessage()); WcsExceptionReport er = new WcsExceptionReport(wcse); handleWcsError(er,response); } } } } return isWcsEndPoint; } public void handleWcsRequest(HttpServletRequest request, HttpServletResponse response) throws WcsException, InterruptedException { BufferedReader sis = getRequestReader(request); String encoding = getEncoding(request); Document wcsRequestDoc = parseWcsRequest(sis, encoding); Element wcsRequest = wcsRequestDoc.getRootElement(); String serviceUrl = Util.getServiceUrlString(request, _prefix); handleWcsRequest(wcsRequest,serviceUrl,response); } public void handleWcsRequest(Element wcsRequest, String serviceUrl, HttpServletResponse response) throws WcsException, InterruptedException { Document wcsResponse; switch (getRequestType(wcsRequest)) { case WCS.GET_CAPABILITIES: GetCapabilitiesRequest getCapabilitiesRequest = new GetCapabilitiesRequest(wcsRequest); wcsResponse = getCapabilities(getCapabilitiesRequest, serviceUrl); sendWcsResponse(wcsResponse,response); break; case WCS.DESCRIBE_COVERAGE: DescribeCoverageRequest wcsDCR = new DescribeCoverageRequest(wcsRequest); wcsResponse = describeCoverage(wcsDCR); sendWcsResponse(wcsResponse,response); break; case WCS.GET_COVERAGE: GetCoverageRequest getCoverageRequest = new GetCoverageRequest(wcsRequest); /* if (getCoverageRequest.isStore()) { wcsResponse = getStoredCoverage(getCoverageRequest); sendWcsResponse(wcsResponse,response); } else { sendCoverageResponse(getCoverageRequest, response); } */ break; default: throw new WcsException("The request document was invalid. " + "The root element was name: '" + wcsRequest.getName() + "' in namespace: '" + wcsRequest.getNamespace().getURI() + "'.", WcsException.MISSING_PARAMETER_VALUE, "wcs:GetCapabilities,wcs:DescribeCoverage,wcs:GetCoverage"); } } public void handleWcsError(WcsExceptionReport er, HttpServletResponse response) { XMLOutputter xmlo = new XMLOutputter(Format.getPrettyFormat()); try { ServletOutputStream os = response.getOutputStream(); xmlo.output(er.getReport(),os); } catch (IOException e) { log.error("FAILED to transmit WcsException to client. Message: ",e.getMessage()); } } public void sendWcsResponse(Document wcsResponse, HttpServletResponse response) throws WcsException { XMLOutputter xmlo = new XMLOutputter(Format.getPrettyFormat()); try { response.setContentType("text/xml"); ServletOutputStream os = response.getOutputStream(); xmlo.output(wcsResponse, os); } catch (IOException e) { throw new WcsException(e.getMessage(), WcsException.NO_APPLICABLE_CODE); } } public class NoOpEntityResolver implements EntityResolver { public InputSource resolveEntity(String publicId, String systemId) { return new InputSource(new StringReader("")); } } public Document parseWcsRequest(BufferedReader sis, String encoding) throws WcsException { String sb = ""; String reqDoc = ""; int length; while (sb != null) { try { sb = sis.readLine(); if (sb != null) { length = sb.length() + reqDoc.length(); if (length > WCS.MAX_REQUEST_LENGTH) { throw new WcsException("Post Body (WCS Request Document) too long. Try again with something smaller.", WcsException.INVALID_PARAMETER_VALUE, "WCS Request Document"); } reqDoc += sb; } } catch (IOException e) { throw new WcsException("Failed to read WCS Request Document. Mesg: " + e.getMessage(), WcsException.INVALID_PARAMETER_VALUE, "WCS Request Document"); } } try { reqDoc = URLDecoder.decode(reqDoc, encoding); } catch (UnsupportedEncodingException e) { throw new WcsException("Failed to URLDecode Wcs Request Document. Attempted with encoding '" + encoding + "' Message: " + e.getMessage(), WcsException.INVALID_PARAMETER_VALUE, "WCS Request Document."); } Document requestDoc; try { // Parse the XML doc into a Document object. SAXBuilder saxBuilder = new SAXBuilder(); // I added these next two bits to stop ENTITY resolution, // which is important for security reasons - ndp saxBuilder.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); saxBuilder.setEntityResolver(new NoOpEntityResolver()); ByteArrayInputStream baos = new ByteArrayInputStream(reqDoc.getBytes()); requestDoc = saxBuilder.build(baos); return requestDoc; } catch (Exception e) { throw new WcsException("Failed to parse WCS request. Message: " + e.getMessage(), WcsException.INVALID_PARAMETER_VALUE, "WCS Request Document"); } } public Document getCapabilities(GetCapabilitiesRequest wcsRequest, String serviceUrl) throws InterruptedException, WcsException { return CapabilitiesRequestProcessor.processGetCapabilitiesRequest(wcsRequest, serviceUrl); } public Document describeCoverage(DescribeCoverageRequest wcsRequest) throws InterruptedException, WcsException { return DescribeCoverageRequestProcessor.processDescribeCoveragesRequest(wcsRequest); } /** * * @param req A GetCoverageREquest object. * @param response The HttpServletResponse to which the coverage will be sent. * @throws WcsException When bad things happen. * @throws InterruptedException When it gets interrupted. */ public void sendCoverageResponse(GetCoverageRequest req, HttpServletResponse response) throws InterruptedException, WcsException, IOException { CoverageRequestProcessor.sendCoverageResponse(req, response, false ); } public static int getRequestType(Element req) throws WcsException{ if(req == null){ throw new WcsException("Poorly formatted WCS request. Missing " + "root element of document.", WcsException.MISSING_PARAMETER_VALUE,"request"); } String name = req.getName(); if(name.equals("GetCapabilities")){ return WCS.GET_CAPABILITIES; } else if(name.equals("DescribeCoverage")){ return WCS.DESCRIBE_COVERAGE; } else if(name.equals("GetCoverage")){ return WCS.GET_COVERAGE; } else { throw new WcsException("The request document was invalid. " + "The root element was name: '"+name+"' in namespace: '"+req.getNamespace().getURI()+"'.", WcsException.MISSING_PARAMETER_VALUE,"wcs:GetCapabilities,wcs:DescribeCoverage,wcs:GetCoverage"); } } public String getEncoding(HttpServletRequest request){ String encoding = request.getCharacterEncoding(); if(encoding==null) encoding = "UTF-8"; return encoding; } public BufferedReader getRequestReader(HttpServletRequest request) throws WcsException { BufferedReader sis; try { sis = request.getReader(); } catch (IOException e) { throw new WcsException("Failed to retrieve WCS Request document input stream. Message: " + e.getMessage(), WcsException.INVALID_PARAMETER_VALUE, "WCS Request Document"); } return sis; } }
package org.antlr.stringtemplate; import java.io.*; import java.util.*; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import org.antlr.stringtemplate.language.*; import antlr.*; import antlr.collections.AST; import antlr.collections.ASTEnumeration; /** A <TT>StringTemplate</TT> is a "document" with holes in it where you can stick * values. <TT>StringTemplate</TT> breaks up your template into chunks of text and * attribute expressions. <TT>StringTemplate</TT> ignores everything outside * of attribute expressions, treating it as just text to spit * out when you call <TT>StringTemplate.toString()</TT>. * */ public class StringTemplate { public static final String VERSION = "3.1b1"; public static final int REGION_IMPLICIT = 1; /** <@r>...<@end> */ public static final int REGION_EMBEDDED = 2; /** @t.r() ::= "..." defined manually by coder */ public static final int REGION_EXPLICIT = 3; /** An automatically created aggregate of properties. * * I often have lists of things that need to be formatted, but the list * items are actually pieces of data that are not already in an object. I * need ST to do something like: * * Ter=3432 * Tom=32234 * .... * * using template: * * $items:{$attr.name$=$attr.type$}$ * * This example will call getName() on the objects in items attribute, but * what if they aren't objects? I have perhaps two parallel arrays * instead of a single array of objects containing two fields. One * solution is allow Maps to be handled like properties so that it.name * would fail getName() but then see that it's a Map and do * it.get("name") instead. * * This very clean approach is espoused by some, but the problem is that * it's a hole in my separation rules. People can put the logic in the * view because you could say: "go get bob's data" in the view: * * Bob's Phone: $db.bob.phone$ * * A view should not be part of the program and hence should never be able * to go ask for a specific person's data. * * After much thought, I finally decided on a simple solution. I've * added setAttribute variants that pass in multiple property values, * with the property names specified as part of the name using a special * attribute name syntax: "name.{propName1,propName2,...}". This * object is a special kind of HashMap that hopefully prevents people * from passing a subclass or other variant that they have created as * it would be a loophole. Anyway, the ASTExpr.getObjectProperty() * method looks for Aggregate as a special case and does a get() instead * of getPropertyName. */ public static final class Aggregate { protected HashMap properties = new HashMap(); /** Allow StringTemplate to add values, but prevent the end * user from doing so. */ protected void put(String propName, Object propValue) { properties.put(propName, propValue); } public Object get(String propName) { return properties.get(propName); } public String toString() { return properties.toString(); } } /** Just an alias for ArrayList, but this way I can track whether a * list is something ST created or it's an incoming list. */ public static final class STAttributeList extends ArrayList { public STAttributeList(int size) { super(size); } public STAttributeList() { super(); } } public static final String ANONYMOUS_ST_NAME = "anonymous"; /** track probable issues like setting attribute that is not referenced. */ static boolean lintMode = false; protected List referencedAttributes = null; /** What's the name of this template? */ protected String name = ANONYMOUS_ST_NAME; private static int templateCounter=0; private static synchronized int getNextTemplateCounter() { templateCounter++; return templateCounter; } /** reset the template ID counter to 0; public so that testing routine * can access but not really of interest to the user. */ public static void resetTemplateCounter() { templateCounter = 0; } protected int templateID = getNextTemplateCounter(); /** Enclosing instance if I'm embedded within another template. * IF-subtemplates are considered embedded as well. */ protected StringTemplate enclosingInstance = null; /** A list of embedded templates */ protected List embeddedInstances = null; /** If this template is an embedded template such as when you apply * a template to an attribute, then the arguments passed to this * template represent the argument context--a set of values * computed by walking the argument assignment list. For example, * <name:bold(item=name, foo="x")> would result in an * argument context of {[item=name], [foo="x"]} for this * template. This template would be the bold() template and * the enclosingInstance would point at the template that held * that <name:bold(...)> template call. When you want to get * an attribute value, you first check the attributes for the * 'self' template then the arg context then the enclosingInstance * like resolving variables in pascal-like language with nested * procedures. * * With multi-valued attributes such as <faqList:briefFAQDisplay()> * attribute "i" is set to 1..n. */ protected Map argumentContext = null; /** If this template is embedded in another template, the arguments * must be evaluated just before each application when applying * template to a list of values. The "it" attribute must change * with each application so that $names:bold(item=it)$ works. If * you evaluate once before starting the application loop then it * has a single fixed value. Eval.g saves the AST rather than evaluating * before invoking applyListOfAlternatingTemplates(). Each iteration * of a template application to a multi-valued attribute, these args * are re-evaluated with an initial context of {[it=...], [i=...]}. */ protected StringTemplateAST argumentsAST = null; /** When templates are defined in a group file format, the attribute * list is provided including information about attribute cardinality * such as present, optional, ... When this information is available, * rawSetAttribute should do a quick existence check as should the * invocation of other templates. So if you ref bold(item="foo") but * item is not defined in bold(), then an exception should be thrown. * When actually rendering the template, the cardinality is checked. * This is a Map<String,FormalArgument>. */ protected LinkedHashMap formalArguments = FormalArgument.UNKNOWN; /** How many formal arguments to this template have default values * specified? */ protected int numberOfDefaultArgumentValues = 0; /** Normally, formal parameters hide any attributes inherited from the * enclosing template with the same name. This is normally what you * want, but makes it hard to invoke another template passing in all * the data. Use notation now: <otherTemplate(...)> to say "pass in * all data". Works great. Can also say <otherTemplate(foo="xxx",...)> */ protected boolean passThroughAttributes = false; /** What group originally defined the prototype for this template? * This affects the set of templates I can refer to. super.t() must * always refer to the super of the original group. * * group base; * t ::= "base"; * * group sub; * t ::= "super.t()2" * * group subsub; * t ::= "super.t()3" */ protected StringTemplateGroup nativeGroup; /** This template was created as part of what group? Even if this * template was created from a prototype in a supergroup, its group * will be the subgroup. That's the way polymorphism works. */ protected StringTemplateGroup group; /** If this template is defined within a group file, what line number? */ protected int groupFileLine; /** Where to report errors */ StringTemplateErrorListener listener = null; /** The original, immutable pattern/language (not really used again after * initial "compilation", setup/parsing). */ protected String pattern; /** Map an attribute name to its value(s). These values are set by outside * code via st.setAttribute(name, value). StringTemplate is like self in * that a template is both the "class def" and "instance". When you * create a StringTemplate or setTemplate, the text is broken up into chunks * (i.e., compiled down into a series of chunks that can be evaluated later). * You can have multiple */ protected Map attributes; /** A Map<Class,Object> that allows people to register a renderer for * a particular kind of object to be displayed in this template. This * overrides any renderer set for this template's group. * * Most of the time this map is not used because the StringTemplateGroup * has the general renderer map for all templates in that group. * Sometimes though you want to override the group's renderers. */ protected Map attributeRenderers; /** A list of alternating string and ASTExpr references. * This is compiled to when the template is loaded/defined and walked to * write out a template instance. */ protected List chunks; /** If someone refs <@r()> in template t, an implicit * * @t.r() ::= "" * * is defined, but you can overwrite this def by defining your * own. We need to prevent more than one manual def though. Between * this var and isEmbeddedRegion we can determine these cases. */ protected int regionDefType; /** Does this template come from a <@region>...<@end> embedded in * another template? */ protected boolean isRegion; /** Set of implicit and embedded regions for this template */ protected Set regions; public static StringTemplateGroup defaultGroup = new StringTemplateGroup("defaultGroup", "."); /** Create a blank template with no pattern and no attributes */ public StringTemplate() { group = defaultGroup; // make sure has a group even if default } /** Create an anonymous template. It has no name just * chunks (which point to this anonymous template) and attributes. */ public StringTemplate(String template) { this(null, template); } public StringTemplate(String template, Class lexer) { this(); setGroup(new StringTemplateGroup("defaultGroup", lexer)); setTemplate(template); } /** Create an anonymous template with no name, but with a group */ public StringTemplate(StringTemplateGroup group, String template) { this(); if ( group!=null ) { setGroup(group); } setTemplate(template); } public StringTemplate(StringTemplateGroup group, String template, HashMap attributes) { this(group,template); this.attributes = attributes; } /** Make the 'to' template look exactly like the 'from' template * except for the attributes. This is like creating an instance * of a class in that the executable code is the same (the template * chunks), but the instance data is blank (the attributes). Do * not copy the enclosingInstance pointer since you will want this * template to eval in a context different from the examplar. */ protected void dup(StringTemplate from, StringTemplate to) { to.attributeRenderers = from.attributeRenderers; to.pattern = from.pattern; to.chunks = from.chunks; to.formalArguments = from.formalArguments; to.numberOfDefaultArgumentValues = from.numberOfDefaultArgumentValues; to.name = from.name; to.group = from.group; to.nativeGroup = from.nativeGroup; to.listener = from.listener; to.regions = from.regions; to.isRegion = from.isRegion; to.regionDefType = from.regionDefType; } /** Make an instance of this template; it contains an exact copy of * everything (except the attributes and enclosing instance pointer). * So the new template refers to the previously compiled chunks of this * template but does not have any attribute values. */ public StringTemplate getInstanceOf() { StringTemplate t = null; if ( nativeGroup!=null ) { // create a template using the native group for this template // but it's "group" is set to this.group by dup after creation so // polymorphism still works. t = nativeGroup.createStringTemplate(); } else { t = group.createStringTemplate(); } dup(this, t); return t; } public StringTemplate getEnclosingInstance() { return enclosingInstance; } public StringTemplate getOutermostEnclosingInstance() { if ( enclosingInstance!=null ) { return enclosingInstance.getOutermostEnclosingInstance(); } return this; } public void setEnclosingInstance(StringTemplate enclosingInstance) { if ( this==enclosingInstance ) { throw new IllegalArgumentException("cannot embed template "+getName()+" in itself"); } // set the parent for this template this.enclosingInstance = enclosingInstance; // make the parent track this template as an embedded template if ( enclosingInstance!=null ) { this.enclosingInstance.addEmbeddedInstance(this); } } public void addEmbeddedInstance(StringTemplate embeddedInstance) { if ( this.embeddedInstances==null ) { this.embeddedInstances = new LinkedList(); } this.embeddedInstances.add(embeddedInstance); } public Map getArgumentContext() { return argumentContext; } public void setArgumentContext(Map ac) { argumentContext = ac; } public StringTemplateAST getArgumentsAST() { return argumentsAST; } public void setArgumentsAST(StringTemplateAST argumentsAST) { this.argumentsAST = argumentsAST; } public String getName() { return name; } public String getOutermostName() { if ( enclosingInstance!=null ) { return enclosingInstance.getOutermostName(); } return getName(); } public void setName(String name) { this.name = name; } public StringTemplateGroup getGroup() { return group; } public void setGroup(StringTemplateGroup group) { this.group = group; } public StringTemplateGroup getNativeGroup() { return nativeGroup; } public void setNativeGroup(StringTemplateGroup nativeGroup) { this.nativeGroup = nativeGroup; } /** Return the outermost template's group file line number */ public int getGroupFileLine() { if ( enclosingInstance!=null ) { return enclosingInstance.getGroupFileLine(); } return groupFileLine; } public void setGroupFileLine(int groupFileLine) { this.groupFileLine = groupFileLine; } public void setTemplate(String template) { this.pattern = template; breakTemplateIntoChunks(); } public String getTemplate() { return pattern; } public void setErrorListener(StringTemplateErrorListener listener) { this.listener = listener; } public StringTemplateErrorListener getErrorListener() { if ( listener==null ) { return group.getErrorListener(); } return listener; } public void reset() { attributes = new HashMap(); // just throw out table and make new one } public void setPredefinedAttributes() { if ( !inLintMode() ) { return; // only do this method so far in lint mode } } public void removeAttribute(String name) { attributes.remove(name); } /** Set an attribute for this template. If you set the same * attribute more than once, you get a multi-valued attribute. * If you send in a StringTemplate object as a value, it's * enclosing instance (where it will inherit values from) is * set to 'this'. This would be the normal case, though you * can set it back to null after this call if you want. * If you send in a List plus other values to the same * attribute, they all get flattened into one List of values. * This will be a new list object so that incoming objects are * not altered. * If you send in an array, it is converted to an ArrayIterator. */ public void setAttribute(String name, Object value) { if ( value==null || name==null ) { return; } if ( name.indexOf('.')>=0 ) { throw new IllegalArgumentException("cannot have '.' in attribute names"); } if ( attributes==null ) { attributes = new HashMap(); } if ( value instanceof StringTemplate ) { ((StringTemplate)value).setEnclosingInstance(this); } else { // convert value if array value = ASTExpr.convertArrayToList(value); } // convert plain collections // get exactly in this scope (no enclosing) Object o = this.attributes.get(name); if ( o==null ) { // new attribute rawSetAttribute(this.attributes, name, value); return; } // it will be a multi-value attribute //System.out.println("exists: "+name+"="+o); STAttributeList v = null; if ( o.getClass() == STAttributeList.class ) { // already a list made by ST v = (STAttributeList)o; } else if ( o instanceof List ) { // existing attribute is non-ST List // must copy to an ST-managed list before adding new attribute List listAttr = (List)o; v = new STAttributeList(listAttr.size()); v.addAll(listAttr); rawSetAttribute(this.attributes, name, v); // replace attribute w/list } else { // non-list second attribute, must convert existing to ArrayList v = new STAttributeList(); // make list to hold multiple values // make it point to list now rawSetAttribute(this.attributes, name, v); // replace attribute w/list v.add(o); // add previous single-valued attribute } if ( value instanceof List ) { // flatten incoming list into existing if ( v!=value ) { // avoid weird cyclic add v.addAll((List)value); } } else { v.add(value); } } /** Convenience method to box ints */ public void setAttribute(String name, int value) { setAttribute(name, new Integer(value)); } /** Set an aggregate attribute with two values. The attribute name * must have the format: "name.{propName1,propName2}". */ public void setAttribute(String aggrSpec, Object v1, Object v2) { setAttribute(aggrSpec, new Object[] {v1,v2}); } public void setAttribute(String aggrSpec, Object v1, Object v2, Object v3) { setAttribute(aggrSpec, new Object[] {v1,v2,v3}); } public void setAttribute(String aggrSpec, Object v1, Object v2, Object v3, Object v4) { setAttribute(aggrSpec, new Object[] {v1,v2,v3,v4}); } public void setAttribute(String aggrSpec, Object v1, Object v2, Object v3, Object v4, Object v5) { setAttribute(aggrSpec, new Object[] {v1,v2,v3,v4,v5}); } /** Create an aggregate from the list of properties in aggrSpec and fill * with values from values array. This is not publically visible because * it conflicts semantically with setAttribute("foo",new Object[] {...}); */ protected void setAttribute(String aggrSpec, Object[] values) { List properties = new ArrayList(); String aggrName = parseAggregateAttributeSpec(aggrSpec, properties); if ( values==null || properties.size()==0 ) { throw new IllegalArgumentException("missing properties or values for '"+aggrSpec+"'"); } if ( values.length != properties.size() ) { throw new IllegalArgumentException("number of properties in '"+aggrSpec+"' != number of values"); } Aggregate aggr = new Aggregate(); for (int i = 0; i < values.length; i++) { Object value = values[i]; if ( value instanceof StringTemplate ) { ((StringTemplate)value).setEnclosingInstance(this); } else { value = ASTExpr.convertArrayToList(value); } aggr.put((String)properties.get(i), value); } setAttribute(aggrName, aggr); } /** Split "aggrName.{propName1,propName2}" into list [propName1,propName2] * and the aggrName. Space is allowed around ','. */ protected String parseAggregateAttributeSpec(String aggrSpec, List properties) { int dot = aggrSpec.indexOf('.'); if ( dot<=0 ) { throw new IllegalArgumentException("invalid aggregate attribute format: "+ aggrSpec); } String aggrName = aggrSpec.substring(0, dot); String propString = aggrSpec.substring(dot+1, aggrSpec.length()); boolean error = true; StringTokenizer tokenizer = new StringTokenizer(propString, "{,}", true); match: if ( tokenizer.hasMoreTokens() ) { String token = tokenizer.nextToken(); // advance to { token = token.trim(); if ( token.equals("{") ) { token = tokenizer.nextToken(); // advance to first prop name token = token.trim(); properties.add(token); token = tokenizer.nextToken(); // advance to a comma token = token.trim(); while ( token.equals(",") ) { token = tokenizer.nextToken(); // advance to a prop name token = token.trim(); properties.add(token); token = tokenizer.nextToken(); // advance to a "," or "}" token = token.trim(); } if ( token.equals("}") ) { error = false; } } } if ( error ) { throw new IllegalArgumentException("invalid aggregate attribute format: "+ aggrSpec); } return aggrName; } /** Map a value to a named attribute. Throw NoSuchElementException if * the named attribute is not formally defined in self's specific template * and a formal argument list exists. */ protected void rawSetAttribute(Map attributes, String name, Object value) { if ( formalArguments!=FormalArgument.UNKNOWN && getFormalArgument(name)==null ) { // a normal call to setAttribute with unknown attribute throw new NoSuchElementException("no such attribute: "+name+ " in template context "+ getEnclosingInstanceStackString()); } if ( value == null ) { return; } attributes.put(name, value); } /** Argument evaluation such as foo(x=y), x must * be checked against foo's argument list not this's (which is * the enclosing context). So far, only eval.g uses arg self as * something other than "this". */ public void rawSetArgumentAttribute(StringTemplate embedded, Map attributes, String name, Object value) { if ( embedded.formalArguments!=FormalArgument.UNKNOWN && embedded.getFormalArgument(name)==null ) { throw new NoSuchElementException("template "+embedded.getName()+ " has no such attribute: "+name+ " in template context "+ getEnclosingInstanceStackString()); } if ( value == null ) { return; } attributes.put(name, value); } public Object getAttribute(String name) { return get(this,name); } /** Walk the chunks, asking them to write themselves out according * to attribute values of 'this.attributes'. This is like evaluating or * interpreting the StringTemplate as a program using the * attributes. The chunks will be identical (point at same list) * for all instances of this template. */ public int write(StringTemplateWriter out) throws IOException { if ( group.debugTemplateOutput ) { group.emitTemplateStartDebugString(this,out); } int n = 0; setPredefinedAttributes(); setDefaultArgumentValues(); for (int i=0; chunks!=null && i<chunks.size(); i++) { Expr a = (Expr)chunks.get(i); int chunkN = a.write(this, out); // expr-on-first-line-with-no-output NEWLINE => NEWLINE if ( chunkN==0 && i==0 && (i+1)<chunks.size() && chunks.get(i+1) instanceof NewlineRef ) { //System.out.println("found pure first-line-blank \\n pattern"); i++; // skip next NEWLINE; continue; } // NEWLINE expr-with-no-output NEWLINE => NEWLINE // Indented $...$ have the indent stored with the ASTExpr // so the indent does not come out as a StringRef if ( chunkN==0 && (i-1)>=0 && chunks.get(i-1) instanceof NewlineRef && (i+1)<chunks.size() && chunks.get(i+1) instanceof NewlineRef ) { //System.out.println("found pure \\n blank \\n pattern"); i++; // make it skip over the next chunk, the NEWLINE } n += chunkN; } if ( group.debugTemplateOutput ) { group.emitTemplateStopDebugString(this,out); } if ( lintMode ) { checkForTrouble(); } return n; } /** Resolve an attribute reference. It can be in four possible places: * * 1. the attribute list for the current template * 2. if self is an embedded template, somebody invoked us possibly * with arguments--check the argument context * 3. if self is an embedded template, the attribute list for the enclosing * instance (recursively up the enclosing instance chain) * 4. if nothing is found in the enclosing instance chain, then it might * be a map defined in the group or the its supergroup etc... * * Attribute references are checked for validity. If an attribute has * a value, its validity was checked before template rendering. * If the attribute has no value, then we must check to ensure it is a * valid reference. Somebody could reference any random value like $xyz$; * formal arg checks before rendering cannot detect this--only the ref * can initiate a validity check. So, if no value, walk up the enclosed * template tree again, this time checking formal parameters not * attributes Map. The formal definition must exist even if no value. * * To avoid infinite recursion in toString(), we have another condition * to check regarding attribute values. If your template has a formal * argument, foo, then foo will hide any value available from "above" * in order to prevent infinite recursion. * * This method is not static so people can override functionality. */ public Object get(StringTemplate self, String attribute) { //System.out.println("### get("+self.getEnclosingInstanceStackString()+", "+attribute+")"); //System.out.println("attributes="+(self.attributes!=null?self.attributes.keySet().toString():"none")); if ( self==null ) { return null; } if ( lintMode ) { self.trackAttributeReference(attribute); } // is it here? Object o = null; if ( self.attributes!=null ) { o = self.attributes.get(attribute); } // nope, check argument context in case embedded if ( o==null ) { Map argContext = self.getArgumentContext(); if ( argContext!=null ) { o = argContext.get(attribute); } } if ( o==null && !self.passThroughAttributes && self.getFormalArgument(attribute)!=null ) { // if you've defined attribute as formal arg for this // template and it has no value, do not look up the // enclosing dynamic scopes. This avoids potential infinite // recursion. return null; } // not locally defined, check enclosingInstance if embedded if ( o==null && self.enclosingInstance!=null ) { /* System.out.println("looking for "+getName()+"."+attribute+" in super="+ enclosingInstance.getName()); */ Object valueFromEnclosing = get(self.enclosingInstance, attribute); if ( valueFromEnclosing==null ) { checkNullAttributeAgainstFormalArguments(self, attribute); } o = valueFromEnclosing; } // not found and no enclosing instance to look at else if ( o==null && self.enclosingInstance==null ) { // It might be a map in the group or supergroup... o = self.group.getMap(attribute); } return o; } /** Walk a template, breaking it into a list of * chunks: Strings and actions/expressions. */ protected void breakTemplateIntoChunks() { //System.out.println("parsing template: "+pattern); if ( pattern==null ) { return; } try { // instead of creating a specific template lexer, use // an instance of the class specified by the user. // The default is DefaultTemplateLexer. // The only constraint is that you use an ANTLR lexer // so I can use the special ChunkToken. Class lexerClass = group.getTemplateLexerClass(); Constructor ctor = lexerClass.getConstructor( new Class[] {StringTemplate.class,Reader.class} ); CharScanner chunkStream = (CharScanner) ctor.newInstance( new Object[] {this,new StringReader(pattern)} ); chunkStream.setTokenObjectClass("org.antlr.stringtemplate.language.ChunkToken"); TemplateParser chunkifier = new TemplateParser(chunkStream); chunkifier.template(this); //System.out.println("chunks="+chunks); } catch (Exception e) { String name = "<unknown>"; String outerName = getOutermostName(); if ( getName()!=null ) { name = getName(); } if ( outerName!=null && !name.equals(outerName) ) { name = name+" nested in "+outerName; } error("problem parsing template '"+name+"'", e); } } public ASTExpr parseAction(String action) { //System.out.println("parse action "+action); ActionLexer lexer = new ActionLexer(new StringReader(action.toString())); ActionParser parser = new ActionParser(lexer, this); parser.setASTNodeClass("org.antlr.stringtemplate.language.StringTemplateAST"); lexer.setTokenObjectClass("org.antlr.stringtemplate.language.StringTemplateToken"); ASTExpr a = null; try { Map options = parser.action(); AST tree = parser.getAST(); if ( tree!=null ) { if ( tree.getType()==ActionParser.CONDITIONAL ) { a = new ConditionalExpr(this,tree); } else { a = new ASTExpr(this,tree,options); } } } catch (RecognitionException re) { error("Can't parse chunk: "+action.toString(), re); } catch (TokenStreamException tse) { error("Can't parse chunk: "+action.toString(), tse); } return a; } public int getTemplateID() { return templateID; } public Map getAttributes() { return attributes; } /** Get a list of the strings and subtemplates and attribute * refs in a template. */ public List getChunks() { return chunks; } public void addChunk(Expr e) { if ( chunks==null ) { chunks = new ArrayList(); } chunks.add(e); } public void setAttributes(Map attributes) { this.attributes = attributes; } // F o r m a l A r g S t u f f public Map getFormalArguments() { return formalArguments; } public void setFormalArguments(LinkedHashMap args) { formalArguments = args; } /** Set any default argument values that were not set by the * invoking template or by setAttribute directly. Note * that the default values may be templates. Their evaluation * context is the template itself and, hence, can see attributes * within the template, any arguments, and any values inherited * by the template. * * Default values are stored in the argument context rather than * the template attributes table just for consistency's sake. */ public void setDefaultArgumentValues() { if ( numberOfDefaultArgumentValues==0 ) { return; } if ( argumentContext==null ) { argumentContext = new HashMap(); } if ( formalArguments!=FormalArgument.UNKNOWN ) { Set argNames = formalArguments.keySet(); for (Iterator it = argNames.iterator(); it.hasNext();) { String argName = (String) it.next(); // use the default value then FormalArgument arg = (FormalArgument)formalArguments.get(argName); if ( arg.defaultValueST!=null ) { Object existingValue = getAttribute(argName); if ( existingValue==null ) { // value unset? // if no value for attribute, set arg context // to the default value. We don't need an instance // here because no attributes can be set in // the arg templates by the user. argumentContext.put(argName, arg.defaultValueST); } } } } } /** From this template upward in the enclosing template tree, * recursively look for the formal parameter. */ public FormalArgument lookupFormalArgument(String name) { FormalArgument arg = getFormalArgument(name); if ( arg==null && enclosingInstance!=null ) { arg = enclosingInstance.lookupFormalArgument(name); } return arg; } public FormalArgument getFormalArgument(String name) { return (FormalArgument)formalArguments.get(name); } public void defineEmptyFormalArgumentList() { setFormalArguments(new LinkedHashMap()); } public void defineFormalArgument(String name) { defineFormalArgument(name,null); } public void defineFormalArguments(List names) { if ( names==null ) { return; } for (int i = 0; i < names.size(); i++) { String name = (String) names.get(i); defineFormalArgument(name); } } public void defineFormalArgument(String name, StringTemplate defaultValue) { if ( defaultValue!=null ) { numberOfDefaultArgumentValues++; } FormalArgument a = new FormalArgument(name,defaultValue); if ( formalArguments==FormalArgument.UNKNOWN ) { formalArguments = new LinkedHashMap(); } formalArguments.put(name, a); } /** Normally if you call template y from x, y cannot see any attributes * of x that are defined as formal parameters of y. Setting this * passThroughAttributes to true, will override that and allow a * template to see through the formal arg list to inherited values. */ public void setPassThroughAttributes(boolean passThroughAttributes) { this.passThroughAttributes = passThroughAttributes; } /** Specify a complete map of what object classes should map to which * renderer objects. */ public void setAttributeRenderers(Map renderers) { this.attributeRenderers = renderers; } /** Register a renderer for all objects of a particular type. This * overrides any renderer set in the group for this class type. */ public void registerRenderer(Class attributeClassType, AttributeRenderer renderer) { if ( attributeRenderers==null ) { attributeRenderers = new HashMap(); } attributeRenderers.put(attributeClassType, renderer); } /** What renderer is registered for this attributeClassType for * this template. If not found, the template's group is queried. */ public AttributeRenderer getAttributeRenderer(Class attributeClassType) { AttributeRenderer renderer = null; if ( attributeRenderers!=null ) { renderer = (AttributeRenderer)attributeRenderers.get(attributeClassType); } if ( renderer!=null ) { // found it! return renderer; } // we have no renderer overrides for the template or none for class arg // check parent template if we are embedded if ( enclosingInstance!=null ) { return enclosingInstance.getAttributeRenderer(attributeClassType); } // else check group return group.getAttributeRenderer(attributeClassType); } // U T I L I T Y R O U T I N E S public void error(String msg) { error(msg, null); } public void warning(String msg) { if ( getErrorListener()!=null ) { getErrorListener().warning(msg); } else { System.err.println("StringTemplate: warning: "+msg); } } public void error(String msg, Throwable e) { if ( getErrorListener()!=null ) { getErrorListener().error(msg,e); } else { if ( e!=null ) { System.err.println("StringTemplate: error: "+msg+": "+e.toString()); if ( e instanceof InvocationTargetException ) { e = ((InvocationTargetException)e).getTargetException(); } e.printStackTrace(System.err); } else { System.err.println("StringTemplate: error: "+msg); } } } /** Make StringTemplate check your work as it evaluates templates. * Problems are sent to error listener. Currently warns when * you set attributes that are not used. */ public static void setLintMode(boolean lint) { StringTemplate.lintMode = lint; } public static boolean inLintMode() { return lintMode; } /** Indicates that 'name' has been referenced in this template. */ protected void trackAttributeReference(String name) { if ( referencedAttributes==null ) { referencedAttributes = new ArrayList(); } referencedAttributes.add(name); } /** Look up the enclosing instance chain (and include this) to see * if st is a template already in the enclosing instance chain. */ public static boolean isRecursiveEnclosingInstance(StringTemplate st) { if ( st==null ) { return false; } StringTemplate p = st.enclosingInstance; if ( p==st ) { return true; // self-recursive } // now look for indirect recursion while ( p!=null ) { if ( p==st ) { return true; } p = p.enclosingInstance; } return false; } public String getEnclosingInstanceStackTrace() { StringBuffer buf = new StringBuffer(); Set seen = new HashSet(); StringTemplate p = this; while ( p!=null ) { if ( seen.contains(p) ) { buf.append(p.getTemplateDeclaratorString()); buf.append(" (start of recursive cycle)"); buf.append("\n"); buf.append("..."); break; } seen.add(p); buf.append(p.getTemplateDeclaratorString()); if ( p.attributes!=null ) { buf.append(", attributes=["); int i = 0; for (Iterator iter = p.attributes.keySet().iterator(); iter.hasNext();) { String attrName = (String) iter.next(); if ( i>0 ) { buf.append(", "); } i++; buf.append(attrName); Object o = p.attributes.get(attrName); if ( o instanceof StringTemplate ) { StringTemplate st = (StringTemplate)o; buf.append("="); buf.append("<"); buf.append(st.getName()); buf.append("()@"); buf.append(String.valueOf(st.getTemplateID())); buf.append(">"); } else if ( o instanceof List ) { buf.append("=List[.."); List list = (List)o; int n=0; for (int j = 0; j < list.size(); j++) { Object listValue = list.get(j); if ( listValue instanceof StringTemplate ) { if ( n>0 ) { buf.append(", "); } n++; StringTemplate st = (StringTemplate)listValue; buf.append("<"); buf.append(st.getName()); buf.append("()@"); buf.append(String.valueOf(st.getTemplateID())); buf.append(">"); } } buf.append("..]"); } } buf.append("]"); } if ( p.referencedAttributes!=null ) { buf.append(", references="); buf.append(p.referencedAttributes); } buf.append(">\n"); p = p.enclosingInstance; } /* if ( enclosingInstance!=null ) { buf.append(enclosingInstance.getEnclosingInstanceStackTrace()); } */ return buf.toString(); } public String getTemplateDeclaratorString() { StringBuffer buf = new StringBuffer(); buf.append("<"); buf.append(getName()); buf.append("("); buf.append(formalArguments.keySet()); buf.append(")@"); buf.append(String.valueOf(getTemplateID())); buf.append(">"); return buf.toString(); } protected String getTemplateHeaderString(boolean showAttributes) { if ( showAttributes ) { StringBuffer buf = new StringBuffer(); buf.append(getName()); if ( attributes!=null ) { buf.append(attributes.keySet()); } return buf.toString(); } return getName(); } /** A reference to an attribute with no value, must be compared against * the formal parameter to see if it exists; if it exists all is well, * but if not, throw an exception. * * Don't do the check if no formal parameters exist for this template; * ask enclosing. */ protected void checkNullAttributeAgainstFormalArguments( StringTemplate self, String attribute) { if ( self.getFormalArguments()==FormalArgument.UNKNOWN ) { // bypass unknown arg lists if ( self.enclosingInstance!=null ) { checkNullAttributeAgainstFormalArguments( self.enclosingInstance, attribute); } return; } FormalArgument formalArg = self.lookupFormalArgument(attribute); if ( formalArg == null ) { throw new NoSuchElementException("no such attribute: "+attribute+ " in template context "+getEnclosingInstanceStackString()); } } /** Executed after evaluating a template. For now, checks for setting * of attributes not reference. */ protected void checkForTrouble() { // we have table of set values and list of values referenced // compare, looking for SET BUT NOT REFERENCED ATTRIBUTES if ( attributes==null ) { return; } Set names = attributes.keySet(); Iterator iter = names.iterator(); // if in names and not in referenced attributes, trouble while ( iter.hasNext() ) { String name = (String)iter.next(); if ( referencedAttributes!=null && !referencedAttributes.contains(name) ) { warning(getName()+": set but not used: "+name); } } // can do the reverse, but will have lots of false warnings :( } /** If an instance of x is enclosed in a y which is in a z, return * a String of these instance names in order from topmost to lowest; * here that would be "[z y x]". */ public String getEnclosingInstanceStackString() { List names = new LinkedList(); StringTemplate p = this; while ( p!=null ) { String name = p.getName(); names.add(0,name+(p.passThroughAttributes?"(...)":"")); p = p.enclosingInstance; } return names.toString().replaceAll(",",""); } public boolean isRegion() { return isRegion; } public void setIsRegion(boolean isRegion) { this.isRegion = isRegion; } public void addRegionName(String name) { if ( regions==null ) { regions = new HashSet(); } regions.add(name); } /** Does this template ref or embed region name? */ public boolean containsRegionName(String name) { if ( regions==null ) { return false; } return regions.contains(name); } public int getRegionDefType() { return regionDefType; } public void setRegionDefType(int regionDefType) { this.regionDefType = regionDefType; } public String toDebugString() { StringBuffer buf = new StringBuffer(); buf.append("template-"+getTemplateDeclaratorString()+":"); buf.append("chunks="); if ( chunks!=null ) { buf.append(chunks.toString()); } buf.append("attributes=["); if ( attributes!=null ) { Set attrNames = attributes.keySet(); int n=0; for (Iterator iter = attrNames.iterator(); iter.hasNext();) { if ( n>0 ) { buf.append(','); } String name = (String) iter.next(); buf.append(name+"="); Object value = attributes.get(name); if ( value instanceof StringTemplate ) { buf.append(((StringTemplate)value).toDebugString()); } else { buf.append(value); } n++; } buf.append("]"); } return buf.toString(); } /** Don't print values, just report the nested structure with attribute names. * Follow (nest) attributes that are templates only. */ public String toStructureString() { return toStructureString(0); } public String toStructureString(int indent) { StringBuffer buf = new StringBuffer(); for (int i=1; i<=indent; i++) { // indent buf.append(" "); } buf.append(getName()); buf.append(attributes.keySet()); buf.append(":\n"); if ( attributes!=null ) { Set attrNames = attributes.keySet(); for (Iterator iter = attrNames.iterator(); iter.hasNext();) { String name = (String) iter.next(); Object value = attributes.get(name); if ( value instanceof StringTemplate ) { // descend buf.append(((StringTemplate)value).toStructureString(indent+1)); } else { if ( value instanceof List ) { List alist = (List)value; for (int i = 0; i < alist.size(); i++) { Object o = (Object) alist.get(i); if ( o instanceof StringTemplate ) { // descend buf.append(((StringTemplate)o).toStructureString(indent+1)); } } } else if ( value instanceof Map ) { Map m = (Map)value; Collection mvalues = m.values(); for (Iterator iterator = mvalues.iterator(); iterator.hasNext();) { Object o = (Object) iterator.next(); if ( o instanceof StringTemplate ) { // descend buf.append(((StringTemplate)o).toStructureString(indent+1)); } } } } } } return buf.toString(); } /* public String getDOTForDependencyGraph(boolean showAttributes) { StringBuffer buf = new StringBuffer(); buf.append("digraph prof {\n"); HashMap edges = new HashMap(); this.getDependencyGraph(edges, showAttributes); Set sourceNodes = edges.keySet(); // for each source template for (Iterator it = sourceNodes.iterator(); it.hasNext();) { String src = (String) it.next(); Set targetNodes = (Set)edges.get(src); // for each target template for (Iterator it2 = targetNodes.iterator(); it2.hasNext();) { String trg = (String) it2.next(); buf.append('"'); buf.append(src); buf.append('"'); buf.append("->"); buf.append('"'); buf.append(trg); buf.append("\"\n"); } } buf.append("}"); return buf.toString(); } */ /** Generate a DOT file for displaying the template enclosure graph; e.g., digraph prof { "t1" -> "t2" "t1" -> "t3" "t4" -> "t5" } */ public StringTemplate getDOTForDependencyGraph(boolean showAttributes) { String structure = "digraph StringTemplateDependencyGraph {\n" + "node [shape=$shape$, $if(width)$width=$width$,$endif$" + " $if(height)$height=$height$,$endif$ fontsize=$fontsize$];\n" + "$edges:{e|\"$e.src$\" -> \"$e.trg$\"\n}$" + "}\n"; StringTemplate graphST = new StringTemplate(structure); HashMap edges = new HashMap(); this.getDependencyGraph(edges, showAttributes); Set sourceNodes = edges.keySet(); // for each source template for (Iterator it = sourceNodes.iterator(); it.hasNext();) { String src = (String) it.next(); Set targetNodes = (Set)edges.get(src); // for each target template for (Iterator it2 = targetNodes.iterator(); it2.hasNext();) { String trg = (String) it2.next(); graphST.setAttribute("edges.{src,trg}", src, trg); } } graphST.setAttribute("shape", "none"); graphST.setAttribute("fontsize", "11"); graphST.setAttribute("height", "0"); // make height return graphST; } /** Get a list of n->m edges where template n contains template m. * The map you pass in is filled with edges: key->value. Useful * for having DOT print out an enclosing template graph. It * finds all direct template invocations too like <foo()> but not * indirect ones like <(name)()>. * * Ack, I just realized that this is done statically and hence * cannot see runtime arg values on statically included templates. * Hmm...someday figure out to do this dynamically as if we were * evaluating the templates. There will be extra nodes in the tree * because we are static like method and method[...] with args. */ public void getDependencyGraph(Map edges, boolean showAttributes) { String srcNode = this.getTemplateHeaderString(showAttributes); if ( attributes!=null ) { Set attrNames = attributes.keySet(); for (Iterator iter = attrNames.iterator(); iter.hasNext();) { String name = (String) iter.next(); Object value = attributes.get(name); if ( value instanceof StringTemplate ) { String targetNode = ((StringTemplate)value).getTemplateHeaderString(showAttributes); putToMultiValuedMap(edges,srcNode,targetNode); ((StringTemplate)value).getDependencyGraph(edges,showAttributes); // descend } else { if ( value instanceof List ) { List alist = (List)value; for (int i = 0; i < alist.size(); i++) { Object o = (Object) alist.get(i); if ( o instanceof StringTemplate ) { String targetNode = ((StringTemplate)o).getTemplateHeaderString(showAttributes); putToMultiValuedMap(edges,srcNode,targetNode); ((StringTemplate)o).getDependencyGraph(edges,showAttributes); // descend } } } else if ( value instanceof Map ) { Map m = (Map)value; Collection mvalues = m.values(); for (Iterator iterator = mvalues.iterator(); iterator.hasNext();) { Object o = (Object) iterator.next(); if ( o instanceof StringTemplate ) { String targetNode = ((StringTemplate)o).getTemplateHeaderString(showAttributes); putToMultiValuedMap(edges,srcNode,targetNode); ((StringTemplate)o).getDependencyGraph(edges,showAttributes); // descend } } } } } } // look in chunks too for template refs for (int i = 0; chunks!=null && i < chunks.size(); i++) { Expr expr = (Expr) chunks.get(i); if ( expr instanceof ASTExpr ) { ASTExpr e = (ASTExpr)expr; AST tree = e.getAST(); AST includeAST = new CommonAST(new CommonToken(ActionEvaluator.INCLUDE,"include")); ASTEnumeration it = tree.findAllPartial(includeAST); while (it.hasMoreNodes()) { AST t = (AST) it.nextNode(); String templateInclude = t.getFirstChild().getText(); System.out.println("found include "+templateInclude); putToMultiValuedMap(edges,srcNode,templateInclude); StringTemplateGroup group = getGroup(); if ( group!=null ) { StringTemplate st = group.getInstanceOf(templateInclude); // descend into the reference template st.getDependencyGraph(edges, showAttributes); } } } } } /** Manage a hash table like it has multiple unique values. Map<Object,Set>. */ protected void putToMultiValuedMap(Map map, Object key, Object value) { HashSet bag = (HashSet)map.get(key); if ( bag==null ) { bag = new HashSet(); map.put(key, bag); } bag.add(value); } public void printDebugString() { System.out.println("template-"+getName()+":"); System.out.print("chunks="); System.out.println(chunks.toString()); if ( attributes==null ) { return; } System.out.print("attributes=["); Set attrNames = attributes.keySet(); int n=0; for (Iterator iter = attrNames.iterator(); iter.hasNext();) { if ( n>0 ) { System.out.print(','); } String name = (String) iter.next(); Object value = attributes.get(name); if ( value instanceof StringTemplate ) { System.out.print(name+"="); ((StringTemplate)value).printDebugString(); } else { if ( value instanceof List ) { ArrayList alist = (ArrayList)value; for (int i = 0; i < alist.size(); i++) { Object o = (Object) alist.get(i); System.out.print(name+"["+i+"] is "+o.getClass().getName()+"="); if ( o instanceof StringTemplate ) { ((StringTemplate)o).printDebugString(); } else { System.out.println(o); } } } else { System.out.print(name+"="); System.out.println(value); } } n++; } System.out.print("]\n"); } public String toString() { return toString(StringTemplateWriter.NO_WRAP); } public String toString(int lineWidth) { StringWriter out = new StringWriter(); // Write the output to a StringWriter StringTemplateWriter wr = group.getStringTemplateWriter(out); wr.setLineWidth(lineWidth); try { write(wr); } catch (IOException io) { error("Got IOException writing to writer "+wr.getClass().getName()); } // reset so next toString() does not wrap; normally this is a new writer // each time, but just in case they override the group to reuse the // writer. wr.setLineWidth(StringTemplateWriter.NO_WRAP); return out.toString(); } }
package org.biojava.utils.io; import java.io.IOException; import java.io.RandomAccessFile; import java.io.Reader; /** * <code>RandomAccessReader</code> extends <code>Reader</code> to * provide a means to create buffered <code>Reader</code>s from * <code>RandomAccessFile</code>s. * * @author Keith James * @since 1.2 */ public class RandomAccessReader extends Reader { private static final int DEFAULT_BUFFER_SIZE = 1 << 13; private RandomAccessFile raf; private char [] buffer; private byte [] bytes; private int bufferPos = 0; private int bufferEnd = 0; private long raPtrPos = 0; private boolean atEOF = false; /** * Creates a new <code>RandomAccessReader</code> wrapping the * <code>RandomAccessFile</code> and using a default-sized buffer * (8192 bytes). * * @param raf a <code>RandomAccessFile</code> to wrap. * * @exception IOException if an error occurs. */ public RandomAccessReader(RandomAccessFile raf) throws IOException { this(raf, DEFAULT_BUFFER_SIZE); } /** * Creates a new <code>RandomAccessReader</code> wrapping the * <code>RandomAccessFile</code> and using a buffer of the * specified size. * * @param raf a <code>RandomAccessFile</code> to wrap. * @param sz an <code>int</code> buffer size. */ public RandomAccessReader(RandomAccessFile raf, int sz) throws IOException { super(); this.raf = raf; buffer = new char [sz]; bytes = new byte [sz]; resetBuffer(); } /** * <code>close</code> closes the underlying * <code>RandomAccessFile</code>. * * @exception IOException if an error occurs. */ public void close() throws IOException { raf.close(); raf = null; } /** * <code>length</code> returns the length of the underlying * <code>RandomAccessFile</code>. * * @return a <code>long</code>. * * @exception IOException if an error occurs. */ public long length() throws IOException { return raf.length(); } /** * <code>read</code> reads one byte from the underlying * <code>RandomAccessFile</code>. * * @return an <code>int</code>, -1 if the end of the stream has * been reached. * * @exception IOException if an error occurs. */ public final int read() throws IOException { if (atEOF) return -1; if (bufferPos >= bufferEnd) if (fill() < 0) return -1; if (bufferEnd == 0) return -1; else return buffer[bufferPos++]; } /** * <code>read</code> reads from the underlying * <code>RandomAccessFile</code> into an array. * * @param cbuf a <code>char []</code> array to read into. * @param off an <code>int</code> offset in the array at which to * start storing chars. * @param len an <code>int</code> maximum number of char to read. * * @return an <code>int</code> number of chars read, or -1 if the * end of the stream has been reached. * * @exception IOException if an error occurs. */ public int read(char [] cbuf, int off, int len) throws IOException { if (atEOF) return -1; int remainder = bufferEnd - bufferPos; // If there are enough chars in the buffer to handle this // call, use those if (len <= remainder) { System.arraycopy(buffer, bufferPos, cbuf, off, len); bufferPos += len; return len; } // Otherwise start getting more chars from the delegate for (int i = 0; i < len; i++) { // Read from our own method which checks the buffer // first int c = read(); if (c != -1) { cbuf[off + i] = (char) c; } else { // Need to remember that EOF was reached to return -1 // next read atEOF= true; return i; } } return len; } /** * <code>getFilePointer</code> returns the effective position of * the pointer in the underlying <code>RandomAccessFile</code>. * * @return a <code>long</code> offset. * * @exception IOException if an error occurs. */ public long getFilePointer() throws IOException { return raPtrPos - bufferEnd + bufferPos; } /** * <code>seek</code> moves the pointer to the specified position. * * @param pos a <code>long</code> offset. * * @exception IOException if an error occurs. */ public void seek(long pos) throws IOException { // If we seek backwards after reaching EOF, we are no longer // at EOF. if (pos < raf.length()) atEOF = false; int p = (int) (raPtrPos - pos); // Check if we can seek within the buffer if (p >= 0 && p <= bufferEnd) { bufferPos = bufferEnd - p; } // Otherwise delegate to do a "real" seek and clean the // dirty buffer else { raf.seek(pos); resetBuffer(); } } /** * <code>fill</code> fills the buffer from the * <code>RandomAccessFile</code>. * * @return an <code>int</code>. * * @exception IOException if an error occurs. */ private int fill() throws IOException { if (raf == null) throw new IOException("Random access file closed"); // Read bytes from random access delegate int b = raf.read(bytes, 0, DEFAULT_BUFFER_SIZE); // Copy and cast bytes read to char buffer for (int i = b; --i >= 0;) buffer[i] = (char) bytes[i]; // If read any bytes if (b >= 0) { raPtrPos += b; bufferPos = 0; bufferEnd = b; } // Return number bytes read return b; } /** * <code>resetBuffer</code> resets the buffer when the pointer * leaves its boundaries. * * @exception IOException if an error occurs. */ private void resetBuffer() throws IOException { bufferPos = 0; bufferEnd = 0; raPtrPos = raf.getFilePointer(); } }
package org.griphyn.cPlanner.classes; import org.griphyn.cPlanner.partitioner.graph.Bag; import org.griphyn.cPlanner.common.PegasusProperties; import org.griphyn.cPlanner.common.LogManager; import org.griphyn.cPlanner.poolinfo.PoolInfoProvider; import org.griphyn.common.catalog.TransformationCatalog; import org.griphyn.common.catalog.ReplicaCatalog; import org.griphyn.common.catalog.transformation.Mapper; /** * A bag of objects that needs to be passed to various refiners. * It contains handles to the various catalogs, the properties and the * planner options. * * @author Karan Vahi * @version $Revision$ */ public class PegasusBag implements Bag { /** * Array storing the names of the attributes that are stored with the * site. */ public static final String PEGASUS_INFO[] = { "pegasus-properties", "planner-options", "replica-catalog", "site-catalog", "transformation-catalog", "pegasus-logger" }; /** * The constant to be passed to the accessor functions to get or set the * PegasusProperties. */ public static final Integer PEGASUS_PROPERTIES = new Integer( 0 ); /** * The constant to be passed to the accessor functions to get or set the * options passed to the planner. */ public static final Integer PLANNER_OPTIONS = new Integer( 1 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the replica catalog */ public static final Integer REPLICA_CATALOG = new Integer( 2 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the site catalog. */ public static final Integer SITE_CATALOG = new Integer( 3 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the transformation catalog. */ public static final Integer TRANSFORMATION_CATALOG = new Integer( 4 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the Transformation Mapper. */ public static final Integer TRANSFORMATION_MAPPER = new Integer( 5 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the Logging manager */ public static final Integer PEGASUS_LOGMANAGER = new Integer( 6 ); /** * The handle to the <code>PegasusProperties</code>. */ private PegasusProperties mProps; /** * The options passed to the planner. */ private PlannerOptions mPOptions; /** * The handle to the replica catalog. */ private ReplicaCatalog mRCHandle; /** * The handle to the site catalog. */ private PoolInfoProvider mSCHandle; /** * The handle to the transformation catalog. */ private TransformationCatalog mTCHandle; /** * The handle to the Transformation Mapper. */ private Mapper mTCMapper; /** * The handle to the LogManager. */ private LogManager mLogger; /** * The default constructor. */ public PegasusBag() { } /** * Adds an object to the underlying bag corresponding to a particular key. * * @param key the key with which the value has to be associated. * @param value the value to be associated with the key. * * @return boolean indicating if insertion was successful. * */ public boolean add( Object key, Object value ) { //to denote if object is of valid type or not. boolean valid = true; int k = getIntValue( key ); switch ( k ) { case 0: //PEGASUS_PROPERTIES if ( value != null && value instanceof PegasusProperties) mProps = (PegasusProperties) value; else valid = false; break; case 1: //PLANNER_OPTIONS if ( value != null && value instanceof PlannerOptions ) mPOptions = ( PlannerOptions ) value; else valid = false; break; case 2: //REPLICA_CATALOG: if ( value != null && value instanceof ReplicaCatalog ) mRCHandle = ( ReplicaCatalog ) value; else valid = false; break; case 3: //SITE_CATALOG: if ( value != null && value instanceof PoolInfoProvider ) mSCHandle = ( PoolInfoProvider ) value; else valid = false; break; case 4: //TRANSFORMATION_CATALOG: if ( value != null && value instanceof TransformationCatalog ) mTCHandle = ( TransformationCatalog ) value; else valid = false; break; case 5: //TRANSFORMATION_MAPPER if ( value != null && value instanceof Mapper ) mTCMapper = ( Mapper ) value; else valid = false; break; case 6: //PEGASUS_LOGGER if ( value != null && value instanceof LogManager ) mLogger = ( LogManager ) value; else valid = false; break; default: throw new RuntimeException( " Wrong Pegasus Bag key. Please use one of the predefined Integer key types"); } //if object is not null , and valid == false //throw exception if( !valid && value != null ){ throw new RuntimeException( "Invalid object passed for key " + PEGASUS_INFO[ k ]); } return valid; } /** * Returns true if the namespace contains a mapping for the specified key. * * @param key The key that you want to search for in the bag. * * @return boolean */ public boolean containsKey(Object key) { int k = -1; try{ k = ( (Integer) key).intValue(); } catch( Exception e ){} return ( k >= this.PEGASUS_PROPERTIES.intValue() && k <= this.TRANSFORMATION_CATALOG.intValue() ); } /** * Returns an objects corresponding to the key passed. * * @param key the key corresponding to which the objects need to be * returned. * * @return the object that is found corresponding to the key or null. */ public Object get( Object key ) { int k = getIntValue( key ); switch( k ){ case 0: return this.mProps; case 1: return this.mPOptions; case 2: return this.mRCHandle; case 3: return this.mSCHandle; case 4: return this.mTCHandle; case 5: //TRANSFORMATION_MAPPER return this.mTCMapper; case 6: //PEGASUS_LOGMANAGER return this.mLogger; default: throw new RuntimeException( " Wrong Pegasus Bag key. Please use one of the predefined Integer key types"); } } /** * A convenice method to get PegasusProperties * * @return the handle to the properties. */ public PegasusProperties getPegasusProperties(){ return ( PegasusProperties )get( PegasusBag.PEGASUS_PROPERTIES ); } /** * A convenice method to get Logger/ * * @return the handle to the logger. */ public LogManager getLogger(){ return ( LogManager )get( PegasusBag.PEGASUS_LOGMANAGER ); } /** * A convenience method to get the intValue for the object passed. * * @param key the key to be converted * * @return the int value if object an integer, else -1 */ private int getIntValue( Object key ){ int k = -1; try{ k = ( (Integer) key).intValue(); } catch( Exception e ){} return k; } }
package org.nutz.dao.impl.entity; import java.sql.Connection; import java.sql.SQLException; import java.util.Map; import java.util.Map.Entry; import javax.sql.DataSource; import org.nutz.dao.entity.Entity; import org.nutz.dao.entity.annotation.ColType; import org.nutz.dao.impl.EntityHolder; import org.nutz.dao.impl.entity.field.NutMappingField; import org.nutz.dao.jdbc.JdbcExpert; import org.nutz.dao.jdbc.Jdbcs; import org.nutz.dao.jdbc.ValueAdaptor; import org.nutz.dao.util.Daos; import org.nutz.lang.Mirror; import org.nutz.lang.eject.EjectFromMap; import org.nutz.lang.inject.InjectToMap; import org.nutz.log.Log; import org.nutz.log.Logs; public class MapEntityMaker { private static final Log log = Logs.get(); protected JdbcExpert expert; protected DataSource dataSource; @SuppressWarnings({"unchecked", "rawtypes"}) public <T extends Map<String, ?>> Entity<T> make(String tableName, T map) { final NutEntity<T> en = new NutEntity(map.getClass()); en.setTableName(tableName); en.setViewName(tableName); boolean check = false; for (Entry<String, ?> entry : map.entrySet()) { String key = entry.getKey(); if (key.startsWith(" en.getMetas().put(key.substring(1), entry.getValue().toString()); continue; } else if (key.startsWith(".")) { continue; } Object value = entry.getValue(); Mirror<?> mirror = Mirror.me(value); NutMappingField ef = new NutMappingField(en); while (true) { if (key.startsWith("+")) { ef.setAsAutoIncreasement(); if (mirror != null && mirror.isIntLike()) ef.setAsId(); key = key.substring(1); } else if (key.startsWith("!")) { ef.setAsNotNull(); key = key.substring(1); } else if (key.startsWith("*")) { key = key.substring(1); if (mirror != null && mirror.isIntLike()) ef.setAsId(); else ef.setAsName(); } else { break; } } ef.setName(key); String columnName = key; if (Daos.FORCE_UPPER_COLUMN_NAME) { ef.setColumnName(columnName.toUpperCase()); } else { ef.setColumnName(columnName); } if (Daos.FORCE_WRAP_COLUMN_NAME) { ef.setColumnNameInSql(expert.wrapKeyword(columnName, true)); } else if (Daos.CHECK_COLUMN_NAME_KEYWORD) { ef.setColumnNameInSql(expert.wrapKeyword(columnName, false)); } if (map.containsKey("." + key + ".type")) { ef.setType((Class) map.get("." + key + ".type")); } else { ef.setType(null == value ? Object.class : value.getClass()); } // ColType? if (map.containsKey("." + key + ".coltype")) { ef.setColumnType((ColType) map.get("." + key + ".coltype")); } else { Jdbcs.guessEntityFieldColumnType(ef); } if (map.containsKey("." + key + ".adaptor")) { ef.setAdaptor((ValueAdaptor) map.get("." + key + ".adaptor")); } else { ef.setAdaptor(expert.getAdaptor(ef)); } if (map.containsKey("." + key + ".width")) { ef.setWidth((int) map.get("." + key + ".width")); } ef.setInjecting(new InjectToMap(key)); ef.setEjecting(new EjectFromMap(entry.getKey())); if (ef.isAutoIncreasement() && ef.isId() && expert.isSupportAutoIncrement() && !expert.isSupportGeneratedKeys()) { en.addAfterInsertMacro(expert.fetchPojoId(en, ef)); } en.addMappingField(ef); if (mirror != null && !check) check = mirror.isEnum(); } en.checkCompositeFields(null); if (check) { Connection conn = null; try { try { conn = dataSource.getConnection(); expert.setupEntityField(conn, en); } finally { if (conn != null) conn.close(); } } catch (SQLException e) { log.debug(e.getMessage(), e); } } return en; } public void init(DataSource datasource, JdbcExpert expert, EntityHolder holder) { this.expert = expert; this.dataSource = datasource; } }
package org.torproject.onionoo; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TimeZone; import java.util.TreeMap; import java.util.TreeSet; import org.apache.commons.lang.StringEscapeUtils; import org.torproject.descriptor.BridgePoolAssignment; import org.torproject.descriptor.Descriptor; import org.torproject.descriptor.DescriptorFile; import org.torproject.descriptor.DescriptorReader; import org.torproject.descriptor.DescriptorSourceFactory; import org.torproject.descriptor.ExitList; import org.torproject.descriptor.ExitListEntry; import org.torproject.descriptor.ServerDescriptor; /* Write updated detail data files to disk and delete files of relays or * bridges that fell out of the summary list. * * The parts of details files coming from server descriptors always come * from the last known descriptor of a relay or bridge, not from the * descriptor that was last referenced in a network status. */ public class DetailDataWriter { private SortedMap<String, Node> relays; public void setCurrentRelays(SortedMap<String, Node> currentRelays) { this.relays = currentRelays; } private SortedMap<String, Node> bridges; public void setCurrentBridges(SortedMap<String, Node> currentBridges) { this.bridges = currentBridges; } private static final long RDNS_LOOKUP_MAX_REQUEST_MILLIS = 10L * 1000L; private static final long RDNS_LOOKUP_MAX_DURATION_MILLIS = 5L * 60L * 1000L; private static final long RDNS_LOOKUP_MAX_AGE_MILLIS = 12L * 60L * 60L * 1000L; private static final int RDNS_LOOKUP_WORKERS_NUM = 5; private Set<String> rdnsLookupJobs; private Map<String, String> rdnsLookupResults; private long startedRdnsLookups; private List<RdnsLookupWorker> rdnsLookupWorkers; public void startReverseDomainNameLookups() { this.startedRdnsLookups = System.currentTimeMillis(); this.rdnsLookupJobs = new HashSet<String>(); for (Node relay : relays.values()) { if (relay.getLastRdnsLookup() < this.startedRdnsLookups - RDNS_LOOKUP_MAX_AGE_MILLIS) { this.rdnsLookupJobs.add(relay.getAddress()); } } this.rdnsLookupResults = new HashMap<String, String>(); this.rdnsLookupWorkers = new ArrayList<RdnsLookupWorker>(); for (int i = 0; i < RDNS_LOOKUP_WORKERS_NUM; i++) { RdnsLookupWorker rdnsLookupWorker = new RdnsLookupWorker(); this.rdnsLookupWorkers.add(rdnsLookupWorker); rdnsLookupWorker.setDaemon(true); rdnsLookupWorker.start(); } } public void finishReverseDomainNameLookups() { for (RdnsLookupWorker rdnsLookupWorker : this.rdnsLookupWorkers) { try { rdnsLookupWorker.join(); } catch (InterruptedException e) { /* This is not something that we can take care of. Just leave the * worker thread alone. */ } } synchronized (this.rdnsLookupResults) { for (Node relay : relays.values()) { if (this.rdnsLookupResults.containsKey(relay.getAddress())) { relay.setHostName(this.rdnsLookupResults.get( relay.getAddress())); relay.setLastRdnsLookup(this.startedRdnsLookups); } } } } private class RdnsLookupWorker extends Thread { public void run() { while (System.currentTimeMillis() - RDNS_LOOKUP_MAX_DURATION_MILLIS <= startedRdnsLookups) { String rdnsLookupJob = null; synchronized (rdnsLookupJobs) { for (String job : rdnsLookupJobs) { rdnsLookupJob = job; rdnsLookupJobs.remove(job); break; } } if (rdnsLookupJob == null) { break; } RdnsLookupRequest request = new RdnsLookupRequest(this, rdnsLookupJob); request.setDaemon(true); request.start(); try { Thread.sleep(RDNS_LOOKUP_MAX_REQUEST_MILLIS); } catch (InterruptedException e) { /* Getting interrupted should be the default case. */ } String hostName = request.getHostName(); if (hostName != null) { synchronized (rdnsLookupResults) { rdnsLookupResults.put(rdnsLookupJob, hostName); } } } } } private class RdnsLookupRequest extends Thread { RdnsLookupWorker parent; String address, hostName; public RdnsLookupRequest(RdnsLookupWorker parent, String address) { this.parent = parent; this.address = address; } public void run() { try { String result = InetAddress.getByName(this.address).getHostName(); synchronized (this) { this.hostName = result; } } catch (UnknownHostException e) { /* We'll try again the next time. */ } this.parent.interrupt(); } public synchronized String getHostName() { return hostName; } } private Map<String, ServerDescriptor> relayServerDescriptors = new HashMap<String, ServerDescriptor>(); public void readRelayServerDescriptors() { DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader(); reader.addDirectory(new File( "in/relay-descriptors/server-descriptors")); /* Don't remember which server descriptors we already parsed. If we * parse a server descriptor now and first learn about the relay in a * later consensus, we'll never write the descriptor content anywhere. * The result would be details files containing no descriptor parts * until the relay publishes the next descriptor. */ Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors(); while (descriptorFiles.hasNext()) { DescriptorFile descriptorFile = descriptorFiles.next(); if (descriptorFile.getException() != null) { System.out.println("Could not parse " + descriptorFile.getFileName()); descriptorFile.getException().printStackTrace(); } if (descriptorFile.getDescriptors() != null) { for (Descriptor descriptor : descriptorFile.getDescriptors()) { if (descriptor instanceof ServerDescriptor) { ServerDescriptor serverDescriptor = (ServerDescriptor) descriptor; String fingerprint = serverDescriptor.getFingerprint(); if (!this.relayServerDescriptors.containsKey(fingerprint) || this.relayServerDescriptors.get(fingerprint). getPublishedMillis() < serverDescriptor.getPublishedMillis()) { this.relayServerDescriptors.put(fingerprint, serverDescriptor); } } } } } } public void calculatePathSelectionProbabilities( SortedMap<String, Integer> bandwidthWeights) { boolean consensusContainsBandwidthWeights = false; double wgg = 0.0, wgd = 0.0, wmg = 0.0, wmm = 0.0, wme = 0.0, wmd = 0.0, wee = 0.0, wed = 0.0; if (bandwidthWeights != null) { SortedSet<String> weightKeys = new TreeSet<String>(Arrays.asList( "Wgg,Wgd,Wmg,Wmm,Wme,Wmd,Wee,Wed".split(","))); weightKeys.removeAll(bandwidthWeights.keySet()); if (weightKeys.isEmpty()) { consensusContainsBandwidthWeights = true; wgg = ((double) bandwidthWeights.get("Wgg")) / 10000.0; wgd = ((double) bandwidthWeights.get("Wgd")) / 10000.0; wmg = ((double) bandwidthWeights.get("Wmg")) / 10000.0; wmm = ((double) bandwidthWeights.get("Wmm")) / 10000.0; wme = ((double) bandwidthWeights.get("Wme")) / 10000.0; wmd = ((double) bandwidthWeights.get("Wmd")) / 10000.0; wee = ((double) bandwidthWeights.get("Wee")) / 10000.0; wed = ((double) bandwidthWeights.get("Wed")) / 10000.0; } } else { System.err.println("Could not determine most recent Wxx parameter " + "values, probably because we didn't parse a consensus in " + "this execution. All relays' guard/middle/exit weights are " + "going to be 0.0."); } SortedMap<String, Double> advertisedBandwidths = new TreeMap<String, Double>(), consensusWeights = new TreeMap<String, Double>(), guardWeights = new TreeMap<String, Double>(), middleWeights = new TreeMap<String, Double>(), exitWeights = new TreeMap<String, Double>(); double totalAdvertisedBandwidth = 0.0; double totalConsensusWeight = 0.0; double totalGuardWeight = 0.0; double totalMiddleWeight = 0.0; double totalExitWeight = 0.0; for (Map.Entry<String, Node> e : this.relays.entrySet()) { String fingerprint = e.getKey(); Node relay = e.getValue(); if (!relay.getRunning()) { continue; } boolean isExit = relay.getRelayFlags().contains("Exit") && !relay.getRelayFlags().contains("BadExit"); boolean isGuard = relay.getRelayFlags().contains("Guard"); if (this.relayServerDescriptors.containsKey(fingerprint)) { ServerDescriptor serverDescriptor = this.relayServerDescriptors.get(fingerprint); double advertisedBandwidth = (double) Math.min(Math.min( serverDescriptor.getBandwidthBurst(), serverDescriptor.getBandwidthObserved()), serverDescriptor.getBandwidthRate()); advertisedBandwidths.put(fingerprint, advertisedBandwidth); totalAdvertisedBandwidth += advertisedBandwidth; } double consensusWeight = (double) relay.getConsensusWeight(); consensusWeights.put(fingerprint, consensusWeight); totalConsensusWeight += consensusWeight; if (consensusContainsBandwidthWeights) { double guardWeight = consensusWeight, middleWeight = consensusWeight, exitWeight = consensusWeight; if (isGuard && isExit) { guardWeight *= wgd; middleWeight *= wmd; exitWeight *= wed; } else if (isGuard) { guardWeight *= wgg; middleWeight *= wmg; exitWeight = 0.0; } else if (isExit) { guardWeight = 0.0; middleWeight *= wme; exitWeight *= wee; } else { guardWeight = 0.0; middleWeight *= wmm; exitWeight = 0.0; } guardWeights.put(fingerprint, guardWeight); middleWeights.put(fingerprint, middleWeight); exitWeights.put(fingerprint, exitWeight); totalGuardWeight += guardWeight; totalMiddleWeight += middleWeight; totalExitWeight += exitWeight; } } for (Map.Entry<String, Node> e : this.relays.entrySet()) { String fingerprint = e.getKey(); Node relay = e.getValue(); if (advertisedBandwidths.containsKey(fingerprint)) { relay.setAdvertisedBandwidthFraction(advertisedBandwidths.get( fingerprint) / totalAdvertisedBandwidth); } if (consensusWeights.containsKey(fingerprint)) { relay.setConsensusWeightFraction(consensusWeights.get(fingerprint) / totalConsensusWeight); } if (guardWeights.containsKey(fingerprint)) { relay.setGuardProbability(guardWeights.get(fingerprint) / totalGuardWeight); } if (middleWeights.containsKey(fingerprint)) { relay.setMiddleProbability(middleWeights.get(fingerprint) / totalMiddleWeight); } if (exitWeights.containsKey(fingerprint)) { relay.setExitProbability(exitWeights.get(fingerprint) / totalExitWeight); } } } private long now = System.currentTimeMillis(); private Map<String, Set<ExitListEntry>> exitListEntries = new HashMap<String, Set<ExitListEntry>>(); public void readExitLists() { DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader(); reader.addDirectory(new File( "in/exit-lists")); reader.setExcludeFiles(new File("status/exit-list-history")); Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors(); while (descriptorFiles.hasNext()) { DescriptorFile descriptorFile = descriptorFiles.next(); if (descriptorFile.getException() != null) { System.out.println("Could not parse " + descriptorFile.getFileName()); descriptorFile.getException().printStackTrace(); } if (descriptorFile.getDescriptors() != null) { for (Descriptor descriptor : descriptorFile.getDescriptors()) { if (descriptor instanceof ExitList) { ExitList exitList = (ExitList) descriptor; for (ExitListEntry exitListEntry : exitList.getExitListEntries()) { if (exitListEntry.getScanMillis() < this.now - 24L * 60L * 60L * 1000L) { continue; } String fingerprint = exitListEntry.getFingerprint(); if (!this.exitListEntries.containsKey(fingerprint)) { this.exitListEntries.put(fingerprint, new HashSet<ExitListEntry>()); } this.exitListEntries.get(fingerprint).add(exitListEntry); } } } } } } private Map<String, ServerDescriptor> bridgeServerDescriptors = new HashMap<String, ServerDescriptor>(); public void readBridgeServerDescriptors() { DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader(); reader.addDirectory(new File( "in/bridge-descriptors/server-descriptors")); /* Don't remember which server descriptors we already parsed. If we * parse a server descriptor now and first learn about the relay in a * later status, we'll never write the descriptor content anywhere. * The result would be details files containing no descriptor parts * until the bridge publishes the next descriptor. */ Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors(); while (descriptorFiles.hasNext()) { DescriptorFile descriptorFile = descriptorFiles.next(); if (descriptorFile.getException() != null) { System.out.println("Could not parse " + descriptorFile.getFileName()); descriptorFile.getException().printStackTrace(); } if (descriptorFile.getDescriptors() != null) { for (Descriptor descriptor : descriptorFile.getDescriptors()) { if (descriptor instanceof ServerDescriptor) { ServerDescriptor serverDescriptor = (ServerDescriptor) descriptor; String fingerprint = serverDescriptor.getFingerprint(); if (!this.bridgeServerDescriptors.containsKey(fingerprint) || this.bridgeServerDescriptors.get(fingerprint). getPublishedMillis() < serverDescriptor.getPublishedMillis()) { this.bridgeServerDescriptors.put(fingerprint, serverDescriptor); } } } } } } private Map<String, String> bridgePoolAssignments = new HashMap<String, String>(); public void readBridgePoolAssignments() { DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader(); reader.addDirectory(new File("in/bridge-pool-assignments")); reader.setExcludeFiles(new File("status/bridge-poolassign-history")); Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors(); while (descriptorFiles.hasNext()) { DescriptorFile descriptorFile = descriptorFiles.next(); if (descriptorFile.getException() != null) { System.out.println("Could not parse " + descriptorFile.getFileName()); descriptorFile.getException().printStackTrace(); } if (descriptorFile.getDescriptors() != null) { for (Descriptor descriptor : descriptorFile.getDescriptors()) { if (descriptor instanceof BridgePoolAssignment) { BridgePoolAssignment bridgePoolAssignment = (BridgePoolAssignment) descriptor; for (Map.Entry<String, String> e : bridgePoolAssignment.getEntries().entrySet()) { String fingerprint = e.getKey(); String details = e.getValue(); this.bridgePoolAssignments.put(fingerprint, details); } } } } } } public void writeDetailDataFiles() { SortedMap<String, File> remainingDetailsFiles = this.listAllDetailsFiles(); remainingDetailsFiles = this.updateRelayDetailsFiles( remainingDetailsFiles); remainingDetailsFiles = this.updateBridgeDetailsFiles( remainingDetailsFiles); this.deleteDetailsFiles(remainingDetailsFiles); } private File detailsFileDirectory = new File("out/details"); private SortedMap<String, File> listAllDetailsFiles() { SortedMap<String, File> result = new TreeMap<String, File>(); if (detailsFileDirectory.exists() && detailsFileDirectory.isDirectory()) { for (File file : detailsFileDirectory.listFiles()) { if (file.getName().length() == 40) { result.put(file.getName(), file); } } } return result; } private static String escapeJSON(String s) { return StringEscapeUtils.escapeJavaScript(s).replaceAll("\\\\'", "'"); } private SortedMap<String, File> updateRelayDetailsFiles( SortedMap<String, File> remainingDetailsFiles) { SortedMap<String, File> result = new TreeMap<String, File>(remainingDetailsFiles); SimpleDateFormat dateTimeFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss"); dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); for (Map.Entry<String, Node> relay : this.relays.entrySet()) { String fingerprint = relay.getKey(); /* Read details file for this relay if it exists. */ String descriptorParts = null; long publishedMillis = -1L; if (result.containsKey(fingerprint)) { File detailsFile = result.remove(fingerprint); try { BufferedReader br = new BufferedReader(new FileReader( detailsFile)); String line; boolean copyDescriptorParts = false; StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null) { if (line.startsWith("\"desc_published\":")) { String published = line.substring( "\"desc_published\":\"".length(), "\"desc_published\":\"1970-01-01 00:00:00".length()); publishedMillis = dateTimeFormat.parse(published).getTime(); copyDescriptorParts = true; } if (copyDescriptorParts) { sb.append(line + "\n"); } } br.close(); if (sb.length() > 0) { descriptorParts = sb.toString(); } } catch (IOException e) { System.err.println("Could not read '" + detailsFile.getAbsolutePath() + "'. Skipping"); e.printStackTrace(); publishedMillis = -1L; descriptorParts = null; } catch (ParseException e) { System.err.println("Could not read '" + detailsFile.getAbsolutePath() + "'. Skipping"); e.printStackTrace(); publishedMillis = -1L; descriptorParts = null; } } /* Generate new descriptor-specific part if we have a more recent * descriptor or if the part we read didn't contain a last_restarted * line. */ if (this.relayServerDescriptors.containsKey(fingerprint) && (this.relayServerDescriptors.get(fingerprint). getPublishedMillis() > publishedMillis)) { ServerDescriptor descriptor = this.relayServerDescriptors.get( fingerprint); StringBuilder sb = new StringBuilder(); String publishedDateTime = dateTimeFormat.format( descriptor.getPublishedMillis()); String lastRestartedString = dateTimeFormat.format( descriptor.getPublishedMillis() - descriptor.getUptime() * 1000L); int bandwidthRate = descriptor.getBandwidthRate(); int bandwidthBurst = descriptor.getBandwidthBurst(); int observedBandwidth = descriptor.getBandwidthObserved(); int advertisedBandwidth = Math.min(bandwidthRate, Math.min(bandwidthBurst, observedBandwidth)); sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n" + "\"last_restarted\":\"" + lastRestartedString + "\",\n" + "\"bandwidth_rate\":" + bandwidthRate + ",\n" + "\"bandwidth_burst\":" + bandwidthBurst + ",\n" + "\"observed_bandwidth\":" + observedBandwidth + ",\n" + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n" + "\"exit_policy\":["); int written = 0; for (String exitPolicyLine : descriptor.getExitPolicyLines()) { sb.append((written++ > 0 ? "," : "") + "\n \"" + exitPolicyLine + "\""); } sb.append("\n]"); if (descriptor.getContact() != null) { sb.append(",\n\"contact\":\"" + escapeJSON(descriptor.getContact()) + "\""); } if (descriptor.getPlatform() != null) { sb.append(",\n\"platform\":\"" + escapeJSON(descriptor.getPlatform()) + "\""); } if (descriptor.getFamilyEntries() != null) { sb.append(",\n\"family\":["); written = 0; for (String familyEntry : descriptor.getFamilyEntries()) { sb.append((written++ > 0 ? "," : "") + "\n \"" + familyEntry + "\""); } sb.append("\n]"); } sb.append("\n}\n"); descriptorParts = sb.toString(); } /* Generate network-status-specific part. */ Node entry = relay.getValue(); String nickname = entry.getNickname(); String address = entry.getAddress(); SortedSet<String> orAddresses = new TreeSet<String>( entry.getOrAddressesAndPorts()); orAddresses.add(address + ":" + entry.getOrPort()); StringBuilder orAddressesAndPortsBuilder = new StringBuilder(); int addressesWritten = 0; for (String orAddress : orAddresses) { orAddressesAndPortsBuilder.append( (addressesWritten++ > 0 ? "," : "") + "\"" + orAddress + "\""); } String running = entry.getRunning() ? "true" : "false"; int dirPort = entry.getDirPort(); String countryCode = entry.getCountryCode(); String latitude = entry.getLatitude(); String longitude = entry.getLongitude(); String countryName = entry.getCountryName(); String regionName = entry.getRegionName(); String cityName = entry.getCityName(); String aSNumber = entry.getASNumber(); String aSName = entry.getASName(); long consensusWeight = entry.getConsensusWeight(); String hostName = entry.getHostName(); double advertisedBandwidthFraction = entry.getAdvertisedBandwidthFraction(); double consensusWeightFraction = entry.getConsensusWeightFraction(); double guardProbability = entry.getGuardProbability(); double middleProbability = entry.getMiddleProbability(); double exitProbability = entry.getExitProbability(); String defaultPolicy = entry.getDefaultPolicy(); String portList = entry.getPortList(); StringBuilder sb = new StringBuilder(); sb.append("{\"version\":1,\n" + "\"nickname\":\"" + nickname + "\",\n" + "\"fingerprint\":\"" + fingerprint + "\",\n" + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString() + "]"); if (dirPort != 0) { sb.append(",\n\"dir_address\":\"" + address + ":" + dirPort + "\""); } sb.append(",\n\"running\":" + running + ",\n"); SortedSet<String> relayFlags = entry.getRelayFlags(); if (!relayFlags.isEmpty()) { sb.append("\"flags\":["); int written = 0; for (String relayFlag : relayFlags) { sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\""); } sb.append("]"); } if (countryCode != null) { sb.append(",\n\"country\":\"" + countryCode + "\""); } if (latitude != null) { sb.append(",\n\"latitude\":" + latitude); } if (longitude != null) { sb.append(",\n\"longitude\":" + longitude); } if (countryName != null) { sb.append(",\n\"country_name\":\"" + escapeJSON(countryName) + "\""); } if (regionName != null) { sb.append(",\n\"region_name\":\"" + escapeJSON(regionName) + "\""); } if (cityName != null) { sb.append(",\n\"city_name\":\"" + escapeJSON(cityName) + "\""); } if (aSNumber != null) { sb.append(",\n\"as_number\":\"" + escapeJSON(aSNumber) + "\""); } if (aSName != null) { sb.append(",\n\"as_name\":\"" + escapeJSON(aSName) + "\""); } if (consensusWeight >= 0L) { sb.append(",\n\"consensus_weight\":" + String.valueOf(consensusWeight)); } if (hostName != null) { sb.append(",\n\"host_name\":\"" + escapeJSON(hostName) + "\""); } if (advertisedBandwidthFraction >= 0.0) { sb.append(String.format( ",\n\"advertised_bandwidth_fraction\":%.9f", advertisedBandwidthFraction)); } if (consensusWeightFraction >= 0.0) { sb.append(String.format(",\n\"consensus_weight_fraction\":%.9f", consensusWeightFraction)); } if (guardProbability >= 0.0) { sb.append(String.format(",\n\"guard_probability\":%.9f", guardProbability)); } if (middleProbability >= 0.0) { sb.append(String.format(",\n\"middle_probability\":%.9f", middleProbability)); } if (exitProbability >= 0.0) { sb.append(String.format(",\n\"exit_probability\":%.9f", exitProbability)); } if (defaultPolicy != null && (defaultPolicy.equals("accept") || defaultPolicy.equals("reject")) && portList != null) { sb.append(",\n\"exit_policy_summary\":{\"" + defaultPolicy + "\":["); int portsWritten = 0; for (String portOrPortRange : portList.split(",")) { sb.append((portsWritten++ > 0 ? "," : "") + "\"" + portOrPortRange + "\""); } sb.append("]}"); } /* Add exit addresses if at least one of them is distinct from the * onion-routing addresses. */ if (exitListEntries.containsKey(fingerprint)) { for (ExitListEntry exitListEntry : exitListEntries.get(fingerprint)) { entry.addExitAddress(exitListEntry.getExitAddress()); } } if (!entry.getExitAddresses().isEmpty()) { sb.append(",\n\"exit_addresses\":["); int written = 0; for (String exitAddress : entry.getExitAddresses()) { sb.append((written++ > 0 ? "," : "") + "\"" + exitAddress + "\""); } sb.append("]"); } String statusParts = sb.toString(); /* Write details file to disk. */ File detailsFile = new File(detailsFileDirectory, fingerprint); try { detailsFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter( detailsFile)); bw.write(statusParts); if (descriptorParts != null) { bw.write(",\n" + descriptorParts); } else { bw.write("\n}\n"); } bw.close(); } catch (IOException e) { System.err.println("Could not write details file '" + detailsFile.getAbsolutePath() + "'. This file may now be " + "broken. Ignoring."); e.printStackTrace(); } } /* Return the files that we didn't update. */ return result; } private SortedMap<String, File> updateBridgeDetailsFiles( SortedMap<String, File> remainingDetailsFiles) { SortedMap<String, File> result = new TreeMap<String, File>(remainingDetailsFiles); SimpleDateFormat dateTimeFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss"); dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); for (Map.Entry<String, Node> bridge : this.bridges.entrySet()) { String fingerprint = bridge.getKey(); /* Read details file for this bridge if it exists. */ String descriptorParts = null, bridgePoolAssignment = null; long publishedMillis = -1L; if (result.containsKey(fingerprint)) { File detailsFile = result.remove(fingerprint); try { BufferedReader br = new BufferedReader(new FileReader( detailsFile)); String line; boolean copyDescriptorParts = false; StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null) { if (line.startsWith("\"desc_published\":")) { String published = line.substring( "\"desc_published\":\"".length(), "\"desc_published\":\"1970-01-01 00:00:00".length()); publishedMillis = dateTimeFormat.parse(published).getTime(); copyDescriptorParts = true; } else if (line.startsWith("\"pool_assignment\":")) { bridgePoolAssignment = line; copyDescriptorParts = false; } else if (line.equals("}")) { copyDescriptorParts = false; } if (copyDescriptorParts) { sb.append(line + "\n"); } } br.close(); descriptorParts = sb.toString(); if (descriptorParts.endsWith(",\n")) { descriptorParts = descriptorParts.substring(0, descriptorParts.length() - 2); } else if (descriptorParts.endsWith("\n")) { descriptorParts = descriptorParts.substring(0, descriptorParts.length() - 1); } } catch (IOException e) { System.err.println("Could not read '" + detailsFile.getAbsolutePath() + "'. Skipping"); e.printStackTrace(); publishedMillis = -1L; descriptorParts = null; } catch (ParseException e) { System.err.println("Could not read '" + detailsFile.getAbsolutePath() + "'. Skipping"); e.printStackTrace(); publishedMillis = -1L; descriptorParts = null; } } /* Generate new descriptor-specific part if we have a more recent * descriptor. */ if (this.bridgeServerDescriptors.containsKey(fingerprint) && this.bridgeServerDescriptors.get(fingerprint). getPublishedMillis() > publishedMillis) { ServerDescriptor descriptor = this.bridgeServerDescriptors.get( fingerprint); StringBuilder sb = new StringBuilder(); String publishedDateTime = dateTimeFormat.format( descriptor.getPublishedMillis()); String lastRestartedString = dateTimeFormat.format( descriptor.getPublishedMillis() - descriptor.getUptime() * 1000L); int advertisedBandwidth = Math.min(descriptor.getBandwidthRate(), Math.min(descriptor.getBandwidthBurst(), descriptor.getBandwidthObserved())); sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n" + "\"last_restarted\":\"" + lastRestartedString + "\",\n" + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n" + "\"platform\":\"" + escapeJSON(descriptor.getPlatform()) + "\""); descriptorParts = sb.toString(); } /* Look up bridge pool assignment. */ if (this.bridgePoolAssignments.containsKey(fingerprint)) { bridgePoolAssignment = "\"pool_assignment\":\"" + this.bridgePoolAssignments.get(fingerprint) + "\""; } /* Generate network-status-specific part. */ Node entry = bridge.getValue(); String nickname = entry.getNickname(); String running = entry.getRunning() ? "true" : "false"; String address = entry.getAddress(); SortedSet<String> orAddresses = new TreeSet<String>( entry.getOrAddressesAndPorts()); orAddresses.add(address + ":" + entry.getOrPort()); StringBuilder orAddressesAndPortsBuilder = new StringBuilder(); int addressesWritten = 0; for (String orAddress : orAddresses) { orAddressesAndPortsBuilder.append( (addressesWritten++ > 0 ? "," : "") + "\"" + orAddress + "\""); } StringBuilder sb = new StringBuilder(); sb.append("{\"version\":1,\n" + "\"nickname\":\"" + nickname + "\",\n" + "\"hashed_fingerprint\":\"" + fingerprint + "\",\n" + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString() + "],\n" + "\"running\":" + running + ","); SortedSet<String> relayFlags = entry.getRelayFlags(); if (!relayFlags.isEmpty()) { sb.append("\n\"flags\":["); int written = 0; for (String relayFlag : relayFlags) { sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\""); } sb.append("]"); } /* Append descriptor and bridge pool assignment parts. */ if (descriptorParts != null) { sb.append(",\n" + descriptorParts); } if (bridgePoolAssignment != null) { sb.append(",\n" + bridgePoolAssignment); } sb.append("\n}\n"); String detailsLines = sb.toString(); /* Write details file to disk. */ File detailsFile = new File(detailsFileDirectory, fingerprint); try { detailsFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter( detailsFile)); bw.write(detailsLines); bw.close(); } catch (IOException e) { System.err.println("Could not write details file '" + detailsFile.getAbsolutePath() + "'. This file may now be " + "broken. Ignoring."); e.printStackTrace(); } } /* Return the files that we didn't update. */ return result; } private void deleteDetailsFiles( SortedMap<String, File> remainingDetailsFiles) { for (File detailsFile : remainingDetailsFiles.values()) { detailsFile.delete(); } } }
package org.torproject.onionoo; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TimeZone; import java.util.TreeMap; import java.util.TreeSet; import org.apache.commons.lang.StringEscapeUtils; import org.torproject.descriptor.BridgePoolAssignment; import org.torproject.descriptor.Descriptor; import org.torproject.descriptor.DescriptorFile; import org.torproject.descriptor.DescriptorReader; import org.torproject.descriptor.DescriptorSourceFactory; import org.torproject.descriptor.ExitList; import org.torproject.descriptor.ExitListEntry; import org.torproject.descriptor.ServerDescriptor; /* Write updated detail data files to disk and delete files of relays or * bridges that fell out of the summary list. * * The parts of details files coming from server descriptors always come * from the last known descriptor of a relay or bridge, not from the * descriptor that was last referenced in a network status. */ public class DetailDataWriter { private SortedMap<String, Node> relays; public void setCurrentRelays(SortedMap<String, Node> currentRelays) { this.relays = currentRelays; } private SortedMap<String, Node> bridges; public void setCurrentBridges(SortedMap<String, Node> currentBridges) { this.bridges = currentBridges; } private static final long RDNS_LOOKUP_MAX_REQUEST_MILLIS = 10L * 1000L; private static final long RDNS_LOOKUP_MAX_DURATION_MILLIS = 5L * 60L * 1000L; private static final long RDNS_LOOKUP_MAX_AGE_MILLIS = 12L * 60L * 60L * 1000L; private static final int RDNS_LOOKUP_WORKERS_NUM = 5; private Set<String> rdnsLookupJobs; private Map<String, String> rdnsLookupResults; private long startedRdnsLookups; private List<RdnsLookupWorker> rdnsLookupWorkers; public void startReverseDomainNameLookups() { this.startedRdnsLookups = System.currentTimeMillis(); this.rdnsLookupJobs = new HashSet<String>(); for (Node relay : relays.values()) { if (relay.getLastRdnsLookup() < this.startedRdnsLookups - RDNS_LOOKUP_MAX_AGE_MILLIS) { this.rdnsLookupJobs.add(relay.getAddress()); } } this.rdnsLookupResults = new HashMap<String, String>(); this.rdnsLookupWorkers = new ArrayList<RdnsLookupWorker>(); for (int i = 0; i < RDNS_LOOKUP_WORKERS_NUM; i++) { RdnsLookupWorker rdnsLookupWorker = new RdnsLookupWorker(); this.rdnsLookupWorkers.add(rdnsLookupWorker); rdnsLookupWorker.setDaemon(true); rdnsLookupWorker.start(); } } public void finishReverseDomainNameLookups() { for (RdnsLookupWorker rdnsLookupWorker : this.rdnsLookupWorkers) { try { rdnsLookupWorker.join(); } catch (InterruptedException e) { /* This is not something that we can take care of. Just leave the * worker thread alone. */ } } synchronized (this.rdnsLookupResults) { for (Node relay : relays.values()) { if (this.rdnsLookupResults.containsKey(relay.getAddress())) { relay.setHostName(this.rdnsLookupResults.get( relay.getAddress())); relay.setLastRdnsLookup(this.startedRdnsLookups); } } } } private class RdnsLookupWorker extends Thread { public void run() { while (System.currentTimeMillis() - RDNS_LOOKUP_MAX_DURATION_MILLIS <= startedRdnsLookups) { String rdnsLookupJob = null; synchronized (rdnsLookupJobs) { for (String job : rdnsLookupJobs) { rdnsLookupJob = job; rdnsLookupJobs.remove(job); break; } } if (rdnsLookupJob == null) { break; } RdnsLookupRequest request = new RdnsLookupRequest(this, rdnsLookupJob); request.setDaemon(true); request.start(); try { Thread.sleep(RDNS_LOOKUP_MAX_REQUEST_MILLIS); } catch (InterruptedException e) { /* Getting interrupted should be the default case. */ } String hostName = request.getHostName(); if (hostName != null) { synchronized (rdnsLookupResults) { rdnsLookupResults.put(rdnsLookupJob, hostName); } } } } } private class RdnsLookupRequest extends Thread { RdnsLookupWorker parent; String address, hostName; public RdnsLookupRequest(RdnsLookupWorker parent, String address) { this.parent = parent; this.address = address; } public void run() { try { String result = InetAddress.getByName(this.address).getHostName(); synchronized (this) { this.hostName = result; } } catch (UnknownHostException e) { /* We'll try again the next time. */ } this.parent.interrupt(); } public synchronized String getHostName() { return hostName; } } private Map<String, ServerDescriptor> relayServerDescriptors = new HashMap<String, ServerDescriptor>(); public void readRelayServerDescriptors() { DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader(); reader.addDirectory(new File( "in/relay-descriptors/server-descriptors")); /* Don't remember which server descriptors we already parsed. If we * parse a server descriptor now and first learn about the relay in a * later consensus, we'll never write the descriptor content anywhere. * The result would be details files containing no descriptor parts * until the relay publishes the next descriptor. */ Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors(); while (descriptorFiles.hasNext()) { DescriptorFile descriptorFile = descriptorFiles.next(); if (descriptorFile.getException() != null) { System.out.println("Could not parse " + descriptorFile.getFileName()); descriptorFile.getException().printStackTrace(); } if (descriptorFile.getDescriptors() != null) { for (Descriptor descriptor : descriptorFile.getDescriptors()) { if (descriptor instanceof ServerDescriptor) { ServerDescriptor serverDescriptor = (ServerDescriptor) descriptor; String fingerprint = serverDescriptor.getFingerprint(); if (!this.relayServerDescriptors.containsKey(fingerprint) || this.relayServerDescriptors.get(fingerprint). getPublishedMillis() < serverDescriptor.getPublishedMillis()) { this.relayServerDescriptors.put(fingerprint, serverDescriptor); } } } } } } public void calculatePathSelectionProbabilities( SortedMap<String, Integer> bandwidthWeights) { boolean consensusContainsBandwidthWeights = false; double wgg = 0.0, wgd = 0.0, wmg = 0.0, wmm = 0.0, wme = 0.0, wmd = 0.0, wee = 0.0, wed = 0.0; if (bandwidthWeights != null) { SortedSet<String> weightKeys = new TreeSet<String>(Arrays.asList( "Wgg,Wgd,Wmg,Wmm,Wme,Wmd,Wee,Wed".split(","))); weightKeys.removeAll(bandwidthWeights.keySet()); if (weightKeys.isEmpty()) { consensusContainsBandwidthWeights = true; wgg = ((double) bandwidthWeights.get("Wgg")) / 10000.0; wgd = ((double) bandwidthWeights.get("Wgd")) / 10000.0; wmg = ((double) bandwidthWeights.get("Wmg")) / 10000.0; wmm = ((double) bandwidthWeights.get("Wmm")) / 10000.0; wme = ((double) bandwidthWeights.get("Wme")) / 10000.0; wmd = ((double) bandwidthWeights.get("Wmd")) / 10000.0; wee = ((double) bandwidthWeights.get("Wee")) / 10000.0; wed = ((double) bandwidthWeights.get("Wed")) / 10000.0; } } else { System.err.println("Could not determine most recent Wxx parameter " + "values, probably because we didn't parse a consensus in " + "this execution. All relays' guard/middle/exit weights are " + "going to be 0.0."); } SortedMap<String, Double> advertisedBandwidths = new TreeMap<String, Double>(), consensusWeights = new TreeMap<String, Double>(), guardWeights = new TreeMap<String, Double>(), middleWeights = new TreeMap<String, Double>(), exitWeights = new TreeMap<String, Double>(); double totalAdvertisedBandwidth = 0.0; double totalConsensusWeight = 0.0; double totalGuardWeight = 0.0; double totalMiddleWeight = 0.0; double totalExitWeight = 0.0; for (Map.Entry<String, Node> e : this.relays.entrySet()) { String fingerprint = e.getKey(); Node relay = e.getValue(); if (!relay.getRunning()) { continue; } boolean isExit = relay.getRelayFlags().contains("Exit") && !relay.getRelayFlags().contains("BadExit"); boolean isGuard = relay.getRelayFlags().contains("Guard"); if (this.relayServerDescriptors.containsKey(fingerprint)) { ServerDescriptor serverDescriptor = this.relayServerDescriptors.get(fingerprint); double advertisedBandwidth = (double) Math.min(Math.min( serverDescriptor.getBandwidthBurst(), serverDescriptor.getBandwidthObserved()), serverDescriptor.getBandwidthRate()); advertisedBandwidths.put(fingerprint, advertisedBandwidth); totalAdvertisedBandwidth += advertisedBandwidth; } double consensusWeight = (double) relay.getConsensusWeight(); consensusWeights.put(fingerprint, consensusWeight); totalConsensusWeight += consensusWeight; if (consensusContainsBandwidthWeights) { double guardWeight = consensusWeight, middleWeight = consensusWeight, exitWeight = consensusWeight; if (isGuard && isExit) { guardWeight *= wgd; middleWeight *= wmd; exitWeight *= wed; } else if (isGuard) { guardWeight *= wgg; middleWeight *= wmg; exitWeight = 0.0; } else if (isExit) { guardWeight = 0.0; middleWeight *= wme; exitWeight *= wee; } else { guardWeight = 0.0; middleWeight *= wmm; exitWeight = 0.0; } guardWeights.put(fingerprint, guardWeight); middleWeights.put(fingerprint, middleWeight); exitWeights.put(fingerprint, exitWeight); totalGuardWeight += guardWeight; totalMiddleWeight += middleWeight; totalExitWeight += exitWeight; } } for (Map.Entry<String, Node> e : this.relays.entrySet()) { String fingerprint = e.getKey(); Node relay = e.getValue(); if (advertisedBandwidths.containsKey(fingerprint)) { relay.setAdvertisedBandwidthFraction(advertisedBandwidths.get( fingerprint) / totalAdvertisedBandwidth); } if (consensusWeights.containsKey(fingerprint)) { relay.setConsensusWeightFraction(consensusWeights.get(fingerprint) / totalConsensusWeight); } if (guardWeights.containsKey(fingerprint)) { relay.setGuardProbability(guardWeights.get(fingerprint) / totalGuardWeight); } if (middleWeights.containsKey(fingerprint)) { relay.setMiddleProbability(middleWeights.get(fingerprint) / totalMiddleWeight); } if (exitWeights.containsKey(fingerprint)) { relay.setExitProbability(exitWeights.get(fingerprint) / totalExitWeight); } } } private long now = System.currentTimeMillis(); private Map<String, Set<ExitListEntry>> exitListEntries = new HashMap<String, Set<ExitListEntry>>(); public void readExitLists() { DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader(); reader.addDirectory(new File( "in/exit-lists")); reader.setExcludeFiles(new File("status/exit-list-history")); Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors(); while (descriptorFiles.hasNext()) { DescriptorFile descriptorFile = descriptorFiles.next(); if (descriptorFile.getException() != null) { System.out.println("Could not parse " + descriptorFile.getFileName()); descriptorFile.getException().printStackTrace(); } if (descriptorFile.getDescriptors() != null) { for (Descriptor descriptor : descriptorFile.getDescriptors()) { if (descriptor instanceof ExitList) { ExitList exitList = (ExitList) descriptor; for (ExitListEntry exitListEntry : exitList.getExitListEntries()) { if (exitListEntry.getScanMillis() < this.now - 24L * 60L * 60L * 1000L) { continue; } String fingerprint = exitListEntry.getFingerprint(); if (!this.exitListEntries.containsKey(fingerprint)) { this.exitListEntries.put(fingerprint, new HashSet<ExitListEntry>()); } this.exitListEntries.get(fingerprint).add(exitListEntry); } } } } } } private Map<String, ServerDescriptor> bridgeServerDescriptors = new HashMap<String, ServerDescriptor>(); public void readBridgeServerDescriptors() { DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader(); reader.addDirectory(new File( "in/bridge-descriptors/server-descriptors")); /* Don't remember which server descriptors we already parsed. If we * parse a server descriptor now and first learn about the relay in a * later status, we'll never write the descriptor content anywhere. * The result would be details files containing no descriptor parts * until the bridge publishes the next descriptor. */ Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors(); while (descriptorFiles.hasNext()) { DescriptorFile descriptorFile = descriptorFiles.next(); if (descriptorFile.getException() != null) { System.out.println("Could not parse " + descriptorFile.getFileName()); descriptorFile.getException().printStackTrace(); } if (descriptorFile.getDescriptors() != null) { for (Descriptor descriptor : descriptorFile.getDescriptors()) { if (descriptor instanceof ServerDescriptor) { ServerDescriptor serverDescriptor = (ServerDescriptor) descriptor; String fingerprint = serverDescriptor.getFingerprint(); if (!this.bridgeServerDescriptors.containsKey(fingerprint) || this.bridgeServerDescriptors.get(fingerprint). getPublishedMillis() < serverDescriptor.getPublishedMillis()) { this.bridgeServerDescriptors.put(fingerprint, serverDescriptor); } } } } } } private Map<String, String> bridgePoolAssignments = new HashMap<String, String>(); public void readBridgePoolAssignments() { DescriptorReader reader = DescriptorSourceFactory.createDescriptorReader(); reader.addDirectory(new File("in/bridge-pool-assignments")); reader.setExcludeFiles(new File("status/bridge-poolassign-history")); Iterator<DescriptorFile> descriptorFiles = reader.readDescriptors(); while (descriptorFiles.hasNext()) { DescriptorFile descriptorFile = descriptorFiles.next(); if (descriptorFile.getException() != null) { System.out.println("Could not parse " + descriptorFile.getFileName()); descriptorFile.getException().printStackTrace(); } if (descriptorFile.getDescriptors() != null) { for (Descriptor descriptor : descriptorFile.getDescriptors()) { if (descriptor instanceof BridgePoolAssignment) { BridgePoolAssignment bridgePoolAssignment = (BridgePoolAssignment) descriptor; for (Map.Entry<String, String> e : bridgePoolAssignment.getEntries().entrySet()) { String fingerprint = e.getKey(); String details = e.getValue(); this.bridgePoolAssignments.put(fingerprint, details); } } } } } } public void writeDetailDataFiles() { SortedMap<String, File> remainingDetailsFiles = this.listAllDetailsFiles(); remainingDetailsFiles = this.updateRelayDetailsFiles( remainingDetailsFiles); remainingDetailsFiles = this.updateBridgeDetailsFiles( remainingDetailsFiles); this.deleteDetailsFiles(remainingDetailsFiles); } private File detailsFileDirectory = new File("out/details"); private SortedMap<String, File> listAllDetailsFiles() { SortedMap<String, File> result = new TreeMap<String, File>(); if (detailsFileDirectory.exists() && detailsFileDirectory.isDirectory()) { for (File file : detailsFileDirectory.listFiles()) { if (file.getName().length() == 40) { result.put(file.getName(), file); } } } return result; } private static String escapeJSON(String s) { return StringEscapeUtils.escapeJavaScript(s).replaceAll("\\\\'", "'"); } private SortedMap<String, File> updateRelayDetailsFiles( SortedMap<String, File> remainingDetailsFiles) { SortedMap<String, File> result = new TreeMap<String, File>(remainingDetailsFiles); SimpleDateFormat dateTimeFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss"); dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); for (Map.Entry<String, Node> relay : this.relays.entrySet()) { String fingerprint = relay.getKey(); /* Read details file for this relay if it exists. */ String descriptorParts = null; long publishedMillis = -1L; if (result.containsKey(fingerprint)) { File detailsFile = result.remove(fingerprint); try { BufferedReader br = new BufferedReader(new FileReader( detailsFile)); String line; boolean copyDescriptorParts = false; StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null) { if (line.startsWith("\"desc_published\":")) { String published = line.substring( "\"desc_published\":\"".length(), "\"desc_published\":\"1970-01-01 00:00:00".length()); publishedMillis = dateTimeFormat.parse(published).getTime(); copyDescriptorParts = true; } if (copyDescriptorParts) { sb.append(line + "\n"); } } br.close(); if (sb.length() > 0) { descriptorParts = sb.toString(); } } catch (IOException e) { System.err.println("Could not read '" + detailsFile.getAbsolutePath() + "'. Skipping"); e.printStackTrace(); publishedMillis = -1L; descriptorParts = null; } catch (ParseException e) { System.err.println("Could not read '" + detailsFile.getAbsolutePath() + "'. Skipping"); e.printStackTrace(); publishedMillis = -1L; descriptorParts = null; } } /* Generate new descriptor-specific part if we have a more recent * descriptor or if the part we read didn't contain a last_restarted * line. */ if (this.relayServerDescriptors.containsKey(fingerprint) && (this.relayServerDescriptors.get(fingerprint). getPublishedMillis() > publishedMillis)) { ServerDescriptor descriptor = this.relayServerDescriptors.get( fingerprint); StringBuilder sb = new StringBuilder(); String publishedDateTime = dateTimeFormat.format( descriptor.getPublishedMillis()); String lastRestartedString = dateTimeFormat.format( descriptor.getPublishedMillis() - descriptor.getUptime() * 1000L); int bandwidthRate = descriptor.getBandwidthRate(); int bandwidthBurst = descriptor.getBandwidthBurst(); int observedBandwidth = descriptor.getBandwidthObserved(); int advertisedBandwidth = Math.min(bandwidthRate, Math.min(bandwidthBurst, observedBandwidth)); sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n" + "\"last_restarted\":\"" + lastRestartedString + "\",\n" + "\"bandwidth_rate\":" + bandwidthRate + ",\n" + "\"bandwidth_burst\":" + bandwidthBurst + ",\n" + "\"observed_bandwidth\":" + observedBandwidth + ",\n" + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n" + "\"exit_policy\":["); int written = 0; for (String exitPolicyLine : descriptor.getExitPolicyLines()) { sb.append((written++ > 0 ? "," : "") + "\n \"" + exitPolicyLine + "\""); } sb.append("\n]"); if (descriptor.getContact() != null) { sb.append(",\n\"contact\":\"" + escapeJSON(descriptor.getContact()) + "\""); } if (descriptor.getPlatform() != null) { sb.append(",\n\"platform\":\"" + escapeJSON(descriptor.getPlatform()) + "\""); } if (descriptor.getFamilyEntries() != null) { sb.append(",\n\"family\":["); written = 0; for (String familyEntry : descriptor.getFamilyEntries()) { sb.append((written++ > 0 ? "," : "") + "\n \"" + familyEntry + "\""); } sb.append("\n]"); } sb.append("\n}\n"); descriptorParts = sb.toString(); } /* Generate network-status-specific part. */ Node entry = relay.getValue(); String nickname = entry.getNickname(); String address = entry.getAddress(); SortedSet<String> orAddresses = new TreeSet<String>( entry.getOrAddressesAndPorts()); orAddresses.add(address + ":" + entry.getOrPort()); StringBuilder orAddressesAndPortsBuilder = new StringBuilder(); int addressesWritten = 0; for (String orAddress : orAddresses) { orAddressesAndPortsBuilder.append( (addressesWritten++ > 0 ? "," : "") + "\"" + orAddress.toLowerCase() + "\""); } String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis()); String firstSeen = dateTimeFormat.format( entry.getFirstSeenMillis()); String running = entry.getRunning() ? "true" : "false"; int dirPort = entry.getDirPort(); String countryCode = entry.getCountryCode(); String latitude = entry.getLatitude(); String longitude = entry.getLongitude(); String countryName = entry.getCountryName(); String regionName = entry.getRegionName(); String cityName = entry.getCityName(); String aSNumber = entry.getASNumber(); String aSName = entry.getASName(); long consensusWeight = entry.getConsensusWeight(); String hostName = entry.getHostName(); double advertisedBandwidthFraction = entry.getAdvertisedBandwidthFraction(); double consensusWeightFraction = entry.getConsensusWeightFraction(); double guardProbability = entry.getGuardProbability(); double middleProbability = entry.getMiddleProbability(); double exitProbability = entry.getExitProbability(); String defaultPolicy = entry.getDefaultPolicy(); String portList = entry.getPortList(); StringBuilder sb = new StringBuilder(); sb.append("{\"version\":1,\n" + "\"nickname\":\"" + nickname + "\",\n" + "\"fingerprint\":\"" + fingerprint + "\",\n" + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString() + "]"); if (dirPort != 0) { sb.append(",\n\"dir_address\":\"" + address + ":" + dirPort + "\""); } sb.append(",\n\"last_seen\":\"" + lastSeen + "\""); sb.append(",\n\"first_seen\":\"" + firstSeen + "\""); sb.append(",\n\"running\":" + running); SortedSet<String> relayFlags = entry.getRelayFlags(); if (!relayFlags.isEmpty()) { sb.append(",\n\"flags\":["); int written = 0; for (String relayFlag : relayFlags) { sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\""); } sb.append("]"); } if (countryCode != null) { sb.append(",\n\"country\":\"" + countryCode + "\""); } if (latitude != null) { sb.append(",\n\"latitude\":" + latitude); } if (longitude != null) { sb.append(",\n\"longitude\":" + longitude); } if (countryName != null) { sb.append(",\n\"country_name\":\"" + escapeJSON(countryName) + "\""); } if (regionName != null) { sb.append(",\n\"region_name\":\"" + escapeJSON(regionName) + "\""); } if (cityName != null) { sb.append(",\n\"city_name\":\"" + escapeJSON(cityName) + "\""); } if (aSNumber != null) { sb.append(",\n\"as_number\":\"" + escapeJSON(aSNumber) + "\""); } if (aSName != null) { sb.append(",\n\"as_name\":\"" + escapeJSON(aSName) + "\""); } if (consensusWeight >= 0L) { sb.append(",\n\"consensus_weight\":" + String.valueOf(consensusWeight)); } if (hostName != null) { sb.append(",\n\"host_name\":\"" + escapeJSON(hostName) + "\""); } if (advertisedBandwidthFraction >= 0.0) { sb.append(String.format( ",\n\"advertised_bandwidth_fraction\":%.9f", advertisedBandwidthFraction)); } if (consensusWeightFraction >= 0.0) { sb.append(String.format(",\n\"consensus_weight_fraction\":%.9f", consensusWeightFraction)); } if (guardProbability >= 0.0) { sb.append(String.format(",\n\"guard_probability\":%.9f", guardProbability)); } if (middleProbability >= 0.0) { sb.append(String.format(",\n\"middle_probability\":%.9f", middleProbability)); } if (exitProbability >= 0.0) { sb.append(String.format(",\n\"exit_probability\":%.9f", exitProbability)); } if (defaultPolicy != null && (defaultPolicy.equals("accept") || defaultPolicy.equals("reject")) && portList != null) { sb.append(",\n\"exit_policy_summary\":{\"" + defaultPolicy + "\":["); int portsWritten = 0; for (String portOrPortRange : portList.split(",")) { sb.append((portsWritten++ > 0 ? "," : "") + "\"" + portOrPortRange + "\""); } sb.append("]}"); } /* Add exit addresses if at least one of them is distinct from the * onion-routing addresses. */ if (exitListEntries.containsKey(fingerprint)) { for (ExitListEntry exitListEntry : exitListEntries.get(fingerprint)) { entry.addExitAddress(exitListEntry.getExitAddress()); } } if (!entry.getExitAddresses().isEmpty()) { sb.append(",\n\"exit_addresses\":["); int written = 0; for (String exitAddress : entry.getExitAddresses()) { sb.append((written++ > 0 ? "," : "") + "\"" + exitAddress.toLowerCase() + "\""); } sb.append("]"); } String statusParts = sb.toString(); /* Write details file to disk. */ File detailsFile = new File(detailsFileDirectory, fingerprint); try { detailsFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter( detailsFile)); bw.write(statusParts); if (descriptorParts != null) { bw.write(",\n" + descriptorParts); } else { bw.write("\n}\n"); } bw.close(); } catch (IOException e) { System.err.println("Could not write details file '" + detailsFile.getAbsolutePath() + "'. This file may now be " + "broken. Ignoring."); e.printStackTrace(); } } /* Return the files that we didn't update. */ return result; } private SortedMap<String, File> updateBridgeDetailsFiles( SortedMap<String, File> remainingDetailsFiles) { SortedMap<String, File> result = new TreeMap<String, File>(remainingDetailsFiles); SimpleDateFormat dateTimeFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss"); dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); for (Map.Entry<String, Node> bridge : this.bridges.entrySet()) { String fingerprint = bridge.getKey(); /* Read details file for this bridge if it exists. */ String descriptorParts = null, bridgePoolAssignment = null; long publishedMillis = -1L; if (result.containsKey(fingerprint)) { File detailsFile = result.remove(fingerprint); try { BufferedReader br = new BufferedReader(new FileReader( detailsFile)); String line; boolean copyDescriptorParts = false; StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null) { if (line.startsWith("\"desc_published\":")) { String published = line.substring( "\"desc_published\":\"".length(), "\"desc_published\":\"1970-01-01 00:00:00".length()); publishedMillis = dateTimeFormat.parse(published).getTime(); copyDescriptorParts = true; } else if (line.startsWith("\"pool_assignment\":")) { bridgePoolAssignment = line; copyDescriptorParts = false; } else if (line.equals("}")) { copyDescriptorParts = false; } if (copyDescriptorParts) { sb.append(line + "\n"); } } br.close(); descriptorParts = sb.toString(); if (descriptorParts.endsWith(",\n")) { descriptorParts = descriptorParts.substring(0, descriptorParts.length() - 2); } else if (descriptorParts.endsWith("\n")) { descriptorParts = descriptorParts.substring(0, descriptorParts.length() - 1); } } catch (IOException e) { System.err.println("Could not read '" + detailsFile.getAbsolutePath() + "'. Skipping"); e.printStackTrace(); publishedMillis = -1L; descriptorParts = null; } catch (ParseException e) { System.err.println("Could not read '" + detailsFile.getAbsolutePath() + "'. Skipping"); e.printStackTrace(); publishedMillis = -1L; descriptorParts = null; } } /* Generate new descriptor-specific part if we have a more recent * descriptor. */ if (this.bridgeServerDescriptors.containsKey(fingerprint) && this.bridgeServerDescriptors.get(fingerprint). getPublishedMillis() > publishedMillis) { ServerDescriptor descriptor = this.bridgeServerDescriptors.get( fingerprint); StringBuilder sb = new StringBuilder(); String publishedDateTime = dateTimeFormat.format( descriptor.getPublishedMillis()); String lastRestartedString = dateTimeFormat.format( descriptor.getPublishedMillis() - descriptor.getUptime() * 1000L); int advertisedBandwidth = Math.min(descriptor.getBandwidthRate(), Math.min(descriptor.getBandwidthBurst(), descriptor.getBandwidthObserved())); sb.append("\"desc_published\":\"" + publishedDateTime + "\",\n" + "\"last_restarted\":\"" + lastRestartedString + "\",\n" + "\"advertised_bandwidth\":" + advertisedBandwidth + ",\n" + "\"platform\":\"" + escapeJSON(descriptor.getPlatform()) + "\""); descriptorParts = sb.toString(); } /* Look up bridge pool assignment. */ if (this.bridgePoolAssignments.containsKey(fingerprint)) { bridgePoolAssignment = "\"pool_assignment\":\"" + this.bridgePoolAssignments.get(fingerprint) + "\""; } /* Generate network-status-specific part. */ Node entry = bridge.getValue(); String nickname = entry.getNickname(); String lastSeen = dateTimeFormat.format(entry.getLastSeenMillis()); String firstSeen = dateTimeFormat.format( entry.getFirstSeenMillis()); String running = entry.getRunning() ? "true" : "false"; String address = entry.getAddress(); SortedSet<String> orAddresses = new TreeSet<String>( entry.getOrAddressesAndPorts()); orAddresses.add(address + ":" + entry.getOrPort()); StringBuilder orAddressesAndPortsBuilder = new StringBuilder(); int addressesWritten = 0; for (String orAddress : orAddresses) { orAddressesAndPortsBuilder.append( (addressesWritten++ > 0 ? "," : "") + "\"" + orAddress.toLowerCase() + "\""); } StringBuilder sb = new StringBuilder(); sb.append("{\"version\":1,\n" + "\"nickname\":\"" + nickname + "\",\n" + "\"hashed_fingerprint\":\"" + fingerprint + "\",\n" + "\"or_addresses\":[" + orAddressesAndPortsBuilder.toString() + "],\n\"last_seen\":\"" + lastSeen + "\",\n\"first_seen\":\"" + firstSeen + "\",\n\"running\":" + running); SortedSet<String> relayFlags = entry.getRelayFlags(); if (!relayFlags.isEmpty()) { sb.append(",\n\"flags\":["); int written = 0; for (String relayFlag : relayFlags) { sb.append((written++ > 0 ? "," : "") + "\"" + relayFlag + "\""); } sb.append("]"); } /* Append descriptor and bridge pool assignment parts. */ if (descriptorParts != null && descriptorParts.length() != 0) { sb.append(",\n" + descriptorParts); } if (bridgePoolAssignment != null) { sb.append(",\n" + bridgePoolAssignment); } sb.append("\n}\n"); String detailsLines = sb.toString(); /* Write details file to disk. */ File detailsFile = new File(detailsFileDirectory, fingerprint); try { detailsFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter( detailsFile)); bw.write(detailsLines); bw.close(); } catch (IOException e) { System.err.println("Could not write details file '" + detailsFile.getAbsolutePath() + "'. This file may now be " + "broken. Ignoring."); e.printStackTrace(); } } /* Return the files that we didn't update. */ return result; } private void deleteDetailsFiles( SortedMap<String, File> remainingDetailsFiles) { for (File detailsFile : remainingDetailsFiles.values()) { detailsFile.delete(); } } }
package org.usfirst.frc.team4536.robot; import edu.wpi.first.wpilibj.Solenoid; /* *Suggested improvement: There needs to be comments in this class! Caleb */ public class Platform { Solenoid rightSolenoid; Solenoid leftSolenoid; public Platform(int rightSolenoidChannel, int leftSolenoidChannel) { rightSolenoid = new Solenoid(rightSolenoidChannel); leftSolenoid = new Solenoid(leftSolenoidChannel); } /* *Suggested improvement: change method name from "get" to something more intuitive, such as "isExtended" or "isRetracted." Caleb */ public boolean get() { return rightSolenoid.get(); // true = extended, false = retracted. This code works because the values must be opposite of each other and only come in 2 combinations.x } public void extend() { rightSolenoid.set(true); leftSolenoid.set(false); } public void retract() { rightSolenoid.set(false); leftSolenoid.set(true); } public void flip() { rightSolenoid.set(!rightSolenoid.get()); leftSolenoid.set(!leftSolenoid.get()); } }
package org.carrot2.text; import java.util.*; import com.google.common.collect.Maps; /** * Maps an integer to each added unique token ({@link CharSequence}). */ public final class CharSequenceIntMap { /** * Mutable character sequence (for token ID lookups). */ private final MutableCharArray buffer = new MutableCharArray(""); /** * A map of index codes and previously seen character sequences. */ private HashMap<MutableCharArray, Integer> tokenImages = Maps.newHashMap(); /** * Fetch an index for an existing or new {@link CharSequence}. */ public int getIndex(CharSequence charSequence) { buffer.reset(charSequence); return getIndex(buffer); } /** * Fetch an index for an existing or new {@link MutableCharArray}. This method does * not create intermediate object if not necessary (as does {@link #get(CharSequence)}). */ public int getIndex(MutableCharArray charSequence) { Integer code = tokenImages.get(charSequence); if (code == null) { code = tokenImages.size(); tokenImages.put(new MutableCharArray(charSequence), code); } return code; } /** * Returns unique images of tokens at the moment of making the call. */ public MutableCharArray [] getTokenImages() { final MutableCharArray [] result = new MutableCharArray [tokenImages.size()]; for (final Map.Entry<MutableCharArray, Integer> entry : tokenImages.entrySet()) { result[entry.getValue()] = entry.getKey(); } return result; } /** * @return Returns the current size of the unique images set. */ public int getSize() { return tokenImages.size(); } }
// DisplayViewEvent.java package imagej.display.event; import imagej.display.DisplayView; /** * An event on a display view. * * @author Lee Kamentsky */ public class DisplayViewEvent extends DisplayEvent { private final DisplayView displayView; public DisplayViewEvent(final DisplayView displayView) { super(displayView.getDisplay()); this.displayView = displayView; } public DisplayView getDisplayView() { return displayView; } }
package com.benny.openlauncher.core.widget; import android.appwidget.AppWidgetHostView; import android.content.Context; import android.view.MotionEvent; //Important!! ReadMe //We are now using the old method to detect widget long press, this fixed all the "randomly disappearing" behaviour of widgets //However, you will need to move a bit to trigger the long press, when dragging. But this can be useful, as we can implement a //popup menu of the widget when it was being pressed. public class WidgetView extends AppWidgetHostView { private OnTouchListener onTouchListener; private OnLongClickListener longClick; private long down; public WidgetView(Context context) { super(context); } @Override public void setOnTouchListener(OnTouchListener onTouchListener) { this.onTouchListener = onTouchListener; } @Override public void setOnLongClickListener(OnLongClickListener l) { this.longClick = l; } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { if (onTouchListener != null) onTouchListener.onTouch(this, ev); switch (ev.getActionMasked()) { case MotionEvent.ACTION_DOWN: down = System.currentTimeMillis(); break; case MotionEvent.ACTION_MOVE: boolean upVal = System.currentTimeMillis() - down > 300L; if (upVal) { longClick.onLongClick(WidgetView.this); } break; } return false; } } //Back up // private static final int LONG_PRESS_TIMEOUT = 500; // private final int THRESHOLD; // private OnLongClickListener longClick; // private boolean hasPerformedLongPress; // private float longPressDownX; // private float longPressDownY; // private CheckForLongPress pendingCheckForLongPress; // private OnTouchListener onTouchListener = null; // public WidgetView(Context context) { // super(context); // THRESHOLD = Tool.dp2px(5, context); // @Override // public void setOnLongClickListener(OnLongClickListener l) { // this.longClick = l; // @Override // public boolean onInterceptTouchEvent(MotionEvent ev) { // if (onTouchListener != null && onTouchListener.onTouch(this, ev)) { // return true; // // Consume any touch events for ourselves after longpress is triggered // if (hasPerformedLongPress) { // hasPerformedLongPress = false; // return true; // //L.d("onInterceptTouchEvent: ev = %s | x = %f | y = %f", ev.getAction(), ev.getX(), ev.getY()); // // Watch for longpress events at this level to make sure // // users can always pick up this widget // switch (ev.getAction()) { // case MotionEvent.ACTION_DOWN: { // longPressDownX = ev.getX(); // longPressDownY = ev.getY(); // Tool.print("Shit, pressed"); // postCheckForLongClick(); // break; // case MotionEvent.ACTION_UP: // case MotionEvent.ACTION_CANCEL: // cancelLongPressInternally(); // break; // case MotionEvent.ACTION_MOVE: // float diffX = Math.abs(longPressDownX - ev.getX()); // float diffY = Math.abs(longPressDownY - ev.getY()); // //L.d("onInterceptTouchEvent: diffX = %f | diffY = %f | THRESHOLD = %d", diffX, diffY, THRESHOLD); // if (diffX >= THRESHOLD || diffY >= THRESHOLD) { // cancelLongPressInternally(); // break; // // Otherwise continue letting touch events fall through to children // return false; // @Override // public void cancelLongPress() { // super.cancelLongPress(); // cancelLongPressInternally(); // private void cancelLongPressInternally() { // Tool.print("Shit, cancel long press"); // hasPerformedLongPress = false; // removeCallbacks(pendingCheckForLongPress); // @Override // public int getDescendantFocusability() { // return ViewGroup.FOCUS_BLOCK_DESCENDANTS; // private boolean onLongPress() { // return longClick.onLongClick(this); // @Override // public final void setOnTouchListener(OnTouchListener onTouchListener) { // this.onTouchListener = onTouchListener; // private void postCheckForLongClick() { // hasPerformedLongPress = false; // if (pendingCheckForLongPress == null) { // pendingCheckForLongPress = new CheckForLongPress(); // pendingCheckForLongPress.rememberWindowAttachCount(); // postDelayed(pendingCheckForLongPress, LONG_PRESS_TIMEOUT); // Tool.print("Shit, posted a delay"); //private class CheckForLongPress implements Runnable { // private int mOriginalWindowAttachCount; // public void run() { // if (getParent() != null && mOriginalWindowAttachCount == getWindowAttachCount() && !hasPerformedLongPress) { // Tool.print("Shit in Runnable"); // if (onLongPress()) { // hasPerformedLongPress = true; // void rememberWindowAttachCount() { // mOriginalWindowAttachCount = getWindowAttachCount();
package com.sequenceiq.cloudbreak.conf; import java.io.IOException; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.jasypt.encryption.pbe.PBEStringCleanablePasswordEncryptor; import org.jasypt.encryption.pbe.StandardPBEStringEncryptor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.http.HttpStatus; import org.springframework.http.client.ClientHttpResponse; import org.springframework.security.access.expression.method.DefaultMethodSecurityExpressionHandler; import org.springframework.security.access.expression.method.MethodSecurityExpressionHandler; import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; import org.springframework.security.config.annotation.method.configuration.GlobalMethodSecurityConfiguration; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; import org.springframework.security.oauth2.config.annotation.web.configurers.ResourceServerSecurityConfigurer; import org.springframework.security.oauth2.provider.OAuth2Authentication; import org.springframework.security.oauth2.provider.token.RemoteTokenServices; import org.springframework.security.web.authentication.preauth.AbstractPreAuthenticatedProcessingFilter; import org.springframework.web.client.DefaultResponseErrorHandler; import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.RestTemplate; import org.springframework.web.filter.OncePerRequestFilter; import com.sequenceiq.cloudbreak.domain.CbUser; import com.sequenceiq.cloudbreak.service.user.UserDetailsService; import com.sequenceiq.cloudbreak.service.user.UserFilterField; @Configuration public class SecurityConfig { @Configuration @EnableGlobalMethodSecurity(prePostEnabled = true) protected static class MethodSecurityConfig extends GlobalMethodSecurityConfiguration { @Autowired private UserDetailsService userDetailsService; @Autowired private OwnerBasedPermissionEvaluator ownerBasedPermissionEvaluator; @Bean MethodSecurityExpressionHandler expressionHandler() { DefaultMethodSecurityExpressionHandler expressionHandler = new DefaultMethodSecurityExpressionHandler(); ownerBasedPermissionEvaluator.setUserDetailsService(userDetailsService); expressionHandler.setPermissionEvaluator(ownerBasedPermissionEvaluator); return expressionHandler; } @Override protected MethodSecurityExpressionHandler createExpressionHandler() { return expressionHandler(); } } @Configuration @EnableResourceServer protected static class ResourceServerConfiguration extends ResourceServerConfigurerAdapter { public static final Logger LOGGER = LoggerFactory.getLogger(ResourceServerConfiguration.class); @Value("${cb.client.id}") private String clientId; @Value("${cb.client.secret}") private String clientSecret; @Value("${cb.identity.server.url}") private String identityServerUrl; @Autowired private UserDetailsService userDetailsService; @Bean RemoteTokenServices remoteTokenServices() { RemoteTokenServices rts = new RemoteTokenServices(); rts.setClientId(clientId); rts.setClientSecret(clientSecret); rts.setCheckTokenEndpointUrl(identityServerUrl + "/check_token"); rts.setRestTemplate(createRestTemplate()); return rts; } @Bean PBEStringCleanablePasswordEncryptor encryptor() { StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor(); encryptor.setPassword(clientSecret); return encryptor; } @Override public void configure(ResourceServerSecurityConfigurer resources) throws Exception { resources.resourceId("cloudbreak"); resources.tokenServices(remoteTokenServices()); } @Override public void configure(HttpSecurity http) throws Exception { http.csrf() .disable() .headers() .contentTypeOptions() .and() .addFilterAfter(new ScimAccountGroupReaderFilter(userDetailsService), AbstractPreAuthenticatedProcessingFilter.class) .authorizeRequests() .antMatchers("/user/blueprints").access("#oauth2.hasScope('cloudbreak.blueprints')") .antMatchers("/account/blueprints").access("#oauth2.hasScope('cloudbreak.blueprints')") .antMatchers("/stacks/*/cluster/**").access("#oauth2.hasScope('cloudbreak.stacks') or #oauth2.hasScope('cloudbreak.autoscale')")
package org.jeeventstore.core.store; import javax.ejb.EJB; import org.jeeventstore.core.notifier.EventStoreCommitNotifier; import org.jeeventstore.core.persistence.EventStorePersistence; import org.jeeventstore.core.ReadableEventStream; import org.jeeventstore.core.WritableEventStream; /** * The EventStoreService orchestrates the creation of event streams. * * @author Alexander Langer */ public class EventStoreService implements EventStore { @EJB private EventStoreCommitNotifier persistenceNotifier; @EJB private EventStorePersistence persistence; @Override public ReadableEventStream openStreamForReading(String bucketId, String streamId) { return this.openStreamForReading(bucketId, streamId, Long.MAX_VALUE); } @Override public ReadableEventStream openStreamForReading(String bucketId, String streamId, long version) { return OptimisticEventStream.createReadable(bucketId, streamId, version, persistence); } @Override public WritableEventStream createStream(String bucketId, String streamId) { return this.openStreamForWriting(bucketId, streamId, 0l); } @Override public WritableEventStream openStreamForWriting( String bucketId, String streamId, long version) { NotifyingPersistenceDecorator deco = new NotifyingPersistenceDecorator(persistence, persistenceNotifier); return OptimisticEventStream.createWritable(bucketId, streamId, version, deco); } @Override public boolean existsStream(String bucketId, String streamId) { return persistence.existsStream(bucketId, streamId); } }
package org.javarosa.xml.util; /** * @author ctsims */ public class UnfullfilledRequirementsException extends Exception { private int severity; private int requirement; /** * Version Numbers if version is incompatible * */ private int maR, miR, maA, miA; public UnfullfilledRequirementsException(String message, int severity) { this(message, severity, -1, -1, -1, -1, -1); } public UnfullfilledRequirementsException(String message, int severity, int requirement) { this(message, severity, requirement, -1, -1, -1, -1); } /** * Constructor for unfulfilled version requirements. * * @param message * @param severity * @param requirement * @param requiredMajor * @param requiredMinor * @param availableMajor * @param availableMinor */ public UnfullfilledRequirementsException(String message, int severity, int requirement, int requiredMajor, int requiredMinor, int availableMajor, int availableMinor) { super(message); this.severity = severity; this.requirement = requirement; this.maR = requiredMajor; this.miR = requiredMinor; this.maA = availableMajor; this.miA = availableMinor; } /** * @return A human readable version string describing the required version */ public String getRequiredVersionString() { return maR + "." + miR; } /** * @return A human readable version string describing the available version */ public String getAvailableVesionString() { return maA + "." + miA; } public int getSeverity() { return severity; } public int getRequirementCode() { return requirement; } }
package com.gentics.mesh.test.context; import static com.gentics.mesh.test.util.MeshAssert.failingLatch; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.commons.io.FileUtils; import org.junit.rules.TestWatcher; import org.junit.runner.Description; import com.syncleus.ferma.tx.Tx; import com.gentics.mesh.Mesh; import com.gentics.mesh.cli.BootstrapInitializerImpl; import com.gentics.mesh.core.cache.PermissionStore; import com.gentics.mesh.core.data.impl.DatabaseHelper; import com.gentics.mesh.core.data.search.IndexHandler; import com.gentics.mesh.core.verticle.migration.MigrationStatusHandler; import com.gentics.mesh.core.verticle.migration.node.NodeMigrationVerticle; import com.gentics.mesh.crypto.KeyStoreHelper; import com.gentics.mesh.dagger.DaggerTestMeshComponent; import com.gentics.mesh.dagger.MeshComponent; import com.gentics.mesh.dagger.MeshInternal; import com.gentics.mesh.etc.RouterStorage; import com.gentics.mesh.etc.config.ElasticSearchOptions; import com.gentics.mesh.etc.config.MeshOptions; import com.gentics.mesh.graphdb.spi.Database; import com.gentics.mesh.impl.MeshFactoryImpl; import com.gentics.mesh.rest.RestAPIVerticle; import com.gentics.mesh.rest.client.MeshRestClient; import com.gentics.mesh.search.DummySearchProvider; import com.gentics.mesh.test.TestDataProvider; import com.gentics.mesh.test.TestSize; import com.gentics.mesh.test.util.TestUtils; import com.gentics.mesh.util.UUIDUtil; import io.vertx.core.DeploymentOptions; import io.vertx.core.Vertx; import io.vertx.core.json.JsonObject; import io.vertx.core.logging.Logger; import io.vertx.core.logging.LoggerFactory; import io.vertx.core.shareddata.LocalMap; public class MeshTestContext extends TestWatcher { private static final Logger log = LoggerFactory.getLogger(MeshTestContext.class); private static final String CONF_PATH = "target/config-" + System.currentTimeMillis(); private List<File> tmpFolders = new ArrayList<>(); private MeshComponent meshDagger; private TestDataProvider dataProvider; private DummySearchProvider dummySearchProvider; private Vertx vertx; protected int port; private MeshRestClient client; private RestAPIVerticle restVerticle; private NodeMigrationVerticle nodeMigrationVerticle; private List<String> deploymentIds = new ArrayList<>(); private RouterStorage routerStorage; @Override protected void starting(Description description) { try { MeshTestSetting settings = getSettings(description); // Setup the dagger context and orientdb,es once if (description.isSuite()) { removeDataDirectory(); removeConfigDirectory(); MeshOptions options = init(settings); initDagger(options, settings.testSize()); meshDagger.boot().registerEventHandlers(); } else { if (!settings.inMemoryDB()) { DatabaseHelper.init(meshDagger.database()); } setupData(); if (settings.useElasticsearch()) { setupIndexHandlers(); } if (settings.startServer()) { setupRestEndpoints(); } if (settings.clusterMode()) { CompletableFuture<Void> fut = new CompletableFuture<>(); vertx.sharedData().getClusterWideMap(MigrationStatusHandler.MIGRATION_DATA_MAP_KEY, rh -> { rh.result().clear(ch -> { fut.complete(null); }); }); fut.get(10, TimeUnit.SECONDS); } else { LocalMap<Object, Object> map = vertx.sharedData().getLocalMap(MigrationStatusHandler.MIGRATION_DATA_MAP_KEY); if (map != null) { map.clear(); } } } } catch (Exception e) { throw new RuntimeException(e); } } @Override protected void finished(Description description) { try { MeshTestSetting settings = getSettings(description); if (description.isSuite()) { removeDataDirectory(); removeConfigDirectory(); } else { cleanupFolders(); if (settings.startServer()) { undeployAndReset(); closeClient(); } if (settings.useElasticsearch()) { meshDagger.searchProvider().clear(); } else { meshDagger.dummySearchProvider().clear(); } resetDatabase(settings); } } catch (Exception e) { throw new RuntimeException(e); } } private void removeConfigDirectory() throws IOException { FileUtils.deleteDirectory(new File(CONF_PATH)); System.setProperty("mesh.confDirName", CONF_PATH); } private void removeDataDirectory() throws IOException { FileUtils.deleteDirectory(new File("data")); } protected void setupIndexHandlers() throws Exception { // We need to call init() again in order create missing indices for the created test data for (IndexHandler<?> handler : meshDagger.indexHandlerRegistry().getHandlers()) { handler.init().await(); } } /** * Set Features according to the method annotations * * @param description */ protected MeshTestSetting getSettings(Description description) { Class<?> testClass = description.getTestClass(); if (testClass != null) { return testClass.getAnnotation(MeshTestSetting.class); } return description.getAnnotation(MeshTestSetting.class); } private void setupRestEndpoints() throws Exception { Mesh.mesh().getOptions().getUploadOptions().setByteLimit(Long.MAX_VALUE); port = com.gentics.mesh.test.util.TestUtils.getRandomPort(); vertx = Mesh.vertx(); routerStorage.addProjectRouter(TestDataProvider.PROJECT_NAME); JsonObject config = new JsonObject(); config.put("port", port); // Start node migration verticle DeploymentOptions options = new DeploymentOptions(); options.setWorker(true); CountDownLatch latch = new CountDownLatch(1); nodeMigrationVerticle = meshDagger.nodeMigrationVerticle(); vertx.deployVerticle(nodeMigrationVerticle, options, rh -> { String deploymentId = rh.result(); deploymentIds.add(deploymentId); latch.countDown(); }); failingLatch(latch); // Start rest verticle CountDownLatch latch2 = new CountDownLatch(1); restVerticle = MeshInternal.get().restApiVerticle(); vertx.deployVerticle(restVerticle, new DeploymentOptions().setConfig(config), rh -> { String deploymentId = rh.result(); deploymentIds.add(deploymentId); latch2.countDown(); }); failingLatch(latch2); // Setup the rest client try (Tx tx = db().tx()) { client = MeshRestClient.create("localhost", getPort(), false, Mesh.vertx()); client.setLogin(getData().user().getUsername(), getData().getUserInfo().getPassword()); client.login().toBlocking().value(); } if (dummySearchProvider != null) { dummySearchProvider.clear(); } } private Database db() { return meshDagger.database(); } public int getPort() { return port; } public Vertx getVertx() { return vertx; } /** * Setup the test data. * * @throws Exception */ private void setupData() throws Exception { meshDagger.database().setMassInsertIntent(); meshDagger.boot().createSearchIndicesAndMappings(); dataProvider.setup(); meshDagger.database().resetIntent(); } private void undeployAndReset() throws Exception { for (String id : deploymentIds) { vertx.undeploy(id); } } private void closeClient() throws Exception { if (client != null) { client.close(); } } /** * Clear the test data. * * @param settings * @throws Exception */ private void resetDatabase(MeshTestSetting settings) throws Exception { BootstrapInitializerImpl.clearReferences(); long start = System.currentTimeMillis(); if (settings.inMemoryDB()) { MeshInternal.get().database().clear(); } else { MeshInternal.get().database().stop(); File dbDir = new File(Mesh.mesh().getOptions().getStorageOptions().getDirectory()); FileUtils.deleteDirectory(dbDir); MeshInternal.get().database().setupConnectionPool(); } long duration = System.currentTimeMillis() - start; log.info("Clearing DB took {" + duration + "} ms."); if (dummySearchProvider != null) { dummySearchProvider.reset(); } } private void cleanupFolders() throws IOException { for (File folder : tmpFolders) { FileUtils.deleteDirectory(folder); } // if (Mesh.mesh().getOptions().getSearchOptions().getDirectory() != null) { // FileUtils.deleteDirectory(new File(Mesh.mesh().getOptions().getSearchOptions().getDirectory())); PermissionStore.invalidate(); } public TestDataProvider getData() { return dataProvider; } public DummySearchProvider getDummySearchProvider() { return dummySearchProvider; } /** * Initialise mesh options. * * @param settings * @throws Exception */ public MeshOptions init(MeshTestSetting settings) throws Exception { MeshFactoryImpl.clear(); MeshOptions options = new MeshOptions(); // Clustering options if (settings.clusterMode()) { options.getClusterOptions().setEnabled(true); options.setInitCluster(true); options.getClusterOptions().setClusterName("cluster" + System.currentTimeMillis()); } // Setup the keystore File keystoreFile = new File("target", "keystore_" + UUIDUtil.randomUUID() + ".jceks"); keystoreFile.deleteOnExit(); String keystorePassword = "finger"; if (!keystoreFile.exists()) { KeyStoreHelper.gen(keystoreFile.getAbsolutePath(), keystorePassword); } options.getAuthenticationOptions().setKeystorePassword(keystorePassword); options.getAuthenticationOptions().setKeystorePath(keystoreFile.getAbsolutePath()); options.setNodeName("testNode"); String uploads = newFolder("testuploads"); options.getUploadOptions().setDirectory(uploads); String targetTmpDir = newFolder("tmpdir"); options.getUploadOptions().setTempDirectory(targetTmpDir); String imageCacheDir = newFolder("image_cache"); options.getImageOptions().setImageCacheDirectory(imageCacheDir); String backupPath = newFolder("backups"); options.getStorageOptions().setBackupDirectory(backupPath); String exportPath = newFolder("exports"); options.getStorageOptions().setExportDirectory(exportPath); options.getHttpServerOptions().setPort(TestUtils.getRandomPort()); // The database provider will switch to in memory mode when no directory has been specified. String graphPath = null; if (!settings.inMemoryDB() || settings.clusterMode()) { graphPath = "target/graphdb_" + UUIDUtil.randomUUID(); File directory = new File(graphPath); directory.deleteOnExit(); directory.mkdirs(); } options.getStorageOptions().setDirectory(graphPath); ElasticSearchOptions searchOptions = new ElasticSearchOptions(); if (settings.useElasticsearch()) { searchOptions.setDirectory("target/elasticsearch_data_" + System.currentTimeMillis()); } else { searchOptions.setDirectory(null); } searchOptions.setHttpEnabled(settings.startESServer()); options.setSearchOptions(searchOptions); Mesh.mesh(options); return options; } /** * Create a new folder which will be automatically be deleted once the rule finishes. * * @param prefix * @return * @throws IOException */ private String newFolder(String prefix) throws IOException { String path = "target/" + prefix + "_" + UUIDUtil.randomUUID(); File directory = new File(path); FileUtils.deleteDirectory(directory); directory.deleteOnExit(); directory.mkdirs(); tmpFolders.add(directory); return path; } /** * Initialise the mesh dagger context and inject the dependencies within the test. * * @param options * * @throws Exception */ public void initDagger(MeshOptions options, TestSize size) throws Exception { log.info("Initializing dagger context"); meshDagger = DaggerTestMeshComponent.create(); MeshInternal.set(meshDagger); dataProvider = new TestDataProvider(size, meshDagger.boot(), meshDagger.database()); routerStorage = meshDagger.routerStorage(); if (meshDagger.searchProvider() instanceof DummySearchProvider) { dummySearchProvider = meshDagger.dummySearchProvider(); } try { meshDagger.boot().init(Mesh.mesh(), false, options, null); } catch (Exception e) { e.printStackTrace(); throw e; } } public MeshRestClient getClient() { return client; } }
package com.yahoo.vespa.flags; import com.yahoo.component.Vtag; import com.yahoo.vespa.defaults.Defaults; import com.yahoo.vespa.flags.custom.PreprovisionCapacity; import java.util.List; import java.util.Optional; import java.util.TreeMap; import static com.yahoo.vespa.flags.FetchVector.Dimension.APPLICATION_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME; import static com.yahoo.vespa.flags.FetchVector.Dimension.NODE_TYPE; import static com.yahoo.vespa.flags.FetchVector.Dimension.VESPA_VERSION; /** * Definitions of feature flags. * * <p>To use feature flags, define the flag in this class as an "unbound" flag, e.g. {@link UnboundBooleanFlag} * or {@link UnboundStringFlag}. At the location you want to get the value of the flag, you need the following:</p> * * <ol> * <li>The unbound flag</li> * <li>A {@link FlagSource}. The flag source is typically available as an injectable component. Binding * an unbound flag to a flag source produces a (bound) flag, e.g. {@link BooleanFlag} and {@link StringFlag}.</li> * <li>If you would like your flag value to be dependent on e.g. the application ID, then 1. you should * declare this in the unbound flag definition in this file (referring to * {@link FetchVector.Dimension#APPLICATION_ID}), and 2. specify the application ID when retrieving the value, e.g. * {@link BooleanFlag#with(FetchVector.Dimension, String)}. See {@link FetchVector} for more info.</li> * </ol> * * <p>Once the code is in place, you can override the flag value. This depends on the flag source, but typically * there is a REST API for updating the flags in the config server, which is the root of all flag sources in the zone.</p> * * @author hakonhall */ public class Flags { private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>(); public static final UnboundIntFlag DROP_CACHES = defineIntFlag("drop-caches", 3, "The int value to write into /proc/sys/vm/drop_caches for each tick. " + "1 is page cache, 2 is dentries inodes, 3 is both page cache and dentries inodes, etc.", "Takes effect on next tick.", HOSTNAME); public static final UnboundBooleanFlag ENABLE_CROWDSTRIKE = defineFeatureFlag( "enable-crowdstrike", true, "Whether to enable CrowdStrike.", "Takes effect on next host admin tick", HOSTNAME); public static final UnboundBooleanFlag ENABLE_NESSUS = defineFeatureFlag( "enable-nessus", true, "Whether to enable Nessus.", "Takes effect on next host admin tick", HOSTNAME); public static final UnboundListFlag<String> DISABLED_HOST_ADMIN_TASKS = defineListFlag( "disabled-host-admin-tasks", List.of(), String.class, "List of host-admin task names (as they appear in the log, e.g. root>main>UpgradeTask) that should be skipped", "Takes effect on next host admin tick", HOSTNAME, NODE_TYPE); public static final UnboundStringFlag DOCKER_VERSION = defineStringFlag( "docker-version", "1.13.1-91.git07f3374", "The version of the docker to use of the format VERSION-REL: The YUM package to be installed will be " + "2:docker-VERSION-REL.el7.centos.x86_64 in AWS (and without '.centos' otherwise). " + "If docker-version is not of this format, it must be parseable by YumPackageName::fromString.", "Takes effect on next tick.", HOSTNAME); public static final UnboundLongFlag THIN_POOL_GB = defineLongFlag( "thin-pool-gb", -1, "The size of the disk reserved for the thin pool with dynamic provisioning in AWS, in base-2 GB. " + "If <0, the default is used (which may depend on the zone and node type).", "Takes effect immediately (but used only during provisioning).", NODE_TYPE); public static final UnboundDoubleFlag CONTAINER_CPU_CAP = defineDoubleFlag( "container-cpu-cap", 0, "Hard limit on how many CPUs a container may use. This value is multiplied by CPU allocated to node, so " + "to cap CPU at 200%, set this to 2, etc.", "Takes effect on next node agent tick. Change is orchestrated, but does NOT require container restart", HOSTNAME, APPLICATION_ID); public static final UnboundBooleanFlag INCLUDE_SIS_IN_TRUSTSTORE = defineFeatureFlag( "include-sis-in-truststore", false, "Whether to use the trust store backed by Athenz and (in public) Service Identity certificates in " + "host-admin and/or Docker containers", "Takes effect on restart of host-admin (for host-admin), and restart of Docker container.", // For host-admin, HOSTNAME and NODE_TYPE is available // For Docker containers, HOSTNAME and APPLICATION_ID is available // WARNING: Having different sets of dimensions is DISCOURAGED in general, but needed for here since // trust store for host-admin is determined before having access to application ID from node repo. HOSTNAME, NODE_TYPE, APPLICATION_ID); public static final UnboundStringFlag TLS_INSECURE_MIXED_MODE = defineStringFlag( "tls-insecure-mixed-mode", "tls_client_mixed_server", "TLS insecure mixed mode. Allowed values: ['plaintext_client_mixed_server', 'tls_client_mixed_server', 'tls_client_tls_server']", "Takes effect on restart of Docker container", NODE_TYPE, APPLICATION_ID, HOSTNAME); public static final UnboundStringFlag TLS_INSECURE_AUTHORIZATION_MODE = defineStringFlag( "tls-insecure-authorization-mode", "log_only", "TLS insecure authorization mode. Allowed values: ['disable', 'log_only', 'enforce']", "Takes effect on restart of Docker container", NODE_TYPE, APPLICATION_ID, HOSTNAME); public static final UnboundBooleanFlag USE_ADAPTIVE_DISPATCH = defineFeatureFlag( "use-adaptive-dispatch", false, "Should adaptive dispatch be used over round robin", "Takes effect at redeployment", APPLICATION_ID); public static final UnboundIntFlag REBOOT_INTERVAL_IN_DAYS = defineIntFlag( "reboot-interval-in-days", 30, "No reboots are scheduled 0x-1x reboot intervals after the previous reboot, while reboot is " + "scheduled evenly distributed in the 1x-2x range (and naturally guaranteed at the 2x boundary).", "Takes effect on next run of NodeRebooter"); public static final UnboundBooleanFlag ENABLE_DYNAMIC_PROVISIONING = defineFeatureFlag( "enable-dynamic-provisioning", false, "Provision a new docker host when we otherwise can't allocate a docker node", "Takes effect on next deployment", APPLICATION_ID); public static final UnboundListFlag<PreprovisionCapacity> PREPROVISION_CAPACITY = defineListFlag( "preprovision-capacity", List.of(), PreprovisionCapacity.class, "List of node resources and their count that should be present in zone to receive new deployments. When a " + "preprovisioned is taken, new will be provisioned within next iteration of maintainer.", "Takes effect on next iteration of HostProvisionMaintainer."); public static final UnboundBooleanFlag USE_ADVERTISED_RESOURCES = defineFeatureFlag( "use-advertised-resources", true, "When enabled, will use advertised host resources rather than actual host resources, ignore host resource " + "reservation, and fail with exception unless requested resource match advertised host resources exactly.", "Takes effect on next iteration of HostProvisionMaintainer.", APPLICATION_ID); public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag( "default-term-wise-limit", 1.0, "Node resource memory in Gb for admin cluster nodes", "Takes effect at redeployment", APPLICATION_ID); public static final UnboundBooleanFlag HOST_HARDENING = defineFeatureFlag( "host-hardening", false, "Whether to enable host hardening Linux baseline.", "Takes effect on next tick or on host-admin restart (may vary where used).", HOSTNAME); public static final UnboundStringFlag ZOOKEEPER_SERVER_MAJOR_MINOR_VERSION = defineStringFlag( "zookeeper-server-version", "3.5", "The version of ZooKeeper server to use (major.minor, not full version)", "Takes effect on restart of Docker container", NODE_TYPE, APPLICATION_ID, HOSTNAME); public static final UnboundStringFlag TLS_FOR_ZOOKEEPER_QUORUM_COMMUNICATION = defineStringFlag( "tls-for-zookeeper-quorum-communication", "OFF", "How to setup TLS for ZooKeeper quorum communication. Valid values are OFF, PORT_UNIFICATION, TLS_WITH_PORT_UNIFICATION, TLS_ONLY", "Takes effect on restart of config server", NODE_TYPE, HOSTNAME); public static final UnboundStringFlag TLS_FOR_ZOOKEEPER_CLIENT_SERVER_COMMUNICATION = defineStringFlag( "tls-for-zookeeper-client-server-communication", "OFF", "How to setup TLS for ZooKeeper client/server communication. Valid values are OFF, PORT_UNIFICATION, TLS_WITH_PORT_UNIFICATION, TLS_ONLY", "Takes effect on restart of config server", NODE_TYPE, HOSTNAME); public static final UnboundBooleanFlag USE_OLD_METRICS_CHECKS = defineFeatureFlag( "use-old-metrics-checks", true, "Whether to use old metrics checks", "Takes effect on next host admin tick", NODE_TYPE, HOSTNAME, APPLICATION_ID); /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundBooleanFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundStringFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundIntFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundLongFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundDoubleFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass), flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundListFlag<T> defineListFlag(String flagId, List<T> defaultValue, Class<T> elementClass, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((fid, dval, fvec) -> new UnboundListFlag<>(fid, dval, elementClass, fvec), flagId, defaultValue, description, modificationEffect, dimensions); } @FunctionalInterface private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> { U create(FlagId id, T defaultVale, FetchVector defaultFetchVector); } /** * Defines a Flag. * * @param factory Factory for creating unbound flag of type U * @param flagId The globally unique FlagId. * @param defaultValue The default value if none is present after resolution. * @param description Description of how the flag is used. * @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc. * @param dimensions What dimensions will be set in the {@link FetchVector} when fetching * the flag value in * {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}. * For instance, if APPLICATION is one of the dimensions here, you should make sure * APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag * from the FlagSource. * @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features. * @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag. * @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} and * {@link FetchVector.Dimension#VESPA_VERSION VESPA_VERSION} already set. The ZONE environment * is typically implicit. */ private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory, String flagId, T defaultValue, String description, String modificationEffect, FetchVector.Dimension[] dimensions) { FlagId id = new FlagId(flagId); FetchVector vector = new FetchVector() .with(HOSTNAME, Defaults.getDefaults().vespaHostname()) // Warning: In unit tests and outside official Vespa releases, the currentVersion is e.g. 7.0.0 // (determined by the current major version). Consider not setting VESPA_VERSION if minor = micro = 0. .with(VESPA_VERSION, Vtag.currentVersion.toFullString()); U unboundFlag = factory.create(id, defaultValue, vector); FlagDefinition definition = new FlagDefinition(unboundFlag, description, modificationEffect, dimensions); flags.put(id, definition); return unboundFlag; } public static List<FlagDefinition> getAllFlags() { return List.copyOf(flags.values()); } public static Optional<FlagDefinition> getFlag(FlagId flagId) { return Optional.ofNullable(flags.get(flagId)); } /** * Allows the statically defined flags to be controlled in a test. * * <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block, * the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from * before the block is reinserted. * * <p>NOT thread-safe. Tests using this cannot run in parallel. */ public static Replacer clearFlagsForTesting() { return new Replacer(); } public static class Replacer implements AutoCloseable { private static volatile boolean flagsCleared = false; private final TreeMap<FlagId, FlagDefinition> savedFlags; private Replacer() { verifyAndSetFlagsCleared(true); this.savedFlags = Flags.flags; Flags.flags = new TreeMap<>(); } @Override public void close() { verifyAndSetFlagsCleared(false); Flags.flags = savedFlags; } /** * Used to implement a simple verification that Replacer is not used by multiple threads. * For instance two different tests running in parallel cannot both use Replacer. */ private static void verifyAndSetFlagsCleared(boolean newValue) { if (flagsCleared == newValue) { throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?"); } flagsCleared = newValue; } } }
package com.yahoo.vespa.flags; import com.yahoo.component.Vtag; import com.yahoo.vespa.defaults.Defaults; import java.time.Instant; import java.time.LocalDate; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Optional; import java.util.TreeMap; import static com.yahoo.vespa.flags.FetchVector.Dimension.APPLICATION_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.CONSOLE_USER_EMAIL; import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME; import static com.yahoo.vespa.flags.FetchVector.Dimension.NODE_TYPE; import static com.yahoo.vespa.flags.FetchVector.Dimension.TENANT_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.VESPA_VERSION; import static com.yahoo.vespa.flags.FetchVector.Dimension.ZONE_ID; /** * Definitions of feature flags. * * <p>To use feature flags, define the flag in this class as an "unbound" flag, e.g. {@link UnboundBooleanFlag} * or {@link UnboundStringFlag}. At the location you want to get the value of the flag, you need the following:</p> * * <ol> * <li>The unbound flag</li> * <li>A {@link FlagSource}. The flag source is typically available as an injectable component. Binding * an unbound flag to a flag source produces a (bound) flag, e.g. {@link BooleanFlag} and {@link StringFlag}.</li> * <li>If you would like your flag value to be dependent on e.g. the application ID, then 1. you should * declare this in the unbound flag definition in this file (referring to * {@link FetchVector.Dimension#APPLICATION_ID}), and 2. specify the application ID when retrieving the value, e.g. * {@link BooleanFlag#with(FetchVector.Dimension, String)}. See {@link FetchVector} for more info.</li> * </ol> * * <p>Once the code is in place, you can override the flag value. This depends on the flag source, but typically * there is a REST API for updating the flags in the config server, which is the root of all flag sources in the zone.</p> * * @author hakonhall */ public class Flags { private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>(); public static final UnboundBooleanFlag FORCE_DISK_ENCRYPTION = defineFeatureFlag( "force-disk-encryption", true, List.of("hakonhall"), "2021-10-01", "2021-11-01", "Enable new conditions for when to encrypt disk.", "Takes effect on next host admin tick."); public static final UnboundBooleanFlag MAP_USER_NAMESPACE = defineFeatureFlag( "map-user-namespace", false, List.of("freva"), "2021-10-18", "2021-12-01", "Whether host-admin should start containers with mapped UID/GID, will also chown all files under container storage.", "Takes effect on next container restart.", APPLICATION_ID, NODE_TYPE, HOSTNAME); public static final UnboundBooleanFlag USE_CGROUPS_V2 = defineFeatureFlag( "use-cgroups-v2", false, List.of("freva"), "2021-10-27", "2021-12-01", "Whether a host should use CGroups v2", "Will attempt to switch on next host admin tick (requires reboot).", NODE_TYPE, HOSTNAME); public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag( "default-term-wise-limit", 1.0, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Default limit for when to apply termwise query evaluation", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag FEED_SEQUENCER_TYPE = defineStringFlag( "feed-sequencer-type", "LATENCY", List.of("baldersheim"), "2020-12-02", "2022-01-01", "Selects type of sequenced executor used for feeding in proton, valid values are LATENCY, ADAPTIVE, THROUGHPUT", "Takes effect at redeployment (requires restart)", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag FEED_TASK_LIMIT = defineIntFlag( "feed-task-limit", 1000, List.of("geirst, baldersheim"), "2021-10-14", "2022-01-01", "The task limit used by the executors handling feed in proton", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_UNCOMMITTED_MEMORY = defineIntFlag( "max-uncommitted-memory", 130000, List.of("geirst, baldersheim"), "2021-10-21", "2022-01-01", "Max amount of memory holding updates to an attribute before we do a commit.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag RESPONSE_SEQUENCER_TYPE = defineStringFlag( "response-sequencer-type", "ADAPTIVE", List.of("baldersheim"), "2020-12-02", "2022-01-01", "Selects type of sequenced executor used for mbus responses, valid values are LATENCY, ADAPTIVE, THROUGHPUT", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag RESPONSE_NUM_THREADS = defineIntFlag( "response-num-threads", 2, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Number of threads used for mbus responses, default is 2, negative number = numcores/4", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_COMMUNICATIONMANAGER_THREAD = defineFeatureFlag( "skip-communicationmanager-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the communicationmanager thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_MBUS_REQUEST_THREAD = defineFeatureFlag( "skip-mbus-request-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the mbus request thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_MBUS_REPLY_THREAD = defineFeatureFlag( "skip-mbus-reply-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the mbus reply thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_THREE_PHASE_UPDATES = defineFeatureFlag( "use-three-phase-updates", false, List.of("vekterli"), "2020-12-02", "2022-01-01", "Whether to enable the use of three-phase updates when bucket replicas are out of sync.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag HIDE_SHARED_ROUTING_ENDPOINT = defineFeatureFlag( "hide-shared-routing-endpoint", false, List.of("tokle", "bjormel"), "2020-12-02", "2022-01-01", "Whether the controller should hide shared routing layer endpoint", "Takes effect immediately", APPLICATION_ID ); public static final UnboundBooleanFlag USE_ASYNC_MESSAGE_HANDLING_ON_SCHEDULE = defineFeatureFlag( "async-message-handling-on-schedule", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Optionally deliver async messages in own thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag FEED_CONCURRENCY = defineDoubleFlag( "feed-concurrency", 0.5, List.of("baldersheim"), "2020-12-02", "2022-01-01", "How much concurrency should be allowed for feed", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag DISK_BLOAT_FACTOR = defineDoubleFlag( "disk-bloat-factor", 0.2, List.of("baldersheim"), "2021-10-08", "2022-01-01", "Amount of bloat allowed before compacting file", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag DOCSTORE_COMPRESSION_LEVEL = defineIntFlag( "docstore-compression-level", 3, List.of("baldersheim"), "2021-10-08", "2022-01-01", "Default compression level used for document store", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag NUM_DEPLOY_HELPER_THREADS = defineIntFlag( "num-model-builder-threads", -1, List.of("balder"), "2021-09-09", "2022-01-01", "Number of threads used for speeding up building of models.", "Takes effect on first (re)start of config server"); public static final UnboundBooleanFlag ENABLE_FEED_BLOCK_IN_DISTRIBUTOR = defineFeatureFlag( "enable-feed-block-in-distributor", true, List.of("geirst"), "2021-01-27", "2021-11-01", "Enables blocking of feed in the distributor if resource usage is above limit on at least one content node", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag CONTAINER_DUMP_HEAP_ON_SHUTDOWN_TIMEOUT = defineFeatureFlag( "container-dump-heap-on-shutdown-timeout", false, List.of("baldersheim"), "2021-09-25", "2022-01-01", "Will trigger a heap dump during if container shutdown times out", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag CONTAINER_SHUTDOWN_TIMEOUT = defineDoubleFlag( "container-shutdown-timeout", 50.0, List.of("baldersheim"), "2021-09-25", "2022-01-01", "Timeout for shutdown of a jdisc container", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundListFlag<String> ALLOWED_ATHENZ_PROXY_IDENTITIES = defineListFlag( "allowed-athenz-proxy-identities", List.of(), String.class, List.of("bjorncs", "tokle"), "2021-02-10", "2021-12-01", "Allowed Athenz proxy identities", "takes effect at redeployment"); public static final UnboundBooleanFlag GENERATE_NON_MTLS_ENDPOINT = defineFeatureFlag( "generate-non-mtls-endpoint", true, List.of("tokle"), "2021-02-18", "2021-12-01", "Whether to generate the non-mtls endpoint", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundIntFlag MAX_ACTIVATION_INHIBITED_OUT_OF_SYNC_GROUPS = defineIntFlag( "max-activation-inhibited-out-of-sync-groups", 0, List.of("vekterli"), "2021-02-19", "2021-11-01", "Allows replicas in up to N content groups to not be activated " + "for query visibility if they are out of sync with a majority of other replicas", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_CONCURRENT_MERGES_PER_NODE = defineIntFlag( "max-concurrent-merges-per-node", 128, List.of("balder", "vekterli"), "2021-06-06", "2022-01-01", "Specifies max concurrent merges per content node.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_MERGE_QUEUE_SIZE = defineIntFlag( "max-merge-queue-size", 1024, List.of("balder", "vekterli"), "2021-06-06", "2022-01-01", "Specifies max size of merge queue.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag IGNORE_MERGE_QUEUE_LIMIT = defineFeatureFlag( "ignore-merge-queue-limit", false, List.of("vekterli", "geirst"), "2021-10-06", "2021-12-01", "Specifies if merges that are forwarded (chained) from another content node are always " + "allowed to be enqueued even if the queue is otherwise full.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag LARGE_RANK_EXPRESSION_LIMIT = defineIntFlag( "large-rank-expression-limit", 8192, List.of("baldersheim"), "2021-06-09", "2022-01-01", "Limit for size of rank expressions distributed by filedistribution", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundIntFlag MAX_ENCRYPTING_HOSTS = defineIntFlag( "max-encrypting-hosts", 0, List.of("mpolden", "hakonhall"), "2021-05-27", "2021-11-01", "The maximum number of hosts allowed to encrypt their disk concurrently", "Takes effect on next run of HostEncrypter, but any currently encrypting hosts will not be cancelled when reducing the limit"); public static final UnboundListFlag<String> DEFER_APPLICATION_ENCRYPTION = defineListFlag( "defer-application-encryption", List.of(), String.class, List.of("mpolden", "hakonhall"), "2021-06-23", "2021-11-01", "List of applications where encryption of their host should be deferred", "Takes effect on next run of HostEncrypter"); public static final UnboundDoubleFlag MIN_NODE_RATIO_PER_GROUP = defineDoubleFlag( "min-node-ratio-per-group", 0.0, List.of("geirst", "vekterli"), "2021-07-16", "2021-12-01", "Minimum ratio of nodes that have to be available (i.e. not Down) in any hierarchic content cluster group for the group to be Up", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundListFlag<String> ALLOWED_SERVICE_VIEW_APIS = defineListFlag( "allowed-service-view-apis", List.of("state/v1/"), String.class, List.of("mortent"), "2021-08-05", "2022-01-01", "Apis allowed to proxy through the service view api", "Takes effect immediately"); public static final UnboundBooleanFlag SEPARATE_TENANT_IAM_ROLES = defineFeatureFlag( "separate-tenant-iam-roles", false, List.of("mortent"), "2021-08-12", "2022-01-01", "Create separate iam roles for tenant", "Takes effect on redeploy", TENANT_ID); public static final UnboundIntFlag METRICSPROXY_NUM_THREADS = defineIntFlag( "metricsproxy-num-threads", 2, List.of("balder"), "2021-09-01", "2022-01-01", "Number of threads for metrics proxy", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ENABLED_HORIZON_DASHBOARD = defineFeatureFlag( "enabled-horizon-dashboard", false, List.of("olaa"), "2021-09-13", "2021-12-31", "Enable Horizon dashboard", "Takes effect immediately", TENANT_ID, CONSOLE_USER_EMAIL ); public static final UnboundBooleanFlag ENABLE_ONPREM_TENANT_S3_ARCHIVE = defineFeatureFlag( "enable-onprem-tenant-s3-archive", false, List.of("bjorncs"), "2021-09-14", "2021-12-31", "Enable tenant S3 buckets in cd/main. Must be set on controller cluster only.", "Takes effect immediately", ZONE_ID, TENANT_ID ); public static final UnboundBooleanFlag DELETE_UNMAINTAINED_CERTIFICATES = defineFeatureFlag( "delete-unmaintained-certificates", false, List.of("andreer"), "2021-09-23", "2021-11-11", "Whether to delete certificates that are known by provider but not by controller", "Takes effect on next run of EndpointCertificateMaintainer" ); public static final UnboundBooleanFlag ENABLE_TENANT_DEVELOPER_ROLE = defineFeatureFlag( "enable-tenant-developer-role", false, List.of("bjorncs"), "2021-09-23", "2021-12-31", "Enable tenant developer Athenz role in cd/main. Must be set on controller cluster only.", "Takes effect immediately", TENANT_ID ); public static final UnboundIntFlag MAX_CONNECTION_LIFE_IN_HOSTED = defineIntFlag( "max-connection-life-in-hosted", 45, List.of("bjorncs"), "2021-09-30", "2021-12-31", "Max connection life for connections to jdisc endpoints in hosted", "Takes effect at redeployment", APPLICATION_ID); public static final UnboundBooleanFlag ENABLE_ROUTING_REUSE_PORT = defineFeatureFlag( "enable-routing-reuse-port", false, List.of("mortent"), "2021-09-29", "2021-12-31", "Enable reuse port in routing configuration", "Takes effect on container restart", HOSTNAME ); public static final UnboundBooleanFlag ENABLE_TENANT_OPERATOR_ROLE = defineFeatureFlag( "enable-tenant-operator-role", false, List.of("bjorncs"), "2021-09-29", "2021-12-31", "Enable tenant specific operator roles in public systems. For controllers only.", "Takes effect on subsequent maintainer invocation", TENANT_ID ); public static final UnboundIntFlag DISTRIBUTOR_MERGE_BUSY_WAIT = defineIntFlag( "distributor-merge-busy-wait", 10, List.of("geirst", "vekterli"), "2021-10-04", "2021-12-31", "Number of seconds that scheduling of new merge operations in the distributor should be inhibited " + "towards a content node that has indicated merge busy", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag DISTRIBUTOR_ENHANCED_MAINTENANCE_SCHEDULING = defineFeatureFlag( "distributor-enhanced-maintenance-scheduling", false, List.of("vekterli", "geirst"), "2021-10-14", "2022-01-31", "Enable enhanced maintenance operation scheduling semantics on the distributor", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ASYNC_APPLY_BUCKET_DIFF = defineFeatureFlag( "async-apply-bucket-diff", false, List.of("geirst", "vekterli"), "2021-10-22", "2022-01-31", "Whether portions of apply bucket diff handling will be performed asynchronously", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag JDK_VERSION = defineStringFlag( "jdk-version", "11", List.of("hmusum"), "2021-10-25", "2021-11-25", "JDK version to use inside containers", "Takes effect on restart of Docker container", APPLICATION_ID); public static final UnboundBooleanFlag LEGACY_ENDPOINT_IN_CERTIFICATE = defineFeatureFlag( "legacy-endpoint-in-certificate", false, List.of("mpolden"), "2021-10-26", "2021-12-01", "Whether to include legacy endpoint names in issued certificates", "Takes effect on deployment through controller", APPLICATION_ID); /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundBooleanFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundStringFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundIntFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundLongFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundDoubleFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundListFlag<T> defineListFlag(String flagId, List<T> defaultValue, Class<T> elementClass, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((fid, dval, fvec) -> new UnboundListFlag<>(fid, dval, elementClass, fvec), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } @FunctionalInterface private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> { U create(FlagId id, T defaultVale, FetchVector defaultFetchVector); } /** * Defines a Flag. * * @param factory Factory for creating unbound flag of type U * @param flagId The globally unique FlagId. * @param defaultValue The default value if none is present after resolution. * @param description Description of how the flag is used. * @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc. * @param dimensions What dimensions will be set in the {@link FetchVector} when fetching * the flag value in * {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}. * For instance, if APPLICATION is one of the dimensions here, you should make sure * APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag * from the FlagSource. * @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features. * @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag. * @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} and * {@link FetchVector.Dimension#VESPA_VERSION VESPA_VERSION} already set. The ZONE environment * is typically implicit. */ private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory, String flagId, T defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension[] dimensions) { FlagId id = new FlagId(flagId); FetchVector vector = new FetchVector() .with(HOSTNAME, Defaults.getDefaults().vespaHostname()) // Warning: In unit tests and outside official Vespa releases, the currentVersion is e.g. 7.0.0 // (determined by the current major version). Consider not setting VESPA_VERSION if minor = micro = 0. .with(VESPA_VERSION, Vtag.currentVersion.toFullString()); U unboundFlag = factory.create(id, defaultValue, vector); FlagDefinition definition = new FlagDefinition( unboundFlag, owners, parseDate(createdAt), parseDate(expiresAt), description, modificationEffect, dimensions); flags.put(id, definition); return unboundFlag; } private static Instant parseDate(String rawDate) { return DateTimeFormatter.ISO_DATE.parse(rawDate, LocalDate::from).atStartOfDay().toInstant(ZoneOffset.UTC); } public static List<FlagDefinition> getAllFlags() { return List.copyOf(flags.values()); } public static Optional<FlagDefinition> getFlag(FlagId flagId) { return Optional.ofNullable(flags.get(flagId)); } /** * Allows the statically defined flags to be controlled in a test. * * <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block, * the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from * before the block is reinserted. * * <p>NOT thread-safe. Tests using this cannot run in parallel. */ public static Replacer clearFlagsForTesting(FlagId... flagsToKeep) { return new Replacer(flagsToKeep); } public static class Replacer implements AutoCloseable { private static volatile boolean flagsCleared = false; private final TreeMap<FlagId, FlagDefinition> savedFlags; private Replacer(FlagId... flagsToKeep) { verifyAndSetFlagsCleared(true); this.savedFlags = Flags.flags; Flags.flags = new TreeMap<>(); List.of(flagsToKeep).forEach(id -> Flags.flags.put(id, savedFlags.get(id))); } @Override public void close() { verifyAndSetFlagsCleared(false); Flags.flags = savedFlags; } /** * Used to implement a simple verification that Replacer is not used by multiple threads. * For instance two different tests running in parallel cannot both use Replacer. */ private static void verifyAndSetFlagsCleared(boolean newValue) { if (flagsCleared == newValue) { throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?"); } flagsCleared = newValue; } } }
package com.yahoo.vespa.flags; import com.yahoo.component.Vtag; import com.yahoo.vespa.defaults.Defaults; import java.time.Instant; import java.time.LocalDate; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Optional; import java.util.TreeMap; import static com.yahoo.vespa.flags.FetchVector.Dimension.APPLICATION_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.CONSOLE_USER_EMAIL; import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME; import static com.yahoo.vespa.flags.FetchVector.Dimension.TENANT_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.VESPA_VERSION; import static com.yahoo.vespa.flags.FetchVector.Dimension.ZONE_ID; /** * Definitions of feature flags. * * <p>To use feature flags, define the flag in this class as an "unbound" flag, e.g. {@link UnboundBooleanFlag} * or {@link UnboundStringFlag}. At the location you want to get the value of the flag, you need the following:</p> * * <ol> * <li>The unbound flag</li> * <li>A {@link FlagSource}. The flag source is typically available as an injectable component. Binding * an unbound flag to a flag source produces a (bound) flag, e.g. {@link BooleanFlag} and {@link StringFlag}.</li> * <li>If you would like your flag value to be dependent on e.g. the application ID, then 1. you should * declare this in the unbound flag definition in this file (referring to * {@link FetchVector.Dimension#APPLICATION_ID}), and 2. specify the application ID when retrieving the value, e.g. * {@link BooleanFlag#with(FetchVector.Dimension, String)}. See {@link FetchVector} for more info.</li> * </ol> * * <p>Once the code is in place, you can override the flag value. This depends on the flag source, but typically * there is a REST API for updating the flags in the config server, which is the root of all flag sources in the zone.</p> * * @author hakonhall */ public class Flags { private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>(); public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag( "default-term-wise-limit", 1.0, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Default limit for when to apply termwise query evaluation", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag FEED_SEQUENCER_TYPE = defineStringFlag( "feed-sequencer-type", "LATENCY", List.of("baldersheim"), "2020-12-02", "2022-01-01", "Selects type of sequenced executor used for feeding, valid values are LATENCY, ADAPTIVE, THROUGHPUT", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag RESPONSE_SEQUENCER_TYPE = defineStringFlag( "response-sequencer-type", "ADAPTIVE", List.of("baldersheim"), "2020-12-02", "2022-01-01", "Selects type of sequenced executor used for mbus responses, valid values are LATENCY, ADAPTIVE, THROUGHPUT", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag RESPONSE_NUM_THREADS = defineIntFlag( "response-num-threads", 2, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Number of threads used for mbus responses, default is 2, negative number = numcores/4", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_COMMUNICATIONMANAGER_THREAD = defineFeatureFlag( "skip-communicationmanager-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the communicationmanager thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_MBUS_REQUEST_THREAD = defineFeatureFlag( "skip-mbus-request-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the mbus request thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_MBUS_REPLY_THREAD = defineFeatureFlag( "skip-mbus-reply-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the mbus reply thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_THREE_PHASE_UPDATES = defineFeatureFlag( "use-three-phase-updates", false, List.of("vekterli"), "2020-12-02", "2021-11-01", "Whether to enable the use of three-phase updates when bucket replicas are out of sync.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag HIDE_SHARED_ROUTING_ENDPOINT = defineFeatureFlag( "hide-shared-routing-endpoint", false, List.of("tokle", "bjormel"), "2020-12-02", "2021-11-01", "Whether the controller should hide shared routing layer endpoint", "Takes effect immediately", APPLICATION_ID ); public static final UnboundBooleanFlag USE_ASYNC_MESSAGE_HANDLING_ON_SCHEDULE = defineFeatureFlag( "async-message-handling-on-schedule", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Optionally deliver async messages in own thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag FEED_CONCURRENCY = defineDoubleFlag( "feed-concurrency", 0.5, List.of("baldersheim"), "2020-12-02", "2022-01-01", "How much concurrency should be allowed for feed", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag NUM_DEPLOY_HELPER_THREADS = defineIntFlag( "num-model-builder-threads", -1, List.of("balder"), "2021-09-09", "2021-11-01", "Number of threads used for speeding up building of models.", "Takes effect on first (re)start of config server"); public static final UnboundBooleanFlag ENABLE_FEED_BLOCK_IN_DISTRIBUTOR = defineFeatureFlag( "enable-feed-block-in-distributor", true, List.of("geirst"), "2021-01-27", "2021-11-01", "Enables blocking of feed in the distributor if resource usage is above limit on at least one content node", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag CONTAINER_DUMP_HEAP_ON_SHUTDOWN_TIMEOUT = defineFeatureFlag( "container-dump-heap-on-shutdown-timeout", false, List.of("baldersheim"), "2021-09-25", "2021-11-01", "Will trigger a heap dump during if container shutdown times out", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag CONTAINER_SHUTDOWN_TIMEOUT = defineDoubleFlag( "container-shutdown-timeout", 50.0, List.of("baldersheim"), "2021-09-25", "2021-11-01", "Timeout for shutdown of a jdisc container", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundListFlag<String> ALLOWED_ATHENZ_PROXY_IDENTITIES = defineListFlag( "allowed-athenz-proxy-identities", List.of(), String.class, List.of("bjorncs", "tokle"), "2021-02-10", "2021-12-01", "Allowed Athenz proxy identities", "takes effect at redeployment"); public static final UnboundBooleanFlag GENERATE_NON_MTLS_ENDPOINT = defineFeatureFlag( "generate-non-mtls-endpoint", true, List.of("tokle"), "2021-02-18", "2021-12-01", "Whether to generate the non-mtls endpoint", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundIntFlag MAX_ACTIVATION_INHIBITED_OUT_OF_SYNC_GROUPS = defineIntFlag( "max-activation-inhibited-out-of-sync-groups", 0, List.of("vekterli"), "2021-02-19", "2021-11-01", "Allows replicas in up to N content groups to not be activated " + "for query visibility if they are out of sync with a majority of other replicas", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag NUM_DISTRIBUTOR_STRIPES = defineIntFlag( "num-distributor-stripes", 0, List.of("geirst", "vekterli"), "2021-04-20", "2021-11-01", "Specifies the number of stripes used by the distributor. When 0, legacy single stripe behavior is used.", "Takes effect after distributor restart", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_CONCURRENT_MERGES_PER_NODE = defineIntFlag( "max-concurrent-merges-per-node", 128, List.of("balder", "vekterli"), "2021-06-06", "2021-11-01", "Specifies max concurrent merges per content node.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_MERGE_QUEUE_SIZE = defineIntFlag( "max-merge-queue-size", 1024, List.of("balder", "vekterli"), "2021-06-06", "2021-11-01", "Specifies max size of merge queue.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag LARGE_RANK_EXPRESSION_LIMIT = defineIntFlag( "large-rank-expression-limit", 8192, List.of("baldersheim"), "2021-06-09", "2021-11-01", "Limit for size of rank expressions distributed by filedistribution", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundIntFlag MAX_ENCRYPTING_HOSTS = defineIntFlag( "max-encrypting-hosts", 0, List.of("mpolden", "hakonhall"), "2021-05-27", "2021-11-01", "The maximum number of hosts allowed to encrypt their disk concurrently", "Takes effect on next run of HostEncrypter, but any currently encrypting hosts will not be cancelled when reducing the limit"); public static final UnboundBooleanFlag REQUIRE_CONNECTIVITY_CHECK = defineFeatureFlag( "require-connectivity-check", true, List.of("arnej"), "2021-06-03", "2021-12-01", "Require that config-sentinel connectivity check passes with good quality before starting services", "Takes effect on next restart", ZONE_ID, APPLICATION_ID); public static final UnboundListFlag<String> DEFER_APPLICATION_ENCRYPTION = defineListFlag( "defer-application-encryption", List.of(), String.class, List.of("mpolden", "hakonhall"), "2021-06-23", "2021-11-01", "List of applications where encryption of their host should be deferred", "Takes effect on next run of HostEncrypter"); public static final UnboundDoubleFlag MIN_NODE_RATIO_PER_GROUP = defineDoubleFlag( "min-node-ratio-per-group", 0.0, List.of("geirst", "vekterli"), "2021-07-16", "2021-12-01", "Minimum ratio of nodes that have to be available (i.e. not Down) in any hierarchic content cluster group for the group to be Up", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundListFlag<String> ALLOWED_SERVICE_VIEW_APIS = defineListFlag( "allowed-service-view-apis", List.of("state/v1/"), String.class, List.of("mortent"), "2021-08-05", "2021-11-01", "Apis allowed to proxy through the service view api", "Takes effect immediately"); public static final UnboundBooleanFlag SEPARATE_TENANT_IAM_ROLES = defineFeatureFlag( "separate-tenant-iam-roles", false, List.of("mortent"), "2021-08-12", "2021-11-01", "Create separate iam roles for tenant", "Takes effect on redeploy", TENANT_ID); public static final UnboundIntFlag METRICSPROXY_NUM_THREADS = defineIntFlag( "metricsproxy-num-threads", 2, List.of("balder"), "2021-09-01", "2021-11-01", "Number of threads for metrics proxy", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag NEW_LOCATION_BROKER_LOGIC = defineFeatureFlag( "new-location-broker-logic", true, List.of("arnej"), "2021-09-07", "2021-12-31", "Use new implementation of internal logic in service location broker", "Takes effect immediately", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ENABLED_HORIZON_DASHBOARD = defineFeatureFlag( "enabled-horizon-dashboard", false, List.of("olaa"), "2021-09-13", "2021-12-31", "Enable Horizon dashboard", "Takes effect immediately", TENANT_ID, CONSOLE_USER_EMAIL ); public static final UnboundBooleanFlag ENABLE_ONPREM_TENANT_S3_ARCHIVE = defineFeatureFlag( "enable-onprem-tenant-s3-archive", false, List.of("bjorncs"), "2021-09-14", "2021-12-31", "Enable tenant S3 buckets in cd/main. Must be set on controller cluster only.", "Takes effect immediately", ZONE_ID, TENANT_ID ); public static final UnboundBooleanFlag USE_APPLICATION_LOCK_IN_MAINTENANCE_DEPLOYMENT = defineFeatureFlag( "use-application-lock-in-maintenance-deployment", true, List.of("hmusum"), "2021-09-16", "2021-10-16", "Whether to use application node repository lock when doing maintenance deployment.", "Takes effect immediately", APPLICATION_ID ); public static final UnboundBooleanFlag ENABLE_TENANT_DEVELOPER_ROLE = defineFeatureFlag( "enable-tenant-developer-role", false, List.of("bjorncs"), "2021-09-23", "2021-12-31", "Enable tenant developer Athenz role in cd/main. Must be set on controller cluster only.", "Takes effect immediately", TENANT_ID ); public static final UnboundIntFlag MAX_CONNECTION_LIFE_IN_HOSTED = defineIntFlag( "max-connection-life-in-hosted", 45, List.of("bjorncs"), "2021-09-30", "2021-12-31", "Max connection life for connections to jdisc endpoints in hosted", "Takes effect at redeployment", APPLICATION_ID); public static final UnboundBooleanFlag ENABLE_ROUTING_REUSE_PORT = defineFeatureFlag( "enable-routing-reuse-port", false, List.of("mortent"), "2021-09-29", "2021-12-31", "Enable reuse port in routing configuration", "Takes effect on container restart", HOSTNAME ); public static final UnboundBooleanFlag ENABLE_TENANT_OPERATOR_ROLE = defineFeatureFlag( "enable-tenant-operator-role", false, List.of("bjorncs"), "2021-09-29", "2021-12-31", "Enable tenant specific operator roles in public systems. For controllers only.", "Takes effect on subsequent maintainer invocation", TENANT_ID ); /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundBooleanFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundStringFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundIntFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundLongFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundDoubleFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundListFlag<T> defineListFlag(String flagId, List<T> defaultValue, Class<T> elementClass, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((fid, dval, fvec) -> new UnboundListFlag<>(fid, dval, elementClass, fvec), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } @FunctionalInterface private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> { U create(FlagId id, T defaultVale, FetchVector defaultFetchVector); } /** * Defines a Flag. * * @param factory Factory for creating unbound flag of type U * @param flagId The globally unique FlagId. * @param defaultValue The default value if none is present after resolution. * @param description Description of how the flag is used. * @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc. * @param dimensions What dimensions will be set in the {@link FetchVector} when fetching * the flag value in * {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}. * For instance, if APPLICATION is one of the dimensions here, you should make sure * APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag * from the FlagSource. * @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features. * @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag. * @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} and * {@link FetchVector.Dimension#VESPA_VERSION VESPA_VERSION} already set. The ZONE environment * is typically implicit. */ private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory, String flagId, T defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension[] dimensions) { FlagId id = new FlagId(flagId); FetchVector vector = new FetchVector() .with(HOSTNAME, Defaults.getDefaults().vespaHostname()) // Warning: In unit tests and outside official Vespa releases, the currentVersion is e.g. 7.0.0 // (determined by the current major version). Consider not setting VESPA_VERSION if minor = micro = 0. .with(VESPA_VERSION, Vtag.currentVersion.toFullString()); U unboundFlag = factory.create(id, defaultValue, vector); FlagDefinition definition = new FlagDefinition( unboundFlag, owners, parseDate(createdAt), parseDate(expiresAt), description, modificationEffect, dimensions); flags.put(id, definition); return unboundFlag; } private static Instant parseDate(String rawDate) { return DateTimeFormatter.ISO_DATE.parse(rawDate, LocalDate::from).atStartOfDay().toInstant(ZoneOffset.UTC); } public static List<FlagDefinition> getAllFlags() { return List.copyOf(flags.values()); } public static Optional<FlagDefinition> getFlag(FlagId flagId) { return Optional.ofNullable(flags.get(flagId)); } /** * Allows the statically defined flags to be controlled in a test. * * <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block, * the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from * before the block is reinserted. * * <p>NOT thread-safe. Tests using this cannot run in parallel. */ public static Replacer clearFlagsForTesting(FlagId... flagsToKeep) { return new Replacer(flagsToKeep); } public static class Replacer implements AutoCloseable { private static volatile boolean flagsCleared = false; private final TreeMap<FlagId, FlagDefinition> savedFlags; private Replacer(FlagId... flagsToKeep) { verifyAndSetFlagsCleared(true); this.savedFlags = Flags.flags; Flags.flags = new TreeMap<>(); List.of(flagsToKeep).forEach(id -> Flags.flags.put(id, savedFlags.get(id))); } @Override public void close() { verifyAndSetFlagsCleared(false); Flags.flags = savedFlags; } /** * Used to implement a simple verification that Replacer is not used by multiple threads. * For instance two different tests running in parallel cannot both use Replacer. */ private static void verifyAndSetFlagsCleared(boolean newValue) { if (flagsCleared == newValue) { throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?"); } flagsCleared = newValue; } } }
package cpw.mods.fml.client; import static org.lwjgl.opengl.GL11.*; import java.awt.image.BufferedImage; import java.awt.Dimension; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.IdentityHashMap; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.logging.Level; import java.util.logging.Logger; import javax.imageio.ImageIO; import org.lwjgl.opengl.GL11; import net.minecraft.client.Minecraft; import net.minecraft.src.BaseMod; import net.minecraft.src.BiomeGenBase; import net.minecraft.src.Block; import net.minecraft.src.ClientRegistry; import net.minecraft.src.CommonRegistry; import net.minecraft.src.EntityItem; import net.minecraft.src.EntityPlayer; import net.minecraft.src.GameSettings; import net.minecraft.src.GuiScreen; import net.minecraft.src.IBlockAccess; import net.minecraft.src.IChunkProvider; import net.minecraft.src.IInventory; import net.minecraft.src.Item; import net.minecraft.src.ItemStack; import net.minecraft.src.KeyBinding; import net.minecraft.src.ModTextureStatic; import net.minecraft.src.NetClientHandler; import net.minecraft.src.NetworkManager; import net.minecraft.src.Packet; import net.minecraft.src.Packet1Login; import net.minecraft.src.Packet250CustomPayload; import net.minecraft.src.Packet3Chat; import net.minecraft.src.Profiler; import net.minecraft.src.Render; import net.minecraft.src.RenderBlocks; import net.minecraft.src.RenderEngine; import net.minecraft.src.RenderManager; import net.minecraft.src.RenderPlayer; import net.minecraft.src.StringTranslate; import net.minecraft.src.TextureFX; import net.minecraft.src.TexturePackBase; import net.minecraft.src.World; import net.minecraft.src.WorldType; import argo.jdom.JdomParser; import argo.jdom.JsonNode; import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.IFMLSidedHandler; import cpw.mods.fml.common.IKeyHandler; import cpw.mods.fml.common.Loader; import cpw.mods.fml.common.ModContainer; import cpw.mods.fml.common.ModContainer.TickType; import cpw.mods.fml.common.ModMetadata; import cpw.mods.fml.common.ReflectionHelper; import cpw.mods.fml.common.modloader.ModLoaderHelper; import cpw.mods.fml.common.modloader.ModLoaderModContainer; /** * Handles primary communication from hooked code into the system * * The FML entry point is {@link #onPreLoad(MinecraftServer)} called from * {@link MinecraftServer} * * Obfuscated code should focus on this class and other members of the "server" * (or "client") code * * The actual mod loading is handled at arms length by {@link Loader} * * It is expected that a similar class will exist for each target environment: * Bukkit and Client side. * * It should not be directly modified. * * @author cpw * */ public class FMLClientHandler implements IFMLSidedHandler { /** * The singleton */ private static final FMLClientHandler INSTANCE = new FMLClientHandler(); /** * A reference to the server itself */ private Minecraft client; /** * A handy list of the default overworld biomes */ private BiomeGenBase[] defaultOverworldBiomes; private int nextRenderId = 30; private TexturePackBase fallbackTexturePack; private NetClientHandler networkClient; private ModContainer animationCallbackMod; // Cached lookups private HashMap<String, ArrayList<OverrideInfo>> overrideInfo = new HashMap<String, ArrayList<OverrideInfo>>(); private HashMap<Integer, BlockRenderInfo> blockModelIds = new HashMap<Integer, BlockRenderInfo>(); private HashMap<KeyBinding, ModContainer> keyBindings = new HashMap<KeyBinding, ModContainer>(); private HashSet<OverrideInfo> animationSet = new HashSet<OverrideInfo>(); private List<TextureFX> addedTextureFX = new ArrayList<TextureFX>(); private boolean firstTick; /** * Called to start the whole game off from * {@link MinecraftServer#startServer} * * @param minecraftServer */ public void onPreLoad(Minecraft minecraft) { client = minecraft; ReflectionHelper.detectObfuscation(World.class); FMLCommonHandler.instance().registerSidedDelegate(this); CommonRegistry.registerRegistry(new ClientRegistry()); Loader.instance().loadMods(); } /** * Called a bit later on during initialization to finish loading mods * Also initializes key bindings * */ public void onLoadComplete() { client.field_6315_n.func_1065_b(); Loader.instance().initializeMods(); for (ModContainer mod : Loader.getModList()) { mod.gatherRenderers(RenderManager.field_1233_a.getRendererList()); for (Render r : RenderManager.field_1233_a.getRendererList().values()) { r.func_4009_a(RenderManager.field_1233_a); } } // Load the key bindings into the settings table GameSettings gs = client.field_6304_y; KeyBinding[] modKeyBindings = harvestKeyBindings(); KeyBinding[] allKeys = new KeyBinding[gs.field_1564_t.length + modKeyBindings.length]; System.arraycopy(gs.field_1564_t, 0, allKeys, 0, gs.field_1564_t.length); System.arraycopy(modKeyBindings, 0, allKeys, gs.field_1564_t.length, modKeyBindings.length); gs.field_1564_t = allKeys; gs.func_6519_a(); // Mark this as a "first tick" firstTick = true; } public KeyBinding[] harvestKeyBindings() { List<IKeyHandler> allKeys=FMLCommonHandler.instance().gatherKeyBindings(); KeyBinding[] keys=new KeyBinding[allKeys.size()]; int i=0; for (IKeyHandler key : allKeys) { keys[i++]=(KeyBinding)key.getKeyBinding(); keyBindings.put((KeyBinding) key.getKeyBinding(), key.getOwningContainer()); } return keys; } /** * Every tick just before world and other ticks occur */ public void onPreWorldTick() { if (client.field_6324_e != null) { FMLCommonHandler.instance().worldTickStart(); FMLCommonHandler.instance().tickStart(TickType.WORLDGUI, 0.0f, client.field_6313_p); } } /** * Every tick just after world and other ticks occur */ public void onPostWorldTick() { if (client.field_6324_e != null) { FMLCommonHandler.instance().worldTickEnd(); FMLCommonHandler.instance().tickEnd(TickType.WORLDGUI, 0.0f, client.field_6313_p); } } public void onWorldLoadTick() { if (client.field_6324_e != null) { if (firstTick) { loadTextures(fallbackTexturePack); firstTick = false; } FMLCommonHandler.instance().tickStart(TickType.WORLDLOADTICK); FMLCommonHandler.instance().tickStart(TickType.GUILOADTICK); } } public void onRenderTickStart(float partialTickTime) { if (client.field_6324_e != null) { FMLCommonHandler.instance().tickStart(TickType.RENDER, partialTickTime); FMLCommonHandler.instance().tickStart(TickType.GUI, partialTickTime, client.field_6313_p); } } public void onRenderTickEnd(float partialTickTime) { if (client.field_6324_e != null) { FMLCommonHandler.instance().tickEnd(TickType.RENDER, partialTickTime); FMLCommonHandler.instance().tickEnd(TickType.GUI, partialTickTime, client.field_6313_p); } } /** * Get the server instance * * @return */ public Minecraft getClient() { return client; } /** * Get a handle to the client's logger instance * The client actually doesn't have one- so we return null */ public Logger getMinecraftLogger() { return null; } /** * Called from ChunkProvider when a chunk needs to be populated * * To avoid polluting the worldgen seed, we generate a new random from the * world seed and generate a seed from that * * @param chunkProvider * @param chunkX * @param chunkZ * @param world * @param generator */ public void onChunkPopulate(IChunkProvider chunkProvider, int chunkX, int chunkZ, World world, IChunkProvider generator) { Random fmlRandom = new Random(world.func_22138_q()); long xSeed = fmlRandom.nextLong() >> 2 + 1L; long zSeed = fmlRandom.nextLong() >> 2 + 1L; fmlRandom.setSeed((xSeed * chunkX + zSeed * chunkZ) ^ world.func_22138_q()); for (ModContainer mod : Loader.getModList()) { if (mod.generatesWorld()) { mod.getWorldGenerator().generate(fmlRandom, chunkX, chunkZ, world, generator, chunkProvider); } } } /** * Is the offered class and instance of BaseMod and therefore a ModLoader * mod? */ public boolean isModLoaderMod(Class<?> clazz) { return BaseMod.class.isAssignableFrom(clazz); } /** * Load the supplied mod class into a mod container */ public ModContainer loadBaseModMod(Class<?> clazz, File canonicalFile) { @SuppressWarnings("unchecked") Class<? extends BaseMod> bmClazz = (Class<? extends BaseMod>) clazz; return new ModLoaderModContainer(bmClazz, canonicalFile); } /** * Called to notify that an item was picked up from the world * * @param entityItem * @param entityPlayer */ public void notifyItemPickup(EntityItem entityItem, EntityPlayer entityPlayer) { for (ModContainer mod : Loader.getModList()) { if (mod.wantsPickupNotification()) { mod.getPickupNotifier().notifyPickup(entityItem, entityPlayer); } } } /** * Attempt to dispense the item as an entity other than just as a the item * itself * * @param world * @param x * @param y * @param z * @param xVelocity * @param zVelocity * @param item * @return */ public boolean tryDispensingEntity(World world, double x, double y, double z, byte xVelocity, byte zVelocity, ItemStack item) { for (ModContainer mod : Loader.getModList()) { if (mod.wantsToDispense() && mod.getDispenseHandler().dispense(x, y, z, xVelocity, zVelocity, world, item)) { return true; } } return false; } /** * @return the instance */ public static FMLClientHandler instance() { return INSTANCE; } /** * Build a list of default overworld biomes * * @return */ public BiomeGenBase[] getDefaultOverworldBiomes() { if (defaultOverworldBiomes == null) { ArrayList<BiomeGenBase> biomes = new ArrayList<BiomeGenBase>(20); for (int i = 0; i < 23; i++) { if ("Sky".equals(BiomeGenBase.field_35486_a[i].field_6504_m) || "Hell".equals(BiomeGenBase.field_35486_a[i].field_6504_m)) { continue; } biomes.add(BiomeGenBase.field_35486_a[i]); } defaultOverworldBiomes = new BiomeGenBase[biomes.size()]; biomes.toArray(defaultOverworldBiomes); } return defaultOverworldBiomes; } /** * Called when an item is crafted * * @param player * @param craftedItem * @param craftingGrid */ public void onItemCrafted(EntityPlayer player, ItemStack craftedItem, IInventory craftingGrid) { for (ModContainer mod : Loader.getModList()) { if (mod.wantsCraftingNotification()) { mod.getCraftingHandler().onCrafting(player, craftedItem, craftingGrid); } } } /** * Called when an item is smelted * * @param player * @param smeltedItem */ public void onItemSmelted(EntityPlayer player, ItemStack smeltedItem) { for (ModContainer mod : Loader.getModList()) { if (mod.wantsCraftingNotification()) { mod.getCraftingHandler().onSmelting(player, smeltedItem); } } } /** * Called when a chat packet is received * * @param chat * @param player * @return true if you want the packet to stop processing and not echo to * the rest of the world */ public boolean handleChatPacket(Packet3Chat chat) { for (ModContainer mod : Loader.getModList()) { if (mod.wantsNetworkPackets() && mod.getNetworkHandler().onChat(chat)) { return true; } } return false; } public void handleServerLogin(Packet1Login loginPacket, NetClientHandler handler, NetworkManager networkManager) { this.networkClient=handler; Packet250CustomPayload packet = new Packet250CustomPayload(); packet.field_44012_a = "REGISTER"; packet.field_44011_c = FMLCommonHandler.instance().getPacketRegistry(); packet.field_44010_b = packet.field_44011_c.length; if (packet.field_44010_b > 0) { networkManager.func_972_a(packet); } for (ModContainer mod : Loader.getModList()) { mod.getNetworkHandler().onServerLogin(handler); } } /** * Called when a packet 250 packet is received from the player * * @param packet * @param player */ public void handlePacket250(Packet250CustomPayload packet) { if ("REGISTER".equals(packet.field_44012_a) || "UNREGISTER".equals(packet.field_44012_a)) { handleServerRegistration(packet); return; } ModContainer mod = FMLCommonHandler.instance().getModForChannel(packet.field_44012_a); if (mod != null) { mod.getNetworkHandler().onPacket250Packet(packet); } } /** * Handle register requests for packet 250 channels * * @param packet */ private void handleServerRegistration(Packet250CustomPayload packet) { if (packet.field_44011_c == null) { return; } try { for (String channel : new String(packet.field_44011_c, "UTF8").split("\0")) { // Skip it if we don't know it if (FMLCommonHandler.instance().getModForChannel(channel) == null) { continue; } if ("REGISTER".equals(packet.field_44012_a)) { FMLCommonHandler.instance().activateChannel(client.field_6322_g,channel); } else { FMLCommonHandler.instance().deactivateChannel(client.field_6322_g,channel); } } } catch (UnsupportedEncodingException e) { getMinecraftLogger().warning("Received invalid registration packet"); } } /** * Are we a server? */ @Override public boolean isServer() { return false; } /** * Are we a client? */ @Override public boolean isClient() { return true; } @Override public File getMinecraftRootDirectory() { return client.field_6297_D; } /** * @param player */ public void announceLogout(EntityPlayer player) { for (ModContainer mod : Loader.getModList()) { if (mod.wantsPlayerTracking()) { mod.getPlayerTracker().onPlayerLogout(player); } } } /** * @param p_28168_1_ */ public void announceDimensionChange(EntityPlayer player) { for (ModContainer mod : Loader.getModList()) { if (mod.wantsPlayerTracking()) { mod.getPlayerTracker().onPlayerChangedDimension(player); } } } /** * @param biome */ public void addBiomeToDefaultWorldGenerator(BiomeGenBase biome) { WorldType.field_48635_b.addNewBiome(biome); } /** * Return the minecraft instance */ @Override public Object getMinecraftInstance() { return client; } /* (non-Javadoc) * @see cpw.mods.fml.common.IFMLSidedHandler#getCurrentLanguage() */ @Override public String getCurrentLanguage() { return StringTranslate.func_20162_a().func_44024_c(); } public Properties getCurrentLanguageTable() { return StringTranslate.func_20162_a().getTranslationTable(); } /** * @param armor * @return */ public int addNewArmourRendererPrefix(String armor) { return RenderPlayer.addNewArmourPrefix(armor); } public void addNewTextureOverride(String textureToOverride, String overridingTexturePath, int location) { if (!overrideInfo.containsKey(textureToOverride)) { overrideInfo.put(textureToOverride, new ArrayList<OverrideInfo>()); } ArrayList<OverrideInfo> list = overrideInfo.get(textureToOverride); OverrideInfo info = new OverrideInfo(); info.index = location; info.override = overridingTexturePath; info.texture = textureToOverride; list.add(info); FMLCommonHandler.instance().getFMLLogger().log(Level.FINE, String.format("Overriding %s @ %d with %s. %d slots remaining",textureToOverride, location, overridingTexturePath, SpriteHelper.freeSlotCount(textureToOverride))); } /** * @param mod * @param inventoryRenderer * @return */ public int obtainBlockModelIdFor(BaseMod mod, boolean inventoryRenderer) { ModLoaderModContainer mlmc=ModLoaderHelper.registerRenderHelper(mod); int renderId=nextRenderId++; BlockRenderInfo bri=new BlockRenderInfo(renderId, inventoryRenderer, mlmc); blockModelIds.put(renderId, bri); return renderId; } /** * @param renderEngine * @param path * @return */ public BufferedImage loadImageFromTexturePack(RenderEngine renderEngine, String path) throws IOException { InputStream image=client.field_6298_C.field_6534_a.func_6481_a(path); if (image==null) { throw new RuntimeException(String.format("The requested image path %s is not found",path)); } BufferedImage result=ImageIO.read(image); if (result==null) { throw new RuntimeException(String.format("The requested image path %s appears to be corrupted",path)); } return result; } /** * @param player * @param gui */ public void displayGuiScreen(EntityPlayer player, GuiScreen gui) { if (client.field_22009_h==player && gui != null) { client.func_6272_a(gui); } } /** * @param mod * @param keyHandler * @param allowRepeat */ public void registerKeyHandler(BaseMod mod, KeyBinding keyHandler, boolean allowRepeat) { ModLoaderModContainer mlmc=ModLoaderHelper.registerKeyHelper(mod); mlmc.addKeyHandler(new KeyBindingHandler(keyHandler, allowRepeat, mlmc)); } /** * @param renderer * @param world * @param x * @param y * @param z * @param block * @param modelId * @return */ public boolean renderWorldBlock(RenderBlocks renderer, IBlockAccess world, int x, int y, int z, Block block, int modelId) { if (!blockModelIds.containsKey(modelId)) { return false; } BlockRenderInfo bri = blockModelIds.get(modelId); return bri.renderWorldBlock(world, x, y, z, block, modelId, renderer); } /** * @param renderer * @param block * @param metadata * @param modelID */ public void renderInventoryBlock(RenderBlocks renderer, Block block, int metadata, int modelID) { if (!blockModelIds.containsKey(modelID)) { return; } BlockRenderInfo bri=blockModelIds.get(modelID); bri.renderInventoryBlock(block, metadata, modelID, renderer); } /** * @param p_1219_0_ * @return */ public boolean renderItemAsFull3DBlock(int modelId) { BlockRenderInfo bri = blockModelIds.get(modelId); if (bri!=null) { return bri.shouldRender3DInInventory(); } return false; } public void registerTextureOverrides(RenderEngine renderer) { for (ModContainer mod : Loader.getModList()) { registerAnimatedTexturesFor(mod); } for (OverrideInfo animationOverride : animationSet) { renderer.func_1066_a(animationOverride.textureFX); addedTextureFX.add(animationOverride.textureFX); FMLCommonHandler.instance().getFMLLogger().finer(String.format("Registered texture override %d (%d) on %s (%d)", animationOverride.index, animationOverride.textureFX.field_1126_b, animationOverride.textureFX.getClass().getSimpleName(), animationOverride.textureFX.field_1128_f)); } for (String fileToOverride : overrideInfo.keySet()) { for (OverrideInfo override : overrideInfo.get(fileToOverride)) { try { BufferedImage image=loadImageFromTexturePack(renderer, override.override); ModTextureStatic mts=new ModTextureStatic(override.index, 1, override.texture, image); renderer.func_1066_a(mts); addedTextureFX.add(mts); FMLCommonHandler.instance().getFMLLogger().finer(String.format("Registered texture override %d (%d) on %s (%d)", override.index, mts.field_1126_b, override.texture, mts.field_1128_f)); } catch (IOException e) { FMLCommonHandler.instance().getFMLLogger().throwing("FMLClientHandler", "registerTextureOverrides", e); } } } } /** * @param mod */ private void registerAnimatedTexturesFor(ModContainer mod) { this.animationCallbackMod=mod; mod.requestAnimations(); this.animationCallbackMod=null; } public String getObjectName(Object instance) { String objectName; if (instance instanceof Item) { objectName=((Item)instance).func_20009_a(); } else if (instance instanceof Block) { objectName=((Block)instance).func_20013_i(); } else if (instance instanceof ItemStack) { objectName=Item.field_233_c[((ItemStack)instance).field_1617_c].func_21011_b((ItemStack)instance); } else { throw new IllegalArgumentException(String.format("Illegal object for naming %s",instance)); } objectName+=".name"; return objectName; } /* (non-Javadoc) * @see cpw.mods.fml.common.IFMLSidedHandler#readMetadataFrom(java.io.InputStream, cpw.mods.fml.common.ModContainer) */ @Override public ModMetadata readMetadataFrom(InputStream input, ModContainer mod) throws Exception { JsonNode root=new JdomParser().func_27366_a(new InputStreamReader(input)); List<JsonNode> lst=root.func_27217_b(); JsonNode modinfo = null; for (JsonNode tmodinfo : lst) { if (mod.getName().equals(tmodinfo.func_27213_a("modid"))) { modinfo = tmodinfo; break; } } if (modinfo == null) { FMLCommonHandler.instance().getFMLLogger().fine(String.format("Unable to process JSON modinfo file for %s", mod.getName())); return null; } ModMetadata meta=new ModMetadata(mod); try { meta.name=modinfo.func_27213_a("name"); meta.description=modinfo.func_27213_a("description"); meta.version=modinfo.func_27213_a("version"); meta.credits=modinfo.func_27213_a("credits"); List authors=modinfo.func_27217_b("authors"); StringBuilder sb=new StringBuilder(); for (int i=0; i<authors.size(); i++) { meta.authorList.add(((JsonNode)authors.get(i)).func_27216_b()); } meta.logoFile=modinfo.func_27213_a("logoFile"); meta.url=modinfo.func_27213_a("url"); meta.updateUrl=modinfo.func_27213_a("updateUrl"); meta.parent=modinfo.func_27213_a("parent"); List screenshots=modinfo.func_27217_b("screenshots"); meta.screenshots=new String[screenshots.size()]; for (int i=0; i<screenshots.size(); i++) { meta.screenshots[i]=((JsonNode)screenshots.get(i)).func_27216_b(); } } catch (Exception e) { FMLCommonHandler.instance().getFMLLogger().log(Level.FINE, String.format("An error occured reading the info file for %s",mod.getName()), e); } return meta; } public void pruneOldTextureFX(TexturePackBase var1, List<TextureFX> effects) { ListIterator<TextureFX> li = addedTextureFX.listIterator(); while (li.hasNext()) { TextureFX tex = li.next(); if (tex instanceof FMLTextureFX) { if (((FMLTextureFX)tex).unregister(client.field_6315_n, effects)) { li.remove(); } } else { effects.remove(tex); li.remove(); } } } /** * @param p_6531_1_ */ public void loadTextures(TexturePackBase texturePack) { registerTextureOverrides(client.field_6315_n); } /** * @param field_6539_c */ public void onEarlyTexturePackLoad(TexturePackBase fallback) { if (client==null) { // We're far too early- let's wait this.fallbackTexturePack=fallback; } else { loadTextures(fallback); } } /** * @param packet */ public void sendPacket(Packet packet) { if (this.networkClient!=null) { this.networkClient.func_847_a(packet); } } /** * @param anim */ public void addAnimation(TextureFX anim) { if (animationCallbackMod==null) { return; } OverrideInfo info=new OverrideInfo(); info.index=anim.field_1126_b; info.imageIndex=anim.field_1128_f; info.textureFX=anim; if (animationSet.contains(info)) { animationSet.remove(info); } animationSet.add(info); } @Override public void profileStart(String profileLabel) { Profiler.func_40663_a(profileLabel); } @Override public void profileEnd() { Profiler.func_40662_b(); } public void preGameLoad(String user, String sessionToken) { // Currently this does nothing, but it's possible I could relaunch Minecraft in a new classloader if I wished Minecraft.fmlReentry(user, sessionToken); } public void onTexturePackChange(RenderEngine engine, TexturePackBase texturepack, List<TextureFX> effects) { FMLClientHandler.instance().pruneOldTextureFX(texturepack, effects); for (TextureFX tex : effects) { if (tex instanceof ITextureFX) { ((ITextureFX)tex).onTexturePackChanged(engine, texturepack, getTextureDimensions(tex)); } } FMLClientHandler.instance().loadTextures(texturepack); } private HashMap<Integer, Dimension> textureDims = new HashMap<Integer, Dimension>(); private IdentityHashMap<TextureFX, Integer> effectTextures = new IdentityHashMap<TextureFX, Integer>(); public void setTextureDimensions(int id, int width, int height, List<TextureFX> effects) { Dimension dim = new Dimension(width, height); textureDims.put(id, dim); for (TextureFX tex : effects) { if (getEffectTexture(tex) == id && tex instanceof ITextureFX) { ((ITextureFX)tex).onTextureDimensionsUpdate(width, height); } } } public Dimension getTextureDimensions(TextureFX effect) { return getTextureDimensions(getEffectTexture(effect)); } public Dimension getTextureDimensions(int id) { return textureDims.get(id); } public int getEffectTexture(TextureFX effect) { Integer id = effectTextures.get(effect); if (id != null) { return id; } int old = GL11.glGetInteger(GL_TEXTURE_BINDING_2D); effect.func_782_a(client.field_6315_n); id = GL11.glGetInteger(GL_TEXTURE_BINDING_2D); GL11.glBindTexture(GL_TEXTURE_2D, old); effectTextures.put(effect, id); return id; } public boolean onUpdateTextureEffect(TextureFX effect) { Logger log = FMLCommonHandler.instance().getFMLLogger(); ITextureFX ifx = (effect instanceof ITextureFX ? ((ITextureFX)effect) : null); if (ifx != null && ifx.getErrored()) { return false; } String name = effect.getClass().getSimpleName(); Profiler.func_40663_a(name); try { effect.func_783_a(); } catch (Exception e) { log.warning(String.format("Texture FX %s has failed to animate. Likely caused by a texture pack change that they did not respond correctly to", name)); if (ifx != null) { ifx.setErrored(true); } Profiler.func_40662_b(); return false; } Profiler.func_40662_b(); Dimension dim = getTextureDimensions(effect); int target = ((dim.width >> 4) * (dim.height >> 4)) << 2; if (effect.field_1127_a.length != target) { log.warning(String.format("Detected a texture FX sizing discrepancy in %s (%d, %d)", name, effect.field_1127_a.length, target)); if (ifx != null) { ifx.setErrored(true); } return false; } return true; } public void onPreRegisterEffect(TextureFX effect) { Dimension dim = getTextureDimensions(effect); if (effect instanceof ITextureFX) { ((ITextureFX)effect).onTextureDimensionsUpdate(dim.width, dim.height); } } }
package org.specsy.core; import fi.jumi.api.drivers.SuiteNotifier; import java.util.concurrent.Executor; public class SpecRun implements Runnable { private final Spec spec; private final Path pathToExecute; private final SuiteNotifier notifier; private final Executor executor; public SpecRun(Spec spec, SuiteNotifier notifier, Executor executor) { this(spec, Path.ROOT, notifier, executor); } private SpecRun(Spec spec, Path pathToExecute, SuiteNotifier notifier, Executor executor) { this.spec = spec; this.pathToExecute = pathToExecute; this.notifier = notifier; this.executor = executor; } @Override public void run() { Context context = executePath(spec, pathToExecute); for (Path postponedPath : context.postponedPaths()) { executor.execute(new SpecRun(spec, postponedPath, notifier, executor)); } } private Context executePath(Spec spec, Path path) { Context context = new Context(path, notifier); spec.run(context); return context; } @Override public String toString() { return getClass().getName() + "(" + spec + ", " + pathToExecute + ")"; } }
package hr.fer.zemris.vhdllab.service; import hr.fer.zemris.vhdllab.dao.DAOException; import hr.fer.zemris.vhdllab.model.File; import hr.fer.zemris.vhdllab.model.Project; import hr.fer.zemris.vhdllab.vhdl.CompilationResult; import hr.fer.zemris.vhdllab.vhdl.SimulationResult; import hr.fer.zemris.vhdllab.vhdl.VHDLGenerator; import java.util.List; /** * This is an interface representing a VHDL Laboratory Manager. * This interface defines the communication between the web * and the service layer. */ public interface VHDLLabManager { /** * Method loads a project. An exception will be thrown if project * with specified identifier does not exists. * @param projectId identifier of project * @return requested project; this will never be <code>null</code>. * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public Project loadProject(Long projectId) throws ServiceException; /** * Check to see if specified project exists. * @param projectId identifier of project * @return <code>true</code> if project exists, <code>false</code> otherwise * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public boolean existsProject(Long projectId) throws ServiceException; /** * Finds all projects whose owner is specified user. Return value will * never be <code>null</code>, although it can be an empty list. * @param userId identifier of user * @return list of user's projects * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public List<Project> findProjectsByUser(Long userId) throws ServiceException; /** * Saves projects using underlaying persistance layer. * @param p project to save; must not be null * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public void saveProject(Project p) throws ServiceException; /** * Renames project with given identifier. * @param projectId identifier of project * @param newName a new name for the project * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public void renameProject(Long projectId, String newName) throws ServiceException; /** * Retrieves file with specified identifier. An exception will be thrown if file * with specified identifier does not exists. * @param fileId identifier of the file * @return requested file; this will never be <code>null</code> * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public File loadFile(Long fileId) throws ServiceException; /** * Checks to see if specified project contains a file with given name. * @param projectId identifier of the project * @param fileName name of file * @return <code>true</code> if file exists, <code>false</code> otherwise * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public boolean existsFile(Long projectId, String fileName) throws ServiceException; /** * Use this method to create a new file which is member of specified * project, and has name and type as specified. Content will be set * to <code>null</code>. Be aware that project can not contain two * files with same names, so the creation in this case will result * with {@linkplain ServiceException} (this is left to implementations * to enforce). * @param project project in which this file will be added * @param fileName name for the file * @param fileType type for the file * @return created file * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public File createNewFile(Project project, String fileName, String fileType) throws ServiceException; /** * Use this method to set a new content for the file. * @param fileId identifier of the file * @param content content for the file * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public void saveFile(Long fileId, String content) throws ServiceException; /** * Use this method to rename a file. * @param fileId identifier of the file * @param newName new name * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public void renameFile(Long fileId, String newName) throws ServiceException; /** * Obtains file type for the specified file. * @param fileId identifier of the file * @return file type * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public String getFileType(Long fileId) throws ServiceException; /** * Obtains file name for the specified file. * @param fileId identifier of the file * @return file name * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public String getFileName(Long fileId) throws ServiceException; /** * Use this method to compile specified file. * @param fileId identifier of the file * @return compilation result * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public CompilationResult compile(Long fileId) throws ServiceException; /** * Use this method to perform a simulation. The file specified * must be of simulatable type (such as {@linkplain File#FT_VHDLTB}}). * @param fileId identifier of the file * @return simulation status * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public SimulationResult runSimulation(Long fileId) throws ServiceException; /** * Use this method to generate VHDL for specified file. Please note that * this method can return directly content of specified file (if file type * is, e.g., {@linkplain File#FT_VHDLSOURCE}. This method will dispatch * the task of VHDL source generation to appropriate {@linkplain VHDLGenerator}. * If no generator for file type exists, a {@linkplain ServiceException} will * be thrown. * @param file file for which VHDL must be generated * @return VHDL source for specified file * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public String generateVHDL(File file) throws ServiceException; /** * Use this method to generate VHDL for specified file. File given as * argument must be of type {@linkplain File#FT_VHDLTB}}. * @param file file for which VHDL must be generated * @return VHDL source for specified file * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public String generateTestbenchVHDL(File file) throws ServiceException; /** * Use this method to generate VHDL for specified file. File given as * argument must be of type {@linkplain File#FT_STRUCT_SCHEMA}}. * @param file file for which VHDL must be generated * @return VHDL source for specified file * @throws ServiceException if any exception occurs (such as {@linkplain DAOException}) */ public String generateShemaVHDL(File file) throws ServiceException; }
package com.exedio.cope.console; import java.io.IOException; import java.io.PrintStream; import java.util.List; import javax.servlet.http.HttpServletRequest; import com.exedio.cope.Item; import com.exedio.cope.Model; import com.exedio.cope.Query; import com.exedio.cope.pattern.Media; final class MediaCop extends ConsoleCop { private static final String MEDIA_TYPE = "mt"; private static final String MEDIA_NAME = "mn"; final Media media; MediaCop(final Media media) { super("media - " + media.getType().getID() + '.' + media.getName()); this.media = media; addParameter(MEDIA_TYPE, media.getType().getID()); addParameter(MEDIA_NAME, media.getName()); } static MediaCop getMediaCop(final Model model, final HttpServletRequest request) { final String typeID = request.getParameter(MEDIA_TYPE); return (typeID==null) ? null : new MediaCop((Media)model.findTypeByID(typeID).getFeature(request.getParameter(MEDIA_NAME))); } final void writeBody(final PrintStream out, final Model model, final HttpServletRequest request) throws IOException { try { model.startTransaction(getClass().getName()); final Query<? extends Item> q = media.getType().newQuery(media.getIsNull().isNotNull()); q.setLimit(0, 50); final List<? extends Item> items = q.search(); Console_Jspm.writeBody(this, out, items); model.commit(); } finally { model.rollbackIfNotCommitted(); } } }
package test.dr.distibutions; import dr.math.distributions.InverseGaussianDistribution; import dr.math.interfaces.OneVariableFunction; import dr.math.iterations.BisectionZeroFinder; import junit.framework.TestCase; /** * @author Wai Lok Sibon Li */ public class InverseGaussianDistributionTest extends TestCase { InverseGaussianDistribution invGaussian; public void setUp() { invGaussian = new InverseGaussianDistribution(1.0, 2.0); } public void testPdf() { System.out.println("Testing 10000 random pdf calls"); for (int i = 0; i < 10000; i++) { double M = Math.random() * 10.0 + 0.1; double S = Math.random() * 5.0 + 0.01; double x = Math.random() * 10; invGaussian.setMean(M); invGaussian.setShape(S); //double pdf = 1.0 / (x * S * Math.sqrt(2 * Math.PI)) * Math.exp(-Math.pow(Math.log(x) - M, 2) / (2 * S * S)); double pdf = Math.sqrt(S/(2.0 * Math.PI * x * x * x)) * Math.exp((-1.0 * S * Math.pow((x - M), 2))/(2 * M * M * x)); assertEquals(pdf, invGaussian.pdf(x), 1e-10); } /* Test with an example using R */ invGaussian.setMean(2.835202292812448); invGaussian.setShape(3.539139491639669); assertEquals(0.1839934, invGaussian.pdf(2.540111), 1e-6); } public void testMean() { for (int i = 0; i < 1000; i++) { double M = Math.random() * 10.0 + 0.1; double S = Math.random() * 5.0 + 0.01; invGaussian.setMean(M); invGaussian.setShape(S); assertEquals(M, invGaussian.mean(), 1e-10); } } public void testVariance() { for (int i = 0; i < 1000; i++) { double M = Math.random() * 10.0 + 0.1; double S = Math.random() * 5.0 + 0.01; invGaussian.setMean(M); invGaussian.setShape(S); double variance = (M * M * M) / S; assertEquals(variance, invGaussian.variance(), 1e-8); } } public void testShape() { System.out.println("Testing 10000 random quantile(0.5) calls"); for (int i = 0; i < 10000; i++) { double M = Math.random() * 10.0 + 0.1; double S = Math.random() * 5.0 + 0.01; invGaussian.setMean(M); invGaussian.setShape(S); assertEquals(S, invGaussian.getShape(), 1e-10); } } public void testCDFAndQuantile() { invGaussian.setMean(1.0); invGaussian.setShape(351.7561121947152); double q = invGaussian.quantile(0.20811009197062338); assertEquals(0.20811009197062338, invGaussian.cdf(q), 3.0e-3); for (int i = 0; i < 10000; i++) { double M = 1.0; double S = Math.random() * 1000.0 + 0.01; invGaussian.setMean(M); invGaussian.setShape(S); double p = Math.random()*0.98 + 0.01; double quantile = invGaussian.quantile(p); //System.out.println(quantile + "\t" + p + "\t" + M + "\t" + S); double cdf = invGaussian.cdf(quantile); if(((int)S)==351) { assertEquals(p, cdf, 1.0e-2); } else { assertEquals(p, cdf, 1.0e-3); } } /* Test with examples using R */ invGaussian.setMean(5); invGaussian.setShape(0.5); assertEquals(0.75, invGaussian.cdf(3.022232), 1e-5); invGaussian.setMean(1.0); invGaussian.setShape(17.418709855826197); double q2 =invGaussian.quantile(0.27959422055126726); double p_hat = invGaussian.cdf(q2); assertEquals(0.27959422055126726, p_hat, 1.0e-3); invGaussian.setMean(1.0); invGaussian.setShape(0.4078303443934461); assertEquals(0.05514379243099207, invGaussian.cdf(invGaussian.quantile(0.05514379243099207)), 1.0e-3); } public void testCDFAndQuantile2() { final InverseGaussianDistribution f = new InverseGaussianDistribution(1, 1); for (double i = 0.01; i < 0.95; i += 0.01) { final double y = i; BisectionZeroFinder zeroFinder = new BisectionZeroFinder(new OneVariableFunction() { public double value(double x) { return f.cdf(x) - y; } }, 0.01, 100); zeroFinder.setMaximumIterations(100); zeroFinder.evaluate(); assertEquals(f.quantile(i), zeroFinder.getResult(), 1e-3); } } public void testCDFAndQuantile3() { double[] shapes = {0.010051836, 0.011108997, 0.01227734, 0.013568559, 0.014995577, 0.016572675, 0.018315639, 0.020241911, 0.022370772, 0.024723526, 0.027323722, 0.030197383, 0.03337327, 0.036883167, 0.040762204, 0.045049202, 0.049787068, 0.05502322, 0.060810063, 0.067205513, 0.074273578, 0.082084999, 0.090717953, 0.100258844, 0.110803158, 0.122456428, 0.135335283, 0.149568619, 0.165298888, 0.182683524, 0.201896518, 0.22313016, 0.246596964, 0.272531793, 0.301194212, 0.332871084, 0.367879441, 0.40656966, 0.449328964, 0.496585304, 0.548811636, 0.60653066, 0.670320046, 0.740818221, 0.818730753, 0.904837418, 1, 1.105170918, 1.221402758, 1.349858808, 1.491824698, 1.648721271, 1.8221188, 2.013752707, 2.225540928, 2.459603111, 2.718281828, 3.004166024, 3.320116923, 3.669296668, 4.055199967, 4.48168907, 4.953032424, 5.473947392, 6.049647464, 6.685894442, 7.389056099, 8.166169913, 9.025013499, 9.974182455, 11.02317638, 12.18249396, 13.46373804, 14.87973172, 16.44464677, 18.17414537, 20.08553692, 22.19795128, 24.5325302, 27.11263892, 29.96410005, 33.11545196, 36.59823444, 40.44730436, 44.70118449, 49.40244911, 54.59815003, 60.3402876, 66.68633104, 73.6997937, 81.45086866, 90.0171313, 99.48431564, 109.9471725, 121.5104175, 134.2897797, 148.4131591, 164.0219073, 181.2722419, 200.33681, 221.4064162, 244.6919323, 270.4264074, 298.867401, 330.2995599, 365.0374679, 403.4287935, 445.8577701, 492.7490411, 544.5719101, 601.8450379, 665.141633, 735.0951892, 812.4058252, 897.8472917, 992.2747156}; for (double shape : shapes) { invGaussian.setShape(shape); for (double p = 0.01; p < 0.99; p += 0.01) { double q = invGaussian.quantile(p); double p_hat = invGaussian.cdf(q); assertEquals(p, p_hat, 1.0e-3); } } } }
package com.akiban.server.encoding; import org.junit.Test; import static org.junit.Assert.assertEquals; public class YearEncoderTest extends LongEncoderTestBase { public YearEncoderTest() { super(EncoderFactory.YEAR, new TestElement[] { new TestElement("0000", 0), new TestElement("1901", 1), new TestElement("1950", 50), new TestElement("2000", 100), new TestElement("2028", 128), new TestElement("2029", 129), new TestElement("2155", 255), new TestElement("2011", new Integer(111)), new TestElement("1986", new Long(86)) } ); } @Test public void partiallySpecified() { assertEquals("0002", encodeAndDecode("2")); assertEquals("0020", encodeAndDecode("20")); assertEquals("0201", encodeAndDecode("201")); assertEquals("2011", encodeAndDecode("2011")); } @Test(expected=IllegalArgumentException.class) public void invalidNumber() { encodeAndDecode("20111zebra"); } @Test(expected=IllegalArgumentException.class) public void noNumbers() { encodeAndDecode("zebra"); } }
package com.github.bot.curiosone.core.nlp; // SUPPRESS CHECKSTYLE AvoidStarImport import static org.junit.Assert.*; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.Test; public class WordTest { @Test public void testInstantiation() { Word w = new Word("colors", "color", new HashSet<>()); assertTrue(w instanceof Word); } @Test public void testGetText() { Word w = new Word("colors", "color", new HashSet<>()); assertEquals("colors", w.getText()); w = new Word("United States", "united_states", new HashSet<>()); assertEquals("United States", w.getText()); w = new Word("This is definitely not a word! But it works!", "42", new HashSet<>()); assertEquals("This is definitely not a word! But it works!", w.getText()); } @Test public void testGetLemma() { Word w = new Word("music", "music", new HashSet<>()); assertEquals("music", w.getLemma()); w = new Word("United Kingdom", "united_kingdom", new HashSet<>()); assertEquals("united_kingdom", w.getLemma()); w = new Word("This is definitely not a word! But it works!", "42", new HashSet<>()); assertEquals("42", w.getLemma()); } @Test public void testGetMeanings() { Word w = new Word("totallyRANDOM", "totRand", new HashSet<>()); assertEquals(0, w.getMeanings().size()); Set<Meaning> m = new HashSet<>( Arrays.asList( new Meaning(POS.N, LEX.PLANT), new Meaning(POS.N, LEX.FOOD))); w = new Word("flower", "flower", m); assertTrue(m.contains(new Meaning(POS.N, LEX.PLANT))); assertTrue(m.contains(new Meaning(POS.N, LEX.FOOD))); m = new HashSet<>( Arrays.asList( new Meaning(POS.N, LEX.LOCATION), new Meaning(POS.N, LEX.OBJECT), new Meaning(POS.N, LEX.SHAPE))); w = new Word("sun", "sun", m); assertTrue(m.contains(new Meaning(POS.N, LEX.OBJECT))); assertTrue(m.contains(new Meaning(POS.N, LEX.LOCATION))); assertTrue(m.contains(new Meaning(POS.N, LEX.SHAPE))); } @Test public void testEqualsReflexive() { Word w = new Word("hi", "hi", new HashSet<>(Arrays.asList(new Meaning(POS.INTERJ, LEX.GENERIC)))); assertEquals(w, w); w = new Word("The", "the", new HashSet<>(Arrays.asList(new Meaning(POS.DET, LEX.DEFINITE_ARTICLE), new Meaning(POS.AP, LEX.CONTACT)))); assertEquals(w, w); w = new Word("42_The_Answer!", "42", new HashSet<>(Arrays.asList(new Meaning(POS.UNKN, LEX.CREATION), new Meaning(POS.APP, LEX.EMOTION), new Meaning(POS.VPP, LEX.MOTION)))); assertEquals(w, w); } @Test public void testEqualsSymmetric() { Word w = new Word("symmetry", "symmetry", new HashSet<>()); Word ww = new Word("symmetry", "symmetry", new HashSet<>()); assertTrue(w.equals(ww) && ww.equals(w)); w = new Word("watch", "watch", new HashSet<>(Arrays.asList( new Meaning(POS.V, LEX.PERCEPTION), new Meaning(POS.V, LEX.SOCIAL), new Meaning(POS.N, LEX.TIME)))); ww = new Word("watch", "watch", new HashSet<>(Arrays.asList( new Meaning(POS.V, LEX.PERCEPTION), new Meaning(POS.V, LEX.SOCIAL), new Meaning(POS.N, LEX.TIME)))); assertTrue(w.equals(ww) && ww.equals(w)); w = new Word("car", "car", new HashSet<>(Arrays.asList( new Meaning(POS.N, LEX.OBJECT)))); ww = new Word("car", "car", new HashSet<>(Arrays.asList( new Meaning(POS.N, LEX.OBJECT)))); assertTrue(w.equals(ww) && ww.equals(w)); } @Test public void testEqualsTransitive() { Word w = new Word("symmetry", "symmetry", new HashSet<>()); Word ww = new Word("symmetry", "symmetry", new HashSet<>()); Word www = new Word("symmetry", "symmetry", new HashSet<>()); assertTrue(w.equals(ww) && ww.equals(www) && www.equals(w)); w = new Word("each other", "each_other", new HashSet<>( Arrays.asList(new Meaning(POS.PRON, LEX.RECIPROCAL)))); ww = new Word("each other", "each_other", new HashSet<>( Arrays.asList(new Meaning(POS.PRON, LEX.RECIPROCAL)))); www = new Word("each other", "each_other", new HashSet<>( Arrays.asList(new Meaning(POS.PRON, LEX.RECIPROCAL)))); assertTrue(w.equals(ww) && ww.equals(www) && www.equals(w)); w = new Word("YOU", "you", new HashSet<>(Arrays.asList( new Meaning(POS.PRON, LEX.RECIPROCAL), new Meaning(POS.PREP, LEX.GENERIC)))); ww = new Word("YOU", "you", new HashSet<>(Arrays.asList( new Meaning(POS.PRON, LEX.RECIPROCAL), new Meaning(POS.PREP, LEX.GENERIC)))); www = new Word("YOU", "you", new HashSet<>(Arrays.asList( new Meaning(POS.PRON, LEX.RECIPROCAL), new Meaning(POS.PREP, LEX.GENERIC)))); assertTrue(w.equals(ww) && ww.equals(www) && www.equals(w)); } @Test public void testEqualsConsistent() { Word w = new Word("consistent", "consistent", new HashSet<>()); Word ww = new Word("consistent", "consistent", new HashSet<>()); assertEquals(w, ww); ww = new Word("CONSISTENT", "consistent", new HashSet<>()); assertNotEquals(w, ww); w = new Word("testMeOut!", "testMeOut!", new HashSet<>()); ww = new Word("testMeOut!", "testMeOut!", new HashSet<>()); assertEquals(w, ww); ww = new Word("testMeOut!", "testMeOut!", new HashSet<>(Arrays.asList( new Meaning(POS.NEG, LEX.CREATION)))); assertEquals(w, ww); w = new Word("testMeOut!", "testMeOut!", new HashSet<>()); ww = new Word("testMeOut!", "testMeOut!", new HashSet<>()); assertEquals(w, ww); ww = new Word("TEST ME OUTT!!!", "testMeOut!", new HashSet<>(Arrays.asList( new Meaning(POS.NEG, LEX.CREATION)))); assertNotEquals(w, ww); } @Test public void testEqualsNullComparison() { Word w = new Word("null value", "null", new HashSet<>()); assertNotEquals(null, w); w = new Word("test", "test", new HashSet<>(Arrays.asList( new Meaning(POS.N, LEX.PROCESS)))); assertNotEquals(null, w); w = new Word("BOTH", "both", new HashSet<>(Arrays.asList( new Meaning(POS.N, LEX.PROCESS), new Meaning(POS.N, LEX.QUANTITY)))); assertNotEquals(null, w); } @Test public void testEqualsOtherObj() { Word w = new Word("object", "OBJ", new HashSet<>()); assertNotEquals(w, Arrays.asList("ob", "je", "ct")); w = new Word("gold", "AU", new HashSet<>(Arrays.asList( new Meaning(POS.N, LEX.SUBSTANCE)))); assertNotEquals(w, new StringBuffer("gold")); w = new Word("gold", "AU", new HashSet<>(Arrays.asList( new Meaning(POS.N, LEX.SUBSTANCE), new Meaning(POS.V, LEX.TIME)))); assertNotEquals(w, new Double(42.42)); } @Test public void testEqualsHashCodeContract() { Word w = new Word("contract", "contract", new HashSet<>(Arrays.asList( new Meaning(POS.N, LEX.MOTIVE)))); Word ww = new Word("contract", "contract", new HashSet<>()); assertEquals(w, ww); assertEquals(w.hashCode(), ww.hashCode()); w = new Word("hello", "hello", new HashSet<>()); ww = new Word("hello", "hello", new HashSet<>()); assertEquals(w, ww); assertEquals(w.hashCode(), ww.hashCode()); w = new Word("HELLO", "hello", new HashSet<>()); ww = new Word("hello", "hello", new HashSet<>()); assertNotEquals(w, ww); assertNotEquals(w.hashCode(), ww.hashCode()); w = new Word("BEAutiFUL", "beautiful", new HashSet<>(Arrays.asList( new Meaning(POS.AP, LEX.PERSONAL_SUBJECTIVE)))); ww = new Word("yo!", "yo", new HashSet<>()); assertNotEquals(w, ww); assertNotEquals(w.hashCode(), ww.hashCode()); } @Test public void testHashCodeTransitive() { Word w = new Word("Mark", "mark", new HashSet<>(Arrays.asList(new Meaning(POS.N, LEX.PERSON)))); Word ww = new Word("Mark", "mark", new HashSet<>(Arrays.asList(new Meaning(POS.N, LEX.PERSON)))); Word www = new Word("Mark", "mark", new HashSet<>(Arrays.asList(new Meaning(POS.N, LEX.PERSON)))); assertTrue(w.hashCode() == ww.hashCode() && ww.hashCode() == www.hashCode() && www.hashCode() == w.hashCode()); w = new Word("42", "fortytwo", new HashSet<>()); ww = new Word("42", "fortytwo", new HashSet<>()); www = new Word("42", "fortytwo", new HashSet<>()); assertTrue(w.hashCode() == ww.hashCode() && ww.hashCode() == www.hashCode() && www.hashCode() == w.hashCode()); w = new Word("", "", new HashSet<>(Arrays.asList(new Meaning(POS.NP, LEX.ACT)))); ww = new Word("", "", new HashSet<>(Arrays.asList(new Meaning(POS.NP, LEX.ACT)))); www = new Word("", "", new HashSet<>(Arrays.asList(new Meaning(POS.NP, LEX.ACT)))); assertTrue(w.hashCode() == ww.hashCode() && ww.hashCode() == www.hashCode() && www.hashCode() == w.hashCode()); } @Test public void testHashCodConsistent() { Word w = new Word("color", "color", new HashSet<>()); Word ww = new Word("color", "color", new HashSet<>()); assertEquals(w.hashCode(), ww.hashCode()); ww = new Word("color", "color", new HashSet<>(Arrays.asList(new Meaning(POS.N, LEX.SUBSTANCE)))); assertEquals(w.hashCode(), ww.hashCode()); ww = new Word("COLOR!", "color", new HashSet<>()); assertNotEquals(w, ww); } }
package com.manmoe.example.test; import com.google.common.base.Predicate; import com.manmoe.example.model.PopupPage; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.remote.RemoteWebDriver; import org.openqa.selenium.remote.SessionId; import org.openqa.selenium.support.ui.WebDriverWait; import org.testng.ITestResult; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import us.monoid.web.Resty; import java.io.IOException; import static org.mockito.Mockito.*; import static org.testng.Assert.*; public class FirespottingITTest { /** * Our test object. */ private FirespottingIT firespottingIT; private PopupPage popupPage = mock(PopupPage.class); /** * Method for setting up the test environment. */ @BeforeMethod public void setUp() { this.firespottingIT = spy(new FirespottingIT()); this.firespottingIT.popupPage = this.popupPage; } /** * Let's check, if we set up our test environment properly. */ @Test public void testSetUp() { RemoteWebDriver remoteWebDriver = mock(RemoteWebDriver.class); doReturn(remoteWebDriver).when(firespottingIT).getWebDriver(); // run test firespottingIT.setUp(); // check, if all went well assertNotNull(firespottingIT.popupPage); } /** * Check the tear down. */ @Test public void testTearDown() throws IOException { // insert mock to test object firespottingIT.popupPage = this.popupPage; // mock rest client firespottingIT.restClient = mock(Resty.class); // mock some objects for session key RemoteWebDriver remoteWebDriver = mock(RemoteWebDriver.class); SessionId sessionId = mock(SessionId.class); when(popupPage.getDriver()).thenReturn(remoteWebDriver); when(remoteWebDriver.getSessionId()).thenReturn(sessionId); when(sessionId.toString()).thenReturn("72345863"); doReturn("sauceUsername").when(firespottingIT).getSystemVariable("SAUCE_USERNAME"); doReturn("sauceKey").when(firespottingIT).getSystemVariable("SAUCE_ACCESS_KEY"); doReturn("platform").when(firespottingIT).getSystemVariable("PLATFORM"); doReturn("travisBuildNr").when(firespottingIT).getSystemVariable("TRAVIS_BUILD_NUMBER"); // run test method firespottingIT.tearDown(); // is the method called to tear down correctly? verify(popupPage, atLeastOnce()).tearDown(); // verify rest client actions if environment variables are set // @TODO: add better verification! (no more anyStrings; check the values!) verify(firespottingIT.restClient, atLeastOnce()).authenticate(anyString(), anyString(), anyString().toCharArray()); verify(firespottingIT.restClient, atLeastOnce()).withHeader("Content-Type", "application/json"); } @Test(dataProvider = "reportDataProvider") public void testReport(boolean testSuccess) { ITestResult itResultMock = mock(ITestResult.class); when(itResultMock.isSuccess()).thenReturn(testSuccess); // set initial value of test result firespottingIT.testResult = true; // run test method firespottingIT.report(itResultMock); assertEquals(firespottingIT.testResult, testSuccess); } @DataProvider public Object[][] reportDataProvider() { return new Object[][] { {true}, {false} }; } /** * We check the isInstalled method. */ @Test public void testIsInstalled() { firespottingIT.popupPage = this.popupPage; when(popupPage.getId()).thenReturn("testId"); firespottingIT.isInstalled(); } @Test public void testPopupTest() { when(popupPage.getTitle()).thenReturn("Firespotting!"); firespottingIT.testPopup(); verify(popupPage, atLeastOnce()).open(); verify(popupPage, atLeastOnce()).getTitle(); } @Test public void testEntryTest() { String linkText = "linkText"; // mocking for every entry for (int i = 1; i <= FirespottingIT.ENTRY_LIST_LENGTH; i++) { when(popupPage.getEntryTitle(i)).thenReturn(linkText + i); } // call test method firespottingIT.testEntry(); // verifying for every entry for (int i = 1; i <= FirespottingIT.ENTRY_LIST_LENGTH; i++) { verify(popupPage, atLeastOnce()).clickOnEntryLink(linkText + i); verify(popupPage, atLeastOnce()).getBack(); } } @Test public void testIssuesTest() { RemoteWebDriver driver = mock(RemoteWebDriver.class); WebDriver.Navigation navigation = mock(WebDriver.Navigation.class); WebElement issuesElement = mock(WebElement.class); // popupPage.getDriver().navigate()--> .refresh(); follows in verifying section when(popupPage.getDriver()).thenReturn(driver); when(driver.navigate()).thenReturn(navigation); // popupPage.getIssues()-->.click(); follows in verifying section when(popupPage.getIssues()).thenReturn(issuesElement); when(driver.getTitle()).thenReturn(FirespottingIT.ISSUES_PAGE_TITLE); firespottingIT.testIssues(); verify(popupPage, atLeastOnce()).open(); verify(navigation, atLeastOnce()).refresh(); verify(issuesElement, atLeastOnce()).click(); } @Test public void testRefreshTest() throws InterruptedException { WebElement refreshLink = mock(WebElement.class); WebDriverWait driverWait = mock(WebDriverWait.class); when(popupPage.getRefreshLink()).thenReturn(refreshLink); when(popupPage.getTitle()).thenReturn("Firespotting!"); doReturn(driverWait).when(firespottingIT).createWebDriverWait(any(WebDriver.class), eq(FirespottingIT.TIME_TO_WAIT_FOR_REFRESH)); // run test method firespottingIT.testRefresh(); verify(popupPage, atLeastOnce()).open(); verify(refreshLink, atLeastOnce()).click(); verify(driverWait, atLeastOnce()).until(any(Predicate.class)); } @Test public void testOpenOptionsTest() { RemoteWebDriver webDriverMock = mock(RemoteWebDriver.class); WebDriver.Navigation navigateMock = mock(WebDriver.Navigation.class); WebElement optionsLinkMock = mock(WebElement.class); // popupPage.getDriver().navigate().refresh(); when(popupPage.getDriver()).thenReturn(webDriverMock); when(webDriverMock.navigate()).thenReturn(navigateMock); // popupPage.getOptionsLink().click(); when(popupPage.getOptionsLink()).thenReturn(optionsLinkMock); // popupPage.getTitle() when(popupPage.getTitle()).thenReturn("Options"); // run test method firespottingIT.testOpenOptions(); verify(popupPage, atLeastOnce()).open(); verify(navigateMock, atLeastOnce()).refresh(); verify(optionsLinkMock, atLeastOnce()).click(); verify(popupPage, atLeastOnce()).switchToNewTab(); } }
package com.uwetrottmann.trakt5.services; import com.uwetrottmann.trakt5.BaseTestCase; import com.uwetrottmann.trakt5.TestData; import com.uwetrottmann.trakt5.entities.BaseEpisode; import com.uwetrottmann.trakt5.entities.BaseSeason; import com.uwetrottmann.trakt5.entities.BaseShow; import com.uwetrottmann.trakt5.entities.Comment; import com.uwetrottmann.trakt5.entities.Credits; import com.uwetrottmann.trakt5.entities.Ratings; import com.uwetrottmann.trakt5.entities.Show; import com.uwetrottmann.trakt5.entities.Stats; import com.uwetrottmann.trakt5.entities.Translation; import com.uwetrottmann.trakt5.entities.TrendingShow; import com.uwetrottmann.trakt5.enums.Extended; import com.uwetrottmann.trakt5.enums.Type; import org.junit.Test; import java.io.IOException; import java.util.List; import static org.assertj.core.api.Assertions.assertThat; public class ShowsTest extends BaseTestCase { @Test public void test_popular() throws IOException { List<Show> shows = executeCall(getTrakt().shows().popular(2, null, null)); assertThat(shows).isNotNull(); assertThat(shows.size()).isLessThanOrEqualTo(DEFAULT_PAGE_SIZE); for (Show show : shows) { assertShowNotNull(show); } } @Test public void test_trending() throws IOException { List<TrendingShow> shows = executeCall(getTrakt().shows().trending(1, null, null)); assertThat(shows).isNotNull(); assertThat(shows.size()).isLessThanOrEqualTo(DEFAULT_PAGE_SIZE); for (TrendingShow show : shows) { assertThat(show.watchers).isNotNull(); assertShowNotNull(show.show); } } private void assertShowNotNull(Show show) { assertThat(show).isNotNull(); assertThat(show.title).isNotEmpty(); assertThat(show.ids).isNotNull(); assertThat(show.ids.trakt).isNotNull(); assertThat(show.year).isNotNull(); } @Test public void test_summary_slug() throws IOException { Show show = executeCall(getTrakt().shows().summary(TestData.SHOW_SLUG, Extended.FULL)); assertTestShow(show); } @Test public void test_summary_trakt_id() throws IOException { Show show = executeCall( getTrakt().shows().summary(String.valueOf(TestData.SHOW_TRAKT_ID), Extended.FULL)); assertTestShow(show); } private void assertTestShow(Show show) { assertThat(show).isNotNull(); assertThat(show.title).isEqualTo(TestData.SHOW_TITLE); assertThat(show.year).isEqualTo(TestData.SHOW_YEAR); assertThat(show.ids).isNotNull(); assertThat(show.ids.trakt).isEqualTo(TestData.SHOW_TRAKT_ID); assertThat(show.ids.slug).isEqualTo(TestData.SHOW_SLUG); assertThat(show.ids.imdb).isEqualTo(TestData.SHOW_IMDB_ID); assertThat(show.ids.tmdb).isEqualTo(TestData.SHOW_TMDB_ID); assertThat(show.ids.tvdb).isEqualTo(TestData.SHOW_TVDB_ID); assertThat(show.ids.tvrage).isEqualTo(TestData.SHOW_TVRAGE_ID); } @Test public void test_translations() throws IOException { List<Translation> translations = executeCall(getTrakt().shows().translations("breaking-bad")); assertThat(translations).isNotNull(); for (Translation translation : translations) { assertThat(translation.language).isNotEmpty(); } } @Test public void test_translation() throws IOException { List<Translation> translations = executeCall(getTrakt().shows().translation("breaking-bad", "de")); // we know that Breaking Bad has a German translation, otherwise this test would fail assertThat(translations).isNotNull(); assertThat(translations).hasSize(1); assertThat(translations.get(0).language).isEqualTo("de"); } @Test public void test_comments() throws IOException { List<Comment> comments = executeCall(getTrakt().shows().comments(TestData.SHOW_SLUG, 1, null, null)); assertThat(comments).isNotNull(); assertThat(comments.size()).isLessThanOrEqualTo(DEFAULT_PAGE_SIZE); } @Test public void test_people() throws IOException { Credits credits = executeCall(getTrakt().shows().people(TestData.SHOW_SLUG)); assertCast(credits, Type.PERSON); assertCrew(credits, Type.PERSON); } @Test public void test_ratings() throws IOException { Ratings ratings = executeCall(getTrakt().shows().ratings(TestData.SHOW_SLUG)); assertRatings(ratings); } @Test public void test_stats() throws IOException { Stats stats = executeCall(getTrakt().shows().stats(TestData.SHOW_SLUG)); assertShowStats(stats); } @Test public void test_collected_progress() throws IOException { BaseShow show = executeCall(getTrakt() .shows() .collectedProgress(TestData.SHOW_SLUG, null, null, null) ); assertThat(show).isNotNull(); assertThat(show.last_collected_at).isNotNull(); assertProgress(show); } @Test public void test_watched_progress() throws IOException { BaseShow show = executeCall(getTrakt() .shows() .watchedProgress(TestData.SHOW_SLUG, null, null, null) ); assertThat(show).isNotNull(); assertThat(show.last_watched_at).isNotNull(); assertProgress(show); } private void assertProgress(BaseShow show) { assertThat(show.aired).isGreaterThan(30); assertThat(show.completed).isGreaterThanOrEqualTo(1); // Killjoys has 5 aired seasons assertThat(show.seasons).isNotNull(); assertThat(show.seasons).hasSize(5); BaseSeason season = show.seasons.get(0); assertThat(season.number).isEqualTo(1); // all aired assertThat(season.aired).isEqualTo(10); // always at least 1 watched assertThat(season.completed).isGreaterThanOrEqualTo(1); // episode 1 should always be watched assertThat(season.episodes).isNotNull(); BaseEpisode episode = season.episodes.get(0); assertThat(episode.number).isEqualTo(1); assertThat(episode.completed).isTrue(); } @Test public void test_related() throws IOException { List<Show> related = executeCall(getTrakt().shows().related(TestData.SHOW_SLUG, 1, null, null)); assertThat(related).isNotNull(); assertThat(related.size()).isLessThanOrEqualTo(DEFAULT_PAGE_SIZE); for (Show show : related) { assertShowNotNull(show); } } }
package ecplugins.openstack; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.util.EntityUtils; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.junit.BeforeClass; import org.junit.Test; import org.openstack4j.api.OSClient; import org.openstack4j.model.compute.Keypair; import org.openstack4j.model.heat.Stack; import org.openstack4j.openstack.OSFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @SuppressWarnings("HardCodedStringLiteral") public class OpenStackProvisionTest { private static OSClient m_osClient; private final static String COMMANDER_SERVER = System.getProperty("COMMANDER_SERVER"); private final static String COMMANDER_USER = System.getProperty("COMMANDER_USER"); private final static String COMMANDER_PASSWORD = System.getProperty("COMMANDER_PASSWORD"); private final static String IDENTITY_URL = System.getProperty("OPENSTACK_IDENTITY_URL"); private final static String USER = System.getProperty("OPENSTACK_USER"); private final static String PASSWORD = System.getProperty("OPENSTACK_PASSWORD"); private final static String TENANTID = System.getProperty("OPENSTACK_TENANTID"); private final static String PLUGIN_VERSION = System.getProperty("PLUGIN_VERSION"); private final static long WAIT_TIME = 100; @BeforeClass public static void setup() { m_osClient = OSFactory.builder() .endpoint(IDENTITY_URL) .credentials(USER, PASSWORD) .tenantId(TENANTID) .authenticate(); deleteConfiguration(); createConfiguration(); } @Test public void testkeyPairCreation() { String keyNameToCreate = "automatedTest-testkeyPairCreation"; // Clean the environment / clean result from previous runs m_osClient.compute().keypairs().delete(keyNameToCreate); JSONObject jo = new JSONObject(); try { jo.put("projectName", "EC-OpenStack-" + PLUGIN_VERSION); jo.put("procedureName", "CreateKeyPair"); JSONArray actualParameterArray = new JSONArray(); actualParameterArray.put(new JSONObject() .put("value", "hp") .put("actualParameterName", "connection_config")); actualParameterArray.put(new JSONObject() .put("actualParameterName", "keyname") .put("value", keyNameToCreate)); actualParameterArray.put(new JSONObject() .put("actualParameterName", "tenant_id") .put("value", TENANTID)); actualParameterArray.put(new JSONObject() .put("actualParameterName", "tag") .put("value", "1")); jo.put("actualParameter", actualParameterArray); } catch (JSONException e) { e.printStackTrace(); } String jobId = callRunProcedure(jo); String response = waitForJob(jobId); // Check job status assertEquals("Job completed without errors", "success", response); // Get the key pair from OpenStack Keypair keypair = m_osClient.compute().keypairs().get(keyNameToCreate); // Assert keypair is not null assertNotNull(keypair); // Grab the keypair name and check its name assertEquals("Keypair name is set correctly", keyNameToCreate, keypair.getName()); } @Test public void testOrchestrationServices() { String stackNameToCreate = "automatedTest-testStackCreation"; Stack stackFromOpenstack = null; String stackId = ""; // Clean the environment / clean result from previous runs System.out.println("Cleaning up the environment."); for (Stack stack : m_osClient.heat().stacks().list()) { if (stack.getName().equalsIgnoreCase(stackNameToCreate)) { System.out.println("Found the stack with name [" + stackNameToCreate + "] already exists.Deleting it."); m_osClient.heat().stacks().delete(stackNameToCreate, stack.getId()); // wait for stack to get completely deleted. System.out.println("Waiting for stack to get completely deleted."); Stack details = m_osClient.heat().stacks().getDetails(stackNameToCreate, stack.getId()); try { while(!details.getStatus().toString().equalsIgnoreCase("DELETE_COMPLETE")) { Thread.sleep(WAIT_TIME); details = m_osClient.heat().stacks().getDetails(stackNameToCreate, stack.getId()); } } catch (InterruptedException e) { e.printStackTrace(); } System.out.println("Stack [" + stackNameToCreate + "] deleted successfully."); } } System.out.println("Cleaned up the environment."); { // limit the variable scope so that same variable names like param1, param2 ... // can be used in the same Junit test. // Scope : Create Stack // Make image_id and key name configurable. String template = "{\"heat_template_version\": \"2013-05-23\",\"description\": \"Simple template to test heat commands\", \"parameters\": { \"flavor\": { \"default\": \"m1.tiny\",\"type\": \"string\"}},\"resources\": {\"StackInstance\": {\"type\":\"OS::Nova::Server\",\"properties\": { \"key_name\": \"secondKey\",\"flavor\": {\"get_param\": \"flavor\"},\"image\": \"f6289218-995b-4471-a6e0-8f437f506ecc\",\"user_data\": \"#!/bin/bash -xv\\necho \\\"hello world\\\" &gt; /root/hello-world.txt\\n\"}}}}"; JSONObject param1 = new JSONObject(); JSONObject param2 = new JSONObject(); JSONObject param3 = new JSONObject(); JSONObject param4 = new JSONObject(); JSONObject param5 = new JSONObject(); JSONObject param6 = new JSONObject(); JSONObject jo = new JSONObject(); try { jo.put("projectName", "EC-OpenStack-" + PLUGIN_VERSION); jo.put("procedureName", "CreateStack"); param1.put("value", "hp"); param1.put("actualParameterName", "connection_config"); param2.put("actualParameterName", "tenant_id"); param2.put("value", TENANTID); param3.put("actualParameterName", "stack_name"); param3.put("value", stackNameToCreate); param4.put("actualParameterName", "template"); param4.put("value", template); param5.put("actualParameterName", "template_url"); param5.put("value", ""); param6.put("actualParameterName", "tag"); param6.put("value", "1"); JSONArray actualParameterArray = new JSONArray(); actualParameterArray.put(param1); actualParameterArray.put(param2); actualParameterArray.put(param3); actualParameterArray.put(param4); actualParameterArray.put(param5); actualParameterArray.put(param6); jo.put("actualParameter", actualParameterArray); } catch (JSONException e) { e.printStackTrace(); } System.out.println("Creating stack [" + stackNameToCreate + "] with template : ." + template); String jobId = callRunProcedure(jo); String response = waitForJob(jobId); // Check job status assertEquals("Job completed without errors", "success", response); // Get the stack from OpenStack for (Stack stack : m_osClient.heat().stacks().list()) { if (stack.getName().equalsIgnoreCase(stackNameToCreate)) { stackFromOpenstack = stack; stackId = stackFromOpenstack.getId(); } } // Assert stack is not null assertNotNull(stackFromOpenstack); // Grab the stack details and verify it. assertEquals("Stack name is set correctly", stackNameToCreate, stackFromOpenstack.getName()); assertEquals("Stack status is correct", "CREATE_COMPLETE", stackFromOpenstack.getStatus().toString()); } // end Scope : Create Stack { // Scope : Update Stack // Make image_id and key name configurable. String template = "{\"heat_template_version\": \"2013-05-23\",\"description\": \"Simple template to test heat commands\", \"parameters\": { \"flavor\": { \"default\": \"m1.tiny\",\"type\": \"string\"}},\"resources\": {\"StackInstance\": {\"type\":\"OS::Nova::Server\",\"properties\": { \"key_name\": \"secondKey\",\"flavor\": {\"get_param\": \"flavor\"},\"image\": \"f6289218-995b-4471-a6e0-8f437f506ecc\",\"user_data\": \"#!/bin/bash -xv\\necho \\\"hello world\\\" &gt; /root/hello-world.txt\\n\"}}}}"; System.out.println("Updating stack to template : " + template); // Assert that before update of stack, updated time is null assertNull(m_osClient.heat().stacks().getDetails(stackNameToCreate,stackId).getUpdatedTime()); JSONObject param1 = new JSONObject(); JSONObject param2 = new JSONObject(); JSONObject param3 = new JSONObject(); JSONObject param4 = new JSONObject(); JSONObject param5 = new JSONObject(); JSONObject param6 = new JSONObject(); JSONObject param7 = new JSONObject(); JSONObject jo = new JSONObject(); try { jo.put("projectName", "EC-OpenStack-" + PLUGIN_VERSION); jo.put("procedureName", "UpdateStack"); param1.put("value", "hp"); param1.put("actualParameterName", "connection_config"); param2.put("actualParameterName", "tenant_id"); param2.put("value", TENANTID); param3.put("actualParameterName", "stack_name"); param3.put("value", stackNameToCreate); param4.put("actualParameterName", "stack_id"); param4.put("value", stackId); param5.put("actualParameterName", "template"); param5.put("value", template); param6.put("actualParameterName", "template_url"); param6.put("value", ""); param7.put("actualParameterName", "tag"); param7.put("value", "1"); JSONArray actualParameterArray = new JSONArray(); actualParameterArray.put(param1); actualParameterArray.put(param2); actualParameterArray.put(param3); actualParameterArray.put(param4); actualParameterArray.put(param5); actualParameterArray.put(param6); actualParameterArray.put(param7); jo.put("actualParameter", actualParameterArray); } catch (JSONException e) { e.printStackTrace(); } System.out.println("Updating stack [" + stackNameToCreate + "] to template : " + template); String jobId = callRunProcedure(jo); String response = waitForJob(jobId); // Check job status assertEquals("Job completed without errors", "success", response); // Assert that after updation of stack , updated time is not null assertNotNull(m_osClient.heat().stacks().getDetails(stackNameToCreate, stackId).getUpdatedTime()); assertEquals("UPDATE_COMPLETE",m_osClient.heat().stacks().getDetails(stackNameToCreate, stackId).getStatus().toString()); } // end Scope : Update Stack { // Scope : Delete Stack JSONObject param1 = new JSONObject(); JSONObject param2 = new JSONObject(); JSONObject param3 = new JSONObject(); JSONObject param4 = new JSONObject(); JSONObject param5 = new JSONObject(); JSONObject jo = new JSONObject(); try { jo.put("projectName", "EC-OpenStack-" + PLUGIN_VERSION); jo.put("procedureName", "DeleteStack"); param1.put("value", "hp"); param1.put("actualParameterName", "connection_config"); param2.put("actualParameterName", "tenant_id"); param2.put("value", TENANTID); param3.put("actualParameterName", "stack_name"); param3.put("value", stackNameToCreate); param4.put("actualParameterName", "stack_id"); param4.put("value", stackId); param5.put("actualParameterName", "tag"); param5.put("value", "1"); JSONArray actualParameterArray = new JSONArray(); actualParameterArray.put(param1); actualParameterArray.put(param2); actualParameterArray.put(param3); actualParameterArray.put(param4); actualParameterArray.put(param5); jo.put("actualParameter", actualParameterArray); } catch (JSONException e) { e.printStackTrace(); } System.out.println("Deleting stack [" + stackNameToCreate + "]."); String jobId = callRunProcedure(jo); String response = waitForJob(jobId); // Check job status assertEquals("Job completed without errors", "success", response); // Assert that the stack with name "automatedTest-testStackCreation" no longer exists. stackFromOpenstack = null; for (Stack stack : m_osClient.heat().stacks().list()) { if (stack.getName().equalsIgnoreCase(stackNameToCreate)) { stackFromOpenstack = stack; } } assertNull(stackFromOpenstack); } // end Scope : Delete Stack } /** * callRunProcedure * * @param jo * @return the jobId of the job launched by runProcedure */ public static String callRunProcedure(JSONObject jo) { HttpClient httpClient = new DefaultHttpClient(); JSONObject result = null; try { HttpPost httpPostRequest = new HttpPost("http://" + COMMANDER_USER + ":" + COMMANDER_PASSWORD + "@" + COMMANDER_SERVER + ":8000/rest/v1.0/jobs?request=runProcedure"); StringEntity input = new StringEntity(jo.toString()); input.setContentType("application/json"); httpPostRequest.setEntity(input); HttpResponse httpResponse = httpClient.execute(httpPostRequest); result = new JSONObject(EntityUtils.toString(httpResponse.getEntity())); } catch (Exception e) { e.printStackTrace(); } finally { httpClient.getConnectionManager().shutdown(); } if (result != null) { try { return result.getString("jobId"); } catch (JSONException e) { e.printStackTrace(); } } return ""; } /** * getProperty * * @path a property path * @return the value of the property */ public static String getProperty(String path) { HttpClient httpClient = new DefaultHttpClient(); JSONObject result = null; try { HttpGet httpPostRequest = new HttpGet("http://" + COMMANDER_USER + ":" + COMMANDER_PASSWORD + "@" + COMMANDER_SERVER + ":8000/rest/v1.0/properties/" + path); HttpResponse httpResponse = httpClient.execute(httpPostRequest); result = new JSONObject(EntityUtils.toString(httpResponse.getEntity())); } catch (Exception e) { e.printStackTrace(); } finally { httpClient.getConnectionManager().shutdown(); } if (result != null) { try { return result.getJSONObject("property").getString("value"); } catch (JSONException e) { e.printStackTrace(); } } return ""; } /** * waitForJob: Waits for job to be completed and reports outcome * * @param jobId * @return outcome of job */ public static String waitForJob(String jobId) { String url = "http://" + COMMANDER_USER + ":" + COMMANDER_PASSWORD + "@" + COMMANDER_SERVER + ":8000/rest/v1.0/jobs/" + jobId + "?request=getJobStatus"; JSONObject jsonObject = performHTTPGet(url); try { while (!jsonObject.getString("status").equalsIgnoreCase("completed")) { jsonObject = performHTTPGet(url); } return jsonObject.getString("outcome"); } catch (JSONException e) { e.printStackTrace(); } return ""; } /** * Wrapper around a HTTP GET to a REST service * * @param url * @return JSONObject */ private static JSONObject performHTTPGet(String url) { HttpClient httpClient = new DefaultHttpClient(); HttpResponse httpResponse = null; try { HttpGet httpGetRequest = new HttpGet(url); httpResponse = httpClient.execute(httpGetRequest); return new JSONObject(EntityUtils.toString(httpResponse.getEntity())); } catch (Exception e) { e.printStackTrace(); } finally { httpClient.getConnectionManager().shutdown(); } return null; } /** * Delete the openstack configuration used for this test suite (clear previous runs) */ private static void deleteConfiguration() { String jobId = ""; try { JSONObject jo = new JSONObject() .put("projectName", "EC-OpenStack-" + PLUGIN_VERSION) .put("procedureName", "DeleteConfiguration"); JSONArray actualParameterArray = new JSONArray(); actualParameterArray.put(new JSONObject() .put("value", "hp") .put("actualParameterName", "config")); jo.put("actualParameter", actualParameterArray); jobId = callRunProcedure(jo); } catch (JSONException e) { e.printStackTrace(); } // Block on job completion waitForJob(jobId); // Do not check job status. Delete will error if it does not exist // which is OK since that is the expected state. } /** * Create the openstack configuration used for this test suite */ private static void createConfiguration() { String response = ""; try { JSONObject parentJSONObject = new JSONObject() .put("projectName", "EC-OpenStack-" + PLUGIN_VERSION) .put("procedureName", "CreateConfiguration"); JSONArray actualParameterArray = new JSONArray(); actualParameterArray.put(new JSONObject() .put("value", "hp") .put("actualParameterName", "config")); actualParameterArray.put(new JSONObject() .put("actualParameterName", "identity_service_url") .put("value", "https://region-a.geo-1.identity.hpcloudsvc.com:35357/")); actualParameterArray.put(new JSONObject() .put("actualParameterName", "compute_service_url") .put("value", "https://region-b.geo-1.compute.hpcloudsvc.com/")); actualParameterArray.put(new JSONObject() .put("actualParameterName", "api_version") .put("value", "2")); actualParameterArray.put(new JSONObject() .put("actualParameterName", "keystone_api_version") .put("value", "2.0")); actualParameterArray.put(new JSONObject() .put("actualParameterName", "debug_level") .put("value", "1")); actualParameterArray.put(new JSONObject() .put("actualParameterName", "credential") .put("value", "hp")); actualParameterArray.put(new JSONObject() .put("actualParameterName", "resource") .put("value", "local")); actualParameterArray.put(new JSONObject() .put("actualParameterName", "workspace") .put("value", "default")); parentJSONObject.put("actualParameter", actualParameterArray); JSONArray credentialArray = new JSONArray(); credentialArray.put(new JSONObject() .put("credentialName", "hp") .put("userName", USER) .put("password", PASSWORD)); parentJSONObject.put("credential", credentialArray); String jobId = callRunProcedure(parentJSONObject); response = waitForJob(jobId); } catch (JSONException e) { e.printStackTrace(); } // Check job status assertEquals("Job completed without errors", "success", response); } }
package edu.yu.einstein.wasp.util; //import static org.junit.Assert.*; import org.testng.annotations.Test; import org.testng.Assert; import org.testng.annotations.*; public class StringHelperTest { @BeforeClass public void setUp() throws Exception { } @AfterClass public void tearDown() throws Exception { } @Test public void testGetLoginFromFormattedNameAndLogin() { String expected = new String("Test"); Assert.assertEquals(expected, StringHelper.getLoginFromFormattedNameAndLogin("This is a (Test)"),"Failed in StringHelper.getLoginFromFormattedNameAndLogin()"); //fail("Not yet implemented"); } }
package net.peerindex.s3stream; import com.amazonaws.AmazonClientException; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.model.*; import com.codahale.metrics.MetricRegistry; import com.google.common.base.Charsets; import com.google.common.base.Throwables; import com.google.common.hash.Hashing; import com.google.common.io.CharStreams; import org.hamcrest.CustomTypeSafeMatcher; import org.hamcrest.Matcher; import org.junit.Test; import javax.xml.bind.DatatypeConverter; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.util.Random; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static com.google.common.base.Preconditions.checkState; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.*; /** * @author Enno Shioji (enno.shioji@peerindex.com) */ public class S3StreamFactoryTest { private static final String MOCK_UPLOAD_ID = "upload"; @Test public void noWrites() throws Exception { MetricRegistry metricRegistry = new MetricRegistry(); AmazonS3Client client = mockClient(); final S3StreamFactory subject = new S3StreamFactory(metricRegistry, client, TimeUnit.SECONDS, 300, 30 * 1024 * 1024, 11 * 1024 * 1024, false, true, 4, 10); S3Stream stream = subject.newStream("test", "test/", Charsets.UTF_8); stream.close(); Matcher<InitiateMultipartUploadRequest> initReqMatcher = matchInitReq(); Matcher<AbortMultipartUploadRequest> abortReqMatcher = matchAbortReq(); verify(client, times(1)).initiateMultipartUpload(argThat(initReqMatcher)); verify(client, times(1)).abortMultipartUpload(argThat(abortReqMatcher)); verifyNoMoreInteractions(client); } @Test public void smallFile() throws Exception { MetricRegistry fake = new MetricRegistry(); AmazonS3Client cl = mockClient(); final S3StreamFactory subject = new S3StreamFactory(fake, cl, TimeUnit.SECONDS, 300, 30 * 1024 * 1024, 11 * 1024 * 1024, false, true, 4, 10); S3Stream stream = subject.newStream("test", "test/", Charsets.UTF_8); try { stream.write("AA"); } finally { stream.close(); } Matcher<UploadPartRequest> matcher = matchUploadPart("AA"); verify(cl, times(1)).initiateMultipartUpload(argThat(matchInitReq())); verify(cl, times(1)).uploadPart(argThat(matcher)); verify(cl, times(1)).completeMultipartUpload(argThat(matchCompleteReq("test", "test/"))); verifyNoMoreInteractions(cl); } @Test public void twoFiles() throws Exception { MetricRegistry fake = new MetricRegistry(); AmazonS3Client cl = mockClient(); final S3StreamFactory subject = new S3StreamFactory(fake, cl, TimeUnit.SECONDS, 300, 10 * 1024 * 1024, 5 * 1024 * 1024, false, true, 4, 10); S3Stream stream = subject.newStream("test", "test/", Charsets.UTF_8); try { String row = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; for (int i = 0; i < 300000; i++) { stream.write(row); } } finally { stream.close(); } Matcher<UploadPartRequest> matcher = matchUploadPart('A'); verify(cl, times(2)).initiateMultipartUpload(argThat(matchInitReq())); verify(cl, times(3)).uploadPart(argThat(matcher)); verify(cl, times(2)).completeMultipartUpload(argThat(matchCompleteReq("test", "test/"))); verifyNoMoreInteractions(cl); } @Test public void failOnFailure() throws Exception { MetricRegistry fake = new MetricRegistry(); AmazonS3Client cl = mockFailingClient(); final S3StreamFactory subject = new S3StreamFactory(fake, cl, TimeUnit.SECONDS, 300, 10 * 1024 * 1024, 5 * 1024 * 1024, false, true, 4, 10); S3Stream stream = subject.newStream("test", "test/", Charsets.UTF_8); try { String row = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; for (int i = 0; i < 300000; i++) { stream.write(row); } fail(); } catch (IOException e) { assertEquals("simulated failure", Throwables.getRootCause(e).getMessage()); } try { stream.close(); fail(); } catch (IOException e) { assertTrue(true); } Matcher<UploadPartRequest> matcher = matchUploadPart('A'); verify(cl, times(2)).initiateMultipartUpload(argThat(matchInitReq())); verify(cl, times(1)).uploadPart(argThat(matcher)); verify(cl, times(2)).abortMultipartUpload(argThat(matchAbortReq())); verifyNoMoreInteractions(cl); } @Test public void continueOnFailure() throws Exception { MetricRegistry fake = new MetricRegistry(); AmazonS3Client cl = mockFailingClient(); final S3StreamFactory subject = new S3StreamFactory(fake, cl, TimeUnit.SECONDS, 300, 10 * 1024 * 1024, 5 * 1024 * 1024, true, true, 4, 10); S3Stream stream = subject.newStream("test", "test/", Charsets.UTF_8); try { String row = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; for (int i = 0; i < 300000; i++) { stream.write(row); } } finally { stream.close(); } Matcher<UploadPartRequest> matcher = matchUploadPart('A'); verify(cl, times(2)).initiateMultipartUpload(argThat(matchInitReq())); verify(cl, times(2)).uploadPart(argThat(matcher)); verify(cl, times(2)).abortMultipartUpload(argThat(matchAbortReq())); verify(cl, times(1)).completeMultipartUpload(argThat(matchCompleteReq("test", "test/"))); verifyNoMoreInteractions(cl); } @Test(expected = IOException.class) public void writeOnClosed() throws Exception { MetricRegistry fake = new MetricRegistry(); AmazonS3Client cl = mockFailingClient(); final S3StreamFactory subject = new S3StreamFactory(fake, cl, TimeUnit.SECONDS, 300, 10 * 1024 * 1024, 5 * 1024 * 1024, true, true, 4, 10); S3Stream stream = subject.newStream("test", "test/", Charsets.UTF_8); stream.close(); stream.write("AA"); } @Test(expected = IOException.class) public void writeOnWrittenAndClosed() throws Exception { MetricRegistry fake = new MetricRegistry(); AmazonS3Client cl = mockFailingClient(); final S3StreamFactory subject = new S3StreamFactory(fake, cl, TimeUnit.SECONDS, 300, 10 * 1024 * 1024, 5 * 1024 * 1024, true, true, 4, 10); S3Stream stream = subject.newStream("test", "test/", Charsets.UTF_8); stream.write("AA"); stream.close(); stream.write("AA"); } private CustomTypeSafeMatcher<AbortMultipartUploadRequest> matchAbortReq() { return new CustomTypeSafeMatcher<AbortMultipartUploadRequest>("") { @Override protected boolean matchesSafely(AbortMultipartUploadRequest item) { boolean ok = true; ok &= MOCK_UPLOAD_ID.equals(item.getUploadId()); ok &= "test".equals(item.getBucketName()); ok &= item.getKey().startsWith("test/"); ok &= !item.getKey().endsWith("/"); return ok; } }; } private CustomTypeSafeMatcher<InitiateMultipartUploadRequest> matchInitReq() { return new CustomTypeSafeMatcher<InitiateMultipartUploadRequest>("") { @Override protected boolean matchesSafely(InitiateMultipartUploadRequest item) { boolean ok = true; ok &= "test".equals(item.getBucketName()); ok &= item.getKey().startsWith("test/"); ok &= !item.getKey().endsWith("/"); return ok; } }; } private Matcher<CompleteMultipartUploadRequest> matchCompleteReq(final String bucketName, final String prefix) { return new CustomTypeSafeMatcher<CompleteMultipartUploadRequest>("") { @Override protected boolean matchesSafely(CompleteMultipartUploadRequest subject) { boolean ok = subject.getBucketName().equals(bucketName); ok &= subject.getKey().startsWith(prefix); ok &= subject.getUploadId().equals(MOCK_UPLOAD_ID); return ok; } }; } private Matcher<UploadPartRequest> matchUploadPart(final char c) { return new CustomTypeSafeMatcher<UploadPartRequest>("") { @Override protected boolean matchesSafely(UploadPartRequest subject) { try { String read = CharStreams.toString(new InputStreamReader(subject.getInputStream(), Charsets.UTF_8)); for (int i = 0, n = read.length(); i < n; i++) { checkState(read.charAt(i) == c); } return true; } catch (IOException e) { throw new AssertionError(e); } } }; } private Matcher<UploadPartRequest> matchUploadPart(final String content) { return new CustomTypeSafeMatcher<UploadPartRequest>("") { @Override protected boolean matchesSafely(UploadPartRequest subject) { String base64encodedMD5 = DatatypeConverter.printBase64Binary(Hashing.md5().hashBytes(content.getBytes(Charsets.UTF_8)).asBytes()); return subject.getMd5Digest().equals(base64encodedMD5); } }; } private AmazonS3Client mockFailingClient() { AmazonS3Client client = mock(AmazonS3Client.class, RETURNS_DEEP_STUBS); InitiateMultipartUploadResult retOnInit = mock(InitiateMultipartUploadResult.class, RETURNS_DEEP_STUBS); when(retOnInit.getUploadId()).thenReturn(MOCK_UPLOAD_ID); when(client.initiateMultipartUpload(any(InitiateMultipartUploadRequest.class))).thenReturn(retOnInit); UploadPartResult uploadPartResult = mock(UploadPartResult.class); when(client.uploadPart(any(UploadPartRequest.class))).thenThrow(new AmazonClientException("simulated failure")).thenReturn(uploadPartResult); return client; } private AmazonS3Client mockClient() { AmazonS3Client client = mock(AmazonS3Client.class, RETURNS_DEEP_STUBS); InitiateMultipartUploadResult retOnInit = mock(InitiateMultipartUploadResult.class, RETURNS_DEEP_STUBS); when(retOnInit.getUploadId()).thenReturn(MOCK_UPLOAD_ID); when(client.initiateMultipartUpload(any(InitiateMultipartUploadRequest.class))).thenReturn(retOnInit); return client; } }
package nom.bdezonia.zorbage.algorithm; import static org.junit.Assert.assertEquals; import org.junit.Test; import nom.bdezonia.zorbage.groups.G; import nom.bdezonia.zorbage.type.data.float64.real.Float64Member; import nom.bdezonia.zorbage.type.storage.IndexedDataSource; import nom.bdezonia.zorbage.type.storage.array.ArrayStorage; /** * * @author Barry DeZonia * */ public class TestReplace { @Test public void test() { IndexedDataSource<?,Float64Member> list = ArrayStorage.allocateDoubles( new double[] {1,2,1,4,2,6,1,7,1}); Float64Member value = G.DBL.construct(); Replace.compute(G.DBL, new Float64Member(4), new Float64Member(11), list); assertEquals(9, list.size()); list.get(0, value); assertEquals(1, value.v(), 0); list.get(1, value); assertEquals(2, value.v(), 0); list.get(2, value); assertEquals(1, value.v(), 0); list.get(3, value); assertEquals(11, value.v(), 0); list.get(4, value); assertEquals(2, value.v(), 0); list.get(5, value); assertEquals(6, value.v(), 0); list.get(6, value); assertEquals(1, value.v(), 0); list.get(7, value); assertEquals(7, value.v(), 0); list.get(8, value); assertEquals(1, value.v(), 0); Replace.compute(G.DBL, new Float64Member(1), new Float64Member(99), list); assertEquals(9, list.size()); list.get(0, value); assertEquals(99, value.v(), 0); list.get(1, value); assertEquals(2, value.v(), 0); list.get(2, value); assertEquals(99, value.v(), 0); list.get(3, value); assertEquals(11, value.v(), 0); list.get(4, value); assertEquals(2, value.v(), 0); list.get(5, value); assertEquals(6, value.v(), 0); list.get(6, value); assertEquals(99, value.v(), 0); list.get(7, value); assertEquals(7, value.v(), 0); list.get(8, value); assertEquals(99, value.v(), 0); } }
package jdit.testing.dao; import com.google.common.collect.ImmutableList; import jdit.testing.domain.ImmutableChessGame; import jdit.testing.domain.ImmutableChessPlayer; import jdit.testing.domain.mapper.ChessGameMapper; import jdit.testing.domain.mapper.ChessPlayerMapper; import org.skife.jdbi.v2.sqlobject.SqlQuery; import org.skife.jdbi.v2.sqlobject.customizers.Mapper; public interface ChessDao { @SqlQuery("chessDao/get-chess-games.sql") @Mapper(ChessGameMapper.class) ImmutableList<ImmutableChessGame> getChessGames(); @SqlQuery("chessDao/get-chess-players.sql") @Mapper(ChessPlayerMapper.class) ImmutableList<ImmutableChessPlayer> getChessPlayers(); }
package org.mariadb.jdbc.integration; import static org.junit.jupiter.api.Assertions.*; import java.sql.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.junit.jupiter.api.*; import org.mariadb.jdbc.Common; import org.mariadb.jdbc.Connection; import org.mariadb.jdbc.Statement; public class StatementTest extends Common { @AfterAll public static void drop() throws SQLException { Statement stmt = sharedConn.createStatement(); stmt.execute("DROP TABLE IF EXISTS StatementTest"); stmt.execute("DROP TABLE IF EXISTS executeGenerated"); stmt.execute("DROP TABLE IF EXISTS executeGenerated2"); stmt.execute("DROP TABLE IF EXISTS testAffectedRow"); } @BeforeAll public static void beforeAll2() throws SQLException { drop(); Statement stmt = sharedConn.createStatement(); stmt.execute("CREATE TABLE StatementTest (t1 int not null primary key auto_increment, t2 int)"); stmt.execute( "CREATE TABLE executeGenerated (t1 int not null primary key auto_increment, t2 int)"); stmt.execute( "CREATE TABLE executeGenerated2 (t1 int not null primary key auto_increment, t2 int)"); stmt.execute("CREATE TABLE testAffectedRow(id int)"); stmt.execute("FLUSH TABLES"); } @Test public void getConnection() throws SQLException { Statement stmt = sharedConn.createStatement(); assertEquals(ResultSet.TYPE_FORWARD_ONLY, stmt.getResultSetType()); assertEquals(ResultSet.CONCUR_READ_ONLY, stmt.getResultSetConcurrency()); assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, stmt.getResultSetHoldability()); assertEquals(sharedConn, stmt.getConnection()); stmt = sharedConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE); assertEquals(ResultSet.TYPE_SCROLL_INSENSITIVE, stmt.getResultSetType()); assertEquals(ResultSet.CONCUR_UPDATABLE, stmt.getResultSetConcurrency()); assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, stmt.getResultSetHoldability()); stmt = sharedConn.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE, ResultSet.CLOSE_CURSORS_AT_COMMIT); assertEquals(ResultSet.TYPE_SCROLL_INSENSITIVE, stmt.getResultSetType()); assertEquals(ResultSet.CONCUR_UPDATABLE, stmt.getResultSetConcurrency()); // not supported assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, stmt.getResultSetHoldability()); } @Test public void execute() throws SQLException { Statement stmt = sharedConn.createStatement(); assertTrue(stmt.execute("SELECT 1", Statement.RETURN_GENERATED_KEYS)); ResultSet rs = stmt.getGeneratedKeys(); Assertions.assertNull(rs.getWarnings()); assertFalse(rs.next()); assertNotNull(stmt.getResultSet()); assertEquals(-1, stmt.getUpdateCount()); assertFalse(stmt.getMoreResults()); assertEquals(-1, stmt.getUpdateCount()); assertFalse(stmt.execute("DO 1")); Assertions.assertNull(stmt.getResultSet()); assertEquals(0, stmt.getUpdateCount()); assertFalse(stmt.getMoreResults()); assertEquals(-1, stmt.getUpdateCount()); assertTrue(stmt.execute("SELECT 1", new int[] {1, 2})); rs = stmt.getGeneratedKeys(); assertFalse(rs.next()); assertTrue(stmt.execute("SELECT 1", new String[] {"test", "test2"})); rs = stmt.getGeneratedKeys(); assertFalse(rs.next()); stmt.close(); } @Test public void executeGenerated() throws SQLException { Statement stmt = sharedConn.createStatement(); assertFalse(stmt.execute("INSERT INTO executeGenerated(t2) values (100)")); SQLException e = Assertions.assertThrows(SQLException.class, () -> stmt.getGeneratedKeys()); assertTrue(e.getMessage().contains("Cannot return generated keys")); assertFalse( stmt.execute( "INSERT INTO executeGenerated(t2) values (100)", Statement.RETURN_GENERATED_KEYS)); ResultSet rs = stmt.getGeneratedKeys(); assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); } @Test public void executeGeneratedBatch() throws SQLException { Statement stmt = sharedConn.createStatement(); stmt.addBatch("INSERT INTO executeGenerated2(t2) values (110)"); stmt.addBatch("INSERT INTO executeGenerated2(t2) values (120)"); int[] res = stmt.executeBatch(); assertArrayEquals(new int[] {1, 1}, res); ResultSet rs = stmt.getGeneratedKeys(); assertTrue(rs.next()); assertEquals(1, rs.getInt(1)); assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); assertFalse(rs.next()); } @Test public void executeUpdate() throws SQLException { Statement stmt = sharedConn.createStatement(); stmt.execute("INSERT INTO StatementTest(t1, t2) values (1, 110), (2, 120)"); assertEquals( 2, stmt.executeUpdate("UPDATE StatementTest SET t2 = 130 WHERE t2 > 100 AND t2 < 200")); assertEquals(2, stmt.getUpdateCount()); assertFalse(stmt.getMoreResults()); assertEquals(-1, stmt.getUpdateCount()); assertEquals( 2, stmt.executeUpdate( "UPDATE StatementTest SET t2 = 150 WHERE t2 > 100 AND t2 < 200", new int[] {1, 2})); assertEquals(2, stmt.getUpdateCount()); assertEquals( 2, stmt.executeUpdate( "UPDATE StatementTest SET t2 = 150 WHERE t2 > 100 AND t2 < 200", new String[] {"test", "test2"})); assertEquals(2, stmt.getUpdateCount()); try { stmt.executeUpdate("SELECT 1"); Assertions.fail(); } catch (SQLException sqle) { assertTrue( sqle.getMessage() .contains("the given SQL statement produces an unexpected ResultSet object")); } assertEquals(0, stmt.executeUpdate("DO 1")); } @Test public void executeLargeUpdate() throws SQLException { Statement stmt = sharedConn.createStatement(); stmt.execute("INSERT INTO StatementTest(t1, t2) values (10, 210), (12, 220)"); assertEquals(2, stmt.executeLargeUpdate("UPDATE StatementTest SET t2 = 230 WHERE t2 > 200")); assertEquals(2L, stmt.getLargeUpdateCount()); assertFalse(stmt.getMoreResults()); assertEquals(-1L, stmt.getLargeUpdateCount()); assertEquals( 2, stmt.executeLargeUpdate( "UPDATE StatementTest SET t2 = 250 WHERE t2 > 200", new int[] {1, 2})); assertEquals(2L, stmt.getLargeUpdateCount()); assertEquals( 2, stmt.executeLargeUpdate( "UPDATE StatementTest SET t2 = 250 WHERE t2 > 200", new String[] {"test", "test2"})); assertEquals(2L, stmt.getLargeUpdateCount()); try { stmt.executeLargeUpdate("SELECT 1"); Assertions.fail(); } catch (SQLException sqle) { assertTrue( sqle.getMessage() .contains("the given SQL statement produces an unexpected ResultSet object")); } assertEquals(0, stmt.executeLargeUpdate("DO 1")); } @Test public void executeQuery() throws SQLException { Statement stmt = sharedConn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT 1"); assertTrue(rs.next()); rs = stmt.executeQuery("DO 1"); assertFalse(rs.next()); } @Test public void close() throws SQLException { Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); assertFalse(stmt.isClosed()); ResultSet rs = stmt.executeQuery("select * FROM mysql.user LIMIT 1"); rs.next(); Object[] objs = new Object[45]; for (int i = 0; i < 45; i++) { objs[i] = rs.getObject(i + 1); } rs = stmt.executeQuery("SELECT * FROM seq_1_to_10000"); assertFalse(rs.isClosed()); stmt.close(); assertTrue(stmt.isClosed()); assertTrue(rs.isClosed()); assertThrowsContains( SQLException.class, () -> stmt.clearBatch(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.isPoolable(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.setPoolable(true), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.closeOnCompletion(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.isCloseOnCompletion(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.getResultSetConcurrency(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.getFetchSize(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.getMoreResults(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.execute("ANY"), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.executeUpdate("ANY"), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.executeQuery("ANY"), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.executeBatch(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.getConnection(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.getMoreResults(1), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.cancel(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.getMaxRows(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.getLargeMaxRows(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.setMaxRows(1), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.setEscapeProcessing(true), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.getQueryTimeout(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.getUpdateCount(), "Cannot do an operation on a closed statement"); assertThrowsContains( SQLException.class, () -> stmt.getLargeUpdateCount(), "Cannot do an operation on a closed statement"); } @Test public void maxRows() throws SQLException { Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); assertEquals(0, stmt.getMaxRows()); try { stmt.setMaxRows(-1); Assertions.fail(); } catch (SQLException e) { assertTrue(e.getMessage().contains("max rows cannot be negative")); } stmt.setMaxRows(10); assertEquals(10, stmt.getMaxRows()); ResultSet rs = stmt.executeQuery("SELECT * FROM seq_1_to_10000"); int i = 0; while (rs.next()) { i++; assertEquals(i, rs.getInt(1)); } assertEquals(10, i); stmt.setQueryTimeout(2); rs = stmt.executeQuery("SELECT * FROM seq_1_to_10000"); i = 0; while (rs.next()) { i++; assertEquals(i, rs.getInt(1)); } assertEquals(10, i); } @Test public void largeMaxRows() throws SQLException { Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); assertEquals(0L, stmt.getLargeMaxRows()); try { stmt.setLargeMaxRows(-1); Assertions.fail(); } catch (SQLException e) { assertTrue(e.getMessage().contains("max rows cannot be negative")); } stmt.setLargeMaxRows(10); assertEquals(10L, stmt.getLargeMaxRows()); ResultSet rs = stmt.executeQuery("SELECT * FROM seq_1_to_10000"); int i = 0; while (rs.next()) { i++; assertEquals(i, rs.getInt(1)); } assertEquals(10, i); stmt.setQueryTimeout(2); rs = stmt.executeQuery("SELECT * FROM seq_1_to_10000"); i = 0; while (rs.next()) { i++; assertEquals(i, rs.getInt(1)); } assertEquals(10, i); } @Test public void checkFixedData() throws SQLException { Statement stmt = sharedConn.createStatement(); assertFalse(stmt.isPoolable()); stmt.setPoolable(true); assertFalse(stmt.isPoolable()); assertFalse(stmt.isWrapperFor(String.class)); assertFalse(stmt.isWrapperFor(null)); assertTrue(stmt.isWrapperFor(Statement.class)); stmt.unwrap(java.sql.Statement.class); assertThrowsContains( SQLException.class, () -> stmt.unwrap(String.class), "he receiver is not a wrapper and does not implement the interface"); assertThrowsContains( SQLException.class, () -> stmt.setCursorName(""), "Cursors are not supported"); assertEquals(ResultSet.FETCH_FORWARD, stmt.getFetchDirection()); stmt.setFetchDirection(ResultSet.FETCH_REVERSE); assertEquals(ResultSet.FETCH_FORWARD, stmt.getFetchDirection()); assertEquals(ResultSet.CONCUR_READ_ONLY, stmt.getResultSetConcurrency()); assertEquals(ResultSet.TYPE_FORWARD_ONLY, stmt.getResultSetType()); assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, stmt.getResultSetHoldability()); assertEquals(0, stmt.getMaxFieldSize()); stmt.setMaxFieldSize(100); assertEquals(0, stmt.getMaxFieldSize()); } @Test public void getMoreResults() throws SQLException { Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT * FROM seq_1_to_10000"); assertFalse(stmt.getMoreResults(Statement.KEEP_CURRENT_RESULT)); assertFalse(rs.isClosed()); rs = stmt.executeQuery("SELECT * FROM seq_1_to_10000"); stmt.getMoreResults(Statement.CLOSE_CURRENT_RESULT); assertTrue(rs.isClosed()); stmt.close(); } @Test @Timeout(20) public void queryTimeout() throws Exception { Assumptions.assumeTrue( isMariaDBServer() && !"maxscale".equals(System.getenv("srv")) && !"skysql".equals(System.getenv("srv")) && !"skysql-ha".equals(System.getenv("srv"))); Statement stmt = sharedConn.createStatement(); assertThrowsContains( SQLException.class, () -> stmt.setQueryTimeout(-1), "Query timeout cannot be negative"); assertThrowsContains( SQLTimeoutException.class, () -> { stmt.setQueryTimeout(1); assertEquals(1, stmt.getQueryTimeout()); stmt.execute( "select * from information_schema.columns as c1, information_schema.tables, information_schema" + ".tables as t2"); }, "Query execution was interrupted (max_statement_time exceeded)"); } @Test public void smallQueryTimeout() throws Exception { Statement stmt = sharedConn.createStatement(); stmt.setQueryTimeout(1); stmt.execute("SELECT 1"); stmt.setMaxRows(1); stmt.execute("SELECT 1"); stmt.setQueryTimeout(0); stmt.execute("SELECT 1"); } @Test public void escaping() throws Exception { try (Connection con = (Connection) DriverManager.getConnection(mDefUrl + "&dumpQueriesOnException=true")) { Statement stmt = con.createStatement(); assertThrowsContains( SQLException.class, () -> stmt.executeQuery( "select {fn timestampdiff(SQL_TSI_HOUR, '2003-02-01','2003-05-01')} df df "), "select {fn timestampdiff" + "(SQL_TSI_HOUR, '2003-02-01','2003-05-01')} df df "); stmt.setEscapeProcessing(true); assertThrowsContains( SQLException.class, () -> stmt.executeQuery( "select {fn timestampdiff(SQL_TSI_HOUR, '2003-02-01','2003-05-01')} df df "), "select timestampdiff(HOUR, '2003-02-01','2003-05-01') df df "); } } @Test public void testWarnings() throws SQLException { Assumptions.assumeTrue( !"skysql".equals(System.getenv("srv")) && !"skysql-ha".equals(System.getenv("srv"))); Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); // connection level Assertions.assertNull(sharedConn.getWarnings()); stmt.executeQuery("select now() = 1"); SQLWarning warning = sharedConn.getWarnings(); assertTrue(warning.getMessage().contains("ncorrect datetime value: '1'")); stmt.executeQuery("select now() = 1"); sharedConn.clearWarnings(); Assertions.assertNull(sharedConn.getWarnings()); // statement level ResultSet rs = stmt.executeQuery("select now() = 1"); warning = rs.getWarnings(); assertTrue(warning.getMessage().contains("ncorrect datetime value: '1'")); rs = stmt.executeQuery("select now() = 1"); rs.clearWarnings(); Assertions.assertNull(rs.getWarnings()); stmt.executeQuery("select now() = 1"); warning = stmt.getWarnings(); assertTrue(warning.getMessage().contains("ncorrect datetime value: '1'")); stmt.executeQuery("select now() = 1"); stmt.clearWarnings(); Assertions.assertNull(stmt.getWarnings()); } @Test public void cancel() throws Exception { Assumptions.assumeTrue( isMariaDBServer() && !"maxscale".equals(System.getenv("srv")) && !"skysql".equals(System.getenv("srv")) && !"skysql-ha".equals(System.getenv("srv"))); Statement stmt = sharedConn.createStatement(); stmt.cancel(); // will do nothing ExecutorService exec = Executors.newFixedThreadPool(1); assertThrowsContains( SQLTimeoutException.class, () -> { exec.execute(new CancelThread(stmt)); stmt.execute( "select * from information_schema.columns as c1, information_schema.tables, information_schema" + ".tables as t2"); exec.shutdown(); }, "Query execution was interrupted"); } @Test public void fetch() throws SQLException { Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); assertThrowsContains(SQLException.class, () -> stmt.setFetchSize(-10), "invalid fetch size"); stmt.setFetchSize(10); assertEquals(10, stmt.getFetchSize()); ResultSet rs = stmt.executeQuery("select * FROM seq_1_to_10000"); for (int i = 1; i <= 10000; i++) { assertTrue(rs.next()); assertEquals(i, rs.getInt(1)); } assertFalse(rs.next()); } @Test public void fetchUnFinishedSameStatement() throws SQLException { Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); stmt.setFetchSize(10); assertEquals(10, stmt.getFetchSize()); ResultSet rs = stmt.executeQuery("select * FROM seq_1_to_1000"); for (int i = 1; i <= 500; i++) { assertTrue(rs.next()); assertEquals(i, rs.getInt(1)); } ResultSet rs2 = stmt.executeQuery("select * FROM seq_1_to_1000"); for (int i = 501; i <= 1000; i++) { assertTrue(rs.next()); assertEquals(i, rs.getInt(1)); } assertFalse(rs.next()); for (int i = 1; i <= 1000; i++) { assertTrue(rs2.next()); assertEquals(i, rs2.getInt(1)); } assertFalse(rs2.next()); } @Test public void fetchUnFinishedOtherStatement() throws SQLException { Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); stmt.setFetchSize(5); assertEquals(5, stmt.getFetchSize()); ResultSet rs = stmt.executeQuery("select * FROM seq_1_to_20"); for (int i = 1; i <= 10; i++) { assertTrue(rs.next()); assertEquals(i, rs.getInt(1)); } Statement stmt2 = sharedConn.createStatement(); ResultSet rs2 = stmt2.executeQuery("select * FROM seq_1_to_20"); for (int i = 11; i <= 20; i++) { assertTrue(rs.next()); assertEquals(i, rs.getInt(1)); } assertFalse(rs.next()); for (int i = 1; i <= 20; i++) { assertTrue(rs2.next()); assertEquals(i, rs2.getInt(1)); } assertFalse(rs2.next()); } @Test public void fetchUnfinished() throws SQLException { Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); stmt.setFetchSize(1); stmt.executeQuery("select * FROM seq_1_to_20"); assertFalse(stmt.getMoreResults()); Statement stmt2 = sharedConn.createStatement(); ResultSet rs = stmt2.executeQuery("SELECT 1"); rs.next(); assertEquals(1, rs.getInt(1)); } @Test public void fetchClose() throws SQLException { Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); stmt.setFetchSize(10); assertEquals(10, stmt.getFetchSize()); ResultSet rs = stmt.executeQuery("select * FROM seq_1_to_1000"); for (int i = 1; i <= 500; i++) { assertTrue(rs.next()); assertEquals(i, rs.getInt(1)); } stmt.close(); assertTrue(rs.isClosed()); stmt.close(); Statement stmt2 = sharedConn.createStatement(); ResultSet rs2 = stmt2.executeQuery("select * FROM seq_1_to_1000"); for (int i = 1; i <= 1000; i++) { assertTrue(rs2.next()); assertEquals(i, rs2.getInt(1)); } assertFalse(rs2.next()); } @Test public void executeBatchBasic() throws SQLException { executeBatchBasic(sharedConn); try (Connection con = createCon("allowLocalInfile=true")) { executeBatchBasic(con); } } private void executeBatchBasic(Connection con) throws SQLException { Statement stmt = con.createStatement(); assertArrayEquals(new int[0], stmt.executeBatch()); stmt.clearBatch(); stmt.execute("DROP TABLE IF EXISTS executeBatchBasic"); stmt.execute( "CREATE TABLE executeBatchBasic (t1 int not null primary key auto_increment, t2 int)"); assertThrowsContains( SQLException.class, () -> stmt.addBatch(null), "null cannot be set to addBatch(String sql)"); stmt.addBatch("INSERT INTO executeBatchBasic(t2) VALUES (55)"); stmt.setEscapeProcessing(true); stmt.addBatch("INSERT INTO executeBatchBasic(t2) VALUES (56)"); int[] ret = stmt.executeBatch(); Assertions.assertArrayEquals(new int[] {1, 1}, ret); ret = stmt.executeBatch(); Assertions.assertArrayEquals(new int[0], ret); stmt.addBatch("INSERT INTO executeLargeBatchBasic(t2) VALUES (57)"); stmt.clearBatch(); ret = stmt.executeBatch(); Assertions.assertArrayEquals(new int[0], ret); assertArrayEquals(new int[0], stmt.executeBatch()); stmt.addBatch("INSERT INTO executeLargeBatchBasic(t2) VALUES (57)"); stmt.addBatch("WRONG QUERY"); assertThrowsContains( BatchUpdateException.class, () -> stmt.executeBatch(), "You have an error in your SQL syntax"); } @Test public void executeLargeBatchBasic() throws SQLException { executeLargeBatchBasic(sharedConn); try (Connection con = createCon("allowLocalInfile=true")) { executeLargeBatchBasic(con); } } private void executeLargeBatchBasic(Connection con) throws SQLException { Statement stmt = con.createStatement(); assertArrayEquals(new long[0], stmt.executeLargeBatch()); stmt.clearBatch(); stmt.execute("DROP TABLE IF EXISTS executeLargeBatchBasic"); stmt.execute( "CREATE TABLE executeLargeBatchBasic (t1 int not null primary key auto_increment, t2 int)"); stmt.addBatch("INSERT INTO executeLargeBatchBasic(t2) VALUES (55)"); stmt.addBatch("INSERT INTO executeLargeBatchBasic(t2) VALUES (56)"); long[] ret = stmt.executeLargeBatch(); Assertions.assertArrayEquals(new long[] {1, 1}, ret); ret = stmt.executeLargeBatch(); Assertions.assertArrayEquals(new long[0], ret); stmt.addBatch("INSERT INTO executeLargeBatchBasic(t2) VALUES (57)"); stmt.clearBatch(); ret = stmt.executeLargeBatch(); Assertions.assertArrayEquals(new long[0], ret); ret = stmt.executeLargeBatch(); Assertions.assertArrayEquals(new long[0], ret); stmt.addBatch("INSERT INTO executeLargeBatchBasic(t2) VALUES (57)"); stmt.addBatch("WRONG QUERY"); assertThrowsContains( BatchUpdateException.class, () -> stmt.executeLargeBatch(), "You have an error in your SQL syntax"); } @Test public void fetchSize() throws SQLException { assertEquals(0, sharedConn.createStatement().getFetchSize()); try (Connection con = createCon("&defaultFetchSize=10")) { assertEquals(10, con.createStatement().getFetchSize()); try (PreparedStatement prep = con.prepareStatement("SELECT ?")) { assertEquals(10, prep.getFetchSize()); } } } @Test public void moreResults() throws SQLException { Assumptions.assumeTrue(isMariaDBServer()); Statement stmt = sharedConn.createStatement(); stmt.execute("DROP PROCEDURE IF EXISTS multi"); stmt.setFetchSize(3); stmt.execute( "CREATE PROCEDURE multi() BEGIN SELECT * from seq_1_to_10; SELECT * FROM seq_1_to_1000;SELECT 2; END"); stmt.execute("CALL multi()"); assertTrue(stmt.getMoreResults()); ResultSet rs = stmt.getResultSet(); int i = 1; while (rs.next()) { assertEquals(i++, rs.getInt(1)); } assertEquals(1001, i); stmt.setFetchSize(3); rs = stmt.executeQuery("CALL multi()"); assertFalse(rs.isClosed()); stmt.setFetchSize(0); // force more result to load all remaining result-set assertTrue(stmt.getMoreResults()); assertTrue(rs.isClosed()); rs = stmt.getResultSet(); i = 1; while (rs.next()) { assertEquals(i++, rs.getInt(1)); } stmt.setFetchSize(3); rs = stmt.executeQuery("CALL multi()"); assertFalse(rs.isClosed()); stmt.setFetchSize(0); // force more result to load all remaining result-set assertTrue(stmt.getMoreResults(java.sql.Statement.KEEP_CURRENT_RESULT)); assertFalse(rs.isClosed()); i = 1; while (rs.next()) { assertEquals(i++, rs.getInt(1)); } assertEquals(11, i); rs = stmt.getResultSet(); i = 1; while (rs.next()) { assertEquals(i++, rs.getInt(1)); } assertEquals(1001, i); rs = stmt.executeQuery("CALL multi()"); stmt.close(); assertTrue(rs.isClosed()); } @Test public void closeOnCompletion() throws SQLException { Statement stmt = sharedConn.createStatement(); assertFalse(stmt.isCloseOnCompletion()); stmt.closeOnCompletion(); assertTrue(stmt.isCloseOnCompletion()); assertFalse(stmt.isClosed()); ResultSet rs = stmt.executeQuery("SELECT 1"); assertFalse(rs.isClosed()); assertFalse(stmt.isClosed()); rs.close(); assertTrue(rs.isClosed()); assertTrue(stmt.isClosed()); } private static class CancelThread implements Runnable { private final java.sql.Statement stmt; public CancelThread(java.sql.Statement stmt) { this.stmt = stmt; } @Override public void run() { try { Thread.sleep(100); stmt.cancel(); } catch (SQLException | InterruptedException e) { e.printStackTrace(); } } } @Test public void testAffectedRow() throws SQLException { testAffectedRow(false); testAffectedRow(true); } private void testAffectedRow(boolean useAffectedRows) throws SQLException { try (Connection con = createCon("&useAffectedRows=" + useAffectedRows)) { java.sql.Statement stmt = con.createStatement(); stmt.execute("TRUNCATE testAffectedRow"); stmt.execute("START TRANSACTION"); stmt.execute("INSERT INTO testAffectedRow values (1), (1), (2), (3)"); int rowCount = stmt.executeUpdate("UPDATE testAffectedRow set id = 1"); assertEquals(useAffectedRows ? 2 : 4, rowCount); con.rollback(); } } }
package uk.co.eluinhost.ultrahardcore.commands; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.World; import org.bukkit.command.CommandSender; import org.bukkit.entity.Entity; import org.bukkit.entity.Player; import uk.co.eluinhost.commands.Command; import uk.co.eluinhost.commands.CommandRequest; import uk.co.eluinhost.ultrahardcore.borders.BorderCreator; import uk.co.eluinhost.ultrahardcore.borders.SessionManager; import uk.co.eluinhost.ultrahardcore.borders.types.CylinderBorder; import uk.co.eluinhost.ultrahardcore.config.ConfigNodes; import uk.co.eluinhost.ultrahardcore.config.PermissionNodes; import uk.co.eluinhost.ultrahardcore.exceptions.worldedit.TooManyBlocksException; import uk.co.eluinhost.ultrahardcore.config.ConfigManager; public class BorderCommand { @Command(trigger = "genborder", identifier = "BorderCommand", permission = PermissionNodes.GENERATE_BORDER) public void onBorderCommand(CommandRequest request){ //TODO this } @Command(trigger = "undo", identifier = "BorderUndoCommand", minArgs = 0, maxArgs = 1, permission = PermissionNodes.GENERATE_BORDER, parentID = "BorderCommand") public void onBorderUndoCommand(CommandRequest request){ CommandSender sender = request.getSender(); String world; if (request.getArgs().size() == 1) { if (!(sender instanceof Player)) { sender.sendMessage("You need to specify a world to undo when not ran as a player"); return; } world = ((Entity) sender).getWorld().getName(); } else { world = request.getFirstArg(); } SessionManager sessionManager = SessionManager.getInstance(); if (sessionManager.undoLastSession(world)) { sender.sendMessage(ChatColor.GOLD + "Undone successfully!"); } else { sender.sendMessage(ChatColor.GOLD + "Nothing left to undo!"); } } @Command(trigger = "types", identifier = "BorderTypesCommand", minArgs = 0, maxArgs = 0, permission = PermissionNodes.GENERATE_BORDER, parentID = "BorderCommand") public void onBorderTypesCommand(CommandRequest request){ //TODO this } private static final String SYNTAX = "/generateborder radius world[:x,z] typeID[:blockid:meta] OR /generateborder undo/types [world]"; public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { if ("generateborder".equals("")) { if (args.length != 3) { sender.sendMessage(ChatColor.RED + "Invalid syntax: " + SYNTAX); return true; } int radius; try { radius = Integer.parseInt(args[0]); } catch (NumberFormatException ignored) { sender.sendMessage(ChatColor.RED + "Unknown radius size: " + args[0]); return true; } int x; int z; World w; if (args[1].contains(":")) { String[] parts = args[1].split(":"); if (parts.length != 2) { sender.sendMessage(ChatColor.RED + "Invalid world name/coordinates, syntax for world is worldname:x,z"); return true; } String[] parts2 = parts[1].split(","); if (parts2.length != 2) { sender.sendMessage(ChatColor.RED + "Invalid world name/coordinates, syntax for world is worldname:x,z"); return true; } try { x = Integer.parseInt(parts2[0]); z = Integer.parseInt(parts2[1]); args[1] = parts[0]; } catch (NumberFormatException e) { sender.sendMessage(ChatColor.RED + "One or more world coordinates not a number, world syntax is worldname:x,z"); return true; } w = Bukkit.getWorld(args[1]); if (w == null) { sender.sendMessage(ChatColor.RED + "World " + args[1] + " not found!"); return true; } } else { w = Bukkit.getWorld(args[1]); if (w == null) { sender.sendMessage(ChatColor.RED + "World " + args[1] + " not found!"); return true; } x = w.getSpawnLocation().getBlockX(); z = w.getSpawnLocation().getBlockZ(); } String[] blockinfo; if (args[2].contains(":")) { String[] blockinfos = args[2].split(":"); if (blockinfos.length != 3) { sender.sendMessage(ChatColor.RED + "Unknown block ID and meta, syntax: " + SYNTAX); return true; } blockinfo = blockinfos; } else { blockinfo = new String[]{ args[2], ConfigManager.getInstance().getConfig().getString(ConfigNodes.BORDER_BLOCK), ConfigManager.getInstance().getConfig().getString(ConfigNodes.BORDER_BLOCK_META) }; } int borderID; try { borderID = Integer.parseInt(blockinfo[1]); } catch (NumberFormatException e) { sender.sendMessage(ChatColor.RED + "Unknown number " + blockinfo[1] + " for block ID"); return true; } int metaID; try { metaID = Integer.parseInt(blockinfo[2]); } catch (NumberFormatException e) { sender.sendMessage(ChatColor.RED + "Unknown number " + blockinfo[2] + " for block meta"); return true; } //TODO BLOCKER put right border in here based on blockinfo[0] BorderCreator creator = new BorderCreator(new CylinderBorder()); creator.setBlockID(borderID); creator.setBlockMeta(metaID); creator.setCenter(null); //TODO generate the location creator.setRadius(radius); try { creator.createBorder(); } catch (TooManyBlocksException ignored) { sender.sendMessage(ChatColor.RED + "Error, hit max changable blocks"); return true; } sender.sendMessage(ChatColor.GOLD + "World border created successfully"); return true; } return false; } }
package nl.mpi.kinnate.ui; import java.awt.BorderLayout; import java.awt.Component; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.net.URI; import java.net.URISyntaxException; import javax.swing.JButton; import javax.swing.JInternalFrame; import javax.swing.JPanel; import javax.swing.JScrollPane; import nl.mpi.arbil.data.AbstractTreeHelper; import nl.mpi.arbil.data.ArbilDataNodeLoader; import nl.mpi.arbil.data.ArbilNode; import nl.mpi.arbil.data.ArbilTreeHelper; import nl.mpi.arbil.ui.ArbilNodeSearchPanel; import nl.mpi.arbil.ui.ArbilSplitPanel; import nl.mpi.arbil.ui.ArbilTable; import nl.mpi.arbil.ui.ArbilTableModel; import nl.mpi.arbil.ui.GuiHelper; import nl.mpi.kinnate.kindata.VisiblePanelSetting; import nl.mpi.kinnate.svg.GraphPanel; public class ArchiveEntityLinkerPanel extends JPanel implements ActionListener { private KinTree archiveTree; private JButton nextButton; private TreeType treeType; private VisiblePanelSetting panelSetting; public enum TreeType { RemoteTree, LocalTree, MpiTree } public ArchiveEntityLinkerPanel(VisiblePanelSetting panelSetting, KinDiagramPanel kinDiagramPanel, GraphPanel graphPanel, KinDragTransferHandler dragTransferHandler, TreeType treeType) { this.treeType = treeType; this.panelSetting = panelSetting; archiveTree = new KinTree(kinDiagramPanel, graphPanel); this.setLayout(new BorderLayout()); JPanel treePanel = new JPanel(new BorderLayout()); // tabbedPane = new JTabbedPane(); // tabbedPane.add("Archive Branch Selection", treePanel); // this.add(tabbedPane, BorderLayout.CENTER); this.add(treePanel, BorderLayout.CENTER); nextButton = new JButton("Search Selected"); nextButton.setActionCommand("Search"); nextButton.addActionListener(this); treePanel.add(new JScrollPane(archiveTree), BorderLayout.CENTER); treePanel.add(nextButton, BorderLayout.PAGE_END); archiveTree.setTransferHandler(dragTransferHandler); archiveTree.setDragEnabled(true); loadTreeNodes(treeType); } private void loadTreeNodes(TreeType treeType) { try { ArbilNode[] allEntities; AbstractTreeHelper treeHelper = ArbilTreeHelper.getSingleInstance(); switch (treeType) { case LocalTree: allEntities = treeHelper.getLocalCorpusNodes(); this.setName("Local Corpus"); break; case RemoteTree: allEntities = treeHelper.getRemoteCorpusNodes(); this.setName("Remote Corpus"); break; case MpiTree: default: ArbilNode imdiCorporaNode = ArbilDataNodeLoader.getSingleInstance().getArbilDataNode(null, new URI("http://corpus1.mpi.nl/IMDI/metadata/IMDI.imdi")); allEntities = new ArbilNode[]{imdiCorporaNode}; this.setName("Nijmegen Corpus"); break; } archiveTree.rootNodeChildren = allEntities; archiveTree.requestResort(); } catch (URISyntaxException exception) { GuiHelper.linorgBugCatcher.logError(exception); } } private void getSeachPanel() { JPanel searchPanel = new JPanel(new BorderLayout()); String frameTitle = "Archive Search"; ArbilTableModel resultsTableModel = new ArbilTableModel(); ArbilTable imdiTable = new ArbilTable(resultsTableModel, frameTitle); ArbilSplitPanel imdiSplitPanel = new ArbilSplitPanel(imdiTable); // todo: take care of main window actions such as pack that might cause odd visuals JInternalFrame searchFrame = new JInternalFrame(); searchPanel.add(new ArbilNodeSearchPanel(searchFrame, resultsTableModel, archiveTree.getSelectedNodes()), BorderLayout.PAGE_START); searchPanel.add(imdiSplitPanel, BorderLayout.CENTER); JButton closeSearch = new JButton("Close Search"); closeSearch.setActionCommand("Close Search"); closeSearch.addActionListener(this); searchPanel.add(closeSearch, BorderLayout.PAGE_END); imdiSplitPanel.setSplitDisplay(); searchPanel.setName(this.getName() + " Search"); panelSetting.addTargetPanel(searchPanel, true); } public void actionPerformed(ActionEvent ae) { if (ae.getActionCommand().equals("Search")) { getSeachPanel(); } if (ae.getActionCommand().equals("Close Search")) { panelSetting.removeTargetPanel(((Component) ae.getSource()).getParent()); } } }
package com.almende.dialog.adapter; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.logging.Logger; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import com.almende.dialog.Settings; import com.almende.dialog.accounts.AdapterConfig; import com.almende.dialog.accounts.Dialog; import com.almende.dialog.accounts.Recording; import com.almende.dialog.agent.AdapterAgent; import com.almende.dialog.agent.DialogAgent; import com.almende.dialog.model.Answer; import com.almende.dialog.model.MediaProperty.MediaPropertyKey; import com.almende.dialog.model.MediaProperty.MediumType; import com.almende.dialog.model.Question; import com.almende.dialog.model.QuestionEventRunner; import com.almende.dialog.model.Session; import com.almende.dialog.model.ddr.DDRRecord; import com.almende.dialog.model.ddr.DDRRecord.CommunicationStatus; import com.almende.dialog.util.DDRUtils; import com.almende.dialog.util.ServerUtils; import com.askfast.commons.entity.AdapterProviders; import com.askfast.commons.entity.TTSInfo; import com.askfast.commons.utils.PhoneNumberUtils; import com.askfast.commons.utils.TimeUtils; import com.askfast.strowger.sdk.StrowgerRestClient; import com.askfast.strowger.sdk.actions.Action; import com.askfast.strowger.sdk.actions.Hangup; import com.askfast.strowger.sdk.actions.Include; import com.askfast.strowger.sdk.actions.Play; import com.askfast.strowger.sdk.actions.StrowgerAction; import com.askfast.strowger.sdk.model.Peer; import com.askfast.strowger.sdk.model.StatusCallback; import com.askfast.strowger.sdk.model.StrowgerRequest; import com.askfast.strowger.sdk.resources.Call; import com.askfast.strowger.sdk.resources.Dial; import com.google.i18n.phonenumbers.PhoneNumberUtil.PhoneNumberFormat; @Path("strowger") public class TPAdapter { protected static final Logger log = Logger.getLogger(TPAdapter.class.getName()); private static final int LOOP_DETECTION = 10; protected String TIMEOUT_URL = "timeout"; //protected String EXCEPTION_URL="exception"; /** * Initiates a call to all the numbers in the addressNameMap and returns a * Map of <adress, SessionKey> * * @param addressNameMap * Map with address (e.g. phonenumber or email) as Key and name * as value. The name is useful for email and not used for SMS * etc * @param dialogIdOrUrl * If a String with leading "http" is found its considered as a * url. Else a Dialog of this id is tried t The URL on which a * GET HTTPRequest is performed and expected a question JSON * @param config * the adapterConfig which is used to perform this broadcast * @param accountId * AccoundId initiating this broadcast. All costs are applied to * this accountId * @param applicationId * This is set in the DialogAgent and should match that with the * applicationId of the twillo account * @return A Map of <adress, SessionKey> * @throws Exception */ public static HashMap<String, String> dial(Map<String, String> addressNameMap, String dialogIdOrUrl, AdapterConfig config, String accountId, String applicationId, String bearerToken) throws Exception { HashMap<String, Session> sessionMap = new HashMap<String, Session>(); HashMap<String, String> result = new HashMap<String, String>(); // If it is a broadcast don't provide the remote address because it is deceiving. String loadAddress = ""; if (addressNameMap == null || addressNameMap.isEmpty()) { throw new Exception("No address given. Error in call request"); } else if (addressNameMap.size() == 1) { loadAddress = addressNameMap.keySet().iterator().next(); loadAddress = PhoneNumberUtils.formatNumber(loadAddress, null); } //create a session for the first remote address String firstRemoteAddress = loadAddress != null && !loadAddress.trim().isEmpty() ? new String(loadAddress) : new String(addressNameMap.keySet().iterator().next()); firstRemoteAddress = PhoneNumberUtils.formatNumber(firstRemoteAddress, null); Session session = Session.getOrCreateSession(config, firstRemoteAddress); session.setAccountId(accountId); session.killed = false; session.setDirection("outbound"); session.setType(AdapterAgent.ADAPTER_TYPE_CALL); session.addExtras(AdapterConfig.ADAPTER_PROVIDER_KEY, AdapterProviders.TP.toString()); session.setAdapterID(config.getConfigId()); session.setAccountId(accountId); session.addExtras(DialogAgent.BEARER_TOKEN_KEY, bearerToken); session.setRemoteAddress(firstRemoteAddress); session.storeSession(); String url = Dialog.getDialogURL(dialogIdOrUrl, accountId, session); session = session.reload(); session.setStartUrl(url); session.storeSession(); //create a ddr record DDRRecord ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication(config, accountId, firstRemoteAddress, 1, url, session); //session = session.reload(); //fetch the question Question question = Question.fromURL(url, loadAddress, config.getFormattedMyAddress(), ddrRecord != null ? ddrRecord.getId() : null, session, null); if (question != null) { for (String address : addressNameMap.keySet()) { String formattedAddress = PhoneNumberUtils.formatNumber(address, PhoneNumberFormat.E164); if (formattedAddress != null && PhoneNumberUtils.isValidPhoneNumber(formattedAddress)) { //ignore the address for which the session is already created. if (!formattedAddress.equals(session.getRemoteAddress())) { //create a new session for every call request session = Session.createSession(config, formattedAddress); } session.killed = false; session.setStartUrl(url); session.setAccountId(accountId); session.setDirection("outbound"); session.setRemoteAddress(formattedAddress); session.setType(AdapterAgent.ADAPTER_TYPE_CALL); session.addExtras(AdapterConfig.ADAPTER_PROVIDER_KEY, AdapterProviders.TP.toString()); session.setAdapterID(config.getConfigId()); session.setQuestion(question); session.setDdrRecordId(ddrRecord != null ? ddrRecord.getId() : null); //update session with account credentials session.addExtras(AdapterConfig.ACCESS_TOKEN_KEY, config.getAccessToken()); session.addExtras(AdapterConfig.ACCESS_TOKEN_SECRET_KEY, config.getAccessTokenSecret()); //update the startTime of the session session.setStartTimestamp(String.valueOf(TimeUtils.getServerCurrentTimeInMillis())); session.storeSession(); if(ddrRecord != null) { ddrRecord.addStatusForAddress(formattedAddress, CommunicationStatus.SENT); ddrRecord.createOrUpdate(); } String extSession = ""; if (!ServerUtils.isInUnitTestingEnvironment()) { StrowgerRestClient client = new StrowgerRestClient( config.getAccessToken(), config.getAccessTokenSecret() ); // Make a call StatusCallback callback = new StatusCallback( URI.create("http://" + Settings.HOST + "/dialoghandler/rest/twilio/cc"), Arrays.asList( "initiated", "ringing", "answered", "completed", "aborted" ) ); Dial dial = new Dial(); dial.addPeer( new Peer( formattedAddress, callback ) ); dial.setCallerId( config.getMyAddress() ); dial.setControlUrl( URI.create( "http://" + Settings.HOST + "/dialoghandler/rest/twilio/new" ) ); extSession = client.initiateCall( config.getMyAddress(), dial ); log.info(String.format("Call triggered with external id: %s", extSession)); session.setExternalSession(extSession); session.storeSession(); } sessionMap.put(formattedAddress, session); result.put(formattedAddress, session.getKey()); } else { result.put(address, String.format(DialogAgent.INVALID_ADDRESS_MESSAGE, address)); log.severe(String.format("To address is invalid: %s. Ignoring.. ", address)); if(ddrRecord != null) { ddrRecord.addStatusForAddress(address, CommunicationStatus.ERROR); ddrRecord.createOrUpdate(); } sessionMap.remove(formattedAddress); session.dropIfRemoteAddressMatches(formattedAddress); } } } else { log.severe(DialogAgent.getQuestionNotFetchedMessage(dialogIdOrUrl)); if(ddrRecord != null) { ddrRecord.setStatusForAddresses(addressNameMap.keySet(), CommunicationStatus.ERROR); ddrRecord.addAdditionalInfo(DDRUtils.DDR_MESSAGE_KEY, DialogAgent.getQuestionNotFetchedMessage(dialogIdOrUrl)); ddrRecord.createOrUpdate(); } session.drop(); throw new Exception(DialogAgent.getQuestionNotFetchedMessage(dialogIdOrUrl)); } if(ddrRecord != null) { ddrRecord.setToAddress(addressNameMap); ddrRecord.setSessionKeysFromMap(sessionMap); ddrRecord.createOrUpdate(); } return result; } /** * Handles incoming new calls. * @param CallSid * @param AccountSid * @param localID * @param remoteID * @param direction * @param forwardedFrom * @param callStatus * @param isTest * @return Twilio response */ @Path("new") @POST @Produces("application/json") public Response getNewDialogPost(@QueryParam("isTest") Boolean isTest, String json) { StrowgerRequest req = StrowgerRequest.fromJson( json ); Call call = req.getData(); String callId = call.getId(); String remoteID = call.getCalled(); String localID = call.getCaller(); String direction = call.getType(); log.info("call started:" + call.getType() + ":" + remoteID + ":" + localID); Map<String, String> extraParams = new HashMap<String, String>(); String url = ""; Session session = Session.getSessionByExternalKey(callId); AdapterConfig config = null; String formattedRemoteId = null; DDRRecord ddrRecord = null; if (direction.equals("inbound")) { //swap the remote and the local numbers if its inbound String tmpLocalId = new String(localID); localID = new String(remoteID); remoteID = tmpLocalId; config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_CALL, localID); formattedRemoteId = PhoneNumberUtils.formatNumber(remoteID, null); //create a session for incoming only. If the session already exists it is a failover call by twilio. if (session == null) { session = Session.createSession(config, formattedRemoteId); session.setAccountId(config.getOwner()); session.setExternalSession(callId); if (isTest != null && Boolean.TRUE.equals(isTest)) { session.setAsTestSession(); } session.storeSession(); url = config.getURLForInboundScenario(session); try { ddrRecord = DDRUtils.createDDRRecordOnIncomingCommunication(config, config.getOwner(), formattedRemoteId, url, session); } catch (Exception e) { e.printStackTrace(); } } else { // when it's a failover call also reuse the ddr record. ddrRecord = session.getDDRRecord(); } } else { direction = "outbound"; config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_CALL, localID); try { if (session != null) { url = Dialog.getDialogURL(session.getStartUrl(), session.getAccountId(), session); ddrRecord = session.getDDRRecord(); } } catch (UnsupportedEncodingException e) { e.printStackTrace(); } } if (session != null) { session.setStartUrl(url); session.setDirection(direction); session.setRemoteAddress(formattedRemoteId); session.setType(AdapterAgent.ADAPTER_TYPE_CALL); session.addExtras(AdapterConfig.ADAPTER_PROVIDER_KEY, AdapterProviders.TWILIO.toString()); session.setAdapterID(config.getConfigId()); //fetch the question Question question = session.getQuestion(); if (question == null) { question = Question.fromURL(url, formattedRemoteId, config.getFormattedMyAddress(), ddrRecord != null ? ddrRecord.getId() : null, session, extraParams); } if (!ServerUtils.isValidBearerToken(session, config)) { TTSInfo ttsInfo = ServerUtils.getTTSInfoFromSession(question, session); String insufficientCreditMessage = ServerUtils.getInsufficientMessage(ttsInfo.getLanguage()); return Response.ok(renderExitQuestion(question, Arrays.asList(insufficientCreditMessage), session)).build(); } // Check if we were able to load a question if (question == null) { //If not load a default error message question = Question.getError(config.getPreferred_language()); } session.setQuestion(question); session.setDdrRecordId(ddrRecord != null ? ddrRecord.getId() : null); session.storeSession(); if (session.getQuestion() != null) { return handleQuestion(question, config, formattedRemoteId, session, extraParams); } else { return Response.ok().build(); } } else { log.severe(String.format("CallSid: %s From: %s to: %s direction: %s has no sessions", callId, localID, remoteID, direction)); return Response.ok("No sessions found.").build(); } } /** * The answer inputs are redirected to this endpoint * * @param answer_id * This is generally not associated with a twilio answer * @param answer_input * The actual answer given for a previous question * @param localID * The from address of this call * @param remoteID * The to address of this call * @param direction * "inbound" or "outbound-dial" * @param recordingUrl * Url for the voice recording if previous question was of type * OPEN_AUDIO * @param dialCallStatus * The call status * @param callSid * The external id for this call. This can also be the parent * externalId if previous question was a referral * @return */ @Path("answer") @POST @Produces("application/json") public Response answer(String json) { StrowgerRequest req = StrowgerRequest.fromJson( json ); Call call = req.getData(); String callId = call.getId(); String localID = call.getCaller(); String remoteID = call.getCalled(); String direction = call.getType(); String answer_input = call.getDigits(); String recordingUrl = call.getAudioUrl(); StrowgerAction strowger = new StrowgerAction(); try { answer_input = answer_input != null ? URLDecoder.decode(answer_input, "UTF-8") : answer_input; } catch (UnsupportedEncodingException e) { log.warning(String.format("Answer input decode failed for: %s", answer_input)); } if (direction.equals("inbound")) { String tmpLocalId = new String(localID); localID = new String(remoteID); remoteID = tmpLocalId; } Session session = Session.getSessionByExternalKey(callId); if (session != null) { if (recordingUrl != null) { answer_input = storeAudioFile(recordingUrl.replace(".wav", "") + ".wav", session.getAccountId(), session.getDdrRecordId(), session.getAdapterID()); } //TODO: update call status //add a tag in the session saying its picked up session.setCallPickedUpStatus(true); session.storeSession(); Question question = session.getQuestion(); log.info(String.format("Question before answer is: %s", ServerUtils.serializeWithoutException(question))); if (question != null) { String responder = session.getRemoteAddress(); if (session.killed) { log.warning("session is killed"); return Response.status(Response.Status.BAD_REQUEST).build(); } String answerForQuestion = question.getQuestion_expandedtext(session); question = question.answer(responder, null, answer_input, session); log.info(String.format("Question after answer is: %s", ServerUtils.serializeWithoutException(question))); session.setQuestion(question); session.storeSession(); //check if ddr is in session. save the answer in the ddr if (session.getDdrRecordId() != null) { try { DDRRecord ddrRecord = DDRRecord.getDDRRecord(session.getDdrRecordId(), session.getAccountId()); if (ddrRecord != null) { ddrRecord.addAdditionalInfo(DDRRecord.ANSWER_INPUT_KEY + ":" + answerForQuestion, answer_input); ddrRecord.createOrUpdateWithLog(session); } } catch (Exception e) { e.printStackTrace(); } } //the answered event is triggered if there are no next requests to process and the previous question //was not an exit question (which would also give a null question on question.answer()) if(question != null && !"exit".equalsIgnoreCase(question.getType())) { session.setCallConnectedStatus(true); answered(direction, remoteID, localID, session.getKey()); } else { session.setCallConnectedStatus(false); } session.storeSession(); return handleQuestion(question, session.getAdapterConfig(), responder, session, null); } else { log.warning("No question found in session!"); } } else { log.warning("No session found for external call id: " + callId); } String reply = strowger.toJson(); return Response.ok(reply).build(); } @Path("timeout") @POST @Produces("application/json") public Response timeout(String json) throws Exception { StrowgerRequest req = StrowgerRequest.fromJson( json ); Call call = req.getData(); String callId = call.getId(); String localID = call.getCaller(); String remoteID = call.getCalled(); String direction = call.getType(); //swap local and remote ids if its an incoming call if (direction.equals("inbound")) { String tmpLocalId = new String(localID); localID = new String(remoteID); remoteID = tmpLocalId; } Session session = Session.getSessionByExternalKey(callId); if (session != null) { //TODO: update call status Question question = session.getQuestion(); String responder = session.getRemoteAddress(); if (session.killed) { return Response.status(Response.Status.BAD_REQUEST).build(); } HashMap<String, Object> extras = new HashMap<String, Object>(); extras.put("sessionKey", session.getKey()); extras.put("requester", session.getLocalAddress()); if(session.getCallStatus()!=null) { extras.put( "callStatus", session.getCallStatus() ); } question = question.event("timeout", "No answer received", extras, responder, session); session.setQuestion(question); if (question != null) { String retryLimit = question.getMediaPropertyValue(MediumType.BROADSOFT, MediaPropertyKey.RETRY_LIMIT); retryLimit = retryLimit != null ? retryLimit : String.valueOf(Question.DEFAULT_MAX_QUESTION_LOAD); Integer retryCount = session.getRetryCount(); retryCount = retryCount != null ? retryCount : 0; if (retryCount < Integer.parseInt(retryLimit)) { session.setRetryCount(++retryCount); } else { //hangup so set question to null question = null; } } else { log.warning("No question found for this session :" + session.getKey()); } session.storeSession(); return handleQuestion(question, session.getAdapterConfig(), responder, session, null); } else { log.warning("Strange that no session is found for external call id: " + callId); } StrowgerAction strowger = new StrowgerAction(); String reply = strowger.toJson(); return Response.ok(reply).build(); } @Path("cc") @POST public Response receiveCCMessage(String json) { Call call = Call.fromJson( json ); String callId = call.getId(); String localID = call.getCaller(); String remoteID = call.getCalled(); String direction = call.getType(); String status = call.getStatus(); log.info("Received twiliocc status: " + status); if (direction.equals("outbound-api")) { direction = "outbound"; } else if (direction.equals("inbound")) { String tmpLocalId = new String(localID); localID = remoteID; remoteID = tmpLocalId; } AdapterConfig config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_CALL, localID); Session session = Session.getSessionByExternalKey(callId); if (session != null) { //update session with call timings if (status.equals("completed")) { finalizeCall(config, session, callId, remoteID); } } log.info("Session key: or external sid" + session != null ? session.getKey() : callId); return Response.ok("").build(); } public void answered(String direction, String remoteID, String localID, String sessionKey) { log.info("call answered with:" + direction + "_" + remoteID + "_" + localID); Session session = Session.getSession(sessionKey); //for direction = transfer (redirect event), json should not be null //make sure that the answered call is not triggered twice if (session != null && session.getQuestion() != null && !isEventTriggered("answered", session)) { //update the communication status to received status DDRRecord ddrRecord = session.getDDRRecord(); if (ddrRecord != null && !"inbound".equals(session.getDirection())) { ddrRecord.addStatusForAddress(session.getRemoteAddress(), CommunicationStatus.RECEIVED); ddrRecord.createOrUpdate(); } String responder = session.getRemoteAddress(); String referredCalledId = session.getAllExtras().get("referredCalledId"); HashMap<String, Object> timeMap = new HashMap<String, Object>(); timeMap.put("referredCalledId", referredCalledId); timeMap.put("sessionKey", sessionKey); if (session.getParentSessionKey() != null) { timeMap.put(Session.PARENT_SESSION_KEY, session.getParentSessionKey()); } timeMap.put("requester", session.getLocalAddress()); QuestionEventRunner questionEventRunner = new QuestionEventRunner(session.getQuestion(), "answered", "Answered", responder, timeMap, session); Thread questionEventRunnerThread = new Thread(questionEventRunner); questionEventRunnerThread.start(); } } /** * Retrieve call information and with that: - update ddr record - destroy * session - send hangup * * @param config * @param session * @param callSid * @param direction * @param remoteID */ private void finalizeCall(AdapterConfig config, Session session, String callSid, String remoteID) { // TODO: Implement } private Response handleQuestion(Question question, AdapterConfig adapterConfig, String remoteID, Session session, Map<String, String> extraParams) { String result = new StrowgerAction().toJson(); Return res = formQuestion(question, adapterConfig.getConfigId(), remoteID, null, session, extraParams); if (question != null && !question.getType().equalsIgnoreCase("comment")) question = res.question; // if the adapter is a trial adapter, add a introductory node log.info("question formed at handleQuestion is: " + ServerUtils.serializeWithoutException(question)); log.info("prompts formed at handleQuestion is: " + res.prompts); if (question != null) { question.generateIds(); session.setQuestion(question); session.setRemoteAddress(remoteID); session.storeSession(); if (question.getType().equalsIgnoreCase("closed")) { // TODO: Implement handling of closed questions } else if (question.getType().equalsIgnoreCase("open")) { // TODO: Implement handling of open questions } else if (question.getType().equalsIgnoreCase("referral")) { // TODO: Implement handling of referral questions } else if (question.getType().equalsIgnoreCase("exit")) { result = renderExitQuestion(question, res.prompts, session); } else if (question.getType().equalsIgnoreCase("conference")) { // TODO: Implement handling of conference questions } else if (res.prompts.size() > 0) { result = renderComment(question, res.prompts, session); } } else if (res.prompts.size() > 0) { result = renderComment(null, res.prompts, session); } else { log.info("Going to hangup? So clear Session?"); } log.info("Sending json: " + result); return Response.status(Status.OK).entity(result).build(); } /** * check if for this session an * @param eventName * @param session * @return */ private static boolean isEventTriggered(String eventName, Session session) { if (session != null) { if (session.getAllExtras().get("event_" + eventName) != null) { String timestamp = TimeUtils.getStringFormatFromDateTime(Long.parseLong(session.getAllExtras() .get("event_" + eventName)), null); log.warning(eventName + "event already triggered before for this session at: " + timestamp); return true; } } return false; } public class Return{ ArrayList<String> prompts; Question question; public Return( ArrayList<String> prompts, Question question ) { this.prompts = prompts; this.question = question; } } public Return formQuestion(Question question, String adapterID, String address, String ddrRecordId, Session session, Map<String, String> extraParams) { ArrayList<String> prompts = new ArrayList<String>(); for ( int count = 0; count <= LOOP_DETECTION; count++ ) { if ( question == null ) break; log.info( "Going to form question of type: " + question.getType() ); if ( question.getType() == null ) { question = null; break; } String preferred_language = question.getPreferred_language(); question.setPreferred_language( preferred_language ); String qText = question.getQuestion_text(); if ( qText != null && !qText.equals( "" ) ) { prompts.add( qText ); } if ( question.getType().equalsIgnoreCase( "closed" ) ) { for ( Answer ans : question.getAnswers() ) { String answer = ans.getAnswer_text(); if ( answer != null && !answer.equals( "" ) && !answer.startsWith( "dtmfKey: prompts.add( answer ); } } break; //Jump from forloop } else if ( question.getType().equalsIgnoreCase( "comment" ) ) { // If it is a comment directly read the next question, because we can append the prompts. //question = question.answer( null, adapterID, null, null, sessionKey ); break; } else if ( question.getType().equalsIgnoreCase( "referral" ) ) { if ( question.getUrl() != null && question.getUrl().size() == 1 && !question.getUrl().get( 0 ).startsWith( "tel:" ) ) { String localAddress = null; if ( session != null ) { localAddress = session.getAdapterConfig() != null ? session.getAdapterConfig().getFormattedMyAddress() : session.getLocalAddress(); } question = Question.fromURL( question.getUrl().get( 0 ), address, localAddress, ddrRecordId, session, extraParams ); //question = question.answer(null, null, null); // break; } else { // Break out because we are going to reconnect break; } } else { break; //Jump from forloop (open questions, etc.) } } return new Return( prompts, question ); } protected String renderComment(Question question, ArrayList<String> prompts, Session session) { StrowgerAction strowger = new StrowgerAction(); addPrompts(prompts, strowger, question, session ); if (question != null && question.getAnswers() != null && !question.getAnswers().isEmpty()) { Include include = new Include( URI.create( getAnswerUrl() ) ); strowger.addAction(include); } return strowger.toJson(); } protected String renderExitQuestion(Question question, List<String> prompts, Session session) { StrowgerAction strowger = new StrowgerAction(); addPrompts(prompts, strowger, question, session); strowger.addAction( new Hangup()); return strowger.toJson(); } protected void addPrompts(List<String> prompts, StrowgerAction strowger, Question question, Session session) { for (String prompt : prompts) { Action actionToAppend = null; if (prompt.startsWith("http") || prompt.startsWith("https")) { actionToAppend = new Play(URI.create(prompt)); } else { String url = ServerUtils.getTTSURL(formatPrompt(prompt), question, session); actionToAppend = new Play(URI.create(url)); } strowger.addAction(actionToAppend); } } /** * Store an incoming audio file and return the download url * @param bimp * @param accountId * @return downloadUrl */ private String storeAudioFile(String url, String accountId, String ddrId, String adapterId) { String uuid = UUID.randomUUID().toString(); Recording recording = Recording.createRecording( new Recording(uuid, accountId, url, "audio/wav", ddrId, adapterId) ); return "http://"+Settings.HOST+"/account/"+accountId+"/recording/"+recording.getId()+".wav"; } /** * Returns the formatted prompt as needed by Strowger * @param prompt * @return */ private String formatPrompt(String prompt) { try { prompt = prompt.replace("text: prompt = URLDecoder.decode(prompt, "UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return prompt; } public String getAnswerUrl() { return "http://"+Settings.HOST+"/dialoghandler/rest/strowger/answer"; } }
package com.liferay.lms.upgrade; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.List; import com.liferay.lms.model.LmsPrefs; import com.liferay.lms.model.Module; import com.liferay.lms.service.LmsPrefsLocalServiceUtil; import com.liferay.lms.service.ModuleLocalServiceUtil; import com.liferay.portal.kernel.dao.db.DB; import com.liferay.portal.kernel.dao.db.DBFactoryUtil; import com.liferay.portal.kernel.exception.PortalException; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.kernel.log.Log; import com.liferay.portal.kernel.log.LogFactoryUtil; import com.liferay.portal.kernel.search.Indexer; import com.liferay.portal.kernel.search.IndexerRegistryUtil; import com.liferay.portal.kernel.upgrade.UpgradeProcess; import com.liferay.portal.kernel.util.ContentTypes; import com.liferay.portal.model.Company; import com.liferay.portal.model.ResourceConstants; import com.liferay.portal.service.CompanyLocalServiceUtil; import com.liferay.portal.service.ResourceActionLocalServiceUtil; import com.liferay.portal.service.ResourcePermissionLocalServiceUtil; import com.liferay.portlet.asset.service.AssetEntryLocalServiceUtil; public class UpgradeVersion_3_7_0 extends UpgradeProcess { private static Log log = LogFactoryUtil.getLog(UpgradeVersion_3_7_0.class); private static String ADD_ACTIVITY = "ADD_ACTIVITY"; public int getThreshold() { return 370; } protected void doUpgrade() throws Exception { log.info("Actualizando version a 3.7"); String alterCourseDeniedInscription = "ALTER TABLE `lms_course` "+ "ADD COLUMN `deniedInscription` TINYINT(4) NULL DEFAULT NULL AFTER `welcomeSubject`;"; String alterCourseDeniedInscriptionSubject = "ALTER TABLE `lms_course` "+ "ADD COLUMN `deniedInscriptionSubject` VARCHAR(75) NULL DEFAULT NULL AFTER `deniedInscription`;"; String alterCourseDeniedInscriptionMsg = "ALTER TABLE `lms_course` "+ "ADD COLUMN `deniedInscriptionMsg` LONGTEXT NULL AFTER `deniedInscriptionSubject`;"; String createTableCourseType = "CREATE TABLE IF NOT EXISTS `lms_coursetype` ("+ "`courseTypeId` BIGINT(20) NOT NULL,"+ "`companyId` BIGINT(20) NULL DEFAULT NULL,"+ "`userId` BIGINT(20) NULL DEFAULT NULL,"+ "`groupId` BIGINT(20) NULL DEFAULT NULL,"+ "`userName` VARCHAR(75) NULL DEFAULT NULL,"+ "`createDate` DATETIME NULL DEFAULT NULL,"+ "`modifiedDate` DATETIME NULL DEFAULT NULL,"+ "`name` LONGTEXT NULL,"+ "`description` LONGTEXT NULL,"+ "`iconId` BIGINT(20) NULL DEFAULT NULL,"+ "PRIMARY KEY (`courseTypeId`),"+ "INDEX `IX_B3E69260` (`companyId`),"+ "INDEX `IX_9A6B92AC` (`courseTypeId`)"+ ")"+ "COLLATE='utf8_general_ci' ENGINE=InnoDB;"; String createTableCourseTypeCalificacionType = "CREATE TABLE IF NOT EXISTS `lms_coursetypecalificationtype` ("+ "`courseTypeCalificationTypeId` BIGINT(20) NOT NULL,"+ "`courseTypeId` BIGINT(20) NULL DEFAULT NULL,"+ "`calificationType` BIGINT(20) NULL DEFAULT NULL,"+ "PRIMARY KEY (`courseTypeCalificationTypeId`),"+ "INDEX `IX_EB924F40` (`courseTypeCalificationTypeId`),"+ "INDEX `IX_C2333876` (`courseTypeId`)"+ ")"+ "COLLATE='utf8_general_ci' ENGINE=InnoDB ;"; String createTableCourseTypeCourseEval = "CREATE TABLE IF NOT EXISTS `lms_coursetypecourseeval` ("+ "`courseTypeEvalutationTypeId` BIGINT(20) NOT NULL,"+ "`courseTypeId` BIGINT(20) NULL DEFAULT NULL,"+ "`courseEvalId` VARCHAR(75) NULL DEFAULT NULL,"+ "PRIMARY KEY (`courseTypeEvalutationTypeId`),"+ "INDEX `IX_B8D93ACB` (`courseTypeEvalutationTypeId`),"+ "INDEX `IX_AAF2A5A3` (`courseTypeId`)"+ ")"+ "COLLATE='utf8_general_ci' ENGINE=InnoDB;"; String createTableCourseTypeInscriptionType = "CREATE TABLE IF NOT EXISTS `lms_coursetypeinscriptiontype` ("+ "`courseTypeInscriptionTypeId` BIGINT(20) NOT NULL,"+ "`courseTypeId` BIGINT(20) NULL DEFAULT NULL,"+ "`inscriptionType` BIGINT(20) NULL DEFAULT NULL,"+ "PRIMARY KEY (`courseTypeInscriptionTypeId`),"+ "INDEX `IX_E4E5DA7A` (`courseTypeId`),"+ "INDEX `IX_14979B52` (`courseTypeInscriptionTypeId`)"+ ")"+ "COLLATE='utf8_general_ci' ENGINE=InnoDB;"; String createTableCourseTypeLearningActivity = "CREATE TABLE IF NOT EXISTS `lms_coursetypelearningactivity` ("+ "`courseTypeLearningActivityId` BIGINT(20) NOT NULL,"+ "`courseTypeId` BIGINT(20) NULL DEFAULT NULL,"+ "`learningActivityTypeId` BIGINT(20) NULL DEFAULT NULL,"+ "PRIMARY KEY (`courseTypeLearningActivityId`),"+ "INDEX `IX_B878E519` (`courseTypeId`),"+ "INDEX `IX_AC299F06` (`courseTypeLearningActivityId`)"+ ")"+ "COLLATE='utf8_general_ci' ENGINE=InnoDB;"; String createTableCourseTypeTemplate = "CREATE TABLE IF NOT EXISTS `lms_coursetypetemplate` ("+ "`courseTypeTemplateId` BIGINT(20) NOT NULL,"+ "`courseTypeId` BIGINT(20) NULL DEFAULT NULL,"+ "`templateId` BIGINT(20) NULL DEFAULT NULL,"+ "PRIMARY KEY (`courseTypeTemplateId`),"+ "INDEX `IX_A40BAD46` (`courseTypeId`),"+ "INDEX `IX_5BD857E0` (`courseTypeTemplateId`)"+ ")"+ "COLLATE='utf8_general_ci' ENGINE=InnoDB;"; DB db = DBFactoryUtil.getDB(); log.info("Alter table lms_course -->> Add deniedDescription"); try { db.runSQL(alterCourseDeniedInscription); } catch (IOException | SQLException e) { e.printStackTrace(); } log.info("Alter table lms_course -->> Add deniedDescriptionSubject"); try { db.runSQL(alterCourseDeniedInscriptionSubject); } catch (IOException | SQLException e) { e.printStackTrace(); } log.info("Alter table lms_course -->> Add deniedInscriptionMsg"); try { db.runSQL(alterCourseDeniedInscriptionMsg); } catch (IOException | SQLException e) { e.printStackTrace(); } log.info("Create table lms_coursetype"); try { db.runSQL(createTableCourseType); } catch (IOException | SQLException e) { e.printStackTrace(); } log.info("Create table lms_coursetypecalificationtype"); try { db.runSQL(createTableCourseTypeCalificacionType); } catch (IOException | SQLException e) { e.printStackTrace(); } log.info("Create table lms_coursetypecourseeval"); try { db.runSQL(createTableCourseTypeCourseEval); } catch (IOException | SQLException e) { e.printStackTrace(); } log.info("Create table lms_coursetypeinscriptiontype"); try { db.runSQL(createTableCourseTypeInscriptionType); } catch (IOException | SQLException e) { e.printStackTrace(); } log.info("Create table lms_coursetypelearningactivity"); try { db.runSQL(createTableCourseTypeLearningActivity); } catch (IOException | SQLException e) { e.printStackTrace(); } log.info("Create table lms_coursetypetemplate"); try { db.runSQL(createTableCourseTypeTemplate); } catch (IOException | SQLException e) { e.printStackTrace(); } log.info("::::::::::::ASSET MODULES:::::::::::::::::::::"); Indexer indexer = IndexerRegistryUtil.nullSafeGetIndexer(Module.class); for(Module module : ModuleLocalServiceUtil.getModules(-1, -1)){ try{ AssetEntryLocalServiceUtil.updateEntry(module.getUserId(), module.getGroupId(), Module.class.getName(), module.getModuleId(), module.getUuid(), 0, null, null, true, module.getStartDate(), module.getEndDate(), new Date(System.currentTimeMillis()), null, ContentTypes.TEXT_HTML, module.getTitle(), module.getDescription(), module.getDescription(), null, null, 0, 0, null, false); indexer.reindex(module); } catch (Exception e){ e.printStackTrace(); } } //Comprobamos que los permisos existan try{ List<String> actionIds = new ArrayList<String>(); actionIds.add("ADD_ACTIVITY"); ResourceActionLocalServiceUtil.checkResourceActions("com.liferay.lms.learningactivity.ResourceExternalLearningActivityType", actionIds); ResourceActionLocalServiceUtil.checkResourceActions("com.liferay.lms.learningactivity.ResourceInternalLearningActivityType", actionIds); ResourceActionLocalServiceUtil.checkResourceActions("com.liferay.lms.learningactivity.SurveyLearningActivityType", actionIds); ResourceActionLocalServiceUtil.checkResourceActions("com.liferay.lms.learningactivity.TaskEvaluationLearningActivityType", actionIds); ResourceActionLocalServiceUtil.checkResourceActions("com.liferay.lms.learningactivity.TaskOfflineLearningActivityType", actionIds); ResourceActionLocalServiceUtil.checkResourceActions("com.liferay.lms.learningactivity.TaskOnlineLearningActivityType", actionIds); ResourceActionLocalServiceUtil.checkResourceActions("com.liferay.lms.learningactivity.TaskP2PLearningActivityType", actionIds); ResourceActionLocalServiceUtil.checkResourceActions("com.liferay.lms.learningactivity.TestLearningActivityType", actionIds); }catch(Exception e){ e.printStackTrace(); } List<Company> listCompanies = CompanyLocalServiceUtil.getCompanies(); LmsPrefs lmsPrefs = null; long editorRoleId = 0; for(Company company: listCompanies){ log.info("Permisos para company: " + company.getCompanyId()); try { lmsPrefs = LmsPrefsLocalServiceUtil.getLmsPrefs(company.getCompanyId()); editorRoleId = lmsPrefs.getEditorRole(); try { ResourcePermissionLocalServiceUtil.addResourcePermission(company.getCompanyId(), "com.liferay.lms.learningactivity.ResourceExternalLearningActivityType", ResourceConstants.SCOPE_GROUP_TEMPLATE, "0", editorRoleId, ADD_ACTIVITY); } catch (PortalException | SystemException e) { e.printStackTrace(); } try { ResourcePermissionLocalServiceUtil.addResourcePermission(company.getCompanyId(), "com.liferay.lms.learningactivity.ResourceInternalLearningActivityType", ResourceConstants.SCOPE_GROUP_TEMPLATE, "0", editorRoleId, ADD_ACTIVITY); } catch (PortalException | SystemException e) { e.printStackTrace(); } try { ResourcePermissionLocalServiceUtil.addResourcePermission(company.getCompanyId(), "com.liferay.lms.learningactivity.SurveyLearningActivityType", ResourceConstants.SCOPE_GROUP_TEMPLATE, "0", editorRoleId, ADD_ACTIVITY); } catch (PortalException | SystemException e) { e.printStackTrace(); } try { ResourcePermissionLocalServiceUtil.addResourcePermission(company.getCompanyId(), "com.liferay.lms.learningactivity.TaskEvaluationLearningActivityType", ResourceConstants.SCOPE_GROUP_TEMPLATE, "0", editorRoleId, ADD_ACTIVITY); } catch (PortalException | SystemException e) { e.printStackTrace(); } try { ResourcePermissionLocalServiceUtil.addResourcePermission(company.getCompanyId(), "com.liferay.lms.learningactivity.TaskOfflineLearningActivityType", ResourceConstants.SCOPE_GROUP_TEMPLATE, "0", editorRoleId, ADD_ACTIVITY); } catch (PortalException | SystemException e) { e.printStackTrace(); } try { ResourcePermissionLocalServiceUtil.addResourcePermission(company.getCompanyId(), "com.liferay.lms.learningactivity.TaskOnlineLearningActivityType", ResourceConstants.SCOPE_GROUP_TEMPLATE, "0", editorRoleId, ADD_ACTIVITY); } catch (PortalException | SystemException e) { e.printStackTrace(); } try { ResourcePermissionLocalServiceUtil.addResourcePermission(company.getCompanyId(), "com.liferay.lms.learningactivity.TaskP2PLearningActivityType", ResourceConstants.SCOPE_GROUP_TEMPLATE, "0", editorRoleId, ADD_ACTIVITY); } catch (PortalException | SystemException e) { e.printStackTrace(); } try { ResourcePermissionLocalServiceUtil.addResourcePermission(company.getCompanyId(), "com.liferay.lms.learningactivity.TestLearningActivityType", ResourceConstants.SCOPE_GROUP_TEMPLATE, "0", editorRoleId, ADD_ACTIVITY); } catch (PortalException | SystemException e) { e.printStackTrace(); } } catch (PortalException | SystemException e) { e.printStackTrace(); } } } }
package com.exedio.cope.lib; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import bak.pcj.map.IntKeyOpenHashMap; import com.exedio.cope.lib.pattern.Qualifier; import com.exedio.cope.lib.search.Condition; import com.exedio.cope.lib.util.ReactivationConstructorDummy; public final class Type implements Selectable { private static final HashMap typesByClass = new HashMap(); final Class javaClass; private final String id; private final Type supertype; private final Attribute[] declaredAttributes; private final List declaredAttributeList; private final Attribute[] attributes; private final List attributeList; private final Feature[] declaredFeatures; private final List declaredFeatureList; private final Feature[] features; private final List featureList; private final HashMap featuresByName = new HashMap(); private final UniqueConstraint[] uniqueConstraints; private final List uniqueConstraintList; private final Qualifier[] qualifiers; private final List qualifierList; private ArrayList subTypes = null; private Model model; private Table table; private PrimaryKeyIterator primaryKeyIterator; private final Constructor creationConstructor; private static final Class[] creationConstructorParams; static { try { creationConstructorParams = new Class[]{Class.forName("[L"+AttributeValue.class.getName()+';')}; } catch(ClassNotFoundException e) { e.printStackTrace(); throw new NestingRuntimeException(e); } } private final Constructor reactivationConstructor; private static final Class[] reactivationConstructorParams = new Class[]{ReactivationConstructorDummy.class, int.class}; public static final Type findByJavaClass(final Class javaClass) { return (Type)typesByClass.get(javaClass); } public Type(final Class javaClass) { this.javaClass = javaClass; if(!Item.class.isAssignableFrom(javaClass)) throw new IllegalArgumentException(javaClass.toString()+" is not a subclass of Item"); typesByClass.put(javaClass, this); { final String className = javaClass.getName(); final int pos = className.lastIndexOf('.'); this.id = className.substring(pos+1).intern(); } // supertype final Class superClass = javaClass.getSuperclass(); if(superClass.equals(Item.class)) supertype = null; else { supertype = findByJavaClass(superClass); if(supertype==null) throw new NullPointerException(superClass.getName()); supertype.registerSubType(this); } // declaredAttributes final Field[] fields = javaClass.getDeclaredFields(); final ArrayList attributesTemp = new ArrayList(fields.length); final ArrayList featuresTemp = new ArrayList(fields.length); final ArrayList uniqueConstraintsTemp = new ArrayList(fields.length); final ArrayList qualifiersTemp = new ArrayList(fields.length); final int expectedModifier = Modifier.STATIC | Modifier.FINAL; try { for(int i = 0; i<fields.length; i++) { final Field field = fields[i]; if((field.getModifiers()&expectedModifier)==expectedModifier) { final Class fieldType = field.getType(); if(Attribute.class.isAssignableFrom(fieldType)) { field.setAccessible(true); final Attribute attribute = (Attribute)field.get(null); if(attribute==null) throw new RuntimeException(field.getName()); attribute.initialize(this, field.getName()); attributesTemp.add(attribute); featuresTemp.add(attribute); featuresByName.put(attribute.getName(), attribute); final UniqueConstraint uniqueConstraint = attribute.getSingleUniqueConstaint(); if(uniqueConstraint!=null) { uniqueConstraint.initialize(this, field.getName()); uniqueConstraintsTemp.add(uniqueConstraint); } } else if(ComputedFunction.class.isAssignableFrom(fieldType)) { field.setAccessible(true); final ComputedFunction function = (ComputedFunction)field.get(null); if(function==null) throw new RuntimeException(field.getName()); function.initialize(this, field.getName()); featuresTemp.add(function); featuresByName.put(function.getName(), function); } else if(UniqueConstraint.class.isAssignableFrom(fieldType)) { field.setAccessible(true); final UniqueConstraint uniqueConstraint = (UniqueConstraint)field.get(null); if(uniqueConstraint==null) throw new RuntimeException(field.getName()); uniqueConstraint.initialize(this, field.getName()); uniqueConstraintsTemp.add(uniqueConstraint); } else if(MediaAttributeVariant.class.isAssignableFrom(fieldType)) { field.setAccessible(true); final MediaAttributeVariant variant = (MediaAttributeVariant)field.get(null); if(variant==null) throw new RuntimeException(field.getName()); variant.initialize(this, field.getName()); } else if(Qualifier.class.isAssignableFrom(fieldType)) { field.setAccessible(true); final Qualifier qualifier = (Qualifier)field.get(null); if(qualifier==null) throw new RuntimeException(field.getName()); qualifier.initialize(); qualifier.getQualifyUnique().setQualifier(qualifier); qualifiersTemp.add(qualifier); } } } } catch(IllegalAccessException e) { throw new NestingRuntimeException(e); } this.declaredAttributes = (Attribute[])attributesTemp.toArray(new Attribute[attributesTemp.size()]); this.declaredAttributeList = Collections.unmodifiableList(Arrays.asList(this.declaredAttributes)); this.declaredFeatures = (Feature[])featuresTemp.toArray(new Feature[featuresTemp.size()]); this.declaredFeatureList = Collections.unmodifiableList(Arrays.asList(this.declaredFeatures)); this.uniqueConstraints = (UniqueConstraint[])uniqueConstraintsTemp.toArray(new UniqueConstraint[uniqueConstraintsTemp.size()]); this.uniqueConstraintList = Collections.unmodifiableList(Arrays.asList(this.uniqueConstraints)); this.qualifiers = (Qualifier[])qualifiersTemp.toArray(new Qualifier[qualifiersTemp.size()]); this.qualifierList = Collections.unmodifiableList(Arrays.asList(this.qualifiers)); // attributes if(supertype==null) { attributes = this.declaredAttributes; features = this.declaredFeatures; } else { { final Attribute[] supertypeAttributes = supertype.attributes; attributes = new Attribute[supertypeAttributes.length+this.declaredAttributes.length]; System.arraycopy(supertypeAttributes, 0, attributes, 0, supertypeAttributes.length); System.arraycopy(this.declaredAttributes, 0, attributes, supertypeAttributes.length, this.declaredAttributes.length); } { final Feature[] supertypeFeatures = supertype.features; features = new Attribute[supertypeFeatures.length+this.declaredFeatures.length]; System.arraycopy(supertypeFeatures, 0, features, 0, supertypeFeatures.length); System.arraycopy(this.declaredFeatures, 0, features, supertypeFeatures.length, this.declaredFeatures.length); } } this.attributeList = Collections.unmodifiableList(Arrays.asList(attributes)); this.featureList = Collections.unmodifiableList(Arrays.asList(features)); try { creationConstructor = javaClass.getDeclaredConstructor(creationConstructorParams); creationConstructor.setAccessible(true); } catch(NoSuchMethodException e) { throw new NestingRuntimeException(e); } try { reactivationConstructor = javaClass.getDeclaredConstructor(reactivationConstructorParams); reactivationConstructor.setAccessible(true); } catch(NoSuchMethodException e) { throw new NestingRuntimeException(e); } } final void registerSubType(final Type subType) { if(this.model!=null) throw new RuntimeException(); if(subTypes==null) subTypes = new ArrayList(); subTypes.add(subType); } final void initialize(final Model model) { if(model==null) throw new RuntimeException(); if(this.model!=null) throw new RuntimeException(); if(this.table!=null) throw new RuntimeException(); if(this.primaryKeyIterator!=null) throw new RuntimeException(); this.model = model; } final void materialize(final Database database) { if(database==null) throw new RuntimeException(); if(this.model==null) throw new RuntimeException(); if(this.table!=null) throw new RuntimeException(); if(this.primaryKeyIterator!=null) throw new RuntimeException(); this.table = new Table(database, id); if(supertype!=null) { primaryKeyIterator = supertype.getPrimaryKeyIterator(); new ItemColumn(table, supertype.getJavaClass()); } else { primaryKeyIterator = new PrimaryKeyIterator(table); new IntegerColumn(table); } if(subTypes!=null) { final ArrayList typeIDs = new ArrayList(); addRecursive(subTypes, typeIDs, 15); table.addTypeColumn(typeIDs); } for(int i = 0; i<declaredAttributes.length; i++) declaredAttributes[i].materialize(table); for(int i = 0; i<uniqueConstraints.length; i++) uniqueConstraints[i].materialize(database); this.table.setUniqueConstraints(this.uniqueConstraintList); } private static final void addRecursive(final List subTypes, final ArrayList typeIDs, int levelLimit) { if(levelLimit<=0) throw new RuntimeException(typeIDs.toString()); levelLimit for(Iterator i = subTypes.iterator(); i.hasNext(); ) { final Type type = (Type)i.next(); typeIDs.add(type.getID()); addRecursive(type.getSubTypes(), typeIDs, levelLimit); } } public final Class getJavaClass() { return javaClass; } public final String getID() { return id; } public final Model getModel() { if(model==null) throw new RuntimeException("model not set for type "+id+", probably you forgot to put this type into the model."); return model; } final Table getTable() { if(model==null) throw new RuntimeException(); return table; } /** * Returns the type representing the {@link Class#getSuperclass() superclass} * of this type's {@link #getJavaClass() java class}. * If this type has no super type * (i.e. the superclass of this type's java class is {@link Item}), * then null is returned. */ public final Type getSupertype() { return supertype; } public final List getSubTypes() { return subTypes==null ? Collections.EMPTY_LIST : Collections.unmodifiableList(subTypes); } /** * Returns the list of persistent attributes declared by the this type. * This excludes inherited attributes. * The elements in the list returned are ordered by their occurance in the source code. * This method returns an empty list if the type declares no attributes. * <p> * If you want to get all persistent attributes of this type, * including attributes inherited from super types, * use {@link #getAttributes}. * <p> * Naming of this method is inspired by Java Reflection API * method {@link Class#getDeclaredFields() getDeclaredFields}. */ public final List getDeclaredAttributes() { return declaredAttributeList; } /** * Returns the list of accessible persistent attributes of this type. * This includes inherited attributes. * The elements in the list returned are ordered by their type, * with types higher in type hierarchy coming first, * and within each type by their occurance in the source code. * This method returns an empty list if the type has no accessible attributes. * <p> * If you want to get persistent attributes declared by this type only, * excluding attributes inherited from super types, * use {@link #getDeclaredAttributes}. */ public final List getAttributes() { return attributeList; } public final List getDeclaredFeatures() { return declaredFeatureList; } public final List getFeatures() { return featureList; } public final Feature getFeature(final String name) { return (Feature)featuresByName.get(name); } public final List getUniqueConstraints() { return uniqueConstraintList; } public final List getQualifiers() { return qualifierList; } private static final AttributeValue[] EMPTY_ATTRIBUTE_VALUES = new AttributeValue[]{}; public final Item newItem(final AttributeValue[] initialAttributeValues) { try { return (Item)creationConstructor.newInstance( new Object[]{ initialAttributeValues!=null ? initialAttributeValues : EMPTY_ATTRIBUTE_VALUES } ); } catch(InstantiationException e) { throw new NestingRuntimeException(e); } catch(IllegalAccessException e) { throw new NestingRuntimeException(e); } catch(InvocationTargetException e) { throw new NestingRuntimeException(e); } } /** * Searches for items of this type, that match the given condition. * <p> * Returns an unmodifiable collection. * Any attempts to modify the returned collection, whether direct or via its iterator, * result in an <code>UnsupportedOperationException</code>. * @param condition the condition the searched items must match. */ public final Collection search(final Condition condition) { return Cope.search(new Query(this, condition)); } final Item searchUnique(final Condition condition) { final Iterator searchResult = search(condition).iterator(); if(searchResult.hasNext()) { final Item result = (Item)searchResult.next(); if(searchResult.hasNext()) throw new RuntimeException(condition.toString()); else return result; } else return null; } private String toStringCache = null; public final String toString() { if(toStringCache!=null) return toStringCache; final StringBuffer buf = new StringBuffer(); buf.append(javaClass.getName()); for(int i = 0; i<uniqueConstraints.length; i++) { buf.append(' '); buf.append(uniqueConstraints[i].toString()); } toStringCache = buf.toString(); return toStringCache; } PrimaryKeyIterator getPrimaryKeyIterator() { if(primaryKeyIterator==null) throw new RuntimeException(); return primaryKeyIterator; } void onDropTable() { rows.clear(); primaryKeyIterator.flushPK(); } private final IntKeyOpenHashMap rows = new IntKeyOpenHashMap(); /** * Returns an item of this type and the given pk, if it's already active. * Returns null, if either there is no such item with the given pk, or * such an item is not active. */ Row getRow(final int pk) { return (Row)rows.get(pk); } void putRow(final Row row) { if(rows.put(row.pk, row)!=null) throw new RuntimeException(); } void removeRow(final Row row) { if(rows.remove(row.pk)!=row) throw new RuntimeException(); } static final ReactivationConstructorDummy REACTIVATION_DUMMY = new ReactivationConstructorDummy(); private Item createItemObject(final int pk) { try { return (Item)reactivationConstructor.newInstance( new Object[]{ REACTIVATION_DUMMY, new Integer(pk) } ); } catch(InstantiationException e) { throw new NestingRuntimeException(e, id); } catch(IllegalAccessException e) { throw new NestingRuntimeException(e, id); } catch(InvocationTargetException e) { throw new NestingRuntimeException(e, id); } } Item getItem(final int pk) { final Row row = getRow(pk); if(row!=null) return row.item; else return createItemObject(pk); } static final Comparator COMPARATOR = new Comparator() { public int compare(Object o1, Object o2) { final Type t1 = (Type)o1; final Type t2 = (Type)o2; return t1.id.compareTo(t2.id); } }; static final int NOT_A_PK = Integer.MIN_VALUE; }
package com.exedio.cope.instrument; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import com.exedio.cope.FinalViolationException; import com.exedio.cope.Item; import com.exedio.cope.util.ClassComparator; final class CopeType { private static final String TAG_PREFIX = CopeFeature.TAG_PREFIX; static final String TAG_TYPE = TAG_PREFIX + "type"; static final String TAG_INITIAL_CONSTRUCTOR = TAG_PREFIX + "constructor"; static final String TAG_GENERIC_CONSTRUCTOR = TAG_PREFIX + "generic.constructor"; static final String TAG_REACTIVATION_CONSTRUCTOR = TAG_PREFIX + "reactivation.constructor"; private static final HashMap<JavaClass, CopeType> copeTypeByJavaClass = new HashMap<JavaClass, CopeType>(); static final CopeType getCopeType(final JavaClass javaClass) { final CopeType result = copeTypeByJavaClass.get(javaClass); //System.out.println("getCopeClass "+javaClass.getFullName()+" "+(result==null?"NULL":result.getName())); return result; } final JavaClass javaClass; final String name; final int accessModifier; final Option typeOption; final Option initialConstructorOption; final Option genericConstructorOption; final Option reactivationConstructorOption; private final ArrayList<CopeFeature> features = new ArrayList<CopeFeature>(); private final TreeMap<String, CopeFeature> featureMap = new TreeMap<String, CopeFeature>(); public CopeType(final JavaClass javaClass) throws InjectorParseException { this.javaClass = javaClass; this.name = javaClass.name; this.accessModifier = javaClass.getAccessModifier(); copeTypeByJavaClass.put(javaClass, this); final String docComment = javaClass.getDocComment(); this.typeOption = new Option(Injector.findDocTagLine(docComment, TAG_TYPE), false); this.initialConstructorOption = new Option(Injector.findDocTagLine(docComment, TAG_INITIAL_CONSTRUCTOR), false); this.genericConstructorOption = new Option(Injector.findDocTagLine(docComment, TAG_GENERIC_CONSTRUCTOR), false); this.reactivationConstructorOption = new Option(Injector.findDocTagLine(docComment, TAG_REACTIVATION_CONSTRUCTOR), false); //System.out.println("copeTypeByJavaClass "+javaClass.getName()); javaClass.nameSpace.importStatic(Item.class); javaClass.file.repository.add(this); } public boolean isAbstract() { return javaClass.isAbstract(); } public boolean isInterface() { return javaClass.isInterface(); } private CopeType supertype; private ArrayList<CopeType> subtypes = new ArrayList<CopeType>(); void endBuildStage() { assert !javaClass.file.repository.isBuildStage(); assert !javaClass.file.repository.isGenerateStage(); final String extname = javaClass.classExtends; if(extname==null) { supertype = null; } else { final Class externalType = javaClass.file.findTypeExternally(extname); if(externalType==Item.class) { supertype = null; } else { supertype = javaClass.file.repository.getCopeType(extname); supertype.addSubtype(this); } } for(final CopeFeature feature : getFeatures()) feature.endBuildStage(); } void addSubtype(final CopeType subtype) { assert !javaClass.file.repository.isBuildStage(); assert !javaClass.file.repository.isGenerateStage(); subtypes.add(subtype); } public CopeType getSuperclass() { assert !javaClass.file.repository.isBuildStage(); return supertype; } public List<CopeType> getSubtypes() { assert !javaClass.file.repository.isBuildStage(); return subtypes; } boolean allowSubTypes() { assert !javaClass.file.repository.isBuildStage(); return isAbstract() || !getSubtypes().isEmpty(); } private final ArrayList<CopeRelation> sourceRelations = new ArrayList<CopeRelation>(); private final ArrayList<CopeRelation> targetRelations = new ArrayList<CopeRelation>(); void addRelation(final CopeRelation relation, final boolean source) { assert !javaClass.file.repository.isBuildStage(); assert !javaClass.file.repository.isGenerateStage(); (source ? sourceRelations : targetRelations).add(relation); } List<CopeRelation> getRelations(final boolean source) { assert !javaClass.file.repository.isBuildStage(); assert javaClass.file.repository.isGenerateStage(); return Collections.unmodifiableList(source ? sourceRelations : targetRelations); } public void register(final CopeFeature feature) { assert !javaClass.file.repository.isBuildStage(); assert !javaClass.file.repository.isGenerateStage(); features.add(feature); final Object collision = featureMap.put(feature.name, feature); assert collision==null : feature.name; } public CopeFeature getFeature(final String name) { assert !javaClass.file.repository.isBuildStage(); return featureMap.get(name); } public List<CopeFeature> getFeatures() { assert !javaClass.file.repository.isBuildStage(); return Collections.unmodifiableList(features); } public boolean hasInitialConstructor() { return initialConstructorOption.exists; } public int getInitialConstructorModifier() { int inheritedModifier = accessModifier; for(final CopeFeature initialFeature : getInitialFeatures()) { final int intialFeatureAccessModifier = initialFeature.accessModifier; if(inheritedModifier<intialFeatureAccessModifier) inheritedModifier = intialFeatureAccessModifier; } return initialConstructorOption.getModifier(JavaFeature.toReflectionModifier(inheritedModifier)); } private ArrayList<CopeFeature> initialFeatures = null; private TreeSet<Class> constructorExceptions = null; private final void makeInitialFeaturesAndConstructorExceptions() { initialFeatures = new ArrayList<CopeFeature>(); constructorExceptions = new TreeSet<Class>(ClassComparator.getInstance()); final CopeType superclass = getSuperclass(); if(superclass!=null) { initialFeatures.addAll(superclass.getInitialFeatures()); constructorExceptions.addAll(superclass.getConstructorExceptions()); } for(final CopeFeature feature : getFeatures()) { if(!(feature instanceof CopeQualifier)) { if(feature.isInitial()) { initialFeatures.add(feature); constructorExceptions.addAll(feature.getSetterExceptions()); } } } constructorExceptions.remove(FinalViolationException.class); } public final List<CopeFeature> getInitialFeatures() { if(initialFeatures == null) makeInitialFeaturesAndConstructorExceptions(); return initialFeatures; } /** * Returns all exceptions, the generated constructor of this class should throw. * This is the unification of throws clauses of all the setters of the * {@link #getInitialFeatures() initial attributes}, * but without the FinalViolationException, * because final attributes can only be written in the constructor. */ public final SortedSet<Class> getConstructorExceptions() { if(constructorExceptions == null) makeInitialFeaturesAndConstructorExceptions(); return constructorExceptions; } }
package com.phonegap; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.res.Configuration; import android.graphics.Color; import android.os.Bundle; import android.util.Log; import android.view.KeyEvent; import android.view.ViewGroup; import android.view.Window; import android.view.WindowManager; import android.webkit.JsResult; import android.webkit.WebChromeClient; import android.webkit.WebSettings; import android.webkit.WebStorage; import android.webkit.WebView; import android.webkit.WebSettings.LayoutAlgorithm; import android.widget.LinearLayout; import android.os.Build.*; public class DroidGap extends Activity { private static final String LOG_TAG = "DroidGap"; protected WebView appView; private LinearLayout root; private PhoneGap gap; private GeoBroker geo; private AccelListener accel; private CameraLauncher launcher; private ContactManager mContacts; private FileUtils fs; private NetworkManager netMan; private CompassListener mCompass; private Storage cupcakeStorage; private CryptoHandler crypto; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().requestFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN); // This builds the view. We could probably get away with NOT having a LinearLayout, but I like having a bucket! LinearLayout.LayoutParams containerParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT, 0.0F); LinearLayout.LayoutParams webviewParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT, 1.0F); root = new LinearLayout(this); root.setOrientation(LinearLayout.VERTICAL); root.setBackgroundColor(Color.BLACK); root.setLayoutParams(containerParams); appView = new WebView(this); appView.setLayoutParams(webviewParams); WebViewReflect.checkCompatibility(); if (android.os.Build.VERSION.RELEASE.startsWith("2.")) appView.setWebChromeClient(new EclairClient(this)); else { appView.setWebChromeClient(new GapClient(this)); } appView.setInitialScale(100); appView.setVerticalScrollBarEnabled(false); WebSettings settings = appView.getSettings(); settings.setJavaScriptEnabled(true); settings.setJavaScriptCanOpenWindowsAutomatically(true); settings.setLayoutAlgorithm(LayoutAlgorithm.NORMAL); Package pack = this.getClass().getPackage(); String appPackage = pack.getName(); WebViewReflect.setStorage(settings, true, "/data/data/" + appPackage + "/app_database/"); // Turn on DOM storage! WebViewReflect.setDomStorage(settings); // Turn off native geolocation object in browser - we use our own :) WebViewReflect.setGeolocationEnabled(settings, false); /* Bind the appView object to the gap class methods */ bindBrowser(appView); if(cupcakeStorage != null) cupcakeStorage.setStorage(appPackage); root.addView(appView); setContentView(root); } @Override public void onConfigurationChanged(Configuration newConfig) { //don't reload the current page when the orientation is changed super.onConfigurationChanged(newConfig); } private void bindBrowser(WebView appView) { gap = new PhoneGap(this, appView); geo = new GeoBroker(appView, this); accel = new AccelListener(this, appView); launcher = new CameraLauncher(appView, this); mContacts = new ContactManager(this, appView); fs = new FileUtils(appView); netMan = new NetworkManager(this, appView); mCompass = new CompassListener(this, appView); crypto = new CryptoHandler(appView); // This creates the new javascript interfaces for PhoneGap appView.addJavascriptInterface(gap, "DroidGap"); appView.addJavascriptInterface(geo, "Geo"); appView.addJavascriptInterface(accel, "Accel"); appView.addJavascriptInterface(launcher, "GapCam"); appView.addJavascriptInterface(mContacts, "ContactHook"); appView.addJavascriptInterface(fs, "FileUtil"); appView.addJavascriptInterface(netMan, "NetworkManager"); appView.addJavascriptInterface(mCompass, "CompassHook"); appView.addJavascriptInterface(crypto, "GapCrypto"); if (android.os.Build.VERSION.RELEASE.startsWith("1.")) { cupcakeStorage = new Storage(appView); appView.addJavascriptInterface(cupcakeStorage, "droidStorage"); } } public void loadUrl(String url) { appView.loadUrl(url); } /** * Provides a hook for calling "alert" from javascript. Useful for * debugging your javascript. */ public class GapClient extends WebChromeClient { Context mCtx; public GapClient(Context ctx) { mCtx = ctx; } @Override public boolean onJsAlert(WebView view, String url, String message, JsResult result) { Log.d(LOG_TAG, message); // This shows the dialog box. This can be commented out for dev AlertDialog.Builder alertBldr = new AlertDialog.Builder(mCtx); GapOKDialog okHook = new GapOKDialog(); GapCancelDialog cancelHook = new GapCancelDialog(); alertBldr.setMessage(message); alertBldr.setTitle("Alert"); alertBldr.setCancelable(true); alertBldr.setPositiveButton("OK", okHook); alertBldr.setNegativeButton("Cancel", cancelHook); alertBldr.show(); result.confirm(); return true; } /* * This is the Code for the OK Button */ public class GapOKDialog implements DialogInterface.OnClickListener { public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub dialog.dismiss(); } } public class GapCancelDialog implements DialogInterface.OnClickListener { public void onClick(DialogInterface dialog, int which) { // TODO Auto-generated method stub dialog.dismiss(); } } } public final class EclairClient extends GapClient { private String TAG = "PhoneGapLog"; private long MAX_QUOTA = 100 * 1024 * 1024; public EclairClient(Context ctx) { super(ctx); // TODO Auto-generated constructor stub } public void onExceededDatabaseQuota(String url, String databaseIdentifier, long currentQuota, long estimatedSize, long totalUsedQuota, WebStorage.QuotaUpdater quotaUpdater) { Log.d(TAG, "event raised onExceededDatabaseQuota estimatedSize: " + Long.toString(estimatedSize) + " currentQuota: " + Long.toString(currentQuota) + " totalUsedQuota: " + Long.toString(totalUsedQuota)); if( estimatedSize < MAX_QUOTA) { //increase for 1Mb long newQuota = currentQuota + 1024*1024; Log.d(TAG, "calling quotaUpdater.updateQuota newQuota: " + Long.toString(newQuota) ); quotaUpdater.updateQuota(newQuota); } else { // Set the quota to whatever it is and force an error // TODO: get docs on how to handle this properly quotaUpdater.updateQuota(currentQuota); } } // This is a test of console.log, because we don't have this in Android 2.01 public void addMessageToConsole(String message, int lineNumber, String sourceID) { Log.d(TAG, sourceID + ": Line " + Integer.toString(lineNumber) + " : " + message); } public void onConsoleMessage(String message, int lineNumber, String sourceID) { Log.d(TAG, sourceID + ": Line " + Integer.toString(lineNumber) + " : " + message); } } public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK) { appView.loadUrl("javascript:keyEvent.backTrigger()"); return true; } if (keyCode == KeyEvent.KEYCODE_MENU) { appView.loadUrl("javascript:keyEvent.menuTrigger()"); return true; } if (keyCode == KeyEvent.KEYCODE_SEARCH) { appView.loadUrl("javascript:keyEvent.searchTrigger()"); return true; } return false; } // This is required to start the camera activity! It has to come from the previous activity public void startCamera(int quality) { Intent i = new Intent(this, CameraPreview.class); i.setAction("android.intent.action.PICK"); i.putExtra("quality", quality); startActivityForResult(i, 0); } protected void onActivityResult(int requestCode, int resultCode, Intent intent) { String data; super.onActivityResult(requestCode, resultCode, intent); if (resultCode == RESULT_OK) { data = intent.getStringExtra("picture"); // Send the graphic back to the class that needs it launcher.processPicture(data); } else { launcher.failPicture("Did not complete!"); } } public WebView getView() { return this.appView; } }
package org.intermine.web; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.intermine.objectstore.query.BagConstraint; import org.intermine.objectstore.query.ResultsInfo; import org.intermine.metadata.Model; import org.intermine.util.CollectionUtil; import java.io.StringReader; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; /** * Class to represent a path-based query. * * @author Mark Woodbridge * @author Thomas Riley */ public class PathQuery { private static final Logger LOG = Logger.getLogger(PathQuery.class); protected Model model; protected LinkedHashMap nodes = new LinkedHashMap(); protected List view = new ArrayList(); protected ResultsInfo info; protected ArrayList problems = new ArrayList(); protected LogicExpression constraintLogic = null; protected Map alternativeViews = new TreeMap(); /** * Construct a new instance of PathQuery. * @param model the Model on which to base this query */ public PathQuery(Model model) { this.model = model; } /** * Get the constraint logic expression. * @return the constraint logic expression */ public String getConstraintLogic() { if (constraintLogic == null) { return null; } else { return constraintLogic.toString(); } } /** * Set the constraint logic expression. This expresses the AND and OR * relation between constraints. * @param constraintLogic the constraint logic expression */ public void setConstraintLogic(String constraintLogic) { if (constraintLogic == null) { this.constraintLogic = null; return; } try { this.constraintLogic = new LogicExpression(constraintLogic); } catch (IllegalArgumentException err) { LOG.error("Failed to parse constraintLogic: " + constraintLogic, err); } } /** * Make sure that the logic expression is valid for the current query. Remove * any unknown constraint codes and add any constraints that aren't included * (using the default operator). * @param defaultOperator the default logical operator */ public void syncLogicExpression(String defaultOperator) { if (getAllConstraints().size() <= 1) { setConstraintLogic(null); } else { Set codes = getConstraintCodes(); if (constraintLogic != null) { // limit to the actual variables constraintLogic.removeAllVariablesExcept(getConstraintCodes()); // add anything that isn't there codes.removeAll(constraintLogic.getVariableNames()); } addCodesToLogic(codes, defaultOperator); } } /** * Get all constraint codes. * @return all present constraint codes */ private Set getConstraintCodes() { Set codes = new HashSet(); for (Iterator iter = getAllConstraints().iterator(); iter.hasNext(); ) { codes.add(((Constraint) iter.next()).getCode()); } return codes; } /** * Gets the value of model * @return the value of model */ public Model getModel() { return model; } /** * Gets the value of nodes * @return the value of nodes */ public Map getNodes() { return nodes; } /** * Get a PathNode by path. * @param path a path * @return the PathNode for path path */ public PathNode getNode(String path) { return (PathNode) nodes.get(path); } /** * Get all constraints. * @return all constraints */ public List getAllConstraints() { ArrayList list = new ArrayList(); for (Iterator iter = nodes.values().iterator(); iter.hasNext(); ) { PathNode node = (PathNode) iter.next(); list.addAll(node.getConstraints()); } return list; } /** * Sets the value of view * @param view the value of view */ public void setView(List view) { this.view = view; } /** * Gets the value of view * @return the value of view */ public List getView() { return view; } /** * Get alternative select list by name. * @param name view name * @return List of Strings */ public List getAlternativeView(String name) { return (List) alternativeViews.get(name); } /** * Get alternative select lists as an unmodifiable Map from name to List. * @return alternative select lists */ public Map getAlternativeViews() { return Collections.unmodifiableMap(alternativeViews); } /** * Add an alternative select list. * @param name view name * @param alternateView the select list */ public void addAlternativeView(String name, List alternateView) { alternativeViews.put(name, alternateView); } /** * Remove alternative select list by name * @param name view name */ public void removeAlternativeView(String name) { alternativeViews.remove(name); } /** * Get info regarding this query * @return the info */ public ResultsInfo getInfo() { return info; } /** * Set info about this query * @param info the info */ public void setInfo(ResultsInfo info) { this.info = info; } /** * Provide a list of the names of bags mentioned in the query * @return the list of bag names */ public List getBagNames() { List bagNames = new ArrayList(); for (Iterator i = nodes.values().iterator(); i.hasNext();) { PathNode node = (PathNode) i.next(); for (Iterator j = node.getConstraints().iterator(); j.hasNext();) { Constraint c = (Constraint) j.next(); if (BagConstraint.VALID_OPS.contains(c.getOp())) { bagNames.add(c.getValue()); } } } return bagNames; } /** * Add a node to the query using a path, adding parent nodes if necessary * @param path the path for the new Node * @return the PathNode that was added to the nodes Map */ public PathNode addNode(String path) { PathNode node; // the new node will be inserted after this one or at the end if null String previousNodePath = null; if (path.indexOf(".") == -1) { node = new PathNode(path); // Check whether starting point exists try { MainHelper.getQualifiedTypeName(path, model); } catch (ClassNotFoundException err) { problems.add(err); } } else { String prefix = path.substring(0, path.lastIndexOf(".")); if (nodes.containsKey(prefix)) { Iterator pathsIter = nodes.keySet().iterator(); while (pathsIter.hasNext()) { String pathFromMap = (String) pathsIter.next(); if (pathFromMap.startsWith(prefix)) { previousNodePath = pathFromMap; } } PathNode parent = (PathNode) nodes.get(prefix); String fieldName = path.substring(path.lastIndexOf(".") + 1); node = new PathNode(parent, fieldName); try { node.setModel(model); } catch (Exception err) { problems.add(err); } } else { addNode(prefix); return addNode(path); } } nodes = CollectionUtil.linkedHashMapAdd(nodes, previousNodePath, path, node); return node; } /** * Get the exceptions generated while deserialising this path query query. * @return exceptions relating to this path query */ public Exception[] getProblems() { return (Exception[]) problems.toArray(new Exception[0]); } /** * Find out whether the path query is valid against the current model. * @return true if query is valid, false if not */ public boolean isValid() { return (problems.size() == 0); } /** * Clone this PathQuery * @return a PathQuery */ public Object clone() { PathQuery query = new PathQuery(model); for (Iterator i = nodes.entrySet().iterator(); i.hasNext();) { Map.Entry entry = (Map.Entry) i.next(); query.getNodes().put(entry.getKey(), clone(query, (PathNode) entry.getValue())); } query.getView().addAll(view); if (problems != null) { query.problems = new ArrayList(problems); } for (Iterator i = getAlternativeViews().entrySet().iterator(); i.hasNext();) { Map.Entry entry = (Map.Entry) i.next(); query.addAlternativeView((String) entry.getKey(), (List) entry.getValue()); } query.setConstraintLogic(getConstraintLogic()); return query; } /** * Clone a PathNode * @param query PathQuery containing cloned PathNode * @param node a PathNode * @return a copy of the PathNode */ protected PathNode clone(PathQuery query, PathNode node) { PathNode newNode; PathNode parent = (PathNode) nodes.get(node.getPrefix()); if (parent == null) { newNode = new PathNode(node.getType()); } else { newNode = new PathNode(parent, node.getFieldName()); try { newNode.setModel(model); } catch (IllegalArgumentException err) { query.problems.add(err); } newNode.setType(node.getType()); } for (Iterator i = node.getConstraints().iterator(); i.hasNext();) { Constraint constraint = (Constraint) i.next(); newNode.getConstraints().add(new Constraint(constraint.getOp(), constraint.getValue(), constraint.isEditable(), constraint.getDescription(), constraint.getCode(), constraint.getIdentifier())); } return newNode; } /** * @see Object#equals(Object) */ public boolean equals(Object o) { return (o instanceof PathQuery) && model.equals(((PathQuery) o).model) && nodes.equals(((PathQuery) o).nodes) && view.equals(((PathQuery) o).view) && alternativeViews.equals(((PathQuery) o).getAlternativeViews()); } /** * @see Object#hashCode() */ public int hashCode() { return 2 * model.hashCode() + 3 * nodes.hashCode() + 5 * view.hashCode(); } /** * @see Object#toString() */ public String toString() { return "{PathQuery: " + model + ", " + nodes + ", " + view + "}"; } /** * Check validity of receiver by trying to create an objectstore Query. If * conversion fails, the exception is recorded and isValid will return false. */ protected void checkValidity() { try { MainHelper.makeQuery(this, new HashMap()); } catch (Exception err) { problems.add(err); } } /** * Get a constraint code that hasn't been used yet. * @return a constraint code that hasn't been used yet */ public String getUnusedConstraintCode() { char c = 'A'; while (getConstraintByCode("" + c) != null) { c++; } return "" + c; } /** * Get a Constraint involved in this query by code. Returns null if no * constraint with the given code was found. * @param string the constraint code * @return the Constraint with matching code or null */ private Constraint getConstraintByCode(String string) { Iterator iter = getAllConstraints().iterator(); while (iter.hasNext()) { Constraint c = (Constraint) iter.next(); if (string.equals(c.getCode())) { return c; } } return null; } /** * Add a set of codes to the logical expression using the given operator. * @param codes Set of codes (Strings) * @param operator operator to add with */ protected void addCodesToLogic(Set codes, String operator) { String logic = getConstraintLogic(); if (logic == null) { logic = ""; } else { logic = "(" + logic + ")"; } for (Iterator iter = codes.iterator(); iter.hasNext(); ) { if (!StringUtils.isEmpty(logic)) { logic += " " + operator + " "; } logic += (String) iter.next(); } setConstraintLogic(logic); } /** * Remove some constraint code from the logic expression. * @param code the code to remove */ public void removeCodeFromLogic(String code) { constraintLogic.removeVariable(code); } /** * Get the LogicExpression. If there are one or zero constraints then * this method will return null. * @return the current LogicExpression or null */ public LogicExpression getLogic() { return constraintLogic; } /** * Serialise this query in XML format. * @param name query name to put in xml * @return PathQuery in XML format */ public String toXml(String name) { return PathQueryBinding.marshal(this, name, model.getName()); } /** * Serialise to XML with no name. * @return the XML */ public String toXml() { return PathQueryBinding.marshal(this, "", model.getName()); } /** * Rematerialise single query from XML. * @param xml PathQuery XML * @return a PathQuery object */ public static PathQuery fromXml(String xml) { Map queries = PathQueryBinding.unmarshal(new StringReader(xml)); return (PathQuery) queries.values().iterator().next(); } }
package org.wyona.yanel.servlet; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.InputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.net.URL; import java.util.Calendar; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.xml.transform.Source; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.stream.StreamSource; import org.wyona.commons.xml.XMLHelper; import org.wyona.neutron.XMLExceptionV1; import org.wyona.yanel.core.ResourceTypeIdentifier; import org.wyona.yanel.core.StateOfView; import org.wyona.yanel.core.Environment; import org.wyona.yanel.core.Path; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.ResourceTypeRegistry; import org.wyona.yanel.core.Yanel; import org.wyona.yanel.core.api.attributes.IntrospectableV1; import org.wyona.yanel.core.api.attributes.ModifiableV1; import org.wyona.yanel.core.api.attributes.ModifiableV2; import org.wyona.yanel.core.api.attributes.TranslatableV1; import org.wyona.yanel.core.api.attributes.VersionableV2; import org.wyona.yanel.core.api.attributes.ViewableV1; import org.wyona.yanel.core.api.attributes.ViewableV2; import org.wyona.yanel.core.api.attributes.WorkflowableV1; import org.wyona.yanel.core.api.security.WebAuthenticator; import org.wyona.yanel.core.attributes.versionable.RevisionInformation; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.attributes.viewable.ViewDescriptor; import org.wyona.yanel.core.navigation.Node; import org.wyona.yanel.core.navigation.Sitetree; import org.wyona.yanel.core.serialization.SerializerFactory; import org.wyona.yanel.core.source.SourceResolver; import org.wyona.yanel.core.source.YanelStreamSource; import org.wyona.yanel.core.transformation.I18nTransformer2; import org.wyona.yanel.core.util.DateUtil; import org.wyona.yanel.core.util.HttpServletRequestHelper; import org.wyona.yanel.core.workflow.Workflow; import org.wyona.yanel.core.workflow.WorkflowException; import org.wyona.yanel.core.workflow.WorkflowHelper; import org.wyona.yanel.core.map.Map; import org.wyona.yanel.core.map.Realm; import org.wyona.yanel.core.util.ResourceAttributeHelper; import org.wyona.yanel.servlet.IdentityMap; import org.wyona.yanel.servlet.communication.HttpRequest; import org.wyona.yanel.servlet.communication.HttpResponse; import org.wyona.security.core.api.Identity; import org.wyona.security.core.api.Usecase; import org.wyona.security.core.api.User; import org.apache.log4j.Logger; import org.apache.xalan.transformer.TransformerIdentityImpl; import org.apache.xml.resolver.tools.CatalogResolver; import org.apache.xml.serializer.Serializer; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.quartz.JobDetail; import org.quartz.Scheduler; import org.quartz.SimpleTrigger; import org.quartz.Trigger; import org.quartz.impl.StdSchedulerFactory; /** * Main entry of Yanel webapp */ public class YanelServlet extends HttpServlet { private static Logger log = Logger.getLogger(YanelServlet.class); private static Logger logAccess = Logger.getLogger("Access"); private static Logger log404 = Logger.getLogger("404"); private Map map; private Yanel yanelInstance; private Sitetree sitetree; private File xsltInfoAndException; private String xsltLoginScreenDefault; private boolean displayMostRecentVersion = true; public static final String IDENTITY_MAP_KEY = "identity-map"; private static final String TOOLBAR_USECASE = "toolbar"; //TODO is this the same as YanelAuthoringUI.TOOLBAR_KEY? public static final String NAMESPACE = "http: private static final String METHOD_PROPFIND = "PROPFIND"; private static final String METHOD_OPTIONS = "OPTIONS"; private static final String METHOD_GET = "GET"; private static final String METHOD_POST = "POST"; private static final String METHOD_PUT = "PUT"; private static final String METHOD_DELETE = "DELETE"; private static final String HTTP_REFERRER = "Referer"; private String sslPort = null; private String toolbarMasterSwitch = "off"; private String reservedPrefix; private String servletContextRealPath; private int cacheExpires = 0; private YanelHTMLUI yanelUI; private boolean logAccessEnabled = false; public static final String DEFAULT_ENCODING = "UTF-8"; public static final String YANEL_ACCESS_POLICY_USECASE = "yanel.policy"; public static final String YANEL_USECASE = "yanel.usecase"; public static final String YANEL_RESOURCE = "yanel.resource"; public static final String YANEL_RESOURCE_USECASE = YANEL_RESOURCE + ".usecase"; public static final String YANEL_RESOURCE_REVISION = YANEL_RESOURCE + ".revision"; public static final String YANEL_RESOURCE_WORKFLOW_TRANSITION = YANEL_RESOURCE + ".workflow.transition"; public static final String YANEL_RESOURCE_WORKFLOW_TRANSITION_OUTPUT = YANEL_RESOURCE_WORKFLOW_TRANSITION + ".output"; public static final String VIEW_ID_PARAM_NAME = "yanel.resource.viewid"; public static final String RESOURCE_META_ID_PARAM_NAME = "yanel.resource.meta"; public static final String RELEASE_LOCK = "release-lock"; private static final String CONTENT_TYPE_XHTML = "xhtml"; private static String ANALYTICS_COOKIE_NAME = "_yanel-analytics"; private Scheduler scheduler; @Override public void init(ServletConfig config) throws ServletException { servletContextRealPath = config.getServletContext().getRealPath("/"); xsltInfoAndException = org.wyona.commons.io.FileUtil.file(servletContextRealPath, config.getInitParameter("exception-and-info-screen-xslt")); xsltLoginScreenDefault = config.getInitParameter("login-screen-xslt"); displayMostRecentVersion = new Boolean(config.getInitParameter("workflow.not-live.most-recent-version")).booleanValue(); try { yanelInstance = Yanel.getInstance(); yanelInstance.init(); map = yanelInstance.getMapImpl("map"); sitetree = yanelInstance.getSitetreeImpl("repo-navigation"); sslPort = config.getInitParameter("ssl-port"); toolbarMasterSwitch = config.getInitParameter("toolbar-master-switch"); reservedPrefix = yanelInstance.getReservedPrefix(); String expires = config.getInitParameter("static-content-cache-expires"); if (expires != null) { this.cacheExpires = Integer.parseInt(expires); } yanelUI = new YanelHTMLUI(map, reservedPrefix); // TODO: Make this value configurable also per realm or per individual user! logAccessEnabled = new Boolean(config.getInitParameter("log-access")).booleanValue(); if (yanelInstance.isSchedulerEnabled()) { log.warn("Startup scheduler ..."); scheduler = StdSchedulerFactory.getDefaultScheduler(); Realm[] realms = yanelInstance.getRealmConfiguration().getRealms(); for (int i = 0; i < realms.length; i++) { if (realms[i] instanceof org.wyona.yanel.core.map.RealmWithConfigurationExceptionImpl) { String eMessage = ((org.wyona.yanel.core.map.RealmWithConfigurationExceptionImpl) realms[i]).getConfigurationException().getMessage(); log.error("Realm '" + realms[i].getID() + "' has thrown a configuration exception: " + eMessage); } else { String schedulerJobsPath = "/scheduler-jobs.xml"; if (realms[i].getRepository().existsNode(schedulerJobsPath)) { log.warn("DEBUG: Scheduler jobs config found for realm: " + realms[i].getRepository().getID()); org.wyona.yanel.impl.scheduler.QuartzSchedulerUtil.schedule(scheduler, XMLHelper.readDocument(realms[i].getRepository().getNode(schedulerJobsPath).getInputStream()), realms[i].getRepository().getID()); } } } String groupName = "yanel"; JobDetail jobDetail = new JobDetail("heartbeatJob", groupName, org.wyona.yanel.servlet.HeartbeatJob.class); Date startDate = new Date(); Date endDate = null; Trigger trigger = new SimpleTrigger("heartbeatTrigger", groupName, startDate, endDate, SimpleTrigger.REPEAT_INDEFINITELY, 60L * 1000L); scheduler.scheduleJob(jobDetail, trigger); scheduler.start(); } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } /** * Dispatch requests */ @Override protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // NOTE: Do not add code outside the try-catch block, because otherwise exceptions won't be logged try { String httpAcceptMediaTypes = request.getHeader("Accept"); String httpAcceptLanguage = request.getHeader("Accept-Language"); String yanelUsecase = request.getParameter(YANEL_USECASE); if(yanelUsecase != null && yanelUsecase.equals("logout")) { // INFO: Logout from Yanel if(doLogout(request, response) != null) return; } else if(yanelUsecase != null && yanelUsecase.equals("create")) { // INFO: Create a new resource if(doCreate(request, response) != null) return; } // Check authorization and if authorization failed, then try to authenticate if(doAccessControl(request, response) != null) { // INFO: Either redirect (after successful authentication) or access denied (and response will send the login screen) return; } else { if (log.isDebugEnabled()) log.debug("Access granted: " + request.getServletPath()); } // Check for requests re policies String policyRequestPara = request.getParameter(YANEL_ACCESS_POLICY_USECASE); if (policyRequestPara != null) { doAccessPolicyRequest(request, response, policyRequestPara); return; } // Check for requests for global data Resource resource = getResource(request, response); String path = resource.getPath(); if (path.indexOf("/" + reservedPrefix + "/") == 0) { getGlobalData(request, response); return; } String value = request.getParameter(YANEL_RESOURCE_USECASE); // Delete node if (value != null && value.equals("delete")) { handleDeleteUsecase(request, response); return; } // Delegate ... String method = request.getMethod(); if (method.equals(METHOD_PROPFIND)) { doPropfind(request, response); } else if (method.equals(METHOD_GET)) { doGet(request, response); } else if (method.equals(METHOD_POST)) { doPost(request, response); } else if (method.equals(METHOD_PUT)) { doPut(request, response); } else if (method.equals(METHOD_DELETE)) { doDelete(request, response); } else if (method.equals(METHOD_OPTIONS)) { doOptions(request, response); } else { log.error("No such method implemented: " + method); response.sendError(HttpServletResponse.SC_NOT_IMPLEMENTED); } } catch (ServletException e) { log.error(e, e); throw new ServletException(e.getMessage(), e); } catch (IOException e) { log.error(e, e); throw new IOException(e.getMessage()); } // NOTE: This was our last chance to log an exception, hence do not add code outside the try-catch block } /** * HTTP GET implementation. */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { HttpSession session = request.getSession(true); Resource resource = getResource(request, response); // Enable or disable toolbar yanelUI.switchToolbar(request); String transition = request.getParameter(YANEL_RESOURCE_WORKFLOW_TRANSITION); if (transition != null) { executeWorkflowTransition(request, response, request.getParameter(YANEL_RESOURCE_REVISION), transition); return; } // Check for requests refered by WebDAV String yanelWebDAV = request.getParameter("yanel.webdav"); if(yanelWebDAV != null && yanelWebDAV.equals("propfind1")) { log.info("WebDAV client (" + request.getHeader("User-Agent") + ") requests to \"edit\" a resource: " + resource.getRealm() + ", " + resource.getPath()); //return; } String value = request.getParameter(YANEL_RESOURCE_USECASE); try { if (value != null && value.equals(RELEASE_LOCK)) { log.warn("Try to release lock ..."); if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { VersionableV2 versionable = (VersionableV2)resource; String checkoutUserID = versionable.getCheckoutUserID(); String userID = getEnvironment(request, response).getIdentity().getUsername(); if (checkoutUserID.equals(userID)) { try { versionable.cancelCheckout(); log.debug("Lock has been released."); response.setStatus(HttpServletResponse.SC_OK); response.setContentType("text/html" + "; charset=" + "UTF-8"); String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(resource.getPath()); StringBuilder sb = new StringBuilder("<html xmlns=\"http: PrintWriter w = response.getWriter(); w.print(sb); return; } catch (Exception e) { throw new ServletException("Releasing the lock of <" + resource.getPath() + "> failed because of: " + e.getMessage(), e); } } else { String eMessage = "Releasing the lock of '" + resource.getPath() + "' failed because checkout user '" + checkoutUserID + "' and session user '" + userID + "' are not the same!"; log.warn(eMessage); throw new ServletException(eMessage); } } return; } else if (value != null && value.equals("roll-back")) { log.debug("Roll back ..."); org.wyona.yanel.core.util.VersioningUtil.rollBack(resource, request.getParameter(YANEL_RESOURCE_REVISION), getIdentity(request, map).getUsername()); // TODO: Send confirmation screen getContent(request, response); return; } else { getContent(request, response); return; } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } /** * Returns the mime-type according to the given file extension. * Default is application/octet-stream. * @param extension * @return */ private static String guessMimeType(String extension) { String ext = extension.toLowerCase(); if (ext.equals("html") || ext.equals("htm")) return "text/html"; if (ext.equals("css")) return "text/css"; if (ext.equals("txt")) return "text/plain"; if (ext.equals("js")) return "application/x-javascript"; if (ext.equals("jpg") || ext.equals("jpg")) return "image/jpeg"; if (ext.equals("gif")) return "image/gif"; if (ext.equals("pdf")) return "application/pdf"; if (ext.equals("zip")) return "application/zip"; if (ext.equals("htc")) return "text/x-component"; // TODO: add more mime types // TODO: and move to MimeTypeUtil return "application/octet-stream"; // default } /** * Get view of resource */ private void getContent(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { View view = null; org.w3c.dom.Document doc = null; try { doc = getDocument(NAMESPACE, "yanel"); } catch (Exception e) { throw new ServletException(e.getMessage(), e); } Element rootElement = doc.getDocumentElement(); rootElement.setAttribute("servlet-context-real-path", servletContextRealPath); Element requestElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "request")); requestElement.setAttributeNS(NAMESPACE, "uri", request.getRequestURI()); requestElement.setAttributeNS(NAMESPACE, "servlet-path", request.getServletPath()); HttpSession session = request.getSession(true); Element sessionElement = (Element) rootElement.appendChild(doc.createElement("session")); sessionElement.setAttribute("id", session.getId()); Enumeration<?> attrNames = session.getAttributeNames(); if (!attrNames.hasMoreElements()) { Element sessionNoAttributesElement = (Element) sessionElement.appendChild(doc.createElement("no-attributes")); } while (attrNames.hasMoreElements()) { String name = (String)attrNames.nextElement(); String value = session.getAttribute(name).toString(); Element sessionAttributeElement = (Element) sessionElement.appendChild(doc.createElement("attribute")); sessionAttributeElement.setAttribute("name", name); sessionAttributeElement.appendChild(doc.createTextNode(value)); } String usecase = request.getParameter(YANEL_RESOURCE_USECASE); Resource res = null; long lastModified = -1; long size = -1; // START first try try { Environment environment = getEnvironment(request, response); res = getResource(request, response); if (res != null) { Element resourceElement = getResourceMetaData(res, doc, rootElement); Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view")); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "1")) { if (log.isDebugEnabled()) log.debug("Resource is viewable V1"); viewElement.setAttributeNS(NAMESPACE, "version", "1"); appendViewDescriptors(doc, viewElement, ((ViewableV1) res).getViewDescriptors()); String viewId = request.getParameter(VIEW_ID_PARAM_NAME); try { view = ((ViewableV1) res).getView(request, viewId); } catch (org.wyona.yarep.core.NoSuchNodeException e) { String message = e.getMessage(); log.error(message, e); do404(request, response, doc, message); return; } catch (Exception e) { String message = e.getMessage(); log.error(message, e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "500"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "2")) { if (log.isDebugEnabled()) log.debug("Resource is viewable V2"); viewElement.setAttributeNS(NAMESPACE, "version", "2"); appendViewDescriptors(doc, viewElement, ((ViewableV2) res).getViewDescriptors()); if (!((ViewableV2) res).exists()) { log.warn("No such ViewableV2 resource: " + res.getPath()); log.warn("TODO: It seems like many ViewableV2 resources are not implementing exists() properly!"); //do404(request, response, doc, res.getPath()); //return; } size = ((ViewableV2) res).getSize(); Element sizeElement = (Element) resourceElement.appendChild(doc.createElement("size")); sizeElement.appendChild(doc.createTextNode(String.valueOf(size))); String viewId = request.getParameter(VIEW_ID_PARAM_NAME); try { String revisionName = request.getParameter(YANEL_RESOURCE_REVISION); // NOTE: Check also if usecase is not roll-back, because roll-back is also using the yanel.resource.revision if (revisionName != null && ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2") && !isRollBack(request)) { view = ((VersionableV2) res).getView(viewId, revisionName); } else if (environment.getStateOfView().equals(StateOfView.LIVE) && ResourceAttributeHelper.hasAttributeImplemented(res, "Workflowable", "1") && WorkflowHelper.getWorkflow(res) != null) { // TODO: Check if resource actually exists (see the exist problem above), because even it doesn't exist, the workflowable interfaces can return something although it doesn't really make sense. For example if a resource type is workflowable, but it has no workflow associated with it, then WorkflowHelper.isLive will nevertheless return true, whereas WorkflowHelper.getLiveView will throw an exception! if (!((ViewableV2) res).exists()) { log.warn("No such ViewableV2 resource: " + res.getPath()); log.warn("TODO: It seems like many ViewableV2 resources are not implementing exists() properly!"); do404(request, response, doc, res.getPath()); return; } WorkflowableV1 workflowable = (WorkflowableV1)res; if (workflowable.isLive()) { view = workflowable.getLiveView(viewId); } else { String message = "The viewable (V2) resource '" + res.getPath() + "' is WorkflowableV1, but has not been published yet."; log.warn(message); // TODO: Make this configurable per resource (or rather workflowable interface) or per realm?! if (displayMostRecentVersion) { // INFO: Because of backwards compatibility the default should display the most recent version log.warn("Instead the live version, the most recent version will be displayed!"); view = ((ViewableV2) res).getView(viewId); } else { log.warn("Instead the live version, a 404 will be displayed!"); // TODO: Instead a 404 one might want to show a different kind of screen do404(request, response, doc, message); return; } } } else { view = ((ViewableV2) res).getView(viewId); } } catch (org.wyona.yarep.core.NoSuchNodeException e) { String message = e.getMessage(); log.warn(message, e); do404(request, response, doc, message); return; } catch (org.wyona.yanel.core.ResourceNotFoundException e) { String message = e.getMessage(); log.warn(message, e); do404(request, response, doc, message); return; } } else { // NO Viewable interface implemented! String message = res.getClass().getName() + " is not viewable! (" + res.getPath() + ", " + res.getRealm() + ")"; log.error(message); Element noViewElement = (Element) resourceElement.appendChild(doc.createElement("not-viewable")); noViewElement.appendChild(doc.createTextNode(res.getClass().getName() + " is not viewable!")); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "501"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_IMPLEMENTED); setYanelOutput(request, response, doc); return; } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { lastModified = ((ModifiableV2) res).getLastModified(); Element lastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("last-modified")); lastModifiedElement.appendChild(doc.createTextNode(new Date(lastModified).toString())); } else { Element noLastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("no-last-modified")); } // Get the revisions, but only in the meta usecase (because of performance reasons) if (request.getParameter(RESOURCE_META_ID_PARAM_NAME) != null) { appendRevisionsAndWorkflow(doc, resourceElement, res, request); } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Translatable", "1")) { TranslatableV1 translatable = ((TranslatableV1) res); Element translationsElement = (Element) resourceElement.appendChild(doc.createElement("translations")); String[] languages = translatable.getLanguages(); for (int i=0; i<languages.length; i++) { Element translationElement = (Element) translationsElement.appendChild(doc.createElement("translation")); translationElement.setAttribute("language", languages[i]); String path = translatable.getTranslation(languages[i]).getPath(); translationElement.setAttribute("path", path); } } if (usecase != null && usecase.equals("checkout")) { if(log.isDebugEnabled()) log.debug("Checkout data ..."); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) { // NOTE: The code below will throw an exception if the document is checked out already by another user. String userID = environment.getIdentity().getUsername(); VersionableV2 versionable = (VersionableV2)res; if (versionable.isCheckedOut()) { String checkoutUserID = versionable.getCheckoutUserID(); if (checkoutUserID.equals(userID)) { log.warn("Resource " + res.getPath() + " is already checked out by this user: " + checkoutUserID); } else { if (isClientSupportingNeutron(request)) { String eMessage = "Resource '" + res.getPath() + "' is already checked out by another user: " + checkoutUserID; response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); // TODO: Checkout date and break-lock (optional) response.getWriter().print(XMLExceptionV1.getCheckoutException(eMessage, res.getPath(), checkoutUserID, null)); return; } else { throw new Exception("Resource '" + res.getPath() + "' is already checked out by another user: " + checkoutUserID); } } } else { versionable.checkout(userID); } } else { log.warn("Acquire lock has not been implemented yet ...!"); // acquireLock(); } } } else { Element resourceIsNullElement = (Element) rootElement.appendChild(doc.createElement("resource-is-null")); } } catch (org.wyona.yarep.core.NoSuchNodeException e) { String message = e.getMessage(); log.warn(message, e); do404(request, response, doc, message); return; } catch (org.wyona.yanel.core.ResourceNotFoundException e) { String message = e.getMessage(); log.warn(message, e); do404(request, response, doc, message); return; } catch (Exception e) { log.error(e.getMessage(), e); String message = e.toString() + "\n\n" + getStackTrace(e); //String message = e.toString(); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } // END first try // START introspection generation if (usecase != null && usecase.equals("introspection")) { sendIntrospectionAsResponse(res, doc, rootElement, request, response); return; } // END introspection generation String meta = request.getParameter(RESOURCE_META_ID_PARAM_NAME); if (meta != null) { if (meta.length() > 0) { log.warn("TODO: meta: " + meta); } else { log.debug("Show all meta"); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); setYanelOutput(request, response, doc); return; } if (view != null) { if (generateResponse(view, res, request, response, doc, size, lastModified) != null) return; } else { String message = "View is null!"; Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } /** * HTTP POST implementation. */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String transition = request.getParameter(YANEL_RESOURCE_WORKFLOW_TRANSITION); if (transition != null) { executeWorkflowTransition(request, response, request.getParameter(YANEL_RESOURCE_REVISION), transition); return; } String value = request.getParameter(YANEL_RESOURCE_USECASE); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response, false); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response, true); log.warn("Release lock has not been implemented yet ..."); // releaseLock(); return; } else { log.info("No parameter " + YANEL_RESOURCE_USECASE + "!"); String contentType = request.getContentType(); // TODO: Check for type (see section 9.2 of APP spec (e.g. draft 16) if (contentType.indexOf("application/atom+xml") >= 0) { InputStream in = intercept(request.getInputStream()); // Create new Atom entry try { String atomEntryUniversalName = "<{http: Realm realm = yanelInstance.getMap().getRealm(request.getServletPath()); String newEntryPath = yanelInstance.getMap().getPath(realm, request.getServletPath() + "/" + new Date().getTime() + ".xml"); log.debug("Realm and Path of new Atom entry: " + realm + " " + newEntryPath); Resource atomEntryResource = yanelInstance.getResourceManager().getResource(getEnvironment(request, response), realm, newEntryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new ResourceTypeIdentifier(atomEntryUniversalName, null)); ((ModifiableV2)atomEntryResource).write(in); byte buffer[] = new byte[8192]; int bytesRead; InputStream resourceIn = ((ModifiableV2)atomEntryResource).getInputStream(); OutputStream responseOut = response.getOutputStream(); while ((bytesRead = resourceIn.read(buffer)) != -1) { responseOut.write(buffer, 0, bytesRead); } resourceIn.close(); //responseOut.close(); // TODO: Fix Location ... response.setHeader("Location", "http://ulysses.wyona.org" + newEntryPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_CREATED); return; } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } // Enable or disable toolbar yanelUI.switchToolbar(request); getContent(request, response); } } /** * Perform the given transition on the indicated revision. * @param request * @param response * @param transition * @throws ServletException * @throws IOException */ private void executeWorkflowTransition(HttpServletRequest request, HttpServletResponse response, String revision, String transition) throws ServletException, IOException { Resource resource = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Workflowable", "1")) { WorkflowableV1 workflowable = (WorkflowableV1)resource; try { String outputFormat = request.getParameter(YANEL_RESOURCE_WORKFLOW_TRANSITION_OUTPUT); StringBuilder sb = null; workflowable.doTransition(transition, revision); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); if (outputFormat != null && CONTENT_TYPE_XHTML.equals(outputFormat.toLowerCase())) { response.setContentType("text/html; charset=" + DEFAULT_ENCODING); sb = new StringBuilder("<html xmlns=\"http: + " has been performed.</p><p>Return to <a href=\"" + request.getHeader(HTTP_REFERRER) + "\">the page</a>.</p></body></html>"); } else { log.warn("No output format query string parameter '" + YANEL_RESOURCE_WORKFLOW_TRANSITION_OUTPUT + "' has been specified."); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); sb = new StringBuilder("<?xml version=\"1.0\"?>"); sb.append(workflowable.getWorkflowIntrospection()); } PrintWriter w = response.getWriter(); w.print(sb); } catch (WorkflowException e) { log.error(e, e); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); // TODO: XMLExceptionV1 is part of Neutron and hence not really appropriate for this kind of exception w.print(XMLExceptionV1.getDefaultException(XMLExceptionV1.AUTHORIZATION, e.getMessage())); return; } } else { log.warn("Resource not workflowable: " + resource.getPath()); } } /** * HTTP PUT implementation. */ @Override protected void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO: Reuse code doPost resp. share code with doPut String value = request.getParameter(YANEL_RESOURCE_USECASE); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response, false); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response, true); log.warn("Release lock has not been implemented yet ...!"); // releaseLock(); return; } else { log.warn("No parameter " + YANEL_RESOURCE_USECASE + "!"); String contentType = request.getContentType(); if (contentType != null && contentType.indexOf("application/atom+xml") >= 0) { InputStream in = intercept(request.getInputStream()); // Overwrite existing atom entry try { String atomEntryUniversalName = "<{http: Realm realm = yanelInstance.getMap().getRealm(request.getServletPath()); String entryPath = yanelInstance.getMap().getPath(realm, request.getServletPath()); log.debug("Realm and Path of new Atom entry: " + realm + " " + entryPath); Resource atomEntryResource = yanelInstance.getResourceManager().getResource(getEnvironment(request, response), realm, entryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new ResourceTypeIdentifier(atomEntryUniversalName, null)); // TODO: There seems to be a problem ... ((ModifiableV2)atomEntryResource).write(in); // NOTE: This method does not update updated date /* OutputStream out = ((ModifiableV2)atomEntry).getOutputStream(entryPath); byte buffer[] = new byte[8192]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } */ log.info("Atom entry has been saved: " + entryPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); return; } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } else { Resource resource = getResource(request, response); log.warn("Client (" + request.getHeader("User-Agent") + ") requests to save a resource: " + resource.getRealm() + ", " + resource.getPath()); save(request, response, false); return; } } } /** * HTTP DELETE implementation. */ @Override protected void doDelete(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { Resource res = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { if (((ModifiableV2) res).delete()) { // TODO: Also delete resource config! What about access policies?! log.debug("Resource has been deleted: " + res); response.setStatus(HttpServletResponse.SC_OK); response.setContentType("text/html" + "; charset=" + "UTF-8"); String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(res.getPath()); StringBuilder sb = new StringBuilder("<html xmlns=\"http: PrintWriter w = response.getWriter(); w.print(sb); return; } else { log.warn("Resource could not be deleted: " + res); response.setStatus(HttpServletResponse.SC_FORBIDDEN); return; } } else { log.error("Resource '" + res + "' has interface ModifiableV2 not implemented." ); response.sendError(HttpServletResponse.SC_NOT_IMPLEMENTED); return; } } catch (Exception e) { throw new ServletException("Could not delete resource with URL <" + request.getRequestURL() + ">: " + e.getMessage(), e); } } /** * Resolve resource for a specific request */ private Resource getResource(HttpServletRequest request, HttpServletResponse response) throws ServletException { try { Realm realm = map.getRealm(request.getServletPath()); String path = map.getPath(realm, request.getServletPath()); HttpRequest httpRequest = (HttpRequest)request; HttpResponse httpResponse = new HttpResponse(response); Resource res = yanelInstance.getResourceManager().getResource(getEnvironment(httpRequest, httpResponse), realm, path); return res; } catch (Exception e) { throw new ServletException("Could not get resource for request <" + request.getServletPath() + ">: " + e.getMessage(), e); } } /** * Get environment containing identity , client request, etc. */ private Environment getEnvironment(HttpServletRequest request, HttpServletResponse response) throws ServletException { Identity identity; try { identity = getIdentity(request, map); Realm realm = map.getRealm(request.getServletPath()); String stateOfView = StateOfView.AUTHORING; if (yanelUI.isToolbarEnabled(request)) { stateOfView = StateOfView.AUTHORING; } else { stateOfView = StateOfView.LIVE; } //log.debug("State of view: " + stateOfView); Environment environment = new Environment(request, response, identity, stateOfView, null); return environment; } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } /** * Save data */ private void save(HttpServletRequest request, HttpServletResponse response, boolean doCheckin) throws ServletException, IOException { log.debug("Save data ..."); Resource resource = getResource(request, response); /* NOTE: Commented because the current default repo implementation does not support versioning yet. if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { try { // check the resource state: Identity identity = getIdentity(request); String userID = identity.getUser().getID(); VersionableV2 versionable = (VersionableV2)resource; if (versionable.isCheckedOut()) { String checkoutUserID = versionable.getCheckoutUserID(); if (!checkoutUserID.equals(userID)) { throw new Exception("Resource is checked out by another user: " + checkoutUserID); } } else { throw new Exception("Resource is not checked out."); } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } */ InputStream in = request.getInputStream(); // Check on well-formedness ... String contentType = request.getContentType(); log.debug("Content-Type: " + contentType); if (contentType != null && (contentType.indexOf("application/xml") >= 0 || contentType.indexOf("application/xhtml+xml") >= 0)) { try { in = XMLHelper.isWellFormed(in); } catch(Exception e) { log.error(e, e); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(XMLExceptionV1.getDefaultException(XMLExceptionV1.DATA_NOT_WELL_FORMED, e.getMessage())); return; } } else { log.info("No well-formedness check required for content type: " + contentType); } // IMPORTANT TODO: Use ModifiableV2.write(InputStream in) such that resource can modify data during saving resp. check if getOutputStream is equals null and then use write .... OutputStream out = null; Resource res = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "1")) { out = ((ModifiableV1) res).getOutputStream(new Path(request.getServletPath())); write(in, out, request, response); } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { try { out = ((ModifiableV2) res).getOutputStream(); if (out != null) { write(in, out, request, response); } else { log.warn("getOutputStream() returned null, hence fallback to write()"); ((ModifiableV2) res).write(in); } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } else { String message = res.getClass().getName() + " is not modifiable (neither V1 nor V2)!"; log.warn(message); // TODO: Differentiate between Neutron based and other clients ... (Use method isClientSupportingNeutron()) response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); // TODO: This is not really a 'checkin' problem, but rather a general 'save-data' problem, but the Neutron spec does not support such a type: http://neutron.wyona.org/draft-neutron-protocol-v0.html#rfc.section.8 w.print(XMLExceptionV1.getDefaultException(XMLExceptionV1.CHECKIN, message)); } if (doCheckin) { if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { VersionableV2 versionable = (VersionableV2)resource; try { versionable.checkin("updated"); } catch (Exception e) { throw new ServletException("Could not check in resource <" + resource.getPath() + ">: " + e.getMessage(), e); } } } } /** * Check authorization and if not authorized then authenticate. Return null if authorization granted, otherwise return 401 and appropriate response such that client can provide credentials for authentication */ private HttpServletResponse doAccessControl(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Get usecase Usecase usecase = getUsecase(request); // Get identity, realm, path Identity identity; Realm realm; String path; try { identity = getIdentity(request, map); realm = map.getRealm(request.getServletPath()); path = map.getPath(realm, request.getServletPath()); } catch (Exception e) { throw new ServletException(e.getMessage(), e); } // Check Authorization boolean authorized = false; try { if (log.isDebugEnabled()) log.debug("Check authorization: realm: " + realm + ", path: " + path + ", identity: " + identity + ", Usecase: " + usecase.getName()); authorized = realm.getPolicyManager().authorize(path, identity, usecase); if (log.isDebugEnabled()) log.debug("Check authorization result: " + authorized); } catch (Exception e) { throw new ServletException(e.getMessage(), e); } if(!authorized) { // TODO: Implement HTTP BASIC/DIGEST response (see above) log.info("Access denied: " + getRequestURLQS(request, null, false)); if(!request.isSecure()) { if(sslPort != null) { log.info("Redirect to SSL ..."); try { URL url = new URL(getRequestURLQS(request, null, false).toString()); url = new URL("https", url.getHost(), new Integer(sslPort).intValue(), url.getFile()); if (realm.isProxySet()) { if (realm.getProxySSLPort() >= 0) { log.debug("Use configured port: " + realm.getProxySSLPort()); url = new URL(url.getProtocol(), url.getHost(), new Integer(realm.getProxySSLPort()).intValue(), url.getFile()); } else { log.debug("Use default port: " + url.getDefaultPort()); // NOTE: getDefaultPort depends on the Protocol (e.g. https is 443) url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile()); } } log.info("Redirect to SSL: " + url); response.setHeader("Location", url.toString()); // TODO: Yulup has a bug re TEMPORARY_REDIRECT //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); return response; } catch (Exception e) { log.error(e.getMessage(), e); } } else { log.warn("SSL does not seem to be configured!"); } } else { log.info("This connection is via SSL."); } if(doAuthenticate(request, response) != null) { log.info("Return response of web authenticator."); /* NOTE: Such a response can have different reasons: - Either no credentials provided yet and web authenticator is generating a response to fetch credentials - Or authentication failed and web authenticator is resending response to fetch again credentials"); - Or authentication was successful and web authenticator sends a redirect */ if(logAccessEnabled) { doLogAccess(request, response); } return response; } else { try { log.warn("Authentication was successful for user: " + getIdentity(request, map).getUsername()); } catch (Exception e) { log.error(e.getMessage(), e); } URL url = new URL(getRequestURLQS(request, null, false).toString()); if (sslPort != null) { url = new URL("https", url.getHost(), new Integer(sslPort).intValue(), url.getFile()); } log.warn("Redirect to original request: " + url); //response.sendRedirect(url.toString()); // 302 // TODO: Yulup has a bug re TEMPORARY_REDIRECT (or is the problem that the load balancer is rewritting 302 reponses?!) response.setHeader("Location", url.toString()); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); // 301 //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); // 302 return response; } } else { log.info("Access granted: " + getRequestURLQS(request, null, false)); return null; } } /** * Patch request with proxy settings re realm configuration */ private String getRequestURLQS(HttpServletRequest request, String addQS, boolean xml) { try { Realm realm = map.getRealm(request.getServletPath()); // TODO: Handle this exception more gracefully! if (realm == null) log.error("No realm found for path " +request.getServletPath()); String proxyHostName = realm.getProxyHostName(); int proxyPort = realm.getProxyPort(); String proxyPrefix = realm.getProxyPrefix(); URL url = null; url = new URL(request.getRequestURL().toString()); //if(proxyHostName != null || proxyPort >= null || proxyPrefix != null) { if(realm.isProxySet()) { if (proxyHostName != null) { url = new URL(url.getProtocol(), proxyHostName, url.getPort(), url.getFile()); } if (proxyPort >= 0) { url = new URL(url.getProtocol(), url.getHost(), proxyPort, url.getFile()); } else { url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile()); } if (proxyPrefix != null) { url = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getFile().substring(proxyPrefix.length())); } //log.debug("Proxy enabled for this realm resp. request: " + realm + ", " + url); } else { //log.debug("No proxy set for this realm resp. request: " + realm + ", " + url); } String urlQS = url.toString(); if (request.getQueryString() != null) { urlQS = urlQS + "?" + request.getQueryString(); if (addQS != null) urlQS = urlQS + "&" + addQS; } else { if (addQS != null) urlQS = urlQS + "?" + addQS; } if (xml) urlQS = urlQS.replaceAll("&", "&amp;"); if(log.isDebugEnabled()) log.debug("Request: " + urlQS); return urlQS; } catch (Exception e) { log.error(e.getMessage(), e); return null; } } private void doPropfind(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Resource resource = getResource(request, response); //Node node = resource.getRealm().getSitetree().getNode(resource.getPath()); Node node = sitetree.getNode(resource.getRealm(),resource.getPath()); String depth = request.getHeader("Depth"); StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>"); sb.append("<multistatus xmlns=\"DAV:\">"); if (depth.equals("0")) { if (node.isCollection()) { sb.append(" <response>"); sb.append(" <href>"+request.getRequestURI()+"</href>"); sb.append(" <propstat>"); sb.append(" <prop>"); sb.append(" <resourcetype><collection/></resourcetype>"); sb.append(" <getcontenttype>httpd/unix-directory</getcontenttype>"); sb.append(" </prop>"); sb.append(" <status>HTTP/1.1 200 OK</status>"); sb.append(" </propstat>"); sb.append(" </response>"); } else if (node.isResource()) { sb.append(" <response>"); sb.append(" <href>"+request.getRequestURI()+"</href>"); sb.append(" <propstat>"); sb.append(" <prop>"); sb.append(" <resourcetype/>"); // TODO: Set mime type of node! sb.append(" <getcontenttype>application/octet-stream</getcontenttype>"); // TODO: Set content length and last modified! sb.append(" <getcontentlength>0</getcontentlength>"); sb.append(" <getlastmodified>1969.02.16</getlastmodified>"); // See http://www.webdav.org/specs/rfc2518.html sb.append(" <source>\n"); sb.append(" <link>\n"); sb.append(" <src>" + request.getRequestURI() + "</src>\n"); sb.append(" <dst>" + request.getRequestURI() + "?yanel.resource.modifiable.source</dst>\n"); sb.append(" </link>\n"); sb.append(" </source>\n"); sb.append(" </prop>"); sb.append(" <status>HTTP/1.1 200 OK</status>"); sb.append(" </propstat>"); sb.append(" </response>"); } else { log.error("Neither collection nor resource!"); } } else if (depth.equals("1")) { // TODO: Shouldn't one check with isCollection() first?! Node[] children = node.getChildren(); if (children != null) { for (int i = 0; i < children.length; i++) { if (children[i].isCollection()) { sb.append(" <response>\n"); sb.append(" <href>" + request.getRequestURI() + "/" + children[i].getName() + "/</href>\n"); sb.append(" <propstat>\n"); sb.append(" <prop>\n"); sb.append(" <displayname>" + children[i].getName() + "</displayname>\n"); sb.append(" <resourcetype><collection/></resourcetype>\n"); sb.append(" <getcontenttype>httpd/unix-directory</getcontenttype>\n"); sb.append(" </prop>\n"); sb.append(" <status>HTTP/1.1 200 OK</status>\n"); sb.append(" </propstat>\n"); sb.append(" </response>\n"); } else if(children[i].isResource()) { sb.append(" <response>\n"); sb.append(" <href>" + request.getRequestURI() + "/" + children[i].getName() + "?yanel.webdav=propfind1</href>\n"); sb.append(" <propstat>\n"); sb.append(" <prop>\n"); sb.append(" <displayname>" + children[i].getName() + "</displayname>\n"); sb.append(" <resourcetype/>\n"); // TODO: Set mime type of node! sb.append(" <getcontenttype>application/octet-stream</getcontenttype>\n"); // TODO: Set content length and last modified! sb.append(" <getcontentlength>0</getcontentlength>"); sb.append(" <getlastmodified>1969.02.16</getlastmodified>"); // See http://www.webdav.org/specs/rfc2518.html sb.append(" <source>\n"); sb.append(" <link>\n"); sb.append(" <src>" + request.getRequestURI() + "/" + children[i].getName() + "</src>\n"); sb.append(" <dst>" + request.getRequestURI() + "/" + children[i].getName() + "?yanel.resource.modifiable.source</dst>\n"); sb.append(" </link>\n"); sb.append(" </source>\n"); sb.append(" </prop>\n"); sb.append(" <status>HTTP/1.1 200 OK</status>\n"); sb.append(" </propstat>\n"); sb.append(" </response>\n"); } else { log.error("Neither collection nor resource: " + children[i].getPath()); } } } else { log.warn("No children!"); } } else if (depth.equals("infinity")) { log.warn("TODO: List children and their children and their children ..."); } else { log.error("No such depth: " + depth); } sb.append("</multistatus>"); //response.setStatus(javax.servlet.http.HttpServletResponse.SC_MULTI_STATUS); response.setStatus(207, "Multi-Status"); PrintWriter w = response.getWriter(); w.print(sb); } /** * HTTP OPTIONS implementation. */ @Override protected void doOptions(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setHeader("DAV", "1"); // TODO: Is there anything else to do?! } /** * Authentication * @return null when authentication successful or has already been authenticated, otherwise return response generated by web authenticator */ private HttpServletResponse doAuthenticate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { // TODO/TBD: In the case of HTTP-BASIC/DIGEST one needs to check authentication with every request // TODO: enhance API with flag, e.g. session-based="true/false" // WARNING: One needs to separate doAuthenticate from the login screen generation! //if (getIdentity(request) != null) return null; WebAuthenticator wa = map.getRealm(request.getServletPath()).getWebAuthenticator(); return wa.doAuthenticate(request, response, map, reservedPrefix, xsltLoginScreenDefault, servletContextRealPath, sslPort); } catch (Exception e) { log.error(e.getMessage(), e); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return response; } } /** * Escapes all reserved xml characters (&amp; &lt; &gt; &apos; &quot;) in a string. * @param s input string * @return string with escaped characters */ public static String encodeXML(String s) { s = s.replaceAll("&", "&amp;"); s = s.replaceAll("<", "&lt;"); s = s.replaceAll(">", "&gt;"); s = s.replaceAll("'", "&apos;"); s = s.replaceAll("\"", "&quot;"); return s; } /** * Do logout * @return null for a regular logout and a Neutron response if auth scheme is Neutron */ private HttpServletResponse doLogout(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { if (yanelUI.isToolbarEnabled(request)) { // TODO: Check if WORLD has access to the toolbar //if (getRealm().getPolicyManager().authorize(path, new Identity(), new Usecase(TOOLBAR_USECASE))) { yanelUI.disableToolbar(request); } HttpSession session = request.getSession(true); // TODO: should we logout only from the current realm, or from all realms? // -> logout only from the current realm Realm realm = map.getRealm(request.getServletPath()); IdentityMap identityMap = (IdentityMap)session.getAttribute(IDENTITY_MAP_KEY); if (identityMap != null && identityMap.containsKey(realm.getID())) { log.info("Logout from realm: " + realm.getID()); identityMap.remove(realm.getID()); } String clientSupportedAuthScheme = getClientAuthenticationScheme(request); if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) { String neutronVersions = getClientSupportedNeutronVersions(request); // TODO: Reply according to which neutron versions the client supports // TODO: send some XML content, e.g. <logout-successful/> response.setContentType("text/plain; charset=" + DEFAULT_ENCODING); response.setStatus(HttpServletResponse.SC_OK); PrintWriter writer = response.getWriter(); writer.print("Neutron Logout Successful!"); return response; } if (log.isDebugEnabled()) log.debug("Regular Logout Successful!"); //return null; URL url = new URL(getRequestURLQS(request, null, false).toString()); String urlWithoutLogoutQS = url.toString().substring(0, url.toString().lastIndexOf("?")); log.warn("Redirect to original request: " + urlWithoutLogoutQS); //response.sendRedirect(url.toString()); // 302 response.setHeader("Location", urlWithoutLogoutQS.toString()); //response.setHeader("Location", url.toString()); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); // 301 //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); // 302 return response; } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } /** * Do create a new resource */ private HttpServletResponse doCreate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { log.error("Not implemented yet!"); return null; } static public String patchMimeType(String mimeType, HttpServletRequest request) throws ServletException, IOException { if (mimeType != null) { String httpAcceptMediaTypes = request.getHeader("Accept"); if (mimeType.equals("application/xhtml+xml") && httpAcceptMediaTypes != null && httpAcceptMediaTypes.indexOf("application/xhtml+xml") < 0) { log.info("Patch contentType with text/html because client (" + request.getHeader("User-Agent") + ") does not seem to understand application/xhtml+xml"); return "text/html"; } else if (mimeType.equals("text/html")) { log.info("Mime type was already set to text/html for request: " + request.getServletPath()); } } else { log.warn("No mime type returned for request: " + request.getServletPath()); } return mimeType; } /** * Intercept InputStream and log content ... */ private InputStream intercept(InputStream in) throws IOException { java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); byte[] buf = new byte[8192]; int bytesR; while ((bytesR = in.read(buf)) != -1) { baos.write(buf, 0, bytesR); } // Buffer within memory (TODO: Maybe replace with File-buffering ...) byte[] memBuffer = baos.toByteArray(); log.debug("InputStream: " + baos); return new java.io.ByteArrayInputStream(memBuffer); } /** * Generate a "Yanel" response (page information, 404, internal server error, ...) */ private void setYanelOutput(HttpServletRequest request, HttpServletResponse response, Document doc) throws ServletException { String path = getResource(request, response).getPath(); String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(path); try { String yanelFormat = request.getParameter("yanel.format"); if(yanelFormat != null && yanelFormat.equals("xml")) { response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); XMLHelper.writeDocument(doc, response.getOutputStream()); /* OutputStream out = response.getOutputStream(); javax.xml.transform.TransformerFactory.newInstance().newTransformer().transform(new javax.xml.transform.dom.DOMSource(doc), new javax.xml.transform.stream.StreamResult(out)); out.close(); */ } else { String mimeType = patchMimeType("application/xhtml+xml", request); // TODO: doLogAccess response.setContentType(mimeType + "; charset=" + DEFAULT_ENCODING); // create identity transformer which serves as a dom-to-sax transformer TransformerIdentityImpl transformer = new TransformerIdentityImpl(); // create xslt transformer: SAXTransformerFactory saxTransformerFactory = (SAXTransformerFactory)SAXTransformerFactory.newInstance(); TransformerHandler xsltTransformer = saxTransformerFactory.newTransformerHandler(new StreamSource(xsltInfoAndException)); xsltTransformer.getTransformer().setParameter("yanel.back2realm", backToRealm); xsltTransformer.getTransformer().setParameter("yanel.reservedPrefix", reservedPrefix); // create i18n transformer: I18nTransformer2 i18nTransformer = new I18nTransformer2("global", getLanguage(request), yanelInstance.getMap().getRealm(request.getServletPath()).getDefaultLanguage()); CatalogResolver catalogResolver = new CatalogResolver(); i18nTransformer.setEntityResolver(new CatalogResolver()); // create serializer: Serializer serializer = SerializerFactory.getSerializer(SerializerFactory.XHTML_STRICT); // chain everything together (create a pipeline): xsltTransformer.setResult(new SAXResult(i18nTransformer)); i18nTransformer.setResult(new SAXResult(serializer.asContentHandler())); serializer.setOutputStream(response.getOutputStream()); // execute pipeline: transformer.transform(new DOMSource(doc), new SAXResult(xsltTransformer)); } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } /** * Get language with the following priorization: 1) yanel.meta.language query string parameter, 2) Accept-Language header, 3) Default en */ private String getLanguage(HttpServletRequest request) throws Exception { // TODO: Shouldn't this be replaced by Resource.getRequestedLanguage() or Resource.getContentLanguage() ?! String language = request.getParameter("yanel.meta.language"); if (language == null) { language = request.getHeader("Accept-Language"); if (language != null) { int commaIndex = language.indexOf(","); if (commaIndex > 0) { language = language.substring(0, commaIndex); } int dashIndex = language.indexOf("-"); if (dashIndex > 0) { language = language.substring(0, dashIndex); } } } if(language != null && language.length() > 0) return language; return yanelInstance.getMap().getRealm(request.getServletPath()).getDefaultLanguage(); } /** * Write to output stream of modifiable resource */ private void write(InputStream in, OutputStream out, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if (out != null) { log.debug("Content-Type: " + request.getContentType()); // TODO: Compare mime-type from response with mime-type of resource //if (contentType.equals("text/xml")) { ... } byte[] buffer = new byte[8192]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } out.flush(); out.close(); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Data has been saved ...</p>"); sb.append("</body>"); sb.append("</html>"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setContentType("application/xhtml+xml; charset=" + DEFAULT_ENCODING); PrintWriter w = response.getWriter(); w.print(sb); log.info("Data has been saved ..."); return; } else { log.error("OutputStream is null!"); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Exception: OutputStream is null!</p>"); sb.append("</body>"); sb.append("</html>"); PrintWriter w = response.getWriter(); w.print(sb); response.setContentType("application/xhtml+xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } } /** * Gets the identity from the session associated with the given request or via the 'Authorization' HTTP header in the case of BASIC or DIGEST * @param request Client/Servlet request * @param map * @return Identity if one exist, or otherwise an empty identity */ static Identity getIdentity(HttpServletRequest request, Map map) throws Exception { Realm realm = map.getRealm(request.getServletPath()); HttpSession session = request.getSession(false); if (session != null) { IdentityMap identityMap = (IdentityMap)session.getAttribute(IDENTITY_MAP_KEY); if (identityMap != null) { Identity identity = (Identity)identityMap.get(realm.getID()); if (identity != null) { return identity; } } } // HTTP BASIC Authentication (For clients such as for instance Sunbird, OpenOffice or cadaver) // IMPORT NOTE: BASIC Authentication needs to be checked on every request, because clients often do not support session handling String authorizationHeader = request.getHeader("Authorization"); if (log.isDebugEnabled()) log.debug("Checking for Authorization Header: " + authorizationHeader); if (authorizationHeader != null) { if (authorizationHeader.toUpperCase().startsWith("BASIC")) { log.warn("Using BASIC authorization ..."); // Get encoded user and password, comes after "BASIC " String userpassEncoded = authorizationHeader.substring(6); // Decode it, using any base 64 decoder sun.misc.BASE64Decoder dec = new sun.misc.BASE64Decoder(); String userpassDecoded = new String(dec.decodeBuffer(userpassEncoded)); log.debug("Username and Password Decoded: " + userpassDecoded); String[] up = userpassDecoded.split(":"); String username = up[0]; String password = up[1]; log.debug("username: " + username + ", password: " + password); try { User user = realm.getIdentityManager().getUserManager().getUser(username); if (user != null && user.authenticate(password)) { return new Identity(user); } else { log.warn("HTTP BASIC Authentication failed for " + username + "!"); /* response.setHeader("WWW-Authenticate", "BASIC realm=\"" + realm.getName() + "\""); response.sendError(response.SC_UNAUTHORIZED); PrintWriter writer = response.getWriter(); writer.print("BASIC Authentication Failed!"); return response; */ } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } else if (authorizationHeader.toUpperCase().startsWith("DIGEST")) { log.error("DIGEST is not implemented"); /* authorized = false; response.sendError(response.SC_UNAUTHORIZED); response.setHeader("WWW-Authenticate", "DIGEST realm=\"" + realm.getName() + "\""); PrintWriter writer = response.getWriter(); writer.print("DIGEST is not implemented!"); */ } else { log.warn("No such authorization type implemented: " + authorizationHeader); } } if(log.isDebugEnabled()) log.debug("No identity yet (Neither session nor header based! Identity is set to WORLD!)"); // TBD: Should add world identity to the session? return new Identity(); } /** * Create a DOM Document */ static public Document getDocument(String namespace, String localname) throws Exception { return XMLHelper.createDocument(namespace, localname); } private Realm getRealm(HttpServletRequest request) throws Exception { Realm realm = yanelInstance.getMap().getRealm(request.getServletPath()); return realm; } private boolean generateResponseFromRTview(HttpServletRequest request, HttpServletResponse response, ResourceConfiguration rc, String path) throws ServletException { String viewId = request.getParameter(VIEW_ID_PARAM_NAME); if (request.getParameter("yanel.format") != null) { // backwards compatible viewId = request.getParameter("yanel.format"); } try { Realm realm = getRealm(request); Resource resource = yanelInstance.getResourceManager().getResource(getEnvironment(request, response), realm, path, rc); View view = ((ViewableV2) resource).getView(viewId); if (view != null) { if (generateResponse(view, resource, request, response, getDocument(NAMESPACE, "yanel"), -1, -1) != null) return true; } } catch (Exception e) { throw new ServletException(e); } return false; } /** * Get global data located below reserved prefix */ private void getGlobalData(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Resource resource = getResource(request, response); String path = resource.getPath(); java.util.Map<String, String> properties = new HashMap<String, String>(); final String pathPrefix = "/" + reservedPrefix + "/"; final String aboutPagePath = pathPrefix + "about.html"; // About Yanel final String aboutRealmPagePath = pathPrefix + "about-realm.html"; // About realm final String resourceTypesPathPrefix = pathPrefix + "resource-types/"; //XXX REFACTORME: in the cases where we simply use a resource-type's view Realm realm; Environment environment = getEnvironment(request, response); ResourceConfiguration rc; YanelGlobalResourceTypeMatcher RTmatcher = new YanelGlobalResourceTypeMatcher(pathPrefix, servletContextRealPath); try { realm = getRealm(request); rc = RTmatcher.getResourceConfiguration(environment, realm, path); } catch (Exception e) { throw new ServletException(e.getMessage(), e); } if (rc != null) { if (generateResponseFromRTview(request, response, rc, path)) return; response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } else if (path.equals(aboutPagePath)) { //XXX REFACTORME: we should define an "about" resource-type instead! response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setHeader("Content-Type", "text/html"); PrintWriter w = response.getWriter(); w.print(About.toHTML(yanelInstance.getVersion(), yanelInstance.getRevision())); return; } else if (path.equals(aboutRealmPagePath)) { //XXX REFACTORME: we should define an "about-realm" resource-type instead! response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setHeader("Content-Type", "text/html"); PrintWriter w = response.getWriter(); w.print(AboutRealm.toHTML(realm)); return; } else if (path.startsWith(resourceTypesPathPrefix)) { final String[] namespaceURI_and_rest = path.substring(resourceTypesPathPrefix.length()).split("::", 2); final String namespaceURI = namespaceURI_and_rest[0]; final String[] name_and_rest = namespaceURI_and_rest[1].split("/", 2); final String name = name_and_rest[0]; final String decoded_namespaceURI = HttpServletRequestHelper.decodeURIinURLpath('^', namespaceURI); if (log.isDebugEnabled()) log.debug("decoded_namespaceURI: "+decoded_namespaceURI); final String namespace = ! decoded_namespaceURI.equals(namespaceURI) ? decoded_namespaceURI : namespaceURI.replaceAll("http:/", "http: rc = new ResourceConfiguration(name, namespace, properties); try { Resource resourceOfPrefix = yanelInstance.getResourceManager().getResource(environment, realm, path, rc); String htdocsPath; if (name_and_rest[1].startsWith(reservedPrefix + "/")) { htdocsPath = "rtyanelhtdocs:" + name_and_rest[1].substring(reservedPrefix.length()).replace('/', File.separatorChar); } else { htdocsPath = "rthtdocs:" + File.separatorChar + name_and_rest[1].replace('/', File.separatorChar); } SourceResolver resolver = new SourceResolver(resourceOfPrefix); Source source = resolver.resolve(htdocsPath, null); long sourceLastModified = -1; // Compare If-Modified-Since with lastModified and return 304 without content resp. check on ETag if (source instanceof YanelStreamSource) { sourceLastModified = ((YanelStreamSource) source).getLastModified(); long ifModifiedSince = request.getDateHeader("If-Modified-Since"); if (log.isDebugEnabled()) log.debug("sourceLastModified <= ifModifiedSince: " + sourceLastModified + " <= " + ifModifiedSince); if (ifModifiedSince != -1) { if (sourceLastModified <= ifModifiedSince) { response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_MODIFIED); return; } } } InputStream htdocIn = ((StreamSource) source).getInputStream(); if (htdocIn != null) { log.debug("Resource-Type specific data: " + htdocsPath); // TODO: Set more HTTP headers (size, etc.) String mimeType = guessMimeType(FilenameUtils.getExtension(FilenameUtils.getName(htdocsPath))); if(sourceLastModified >= 0) response.setDateHeader("Last-Modified", sourceLastModified); response.setHeader("Content-Type", mimeType); byte buffer[] = new byte[8192]; int bytesRead; OutputStream out = response.getOutputStream(); while ((bytesRead = htdocIn.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } htdocIn.close(); // allow client-side caching: if (cacheExpires != 0) { setExpiresHeader(response, cacheExpires); } return; } else { log.error("No such file or directory: " + htdocsPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } else { File globalFile = org.wyona.commons.io.FileUtil.file(servletContextRealPath, "htdocs" + File.separator + path.substring(pathPrefix.length())); if (globalFile.exists()) { log.debug("Global data: " + globalFile); // TODO: Set more HTTP headers (size, etc.) String mimeType = guessMimeType(FilenameUtils.getExtension(globalFile.getName())); response.setHeader("Content-Type", mimeType); byte buffer[] = new byte[8192]; int bytesRead; InputStream in = new java.io.FileInputStream(globalFile); OutputStream out = response.getOutputStream(); while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } in.close(); // allow client-side caching: if (cacheExpires != 0) { setExpiresHeader(response, cacheExpires); } return; } else { log.error("No such file or directory: " + globalFile); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } } } private void setExpiresHeader(HttpServletResponse response, int hours) { Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.HOUR_OF_DAY, hours); String expires = DateUtil.formatRFC822GMT(calendar.getTime()); response.setHeader("Expires", expires); } /** * Generate response from a resource view, whereas it will be checked first if the resource already wrote the response (if so, then just return) */ private HttpServletResponse generateResponse(View view, Resource res, HttpServletRequest request, HttpServletResponse response, Document doc, long size, long lastModified) throws ServletException, IOException { // TODO: There seem like no header fields are being set (e.g. Content-Length, ...). Please see below ... // Check if viewable resource has already created a response if (!view.isResponse()) { if(logAccessEnabled) { if (view.getMimeType() != null) { // TODO: Add more mime types or rather make it configurable if (view.getMimeType().indexOf("html") > 0 || view.getMimeType().indexOf("pdf") > 0 || view.getMimeType().indexOf("video") >= 0) { doLogAccess(request, response); } } } return response; } // Set mime type and encoding String mimeType = view.getMimeType(); if (view.getEncoding() != null) { mimeType = patchMimeType(mimeType, request); response.setContentType(mimeType + "; charset=" + view.getEncoding()); } else if (res.getConfiguration() != null && res.getConfiguration().getEncoding() != null) { mimeType = patchMimeType(mimeType, request); response.setContentType(mimeType + "; charset=" + res.getConfiguration().getEncoding()); } else { // try to guess if we have to set the default encoding if (mimeType != null && mimeType.startsWith("text") || mimeType.equals("application/xml") || mimeType.equals("application/xhtml+xml") || mimeType.equals("application/atom+xml") || mimeType.equals("application/x-javascript")) { mimeType = patchMimeType(mimeType, request); response.setContentType(mimeType + "; charset=" + DEFAULT_ENCODING); } else { // probably binary mime-type, don't set encoding mimeType = patchMimeType(mimeType, request); response.setContentType(mimeType); } } if(logAccessEnabled) { if (mimeType != null) { if (mimeType.indexOf("html") > 0 || mimeType.indexOf("pdf") > 0) { // INFO: Only HTML pages and PDFs etc. should be logged, but no images, CSS, etc. Check the mime-type instead the suffix or use JavaScript or Pixel doLogAccess(request, response); } } } // Set HTTP headers: HashMap<?, ?> headers = view.getHttpHeaders(); Iterator<?> iter = headers.keySet().iterator(); while (iter.hasNext()) { String name = (String)iter.next(); String value = (String)headers.get(name); if (log.isDebugEnabled()) { log.debug("set http header: " + name + ": " + value); } response.setHeader(name, value); } // Possibly embed toolbar: // TODO: Check if user is authorized to actually see toolbar (Current flaw: Enabled Toolbar, Login, Toolbar is enabled, Logout, Toolbar is still visible!) if (yanelUI.isToolbarEnabled(request)) { if (mimeType != null && mimeType.indexOf("html") > 0) { // TODO: What about other query strings or frames or TinyMCE? if (request.getParameter(YANEL_RESOURCE_USECASE) == null) { if (toolbarMasterSwitch.equals("on")) { OutputStream os = response.getOutputStream(); try { Usecase usecase = new Usecase(TOOLBAR_USECASE); Identity identity = getIdentity(request, map); Realm realm = map.getRealm(request.getServletPath()); String path = map.getPath(realm, request.getServletPath()); // NOTE: This extra authorization check is necessary within a multi-realm environment, because after activating the toolbar with a query string, the toolbar flag attached to the session will be ignored by doAccessControl(). One could possibly do this check within doAccessControl(), but could be a peformance issue! Or as an alternative one could refactor the code, such that the toolbar session flag is realm aware. if(realm.getPolicyManager().authorize(path, identity, usecase)) { yanelUI.mergeToolbarWithContent(request, response, res, view); return response; } else { log.warn("Toolbar authorization denied (Realm: '" + realm.getName() + "', User: '" + identity.getUsername() + "', Path: '" + path + "')!"); } } catch (Exception e) { String message = "Error merging toolbar into content: " + e.getMessage(); log.error(message, e); Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return response; } } else { log.info("Toolbar has been disabled. Please check web.xml!"); } } else { log.warn("Yanel resource usecase is not null, but set to '" + request.getParameter(YANEL_RESOURCE_USECASE) + "' and hence Yanel toolbar is not displayed in order to avoid that users are leaving the usecase because they might click on some toolbar menu item."); } } else { log.info("No HTML related mime type: " + mimeType); } } else { log.debug("Toolbar is turned off."); } InputStream is = view.getInputStream(); if (is != null) { // Write actual content into response byte buffer[] = new byte[8192]; int bytesRead; bytesRead = is.read(buffer); try { // Compare If-Modified-Since with lastModified and return 304 without content resp. check on ETag long ifModifiedSince = request.getDateHeader("If-Modified-Since"); if (ifModifiedSince != -1) { if (res instanceof ModifiableV2) { long resourceLastMod = ((ModifiableV2)res).getLastModified(); //log.debug(resourceLastMod + " " + ifModifiedSince); if (resourceLastMod <= ifModifiedSince) { response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_MODIFIED); return response; } } else { // TODO: Many resources do not implement ModifiableV2 and hence never return a lastModified and hence the browser will never ask for ifModifiedSince! //log.warn("Resource of path '" + res.getPath() + "' is not ModifiableV2 and hence cannot be cached!"); if (log.isDebugEnabled()) log.debug("Resource of path '" + res.getPath() + "' is not ModifiableV2 and hence cannot be cached!"); } } } catch (Exception e) { log.error(e.getMessage(), e); } if(lastModified >= 0) response.setDateHeader("Last-Modified", lastModified); if(size > 0) { if (log.isDebugEnabled()) log.debug("Size of " + request.getRequestURI() + ": " + size); response.setContentLength((int) size); } else { if (log.isDebugEnabled()) log.debug("No size for " + request.getRequestURI() + ": " + size); } // Check if InputStream is empty if (bytesRead != -1) { java.io.OutputStream os = response.getOutputStream(); os.write(buffer, 0, bytesRead); while ((bytesRead = is.read(buffer)) != -1) { os.write(buffer, 0, bytesRead); } os.close(); } else { log.warn("Returned content size of request '" + request.getRequestURI() + "' is 0"); } is.close(); return response; } else { String message = "Returned InputStream of request '" + request.getRequestURI() + "' is null!"; Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); is.close(); return response; } } @Override public void destroy() { super.destroy(); yanelInstance.destroy(); if (scheduler != null) { try { log.warn("Shutdown scheduler ..."); scheduler.shutdown(); //scheduler.shutdown(true); // INFO: true means to wait until all jobs have completed } catch(Exception e) { log.error(e, e); } } log.warn("Yanel webapp has been shut down."); } /** * Get usecase. Maps query strings, etc. to usecases, which then can be used for example within access control policies */ private Usecase getUsecase(HttpServletRequest request) { // TODO: Replace hardcoded roles by mapping between roles amd query strings ... Usecase usecase = new Usecase("view"); String yanelResUsecaseValue = request.getParameter(YANEL_RESOURCE_USECASE); if (yanelResUsecaseValue != null) { if (yanelResUsecaseValue.equals("save")) { log.debug("Save data ..."); usecase = new Usecase("write"); } else if (yanelResUsecaseValue.equals("checkin")) { log.debug("Checkin data ..."); usecase = new Usecase("write"); } else if (yanelResUsecaseValue.equals("roll-back")) { log.debug("Roll back to previous revision ..."); usecase = new Usecase("write"); } else if (yanelResUsecaseValue.equals("introspection")) { if(log.isDebugEnabled()) log.debug("Dynamically generated introspection ..."); usecase = new Usecase("introspection"); } else if (yanelResUsecaseValue.equals("checkout")) { log.debug("Checkout data ..."); usecase = new Usecase("open"); } else if (yanelResUsecaseValue.equals("delete")) { log.info("Delete resource (yanel resource usecase delete)"); usecase = new Usecase("delete"); } else { log.warn("No such generic Yanel resource usecase: " + yanelResUsecaseValue + " (maybe some custom resource usecase)"); } } String yanelUsecaseValue = request.getParameter(YANEL_USECASE); if (yanelUsecaseValue != null) { if (yanelUsecaseValue.equals("create")) { log.debug("Create new resource ..."); usecase = new Usecase("resource.create"); } else { log.warn("No such usecase: " + yanelUsecaseValue); } } String contentType = request.getContentType(); String method = request.getMethod(); if (contentType != null && contentType.indexOf("application/atom+xml") >= 0 && (method.equals(METHOD_PUT) || method.equals(METHOD_POST))) { // TODO: Is posting atom entries different from a general post (see below)?! log.warn("Write/Checkin Atom entry ..."); usecase = new Usecase("write"); // TODO: METHOD_POST is not generally protected, but save, checkin, application/atom+xml are being protected. See doPost(.... } else if (method.equals(METHOD_PUT)) { log.warn("Upload data ..."); usecase = new Usecase("write"); } else if (method.equals(METHOD_DELETE)) { log.warn("Delete resource (HTTP method delete)"); usecase = new Usecase("delete"); } String workflowTransitionValue = request.getParameter(YANEL_RESOURCE_WORKFLOW_TRANSITION); if (workflowTransitionValue != null) { // TODO: At the moment the authorization of workflow transitions are checked within executeWorkflowTransition or rather workflowable.doTransition(transition, revision) log.warn("Workflow transition is currently handled as view usecase: " + workflowTransitionValue); usecase = new Usecase("view"); // TODO: Return workflow transition ID //usecase = new Usecase(transitionID); } String toolbarValue = request.getParameter("yanel.toolbar"); if (toolbarValue != null && toolbarValue.equals("on")) { log.debug("Turn on toolbar ..."); usecase = new Usecase(TOOLBAR_USECASE); } String yanelPolicyValue = request.getParameter(YANEL_ACCESS_POLICY_USECASE); if (yanelPolicyValue != null) { if (yanelPolicyValue.equals("create")) { usecase = new Usecase("policy.create"); } else if (yanelPolicyValue.equals("read")) { usecase = new Usecase("policy.read"); } else if (yanelPolicyValue.equals("update")) { usecase = new Usecase("policy.update"); } else if (yanelPolicyValue.equals("delete")) { usecase = new Usecase("policy.delete"); } else { log.warn("No such policy usecase: " + yanelPolicyValue); } } String showResourceMeta = request.getParameter(RESOURCE_META_ID_PARAM_NAME); if (showResourceMeta != null) { usecase = new Usecase(RESOURCE_META_ID_PARAM_NAME); } return usecase; } /** * Handle access policy requests (CRUD, whereas delete is not implemented yet!) */ private void doAccessPolicyRequest(HttpServletRequest request, HttpServletResponse response, String usecase) throws ServletException, IOException { try { String viewId = request.getParameter(VIEW_ID_PARAM_NAME); Realm realm = map.getRealm(request.getServletPath()); String path = map.getPath(realm, request.getServletPath()); ResourceConfiguration rc = getGlobalResourceConfiguration("policy-manager_yanel-rc.xml", realm); if (generateResponseFromRTview(request, response, rc, path)) return; log.error("Something went terribly wrong!"); response.getWriter().print("Something went terribly wrong!"); return; } catch(Exception e) { throw new ServletException(e.getMessage(), e); } } /** * Handle delete usecase */ private void handleDeleteUsecase(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String confirmed = request.getParameter("confirmed"); if (confirmed != null) { String path = getResource(request, response).getPath(); log.warn("Really delete " + path); doDelete(request, response); return; } else { log.warn("Delete has not been confirmed by client yet!"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setContentType("text/html" + "; charset=" + "UTF-8"); StringBuffer sb = new StringBuffer("<html xmlns=\"http: PrintWriter w = response.getWriter(); w.print(sb); return; } } /** * Get resource configuration from global location of the realm or if not available there, then from global location of Yanel * * @param resConfigName Filename of resource configuration * @param realm Current realm */ private ResourceConfiguration getGlobalResourceConfiguration(String resConfigName, Realm realm) { return YanelGlobalResourceTypeMatcher.getGlobalResourceConfiguration(resConfigName, realm, servletContextRealPath); } private String getStackTrace(Exception e) { java.io.StringWriter sw = new java.io.StringWriter(); e.printStackTrace(new java.io.PrintWriter(sw)); return sw.toString(); } private void do404(HttpServletRequest request, HttpServletResponse response, Document doc, String exceptionMessage) throws ServletException { log404.info("Referer: " + request.getHeader("referer")); log404.warn(request.getRequestURL().toString()); //org.wyona.yarep.core.Node node = realm.getRepository().getNode("/yanel-logs/404.txt"); String message = "No such node/resource exception: " + exceptionMessage; log.warn(message); /* Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "404"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); setYanelOutput(request, response, doc); return; */ // TODO: Finish the XML (as it used to be before)! response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); try { Realm realm = yanelInstance.getMap().getRealm(request.getServletPath()); String path = getResource(request, response).getPath(); ResourceConfiguration rc = getGlobalResourceConfiguration("404_yanel-rc.xml", realm); if (generateResponseFromRTview(request, response, rc, path)) return; log.error("404 seems to be broken!"); return; } catch (Exception e) { log.error(e.getMessage(), e); return; } } /** * Check if yanel resource usecase is 'roll back" usecase */ private boolean isRollBack(HttpServletRequest request) { String yanelResUsecase = request.getParameter(YANEL_RESOURCE_USECASE); if (yanelResUsecase != null) { if (yanelResUsecase.equals("roll-back")) return true; } return false; } /** * Check if request comes from Neutron supporting client */ private boolean isClientSupportingNeutron(HttpServletRequest request) { String neutronVersions = request.getHeader("Neutron"); if (neutronVersions != null) { log.info("Neutron version(s) supported by client: " + neutronVersions); return true; } return false; } /** * Get Neutron versions which are supported by client */ private String getClientSupportedNeutronVersions(HttpServletRequest request) { return request.getHeader("Neutron"); } /** * Get client authentication scheme */ private String getClientAuthenticationScheme(HttpServletRequest request) { return request.getHeader("WWW-Authenticate"); } /** * Respond with introspection */ private void sendIntrospectionAsResponse(Resource res, Document doc, Element rootElement, HttpServletRequest request, HttpServletResponse response) throws ServletException { try { if (ResourceAttributeHelper.hasAttributeImplemented(res, "Introspectable", "1")) { String introspection = ((IntrospectableV1)res).getIntrospection(); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.getWriter().print(introspection); } else { String message = "Resource is not introspectable."; Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); setYanelOutput(request, response, doc); } return; } catch(Exception e) { log.error(e.getMessage(), e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(e.getMessage())); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } } /** * Set/get meta data re resource */ private Element getResourceMetaData(Resource res, Document doc, Element rootElement) { Element resourceElement = (Element) rootElement.appendChild(doc.createElement("resource")); ResourceConfiguration resConfig = res.getConfiguration(); if (resConfig != null) { Element resConfigElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "config")); resConfigElement.setAttributeNS(NAMESPACE, "rti-name", resConfig.getName()); resConfigElement.setAttributeNS(NAMESPACE, "rti-namespace", resConfig.getNamespace()); } else { Element noResConfigElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "no-config")); } Element realmElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "realm")); realmElement.setAttributeNS(NAMESPACE, "name", res.getRealm().getName()); realmElement.setAttributeNS(NAMESPACE, "rid", res.getRealm().getID()); realmElement.setAttributeNS(NAMESPACE, "prefix", res.getRealm().getMountPoint()); Element identityManagerElement = (Element) realmElement.appendChild(doc.createElementNS(NAMESPACE, "identity-manager")); Element userManagerElement = (Element) identityManagerElement.appendChild(doc.createElementNS(NAMESPACE, "user-manager")); return resourceElement; } /** * Append view descriptors to meta */ private void appendViewDescriptors(Document doc, Element viewElement, ViewDescriptor[] vd) { if (vd != null) { for (int i = 0; i < vd.length; i++) { Element descriptorElement = (Element) viewElement.appendChild(doc.createElement("descriptor")); if (vd[i].getMimeType() != null) { descriptorElement.appendChild(doc.createTextNode(vd[i].getMimeType())); } descriptorElement.setAttributeNS(NAMESPACE, "id", vd[i].getId()); } } else { viewElement.appendChild(doc.createTextNode("No View Descriptors!")); } } /** * Log browser history of each user */ private void doLogAccess(HttpServletRequest request, HttpServletResponse response) { Cookie cookie = getYanelAnalyticsCookie(request, response); // See apache-tomcat-5.5.20/logs/localhost_access_log.2009-11-07.txt // 127.0.0.1 - - [07/Nov/2009:01:24:09 +0100] "GET /yanel/from-scratch-realm/de/index.html HTTP/1.1" 200 4464 try { Realm realm = map.getRealm(request.getServletPath()); // TBD/TODO: What if user has logged out, but still has a persistent cookie?! //String userID = getEnvironment(request, response).getIdentity().getUsername(); Identity identity = getIdentity(request, map); if (identity != null && identity.getUsername() != null) { User user = realm.getIdentityManager().getUserManager().getUser(identity.getUsername()); // The log should be attached to the user, because realms can share a UserManager, but the UserManager API has no mean to save such data, so how should we do this? // What if realm ID is changing? String logPath = "/yanel-logs/browser-history/" + user.getID() + ".txt"; if (!realm.getRepository().existsNode(logPath)) { org.wyona.yarep.util.YarepUtil.addNodes(realm.getRepository(), logPath, org.wyona.yarep.core.NodeType.RESOURCE); } org.wyona.yarep.core.Node node = realm.getRepository().getNode(logPath); // Stream into node (append log entry, see for example log4j) // 127.0.0.1 - - [07/Nov/2009:01:24:09 +0100] "GET /yanel/from-scratch-realm/de/index.html HTTP/1.1" 200 4464 String requestURL = request.getRequestURL().toString(); logAccess.info(requestURL + " r:" + realm.getID() + " c:" + cookie.getValue() + " u:" + identity.getUsername() + " ref:" + request.getHeader("referer") + " ua:" + request.getHeader("User-Agent")); } else { // INFO: Log access of anonymous user String requestURL = request.getRequestURL().toString(); // TODO: Also log referer as entry point logAccess.info(requestURL + " r:" + realm.getID() + " c:" + cookie.getValue() + " ref:" + request.getHeader("referer") + " ua:" + request.getHeader("User-Agent")); } //log.warn("DEBUG: Referer: " + request.getHeader(HTTP_REFERRER)); } catch(Exception e) { // Catch all exceptions, because we do not want to throw exceptions because of logging browser history log.error(e, e); } } private void appendRevisionsAndWorkflow(Document doc, Element resourceElement, Resource res, HttpServletRequest request) throws Exception { if (ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) { WorkflowableV1 workflowableResource = null; Workflow workflow = null; String liveRevisionName = null; if (ResourceAttributeHelper.hasAttributeImplemented(res, "Workflowable", "1")) { workflowableResource = (WorkflowableV1)res; workflow = WorkflowHelper.getWorkflow(res); liveRevisionName = WorkflowHelper.getLiveRevision(res); } RevisionInformation[] revisionsInfo = ((VersionableV2)res).getRevisions(); Element revisionsElement = (Element) resourceElement.appendChild(doc.createElement("revisions")); if (revisionsInfo != null && revisionsInfo.length > 0) { for (int i = revisionsInfo.length - 1; i >= 0; i Element revisionElement = (Element) revisionsElement.appendChild(doc.createElement("revision")); log.debug("Revision: " + revisionsInfo[i].getName()); revisionElement.appendChild(XMLHelper.createTextElement(doc, "name", revisionsInfo[i].getName(), null)); log.debug("Date: " + revisionsInfo[i].getDate()); revisionElement.appendChild(XMLHelper.createTextElement(doc, "date", "" + revisionsInfo[i].getDate(), null)); if (revisionsInfo[i].getUser() != null) { log.debug("User: " + revisionsInfo[i].getUser()); revisionElement.appendChild(XMLHelper.createTextElement(doc, "user", revisionsInfo[i].getUser(), null)); } else { revisionElement.appendChild(doc.createElement("no-user")); } if (revisionsInfo[i].getComment() != null) { log.debug("Comment: " + revisionsInfo[i].getComment()); revisionElement.appendChild(XMLHelper.createTextElement(doc, "comment", revisionsInfo[i].getComment(), null)); } else { revisionElement.appendChild(doc.createElement("no-comment")); } // Add workflow info if (workflowableResource != null && workflow != null) { Element revisionWorkflowElement = (Element) revisionElement.appendChild(doc.createElement("workflow-state")); String wfState = workflowableResource.getWorkflowState(revisionsInfo[i].getName()); if (wfState == null) { wfState = workflow.getInitialState(); } if (liveRevisionName != null && revisionsInfo[i].getName().equals(liveRevisionName)) { revisionWorkflowElement.appendChild(doc.createTextNode(wfState + " (LIVE)")); } else { revisionWorkflowElement.appendChild(doc.createTextNode(wfState)); } } } } else { Element noRevisionsYetElement = (Element) resourceElement.appendChild(doc.createElement("no-revisions-yet")); } } else { Element notVersionableElement = (Element) resourceElement.appendChild(doc.createElement("not-versionable")); } } /** * Set Yanel analytics cookie, which is persistent * @param request Client request */ private Cookie getYanelAnalyticsCookie(HttpServletRequest request, HttpServletResponse response) { Cookie[] cookies = request.getCookies(); if (cookies != null) { for (int i = 0; i < cookies.length; i++) { if (cookies[i].getName().equals(ANALYTICS_COOKIE_NAME)) { // TODO: This code is not sufficient to make sure that only one cookie is being set, because Tomcat processes the requests in parallel and until the first cookie is registered, some more cookies might already be set! //log.debug("Has already a Yanel analytics cookie: " + cookies[i].getValue()); return cookies[i]; } } } Cookie analyticsCookie = new Cookie(ANALYTICS_COOKIE_NAME, "YA-" + new Date().getTime()); // TODO: getTime() is not unique! analyticsCookie.setMaxAge(31536000); // 1 year //analyticsCookie.setMaxAge(86400); // 1 day analyticsCookie.setPath(request.getContextPath()); response.addCookie(analyticsCookie); return analyticsCookie; } }
package Main.Controllers.Retailers; import Main.Helpers.Retailers.Sale; import Main.Helpers.UserInfo; import Main.JdbcConnection.JDBC; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.scene.control.*; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.layout.HBox; import java.sql.Connection; import java.sql.ResultSet; import java.sql.Statement; import java.util.Calendar; public class ViewSaleController { @FXML private TableView<Sale> saleTableView; @FXML private TableColumn<Sale,String> dateColumn; @FXML private TableColumn<Sale,Long> billNumberColumn; @FXML private TableColumn<Sale,String> patientNameColumn; @FXML private TableColumn<Sale,String> doctorNameColumn; @FXML private TableColumn<Sale,String> companyNameColumn; @FXML private TableColumn<Sale,String> modeColumn; @FXML private TableColumn<Sale,Float> amountColumn; @FXML private Label todaySaleLabel,yesterdaySaleLabel,day3SaleLabel,day4SaleLabel,day5SaleLabel,day6SaleLabel,day7SaleLabel,dateSelectedLabel; @FXML private Button particularDateSaleButton; @FXML private DatePicker saleDatePicker; @FXML private HBox dateHBox,datePickedHBox; private static double drawableWidth; ObservableList<Sale> saleList,saleParticularDayList; Long user_id=(long) UserInfo.accessId; static boolean saleCheck=true; public void initialize() { initializeDatePicker(); initializeTable(); addingToolTip(); calculateTotalSale(); } public void initializeDatePicker(){ dateHBox.setVisible(false); datePickedHBox.setVisible(false); } public void calculateTotalSale() { Calendar dateToday= Calendar.getInstance(); long[] year=new long[7]; long[] month=new long[7]; long[] day=new long[7]; long dateCheck; year[0] = dateToday.get(Calendar.YEAR); month[0] = dateToday.get(Calendar.MONTH)+1; day[0] = dateToday.get(Calendar.DAY_OF_MONTH); double[] sum=new double[7]; String date; try { Connection dbConnection = JDBC.databaseConnect(); Statement sqlStatement = dbConnection.createStatement(); for(int i=0;i<7;i++) { date= year[i]+"-"+month[i]+"-"+day[i]; String sqlQuery="SELECT sum(total_amount) from retailer_sale_bill where user_access_id='"+user_id+"' and date='"+date+"'" ; ResultSet saleResultSet = sqlStatement.executeQuery(sqlQuery); while (saleResultSet.next()) { sum[i]= saleResultSet.getFloat(1); break; } if(i!=6) { dateCheck=day[i]-1; if (dateCheck == 0) { if (month[i] == 2 || month[i] == 4 || month[i] == 6 || month[i] == 8 || month[i] == 9 || month[i] == 11) { day[i + 1] = 31; month[i + 1] = month[i] - 1; year[i+1]=year[i]; } else if (month[i] == 5 || month[i] == 7 || month[i] == 10 || month[i] == 12) { day[i + 1] = 30; month[i + 1] = month[i] - 1; year[i+1]=year[i]; } else if (month[i] == 1) { day[i + 1] = 31; month[i + 1] = 12; year[i + 1] = year[i] - 1; } else { if (year[i] % 4 == 0) { if (year[i] % 100 == 0) { if (year[i] % 400 == 0) day[i + 1] = 29; else day[i + 1] = 28; } else day[i + 1] = 29; } else day[i + 1] = 28; month[i+1]=month[i]-1; year[i+1]=year[i]; } } else{ day[i+1]=day[i]-1; month[i+1]=month[i]; year[i+1]=year[i]; } } } todaySaleLabel.setText("Today's Total Sale is Rs."+sum[0]); yesterdaySaleLabel.setText("Yesterday's Total Sale was Rs."+sum[1]); day3SaleLabel.setText("Total Sale on "+day[2] +"-"+month[2]+"-"+year[2]+" was Rs."+sum[2]); day4SaleLabel.setText("Total Sale on "+day[3] +"-"+month[3]+"-"+year[3]+" was Rs."+sum[3]); day5SaleLabel.setText("Total Sale on "+day[4] +"-"+month[4]+"-"+year[4]+" was Rs."+sum[4]); day6SaleLabel.setText("Total Sale on "+day[5] +"-"+month[5]+"-"+year[5]+" was Rs."+sum[5]); day7SaleLabel.setText("Total Sale on "+day[6] +"-"+month[6]+"-"+year[6]+" was Rs."+sum[6]); } catch (Exception e) { e.printStackTrace(); } } public void addingToolTip() { saleTableView.setRowFactory(tv -> new TableRow<Sale>() { private Tooltip tooltip = new Tooltip(); @Override public void updateItem(Sale sale, boolean empty) { super.updateItem(sale, empty); if (sale == null) { setTooltip(null); } else { String tooltipText=""; long rsBillId=0; if (sale.getBillNumber() == 0) { tooltip.setText("NA"); setTooltip(tooltip); } else { try { Connection dbConnection = JDBC.databaseConnect(); Statement sqlStatement = dbConnection.createStatement(); ResultSet saleResultSet = sqlStatement.executeQuery("SELECT rs_bill_id,discount FROM retailer_sale_bill WHERE user_access_id='"+user_id+"' AND bill_no='"+sale.getBillNumber()+"'"); while(saleResultSet.next()) { rsBillId=saleResultSet.getLong("rs_bill_id"); tooltipText="Discount - "+saleResultSet.getFloat("discount")+"%\n"; break; } saleResultSet.close(); ResultSet itemResultSet=sqlStatement.executeQuery("SELECT * FROM retailer_sale_bill_info where rs_bill_id='"+rsBillId+"'"); while(itemResultSet.next()) { int quantity=itemResultSet.getInt("quantity"); float amt=quantity*itemResultSet.getFloat("rate"); String item=itemResultSet.getString("item"); tooltipText=tooltipText+"Item- "+item+" Quantity- "+quantity+" Amount- "+amt+"\n"; } } catch (Exception e) { e.printStackTrace(); } tooltip.setText(tooltipText); setTooltip(tooltip); } } } }); } public void datePicked() { String date=saleDatePicker.getValue().toString(); float sum=0.0f; saleParticularDayList=getParticularSaleDate(date); if(saleParticularDayList.size()<1) { saleParticularDayList.add(new Sale(date, (long) 0,"-","-","-","-",0.0f)); } try { Connection dbConnection = JDBC.databaseConnect(); Statement sqlStatement = dbConnection.createStatement(); String sqlQuery = "SELECT sum(total_amount) from retailer_sale_bill where user_access_id='" + user_id + "' and date='" + date + "'"; ResultSet saleResultSet = sqlStatement.executeQuery(sqlQuery); while (saleResultSet.next()) { sum = saleResultSet.getFloat(1); break; } }catch (Exception e){ e.printStackTrace(); } Calendar dateToday= Calendar.getInstance(); int year,day,month; String dateChk,tense; year = dateToday.get(Calendar.YEAR); month= dateToday.get(Calendar.MONTH)+1; day= dateToday.get(Calendar.DAY_OF_MONTH); dateChk=year+"-"+month+"-"+day; if(dateChk.equals(date)) tense="is"; else tense="was"; saleTableView.setItems(saleParticularDayList); dateSelectedLabel.setText("Total Sale on "+date+" "+tense+" Rs."+sum); datePickedHBox.setVisible(true); } public void saleParticularDay() { if(saleCheck) { particularDateSaleButton.setText("See Sale of Particular Day v"); dateHBox.setVisible(true); saleCheck=false; } else { particularDateSaleButton.setText("See Sale of Particular Day >"); dateHBox.setVisible(false); datePickedHBox.setVisible(false); saleDatePicker.getEditor().clear(); saleTableView.setItems(saleList); saleCheck=true; } } public void initializeTable() { dateColumn.setCellValueFactory(new PropertyValueFactory<Sale,String>("date")); billNumberColumn.setCellValueFactory(new PropertyValueFactory<Sale,Long>("billNumber")); patientNameColumn.setCellValueFactory(new PropertyValueFactory<Sale, String>("patientName")); doctorNameColumn.setCellValueFactory(new PropertyValueFactory<Sale, String>("doctorName")); companyNameColumn.setCellValueFactory(new PropertyValueFactory<Sale, String>("companyName")); modeColumn.setCellValueFactory(new PropertyValueFactory<Sale, String>("mode")); amountColumn.setCellValueFactory(new PropertyValueFactory<Sale, Float>("amount")); saleList=getSale(); if(saleList.size()<1) { Calendar dateToday= Calendar.getInstance(); int year,day,month; year = dateToday.get(Calendar.YEAR); month= dateToday.get(Calendar.MONTH)+1; day= dateToday.get(Calendar.DAY_OF_MONTH); String date=year+"-"+month+"-"+day; saleList.add(new Sale(date, (long) 0,"-","-","-","-",0.0f)); } saleTableView.setItems(saleList); } public ObservableList<Sale> getSale() { String sqlQuery="SELECT * from retailer_sale_bill where user_access_id='"+user_id+"'"; saleList= FXCollections.observableArrayList(); saleList=getList(sqlQuery,saleList); return saleList; } public ObservableList<Sale> getParticularSaleDate(String particularDate) { String sqlQuery="SELECT * from retailer_sale_bill where user_access_id='"+user_id+"' and date='"+particularDate+"'"; saleParticularDayList= FXCollections.observableArrayList(); saleParticularDayList=getList(sqlQuery,saleParticularDayList); return saleParticularDayList; } public ObservableList<Sale> getList(String sqlQuery,ObservableList<Sale> saleListDetails) { String date,patientName,doctorName,companyName,mode; Long billNumber; Float amount; try { Connection dbConnection= JDBC.databaseConnect(); Statement sqlStatement=dbConnection.createStatement(); ResultSet saleResultSet=sqlStatement.executeQuery(sqlQuery); while(saleResultSet.next()) { date=saleResultSet.getString("date"); patientName=saleResultSet.getString("patient_name"); companyName=saleResultSet.getString("company"); doctorName=saleResultSet.getString("doctor_name"); mode=saleResultSet.getString("mode"); billNumber=saleResultSet.getLong("bill_no"); amount=saleResultSet.getFloat("total_amount"); saleListDetails.add(new Sale(date,billNumber,patientName,doctorName,companyName,mode,amount)); } } catch (Exception e) { e.printStackTrace(); } return saleListDetails; } public static void setDrawableWidth(double width) { drawableWidth = width; } }
package org.wyona.yanel.servlet; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.InputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.net.URL; import java.util.Calendar; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.xml.transform.Source; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.stream.StreamSource; import org.wyona.commons.xml.XMLHelper; import org.wyona.neutron.XMLExceptionV1; import org.wyona.yanel.core.Environment; import org.wyona.yanel.core.Path; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.ResourceNotFoundException; import org.wyona.yanel.core.ResourceTypeIdentifier; import org.wyona.yanel.core.ResourceTypeRegistry; import org.wyona.yanel.core.StateOfView; import org.wyona.yanel.core.ToolbarState; import org.wyona.yanel.core.Yanel; import org.wyona.yanel.core.api.attributes.AnnotatableV1; import org.wyona.yanel.core.api.attributes.IntrospectableV1; import org.wyona.yanel.core.api.attributes.ModifiableV1; import org.wyona.yanel.core.api.attributes.ModifiableV2; import org.wyona.yanel.core.api.attributes.TranslatableV1; import org.wyona.yanel.core.api.attributes.VersionableV2; import org.wyona.yanel.core.api.attributes.ViewableV1; import org.wyona.yanel.core.api.attributes.ViewableV2; import org.wyona.yanel.core.api.attributes.WorkflowableV1; import org.wyona.yanel.core.api.security.WebAuthenticator; import org.wyona.yanel.core.attributes.versionable.RevisionInformation; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.attributes.viewable.ViewDescriptor; import org.wyona.yanel.core.attributes.tracking.TrackingInformationV1; import org.wyona.yanel.core.navigation.Node; import org.wyona.yanel.core.navigation.Sitetree; import org.wyona.yanel.core.serialization.SerializerFactory; import org.wyona.yanel.core.source.SourceResolver; import org.wyona.yanel.core.source.YanelStreamSource; import org.wyona.yanel.core.transformation.I18nTransformer2; import org.wyona.yanel.core.util.DateUtil; import org.wyona.yanel.core.util.HttpServletRequestHelper; import org.wyona.yanel.core.workflow.Workflow; import org.wyona.yanel.core.workflow.WorkflowException; import org.wyona.yanel.core.workflow.WorkflowHelper; import org.wyona.yanel.core.map.Map; import org.wyona.yanel.core.map.Realm; import org.wyona.yanel.core.util.ResourceAttributeHelper; import org.wyona.yanel.impl.resources.BasicGenericExceptionHandlerResource; import org.wyona.yanel.servlet.IdentityMap; import org.wyona.yanel.servlet.communication.HttpRequest; import org.wyona.yanel.servlet.communication.HttpResponse; import org.wyona.yanel.servlet.security.impl.AutoLogin; import org.wyona.security.core.api.Identity; import org.wyona.security.core.api.Usecase; import org.wyona.security.core.api.User; import org.apache.log4j.Logger; import org.apache.xalan.transformer.TransformerIdentityImpl; import org.apache.xml.resolver.tools.CatalogResolver; import org.apache.xml.serializer.Serializer; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.quartz.JobDetail; import org.quartz.Scheduler; import org.quartz.SimpleTrigger; import org.quartz.Trigger; import org.quartz.impl.StdSchedulerFactory; /** * Main entry of Yanel webapp */ public class YanelServlet extends HttpServlet { private static Logger log = Logger.getLogger(YanelServlet.class); private static Logger logAccess = Logger.getLogger(AccessLog.CATEGORY); private static Logger log404 = Logger.getLogger("404"); private Map map; private Yanel yanelInstance; private Sitetree sitetree; private File xsltInfoAndException; private String xsltLoginScreenDefault; private boolean displayMostRecentVersion = true; public static final String MOBILE_KEY = "yanel.mobile"; public static final String IDENTITY_MAP_KEY = "identity-map"; private static final String TOOLBAR_USECASE = "toolbar"; //TODO is this the same as YanelAuthoringUI.TOOLBAR_KEY? public static final String NAMESPACE = "http: private static final String METHOD_PROPFIND = "PROPFIND"; private static final String METHOD_OPTIONS = "OPTIONS"; private static final String METHOD_GET = "GET"; private static final String METHOD_POST = "POST"; private static final String METHOD_PUT = "PUT"; private static final String METHOD_DELETE = "DELETE"; private static final String HTTP_REFERRER = "Referer"; private String sslPort = null; private String toolbarMasterSwitch = "off"; private String reservedPrefix; private String servletContextRealPath; private int cacheExpires = 0; private YanelHTMLUI yanelUI; private boolean logAccessEnabled = false; private boolean detectMobilePerRequest = false; public static final String DEFAULT_ENCODING = "UTF-8"; public static final String YANEL_ACCESS_POLICY_USECASE = "yanel.policy"; public static final String YANEL_USECASE = "yanel.usecase"; public static final String YANEL_RESOURCE = "yanel.resource"; public static final String YANEL_RESOURCE_USECASE = YANEL_RESOURCE + ".usecase"; public static final String YANEL_RESOURCE_REVISION = YANEL_RESOURCE + ".revision"; public static final String YANEL_RESOURCE_WORKFLOW_TRANSITION = YANEL_RESOURCE + ".workflow.transition"; public static final String YANEL_RESOURCE_WORKFLOW_TRANSITION_OUTPUT = YANEL_RESOURCE_WORKFLOW_TRANSITION + ".output"; public static final String VIEW_ID_PARAM_NAME = "yanel.resource.viewid"; public static final String RESOURCE_META_ID_PARAM_NAME = "yanel.resource.meta"; public static final String RELEASE_LOCK = "release-lock"; private static final String CONTENT_TYPE_XHTML = "xhtml"; private static String ANALYTICS_COOKIE_NAME = "_yanel-analytics"; public static final String YANEL_LAST_ACCESS_ATTR = "_yanel-last-access"; private Scheduler scheduler; private String[] mobileDevices; private static String ACCESS_LOG_TAG_SEPARATOR; /** * @see javax.servlet.GenericServlet#init(ServletConfig) */ @Override public void init(ServletConfig config) throws ServletException { servletContextRealPath = config.getServletContext().getRealPath("/"); xsltInfoAndException = org.wyona.commons.io.FileUtil.file(servletContextRealPath, config.getInitParameter("exception-and-info-screen-xslt")); xsltLoginScreenDefault = config.getInitParameter("login-screen-xslt"); displayMostRecentVersion = new Boolean(config.getInitParameter("workflow.not-live.most-recent-version")).booleanValue(); try { yanelInstance = Yanel.getInstance(); yanelInstance.init(); // TODO: Tell Yanel about alternative directory to look for configuration files, e.g. (File) getServletContext().getAttribute("javax.servlet.context.tempdir") map = yanelInstance.getMapImpl("map"); sitetree = yanelInstance.getSitetreeImpl("repo-navigation"); sslPort = config.getInitParameter("ssl-port"); toolbarMasterSwitch = config.getInitParameter("toolbar-master-switch"); reservedPrefix = yanelInstance.getReservedPrefix(); String expires = config.getInitParameter("static-content-cache-expires"); if (expires != null) { this.cacheExpires = Integer.parseInt(expires); } yanelUI = new YanelHTMLUI(map, reservedPrefix); // TODO: Make this value configurable also per realm or per individual user! logAccessEnabled = new Boolean(config.getInitParameter("log-access")).booleanValue(); String TAG_SEP_PARAM_NAME = "access-log-tag-separator"; if (config.getInitParameter(TAG_SEP_PARAM_NAME) != null) { if (config.getInitParameter(TAG_SEP_PARAM_NAME).equals("SPACE")) { // Note that the leading and trailing space around the parameter value is trimmed, hence we denote the space sign by SPACE. ACCESS_LOG_TAG_SEPARATOR = " "; } else { ACCESS_LOG_TAG_SEPARATOR = config.getInitParameter(TAG_SEP_PARAM_NAME); } } else { ACCESS_LOG_TAG_SEPARATOR = ","; log.warn("No access log tag separator parameter '" + TAG_SEP_PARAM_NAME + "' configured, hence use default: " + ACCESS_LOG_TAG_SEPARATOR); } // TODO: Make this value configurable also per realm or per individual user! if (config.getInitParameter("detect-mobile-per-request") != null) { detectMobilePerRequest = new Boolean(config.getInitParameter("detect-mobile-per-request")).booleanValue(); } if (config.getInitParameter("mobile-devices") != null) { mobileDevices = org.springframework.util.StringUtils.tokenizeToStringArray(config.getInitParameter("mobile-devices"), ",", true, true); } else { mobileDevices = new String[]{"iPhone", "Android"}; log.error("No mobile devices configured! Please make sure to update your web.xml configuration file accordingly. Fallback to hard-coded list: " + mobileDevices); } if (yanelInstance.isSchedulerEnabled()) { log.warn("Startup scheduler ..."); scheduler = StdSchedulerFactory.getDefaultScheduler(); Realm[] realms = yanelInstance.getRealmConfiguration().getRealms(); for (int i = 0; i < realms.length; i++) { if (realms[i] instanceof org.wyona.yanel.core.map.RealmWithConfigurationExceptionImpl) { String eMessage = ((org.wyona.yanel.core.map.RealmWithConfigurationExceptionImpl) realms[i]).getConfigurationException().getMessage(); log.error("Realm '" + realms[i].getID() + "' has thrown a configuration exception: " + eMessage); } else { String schedulerJobsPath = "/scheduler-jobs.xml"; if (realms[i].getRepository().existsNode(schedulerJobsPath)) { log.debug("Scheduler jobs config found for realm: " + realms[i].getRepository().getID()); try { org.wyona.yanel.impl.scheduler.QuartzSchedulerUtil.schedule(scheduler, XMLHelper.readDocument(realms[i].getRepository().getNode(schedulerJobsPath).getInputStream()), realms[i]); } catch(Exception e) { log.error(e, e); // INFO: Log error, but otherwise ignore and keep going ... } } } } /* TODO: Make global scheduler jobs configurable String groupName = "yanel"; JobDetail jobDetail = new JobDetail("heartbeatJob", groupName, org.wyona.yanel.servlet.HeartbeatJob.class); Date startDate = new Date(); Date endDate = null; Trigger trigger = new SimpleTrigger("heartbeatTrigger", groupName, startDate, endDate, SimpleTrigger.REPEAT_INDEFINITELY, 60L * 1000L); scheduler.scheduleJob(jobDetail, trigger); */ scheduler.start(); } } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } } /** * @see javax.servlet.http.HttpServlet#service(HttpServletRequest, HttpServletResponse) */ @Override protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // NOTE: Do not add code outside the try-catch block, because otherwise exceptions won't be logged try { //String httpAcceptMediaTypes = request.getHeader("Accept"); //String httpAcceptLanguage = request.getHeader("Accept-Language"); String yanelUsecase = request.getParameter(YANEL_USECASE); if(yanelUsecase != null && yanelUsecase.equals("logout")) { try { log.warn("DEBUG: Disable auto login..."); // TODO: The cookie is not always deleted! AutoLogin.disableAutoLogin(request, response, getRealm(request).getRepository()); } catch (Exception e) { log.error("Exception while disabling auto login: " + e.getMessage(), e); } // INFO: Logout from Yanel if(doLogout(request, response)) { return; } else { log.error("Logout failed!"); } } else if(yanelUsecase != null && yanelUsecase.equals("create")) { // TODO: Why does that not go through access control? // INFO: Create a new resource if(doCreate(request, response) != null) return; } // Check authorization and if authorization failed, then try to authenticate if (doAccessControl(request, response) != null) { // INFO: Either redirect (after successful authentication) or access denied (and response will send the login screen) return; } else { if (log.isDebugEnabled()) log.debug("Access granted: " + request.getServletPath()); } // Check for requests re policies String policyRequestPara = request.getParameter(YANEL_ACCESS_POLICY_USECASE); if (policyRequestPara != null) { doAccessPolicyRequest(request, response, 1); return; } else if (yanelUsecase != null && yanelUsecase.equals("policy.read")) { doAccessPolicyRequest(request, response, 2); return; } // Check for requests for global data Resource resource = getResource(request, response); String path = resource.getPath(); if (path.indexOf("/" + reservedPrefix + "/") == 0) { getGlobalData(request, response); return; } String value = request.getParameter(YANEL_RESOURCE_USECASE); // Delete node if (value != null && value.equals("delete")) { handleDeleteUsecase(request, response); return; } // INFO: Check if user agent is mobile device doMobile(request); // Delegate ... String method = request.getMethod(); if (method.equals(METHOD_PROPFIND)) { doPropfind(request, response); } else if (method.equals(METHOD_GET)) { doGet(request, response); } else if (method.equals(METHOD_POST)) { doPost(request, response); } else if (method.equals(METHOD_PUT)) { doPut(request, response); } else if (method.equals(METHOD_DELETE)) { doDelete(request, response); } else if (method.equals(METHOD_OPTIONS)) { doOptions(request, response); } else { log.error("No such method implemented: " + method); response.sendError(HttpServletResponse.SC_NOT_IMPLEMENTED); } } catch (ServletException e) { log.error(e, e); throw new ServletException(e.getMessage(), e); } catch (IOException e) { log.error(e, e); throw new IOException(e.getMessage()); } // NOTE: This was our last chance to log an exception, hence do not add code outside the try-catch block } /** * @see javax.servlet.http.HttpServlet#doGet(HttpServletRequest, HttpServletResponse) */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // INFO: Init session in case it does not exist yet HttpSession session = request.getSession(true); // INFO: Enable or disable toolbar yanelUI.switchToolbar(request); // INFO: Handle workflow transitions String transition = request.getParameter(YANEL_RESOURCE_WORKFLOW_TRANSITION); if (transition != null) { executeWorkflowTransition(request, response, request.getParameter(YANEL_RESOURCE_REVISION), transition); return; } // INFO: Init resource Resource resource = getResource(request, response); // INFO: Check for requests refered by WebDAV String yanelWebDAV = request.getParameter("yanel.webdav"); if(yanelWebDAV != null && yanelWebDAV.equals("propfind1")) { log.info("WebDAV client (" + request.getHeader("User-Agent") + ") requests to \"edit\" a resource: " + resource.getRealm() + ", " + resource.getPath()); //return; } // INFO: Handle first specific Yanel usecase requests and then at the very end all other requests String value = request.getParameter(YANEL_RESOURCE_USECASE); try { if (value != null && value.equals(RELEASE_LOCK)) { log.warn("Try to release lock ..."); if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { VersionableV2 versionable = (VersionableV2)resource; String checkoutUserID = versionable.getCheckoutUserID(); Identity identity = getEnvironment(request, response).getIdentity(); String userID = identity.getUsername(); Usecase usecase = new Usecase(RELEASE_LOCK); String path = resource.getPath(); if (checkoutUserID.equals(userID) || resource.getRealm().getPolicyManager().authorize(path, identity, usecase)) { try { versionable.cancelCheckout(); log.debug("Lock has been released."); response.setStatus(HttpServletResponse.SC_OK); response.setContentType("text/html" + "; charset=" + "UTF-8"); String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(resource.getPath()); StringBuilder sb = new StringBuilder("<html xmlns=\"http: PrintWriter w = response.getWriter(); w.print(sb); return; } catch (Exception e) { throw new ServletException("Releasing the lock of <" + resource.getPath() + "> failed because of: " + e.getMessage(), e); } } else { String eMessage = "Releasing the lock of '" + resource.getPath() + "' failed because"; if (checkoutUserID.equals(userID)) { eMessage = " user '" + userID + "' has no right to release her/his own lock!"; } else { eMessage = " checkout user '" + checkoutUserID + "' and session user '" + userID + "' are not the same and session user '" + userID + "' has no right to release the lock of the checkout user '" + checkoutUserID + "'!"; } log.warn(eMessage); throw new ServletException(eMessage); } } else { throw new ServletException("Resource '" + resource.getPath() + "' is not VersionableV2!"); } } else if (value != null && value.equals("roll-back")) { log.debug("Roll back ..."); org.wyona.yanel.core.util.VersioningUtil.rollBack(resource, request.getParameter(YANEL_RESOURCE_REVISION), getIdentity(request, map).getUsername()); // TODO: Send confirmation screen getContent(request, response); return; } else { //log.debug("Handle all other GET requests..."); getContent(request, response); return; } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } /** * Returns the mime-type according to the given file extension. * Default is application/octet-stream. * @param extension * @return */ private static String guessMimeType(String extension) { String ext = extension.toLowerCase(); if (ext.equals("html") || ext.equals("htm")) return "text/html"; if (ext.equals("css")) return "text/css"; if (ext.equals("txt")) return "text/plain"; if (ext.equals("js")) return "application/x-javascript"; if (ext.equals("jpg") || ext.equals("jpg")) return "image/jpeg"; if (ext.equals("gif")) return "image/gif"; if (ext.equals("pdf")) return "application/pdf"; if (ext.equals("zip")) return "application/zip"; if (ext.equals("htc")) return "text/x-component"; if (ext.equals("svg")) return "image/svg+xml"; // TODO: add more mime types // TODO: and move to MimeTypeUtil return "application/octet-stream"; // default } /** * Generate response from view of resource */ private void getContent(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { View view = null; org.w3c.dom.Document doc = null; try { doc = getDocument(NAMESPACE, "yanel"); } catch (Exception e) { throw new ServletException(e.getMessage(), e); } Element rootElement = doc.getDocumentElement(); rootElement.setAttribute("servlet-context-real-path", servletContextRealPath); Element requestElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "request")); requestElement.setAttributeNS(NAMESPACE, "uri", request.getRequestURI()); requestElement.setAttributeNS(NAMESPACE, "servlet-path", request.getServletPath()); HttpSession session = request.getSession(true); Element sessionElement = (Element) rootElement.appendChild(doc.createElement("session")); sessionElement.setAttribute("id", session.getId()); Enumeration<?> attrNames = session.getAttributeNames(); if (!attrNames.hasMoreElements()) { Element sessionNoAttributesElement = (Element) sessionElement.appendChild(doc.createElement("no-attributes")); } while (attrNames.hasMoreElements()) { String name = (String)attrNames.nextElement(); String value = session.getAttribute(name).toString(); Element sessionAttributeElement = (Element) sessionElement.appendChild(doc.createElement("attribute")); sessionAttributeElement.setAttribute("name", name); sessionAttributeElement.appendChild(doc.createTextNode(value)); } String usecase = request.getParameter(YANEL_RESOURCE_USECASE); Resource res = null; TrackingInformationV1 trackInfo = null; long lastModified = -1; long size = -1; // START first try try { Environment environment = getEnvironment(request, response); res = getResource(request, response); if (res != null) { if (ResourceAttributeHelper.hasAttributeImplemented(res, "Trackable", "1")) { //log.debug("Do track: " + res.getPath()); trackInfo = new TrackingInformationV1(); ((org.wyona.yanel.core.api.attributes.TrackableV1) res).doTrack(trackInfo); //} else { // log.debug("Resource '" + res.getPath() + "' is not trackable."); } // START introspection generation if (usecase != null && usecase.equals("introspection")) { sendIntrospectionAsResponse(res, doc, rootElement, request, response); return; } // END introspection generation Element resourceElement = getResourceMetaData(res, doc, rootElement); Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view")); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "1")) { if (log.isDebugEnabled()) log.debug("Resource is viewable V1"); viewElement.setAttributeNS(NAMESPACE, "version", "1"); appendViewDescriptors(doc, viewElement, ((ViewableV1) res).getViewDescriptors()); String viewId = getViewID(request); try { view = ((ViewableV1) res).getView(request, viewId); } catch (org.wyona.yarep.core.NoSuchNodeException e) { String message = e.getMessage(); log.error(message, e); do404(request, response, doc, message); return; } catch (Exception e) { String message = e.getMessage(); log.error(message, e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "500"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "2")) { if (log.isDebugEnabled()) log.debug("Resource '" + res.getPath() + "' is viewable V2"); viewElement.setAttributeNS(NAMESPACE, "version", "2"); appendViewDescriptors(doc, viewElement, ((ViewableV2) res).getViewDescriptors()); if (!((ViewableV2) res).exists()) { log.warn("No such ViewableV2 resource: " + res.getPath()); log.warn("TODO: Many ViewableV2 resources are not implementing exists() properly, hence do not generate a 404 for backwards compatibility! As a workaround use the exists() method within the getView(String) method and throw a ResourceNotFoundException accordingly."); //do404(request, response, doc, res.getPath()); //return; } try { size = ((ViewableV2) res).getSize(); Element sizeElement = (Element) resourceElement.appendChild(doc.createElement("size")); sizeElement.appendChild(doc.createTextNode(String.valueOf(size))); } catch(ResourceNotFoundException e) { log.error(e, e); // INFO: Let's be fault tolerant such that a 404 can be handled more gracefully further down } String viewId = getViewID(request); try { String revisionName = request.getParameter(YANEL_RESOURCE_REVISION); // NOTE: Check also if usecase is not roll-back, because roll-back is also using the yanel.resource.revision if (revisionName != null && ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2") && !isRollBack(request)) { view = ((VersionableV2) res).getView(viewId, revisionName); } else if (environment.getStateOfView().equals(StateOfView.LIVE) && ResourceAttributeHelper.hasAttributeImplemented(res, "Workflowable", "1") && WorkflowHelper.getWorkflow(res) != null) { // TODO: Check if resource actually exists (see the exist problem above), because even it doesn't exist, the workflowable interfaces can return something although it doesn't really make sense. For example if a resource type is workflowable, but it has no workflow associated with it, then WorkflowHelper.isLive will nevertheless return true, whereas WorkflowHelper.getLiveView will throw an exception! if (!((ViewableV2) res).exists()) { log.warn("No such ViewableV2 resource: " + res.getPath()); log.warn("TODO: It seems like many ViewableV2 resources are not implementing exists() properly!"); do404(request, response, doc, res.getPath()); return; } WorkflowableV1 workflowable = (WorkflowableV1)res; if (workflowable.isLive()) { view = workflowable.getLiveView(viewId); } else { String message = "The viewable (V2) resource '" + res.getPath() + "' is WorkflowableV1, but has not been published yet."; log.warn(message); // TODO: Make this configurable per resource (or rather workflowable interface) or per realm?! if (displayMostRecentVersion) { // INFO: Because of backwards compatibility the default should display the most recent version log.warn("Instead the live version, the most recent version will be displayed!"); view = ((ViewableV2) res).getView(viewId); } else { log.warn("Instead the live version, a 404 will be displayed!"); // TODO: Instead a 404 one might want to show a different kind of screen do404(request, response, doc, message); return; } } } else { view = ((ViewableV2) res).getView(viewId); } } catch (org.wyona.yarep.core.NoSuchNodeException e) { String message = e.getMessage(); log.warn(message, e); do404(request, response, doc, message); return; } catch (ResourceNotFoundException e) { String message = e.getMessage(); log.warn(message, e); do404(request, response, doc, message); return; } } else { // NO Viewable interface implemented! String message = res.getClass().getName() + " is not viewable! (" + res.getPath() + ", " + res.getRealm() + ")"; log.error(message); Element noViewElement = (Element) resourceElement.appendChild(doc.createElement("not-viewable")); noViewElement.appendChild(doc.createTextNode(res.getClass().getName() + " is not viewable!")); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "501"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_IMPLEMENTED); setYanelOutput(request, response, doc); return; } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { lastModified = ((ModifiableV2) res).getLastModified(); Element lastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("last-modified")); lastModifiedElement.appendChild(doc.createTextNode(new Date(lastModified).toString())); } else { Element noLastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("no-last-modified")); } // Get the revisions, but only in the meta usecase (because of performance reasons) if (request.getParameter(RESOURCE_META_ID_PARAM_NAME) != null) { appendRevisionsAndWorkflow(doc, resourceElement, res, request); } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Translatable", "1")) { TranslatableV1 translatable = ((TranslatableV1) res); Element translationsElement = (Element) resourceElement.appendChild(doc.createElement("translations")); String[] languages = translatable.getLanguages(); for (int i=0; i<languages.length; i++) { Element translationElement = (Element) translationsElement.appendChild(doc.createElement("translation")); translationElement.setAttribute("language", languages[i]); String path = translatable.getTranslation(languages[i]).getPath(); translationElement.setAttribute("path", path); } } if (usecase != null && usecase.equals("checkout")) { if(log.isDebugEnabled()) log.debug("Checkout data ..."); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) { // NOTE: The code below will throw an exception if the document is checked out already by another user. String userID = environment.getIdentity().getUsername(); VersionableV2 versionable = (VersionableV2)res; if (versionable.isCheckedOut()) { String checkoutUserID = versionable.getCheckoutUserID(); if (checkoutUserID.equals(userID)) { log.warn("Resource " + res.getPath() + " is already checked out by this user: " + checkoutUserID); } else { if (isClientSupportingNeutron(request)) { String eMessage = "Resource '" + res.getPath() + "' is already checked out by another user: " + checkoutUserID; response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); // TODO: Checkout date and break-lock (optional) response.getWriter().print(XMLExceptionV1.getCheckoutException(eMessage, res.getPath(), checkoutUserID, null)); return; } else { throw new Exception("Resource '" + res.getPath() + "' is already checked out by another user: " + checkoutUserID); } } } else { versionable.checkout(userID); } } else { log.warn("Acquire lock has not been implemented yet ...!"); // acquireLock(); } } } else { Element resourceIsNullElement = (Element) rootElement.appendChild(doc.createElement("resource-is-null")); } } catch (org.wyona.yarep.core.NoSuchNodeException e) { String message = e.getMessage(); log.warn(message, e); do404(request, response, doc, message); return; } catch (org.wyona.yanel.core.ResourceNotFoundException e) { String message = e.getMessage(); log.warn(message, e); do404(request, response, doc, message); return; } catch (Exception e) { log.error(e, e); handleException(request, response, e); return; } // END first try String meta = request.getParameter(RESOURCE_META_ID_PARAM_NAME); if (meta != null) { if (meta.length() > 0) { if (meta.equals("annotations")) { log.warn("TODO: Remove everything from the page meta document except the annotations"); } else { log.warn("Stripping everything from page meta document but '" + meta + "' not supported!"); } } else { log.debug("Show all meta"); appendAnnotations(doc, res); appendTrackingInformation(doc, trackInfo); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); setYanelOutput(request, response, doc); return; } if (view != null) { if (generateResponse(view, res, request, response, -1, doc, size, lastModified, trackInfo) != null) { //log.debug("Response has been generated :-)"); return; } else { log.warn("No response has been generated!"); } } else { String message = "View is null!"; Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } /** * @see javax.servlet.http.HttpServlet#doPost(HttpServletRequest, HttpServletResponse) */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String transition = request.getParameter(YANEL_RESOURCE_WORKFLOW_TRANSITION); if (transition != null) { executeWorkflowTransition(request, response, request.getParameter(YANEL_RESOURCE_REVISION), transition); return; } String value = request.getParameter(YANEL_RESOURCE_USECASE); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response, false); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response, true); log.warn("Release lock has not been implemented yet ..."); // releaseLock(); return; } else { log.info("No parameter " + YANEL_RESOURCE_USECASE + "!"); String contentType = request.getContentType(); // TODO: Check for type (see section 9.2 of APP spec (e.g. draft 16) if (contentType != null && contentType.indexOf("application/atom+xml") >= 0) { InputStream in = intercept(request.getInputStream()); // Create new Atom entry try { String atomEntryUniversalName = "<{http: Realm realm = yanelInstance.getMap().getRealm(request.getServletPath()); String newEntryPath = yanelInstance.getMap().getPath(realm, request.getServletPath() + "/" + new Date().getTime() + ".xml"); log.debug("Realm and Path of new Atom entry: " + realm + " " + newEntryPath); Resource atomEntryResource = yanelInstance.getResourceManager().getResource(getEnvironment(request, response), realm, newEntryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new ResourceTypeIdentifier(atomEntryUniversalName, null)); ((ModifiableV2)atomEntryResource).write(in); byte buffer[] = new byte[8192]; int bytesRead; InputStream resourceIn = ((ModifiableV2)atomEntryResource).getInputStream(); OutputStream responseOut = response.getOutputStream(); while ((bytesRead = resourceIn.read(buffer)) != -1) { responseOut.write(buffer, 0, bytesRead); } resourceIn.close(); //responseOut.close(); // TODO: Fix Location ... response.setHeader("Location", "http://ulysses.wyona.org" + newEntryPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_CREATED); return; } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } // Enable or disable toolbar yanelUI.switchToolbar(request); getContent(request, response); } } /** * Perform the given transition on the indicated revision. * @param request * @param response * @param transition * @throws ServletException * @throws IOException */ private void executeWorkflowTransition(HttpServletRequest request, HttpServletResponse response, String revision, String transition) throws ServletException, IOException { Resource resource = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Workflowable", "1")) { WorkflowableV1 workflowable = (WorkflowableV1)resource; try { String outputFormat = request.getParameter(YANEL_RESOURCE_WORKFLOW_TRANSITION_OUTPUT); StringBuilder sb = null; workflowable.doTransition(transition, revision); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); if (outputFormat != null && CONTENT_TYPE_XHTML.equals(outputFormat.toLowerCase())) { response.setContentType("text/html; charset=" + DEFAULT_ENCODING); sb = new StringBuilder("<html xmlns=\"http: + " has been performed.</p><p>Return to <a href=\"" + request.getHeader(HTTP_REFERRER) + "\">the page</a>.</p></body></html>"); } else { log.warn("No output format query string parameter '" + YANEL_RESOURCE_WORKFLOW_TRANSITION_OUTPUT + "' has been specified."); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); sb = new StringBuilder("<?xml version=\"1.0\"?>"); sb.append(workflowable.getWorkflowIntrospection()); } PrintWriter w = response.getWriter(); w.print(sb); } catch (WorkflowException e) { log.error(e, e); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(getWorkflowException(e.getMessage())); return; } } else { log.warn("Resource not workflowable: " + resource.getPath()); } } /** * HTTP PUT implementation. */ @Override protected void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO: Reuse code doPost resp. share code with doPut String value = request.getParameter(YANEL_RESOURCE_USECASE); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response, false); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response, true); log.warn("Release lock has not been implemented yet ...!"); // releaseLock(); return; } else { log.warn("No parameter " + YANEL_RESOURCE_USECASE + "!"); String contentType = request.getContentType(); if (contentType != null && contentType.indexOf("application/atom+xml") >= 0) { InputStream in = intercept(request.getInputStream()); // Overwrite existing atom entry try { String atomEntryUniversalName = "<{http: Realm realm = yanelInstance.getMap().getRealm(request.getServletPath()); String entryPath = yanelInstance.getMap().getPath(realm, request.getServletPath()); log.debug("Realm and Path of new Atom entry: " + realm + " " + entryPath); Resource atomEntryResource = yanelInstance.getResourceManager().getResource(getEnvironment(request, response), realm, entryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new ResourceTypeIdentifier(atomEntryUniversalName, null)); // TODO: There seems to be a problem ... ((ModifiableV2)atomEntryResource).write(in); // NOTE: This method does not update updated date /* OutputStream out = ((ModifiableV2)atomEntry).getOutputStream(entryPath); byte buffer[] = new byte[8192]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } */ log.info("Atom entry has been saved: " + entryPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); return; } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } else { Resource resource = getResource(request, response); log.warn("Client (" + request.getHeader("User-Agent") + ") requests to save a resource: " + resource.getRealm() + ", " + resource.getPath()); save(request, response, false); return; } } } /** * @see javax.servlet.http.HttpServlet#doDelete(HttpServletRequest, HttpServletResponse); */ @Override protected void doDelete(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { Resource res = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { if (((ModifiableV2) res).delete()) { // TODO: Also delete resource config! What about access policies?! log.debug("Resource has been deleted: " + res); response.setStatus(HttpServletResponse.SC_OK); response.setContentType("text/html" + "; charset=" + "UTF-8"); String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(res.getPath()); StringBuilder sb = new StringBuilder("<html xmlns=\"http: PrintWriter w = response.getWriter(); w.print(sb); return; } else { log.warn("Deletable (or rather ModifiableV2) resource '" + res + "' could not be deleted!"); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } } else { log.error("Resource '" + res + "' has interface ModifiableV2 not implemented." ); response.sendError(HttpServletResponse.SC_NOT_IMPLEMENTED); return; // QUESTION: According to the spec http://docs.oracle.com/javaee/1.4/api/javax/servlet/http/HttpServlet.html#doDelete%28javax.servlet.http.HttpServletRequest,%20javax.servlet.http.HttpServletResponse%29 one should rather throw a ServletException, right? } } catch (Exception e) { throw new ServletException("Could not delete resource with URL <" + request.getRequestURL() + ">: " + e.getMessage(), e); } } /** * Resolve resource for a specific request */ private Resource getResource(HttpServletRequest request, HttpServletResponse response) throws ServletException { try { Realm realm = map.getRealm(request.getServletPath()); String path = map.getPath(realm, request.getServletPath()); HttpRequest httpRequest = (HttpRequest)request; HttpResponse httpResponse = new HttpResponse(response); Resource res = yanelInstance.getResourceManager().getResource(getEnvironment(httpRequest, httpResponse), realm, path); return res; } catch (Exception e) { log.error(e, e); throw new ServletException("Could not get resource for request <" + request.getServletPath() + ">: " + e.getMessage(), e); } } /** * Get environment containing identity , client request, etc. */ private Environment getEnvironment(HttpServletRequest request, HttpServletResponse response) throws ServletException { Identity identity; try { identity = getIdentity(request, map); Realm realm = map.getRealm(request.getServletPath()); String stateOfView = StateOfView.AUTHORING; if (yanelUI.isToolbarEnabled(request)) { // TODO: Is this the only criteria? stateOfView = StateOfView.AUTHORING; } else { stateOfView = StateOfView.LIVE; } //log.debug("State of view: " + stateOfView); Environment environment = new Environment(request, response, identity, stateOfView, null); if (yanelUI.isToolbarEnabled(request)) { // INFO: Please note that isToolbarEnabled() also checks whether toolbar is suppressed... environment.setToolbarState(ToolbarState.ON); } else if (yanelUI.isToolbarSuppressed(request)) { environment.setToolbarState(ToolbarState.SUPPRESSED); } else { environment.setToolbarState(ToolbarState.OFF); } return environment; } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } /** * Save data */ private void save(HttpServletRequest request, HttpServletResponse response, boolean doCheckin) throws ServletException, IOException { log.debug("Save data ..."); Resource resource = getResource(request, response); /* NOTE: Commented because the current default repo implementation does not support versioning yet. if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { try { // check the resource state: Identity identity = getIdentity(request); String userID = identity.getUser().getID(); VersionableV2 versionable = (VersionableV2)resource; if (versionable.isCheckedOut()) { String checkoutUserID = versionable.getCheckoutUserID(); if (!checkoutUserID.equals(userID)) { throw new Exception("Resource is checked out by another user: " + checkoutUserID); } } else { throw new Exception("Resource is not checked out."); } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } */ InputStream in = request.getInputStream(); // Check on well-formedness ... String contentType = request.getContentType(); log.debug("Content-Type: " + contentType); if (contentType != null && (contentType.indexOf("application/xml") >= 0 || contentType.indexOf("application/xhtml+xml") >= 0)) { try { in = XMLHelper.isWellFormed(in); } catch(Exception e) { log.error(e, e); response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(XMLExceptionV1.getDefaultException(XMLExceptionV1.DATA_NOT_WELL_FORMED, e.getMessage())); return; } } else { log.info("No well-formedness check required for content type: " + contentType); } // IMPORTANT TODO: Use ModifiableV2.write(InputStream in) such that resource can modify data during saving resp. check if getOutputStream is equals null and then use write .... OutputStream out = null; Resource res = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "1")) { out = ((ModifiableV1) res).getOutputStream(new Path(request.getServletPath())); write(in, out, request, response); } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { try { out = ((ModifiableV2) res).getOutputStream(); if (out != null) { write(in, out, request, response); } else { log.warn("getOutputStream() returned null, hence fallback to write()"); ((ModifiableV2) res).write(in); } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } else { String message = res.getClass().getName() + " is not modifiable (neither V1 nor V2)!"; log.warn(message); // TODO: Differentiate between Neutron based and other clients ... (Use method isClientSupportingNeutron()) response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); // TODO: This is not really a 'checkin' problem, but rather a general 'save-data' problem, but the Neutron spec does not support such a type: http://neutron.wyona.org/draft-neutron-protocol-v0.html#rfc.section.8 w.print(XMLExceptionV1.getDefaultException(XMLExceptionV1.CHECKIN, message)); } if (doCheckin) { if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Versionable", "2")) { VersionableV2 versionable = (VersionableV2)resource; try { versionable.checkin("updated"); } catch (Exception e) { throw new ServletException("Could not check in resource <" + resource.getPath() + ">: " + e.getMessage(), e); } } } } /** * Check authorization and if not authorized then authenticate. Return null if authorization granted, otherwise return 401 and appropriate response such that client can provide credentials for authentication * * @return Null if access is granted and an authentication response if access is denied */ private HttpServletResponse doAccessControl(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // INFO: Get identity, realm, path Identity identity; Realm realm; String path; try { identity = getIdentity(request, map); realm = map.getRealm(request.getServletPath()); path = map.getPath(realm, request.getServletPath()); } catch (Exception e) { throw new ServletException(e.getMessage(), e); } // INFO: Try Auto-Login if (identity == null || (identity != null && identity.isWorld())) { //log.debug("Not logged in yet, hence try auto login..."); try { if (AutoLogin.tryAutoLogin(request, response, realm)) { log.debug("Auto login successful, hence set identity inside session..."); String username = AutoLogin.getUsername(request); if (username != null) { User user = realm.getIdentityManager().getUserManager().getUser(username); setIdentity(new Identity(user, user.getEmail()), request.getSession(), realm); } else { log.error("Auto login successful, but no username available!"); } } else { //log.debug("No auto login."); } } catch(Exception e) { log.error(e, e); } } // INFO: Check Authorization boolean authorized = false; Usecase usecase = getUsecase(request); try { if (log.isDebugEnabled()) log.debug("Check authorization: realm: " + realm + ", path: " + path + ", identity: " + identity + ", Usecase: " + usecase.getName()); authorized = realm.getPolicyManager().authorize(path, identity, usecase); if (log.isDebugEnabled()) log.debug("Check authorization result: " + authorized); } catch (Exception e) { throw new ServletException(e.getMessage(), e); } if (authorized) { if (identity != null && identity.getUsername() != null) { if (identity.getUsername() != null) { if(log.isDebugEnabled()) log.debug("Access for user '" + identity.getUsername() + "' granted: " + getRequestURLQS(request, null, false)); } else { if(log.isDebugEnabled()) log.debug("Access for anonymous user (aka WORLD) granted: " + getRequestURLQS(request, null, false)); } } else { if(log.isDebugEnabled()) log.debug("Access for anonymous user (aka WORLD) granted: " + getRequestURLQS(request, null, false)); } return null; // INFO: Return null in order to indicate that access is granted } else { log.warn("Access denied: " + getRequestURLQS(request, null, false) + " (Path of request: " + path + "; Identity: " + identity + "; Usecase: " + usecase + ")"); // TODO: Implement HTTP BASIC/DIGEST response (see above) // INFO: If request is not via SSL and SSL is configured, then redirect to SSL connection. if(!request.isSecure()) { if(sslPort != null) { log.info("Redirect to SSL ..."); try { URL url = new URL(getRequestURLQS(request, null, false).toString()); url = new URL("https", url.getHost(), new Integer(sslPort).intValue(), url.getFile()); if (realm.isProxySet()) { if (realm.getProxySSLPort() >= 0) { log.debug("Use configured port: " + realm.getProxySSLPort()); url = new URL(url.getProtocol(), url.getHost(), new Integer(realm.getProxySSLPort()).intValue(), url.getFile()); } else { log.debug("Use default port: " + url.getDefaultPort()); // NOTE: getDefaultPort depends on the Protocol (e.g. https is 443) url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile()); } } log.info("Redirect to SSL: " + url); response.setHeader("Location", url.toString()); // TODO: Yulup has a bug re TEMPORARY_REDIRECT //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); return response; } catch (Exception e) { log.error(e.getMessage(), e); } } else { log.warn("SSL does not seem to be configured!"); } } else { log.info("This connection is already via SSL."); } if (doAuthenticate(request, response) != null) { log.info("Access denied and not authenticated yet, hence return response of web authenticator."); /* NOTE: Such a response can have different reasons: - Either no credentials provided yet and web authenticator is generating a response to fetch credentials - Or authentication failed and web authenticator is resending response to fetch again credentials"); - Or authentication was successful and web authenticator sends a redirect */ // TODO: Check "would be mime type", etc.: if (logAccessIsApplicable(view.getMimeType())) { if(logAccessEnabled) { // INFO: Although authorization has been denied and user first needs to authenticate, let's log the request anyway if (usecase != null && usecase.getName().equals("introspection")) { log.debug("Ignore introspection request: " + getRequestURLQS(request, null, false)); } else { log.info("Access denied and authentication not completed yet, hence let's log request '" + getRequestURLQS(request, null, false) + "'"); doLogAccess(request, response, HttpServletResponse.SC_UNAUTHORIZED, null, null); } } return response; } else { try { log.warn("Authentication was successful for user: " + getIdentity(request, map).getUsername()); } catch (Exception e) { log.error(e.getMessage(), e); } URL url = new URL(getRequestURLQS(request, null, false).toString()); if (sslPort != null) { url = new URL("https", url.getHost(), new Integer(sslPort).intValue(), url.getFile()); } log.warn("Redirect to original request: " + url); //response.sendRedirect(url.toString()); // 302 // TODO: Yulup has a bug re TEMPORARY_REDIRECT (or is the problem that the load balancer is rewritting 302 reponses?!) response.setHeader("Location", url.toString()); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); // 301 //response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT); // 302 return response; } } } /** * Patch request with proxy settings re realm configuration */ private String getRequestURLQS(HttpServletRequest request, String addQS, boolean xml) { try { Realm realm = map.getRealm(request.getServletPath()); // TODO: Handle this exception more gracefully! if (realm == null) log.error("No realm found for path " +request.getServletPath()); String proxyHostName = realm.getProxyHostName(); int proxyPort = realm.getProxyPort(); String proxyPrefix = realm.getProxyPrefix(); URL url = null; url = new URL(request.getRequestURL().toString()); //if(proxyHostName != null || proxyPort >= null || proxyPrefix != null) { if(realm.isProxySet()) { if (proxyHostName != null) { url = new URL(url.getProtocol(), proxyHostName, url.getPort(), url.getFile()); } if (proxyPort >= 0) { url = new URL(url.getProtocol(), url.getHost(), proxyPort, url.getFile()); } else { url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile()); } if (proxyPrefix != null) { url = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getFile().substring(proxyPrefix.length())); } //log.debug("Proxy enabled for this realm resp. request: " + realm + ", " + url); } else { //log.debug("No proxy set for this realm resp. request: " + realm + ", " + url); } String urlQS = url.toString(); if (request.getQueryString() != null) { urlQS = urlQS + "?" + request.getQueryString(); if (addQS != null) urlQS = urlQS + "&" + addQS; } else { if (addQS != null) urlQS = urlQS + "?" + addQS; } if (xml) urlQS = urlQS.replaceAll("&", "&amp;"); if(log.isDebugEnabled()) log.debug("Request: " + urlQS); return urlQS; } catch (Exception e) { log.error(e.getMessage(), e); return null; } } private void doPropfind(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Resource resource = getResource(request, response); //Node node = resource.getRealm().getSitetree().getNode(resource.getPath()); Node node = sitetree.getNode(resource.getRealm(),resource.getPath()); String depth = request.getHeader("Depth"); StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>"); sb.append("<multistatus xmlns=\"DAV:\">"); if (depth.equals("0")) { if (node.isCollection()) { sb.append(" <response>"); sb.append(" <href>"+request.getRequestURI()+"</href>"); sb.append(" <propstat>"); sb.append(" <prop>"); sb.append(" <resourcetype><collection/></resourcetype>"); sb.append(" <getcontenttype>httpd/unix-directory</getcontenttype>"); sb.append(" </prop>"); sb.append(" <status>HTTP/1.1 200 OK</status>"); sb.append(" </propstat>"); sb.append(" </response>"); } else if (node.isResource()) { sb.append(" <response>"); sb.append(" <href>"+request.getRequestURI()+"</href>"); sb.append(" <propstat>"); sb.append(" <prop>"); sb.append(" <resourcetype/>"); // TODO: Set mime type of node! sb.append(" <getcontenttype>application/octet-stream</getcontenttype>"); // TODO: Set content length and last modified! sb.append(" <getcontentlength>0</getcontentlength>"); sb.append(" <getlastmodified>1969.02.16</getlastmodified>"); // See http://www.webdav.org/specs/rfc2518.html sb.append(" <source>\n"); sb.append(" <link>\n"); sb.append(" <src>" + request.getRequestURI() + "</src>\n"); sb.append(" <dst>" + request.getRequestURI() + "?yanel.resource.modifiable.source</dst>\n"); sb.append(" </link>\n"); sb.append(" </source>\n"); sb.append(" </prop>"); sb.append(" <status>HTTP/1.1 200 OK</status>"); sb.append(" </propstat>"); sb.append(" </response>"); } else { log.error("Neither collection nor resource!"); } } else if (depth.equals("1")) { // TODO: Shouldn't one check with isCollection() first?! Node[] children = node.getChildren(); if (children != null) { for (int i = 0; i < children.length; i++) { if (children[i].isCollection()) { sb.append(" <response>\n"); sb.append(" <href>" + request.getRequestURI() + "/" + children[i].getName() + "/</href>\n"); sb.append(" <propstat>\n"); sb.append(" <prop>\n"); sb.append(" <displayname>" + children[i].getName() + "</displayname>\n"); sb.append(" <resourcetype><collection/></resourcetype>\n"); sb.append(" <getcontenttype>httpd/unix-directory</getcontenttype>\n"); sb.append(" </prop>\n"); sb.append(" <status>HTTP/1.1 200 OK</status>\n"); sb.append(" </propstat>\n"); sb.append(" </response>\n"); } else if(children[i].isResource()) { sb.append(" <response>\n"); sb.append(" <href>" + request.getRequestURI() + "/" + children[i].getName() + "?yanel.webdav=propfind1</href>\n"); sb.append(" <propstat>\n"); sb.append(" <prop>\n"); sb.append(" <displayname>" + children[i].getName() + "</displayname>\n"); sb.append(" <resourcetype/>\n"); // TODO: Set mime type of node! sb.append(" <getcontenttype>application/octet-stream</getcontenttype>\n"); // TODO: Set content length and last modified! sb.append(" <getcontentlength>0</getcontentlength>"); sb.append(" <getlastmodified>1969.02.16</getlastmodified>"); // See http://www.webdav.org/specs/rfc2518.html sb.append(" <source>\n"); sb.append(" <link>\n"); sb.append(" <src>" + request.getRequestURI() + "/" + children[i].getName() + "</src>\n"); sb.append(" <dst>" + request.getRequestURI() + "/" + children[i].getName() + "?yanel.resource.modifiable.source</dst>\n"); sb.append(" </link>\n"); sb.append(" </source>\n"); sb.append(" </prop>\n"); sb.append(" <status>HTTP/1.1 200 OK</status>\n"); sb.append(" </propstat>\n"); sb.append(" </response>\n"); } else { log.error("Neither collection nor resource: " + children[i].getPath()); } } } else { log.warn("No children!"); } } else if (depth.equals("infinity")) { log.warn("TODO: List children and their children and their children ..."); } else { log.error("No such depth: " + depth); } sb.append("</multistatus>"); //response.setStatus(javax.servlet.http.HttpServletResponse.SC_MULTI_STATUS); response.setStatus(207, "Multi-Status"); PrintWriter w = response.getWriter(); w.print(sb); } /** * HTTP OPTIONS implementation. */ @Override protected void doOptions(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setHeader("DAV", "1"); // TODO: Is there anything else to do?! } /** * Authentication * @return null when authentication successful or has already been authenticated, otherwise return response generated by web authenticator */ private HttpServletResponse doAuthenticate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { // TODO/TBD: In the case of HTTP-BASIC/DIGEST one needs to check authentication with every request // TODO: enhance API with flag, e.g. session-based="true/false" // WARNING: One needs to separate doAuthenticate from the login screen generation! //if (getIdentity(request) != null) return null; WebAuthenticator wa = map.getRealm(request.getServletPath()).getWebAuthenticator(); return wa.doAuthenticate(request, response, map, reservedPrefix, xsltLoginScreenDefault, servletContextRealPath, sslPort); } catch (Exception e) { log.error(e.getMessage(), e); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return response; } } /** * Escapes all reserved xml characters (&amp; &lt; &gt; &apos; &quot;) in a string. * @param s input string * @return string with escaped characters */ public static String encodeXML(String s) { s = s.replaceAll("&", "&amp;"); s = s.replaceAll("<", "&lt;"); s = s.replaceAll(">", "&gt;"); s = s.replaceAll("'", "&apos;"); s = s.replaceAll("\"", "&quot;"); return s; } /** * Do logout * @return true if logout was successful (and set a "Redirect response" for a regular logout and a "Neutron response" if auth scheme is Neutron) */ private boolean doLogout(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { if (yanelUI.isToolbarEnabled(request)) { // TODO: Check if WORLD has access to the toolbar //if (getRealm().getPolicyManager().authorize(path, new Identity(), new Usecase(TOOLBAR_USECASE))) { yanelUI.disableToolbar(request); } HttpSession session = request.getSession(true); // TODO: should we logout only from the current realm, or from all realms? // -> logout only from the current realm Realm realm = map.getRealm(request.getServletPath()); IdentityMap identityMap = (IdentityMap)session.getAttribute(IDENTITY_MAP_KEY); if (identityMap != null && identityMap.containsKey(realm.getID())) { log.info("Logout from realm: " + realm.getID()); identityMap.remove(realm.getID()); } String clientSupportedAuthScheme = getClientAuthenticationScheme(request); if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) { String neutronVersions = getClientSupportedNeutronVersions(request); // TODO: Reply according to which neutron versions the client supports // TODO: send some XML content, e.g. <logout-successful/> response.setContentType("text/plain; charset=" + DEFAULT_ENCODING); response.setStatus(HttpServletResponse.SC_OK); PrintWriter writer = response.getWriter(); writer.print("Neutron Logout Successful!"); return true; } if (log.isDebugEnabled()) log.debug("Regular Logout Successful!"); URL url = new URL(getRequestURLQS(request, null, false).toString()); String urlWithoutLogoutQS = url.toString().substring(0, url.toString().lastIndexOf("?")); // TODO: Check if url still has a query string (see above) urlWithoutLogoutQS = urlWithoutLogoutQS + "?yanel.refresh=" + new Date().getTime(); log.debug("Redirect to original request: " + urlWithoutLogoutQS); response.setHeader("Location", urlWithoutLogoutQS.toString()); response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY); // 301 return true; } catch (Exception e) { log.error(e, e); throw new ServletException(e.getMessage(), e); } } /** * Do create a new resource */ private HttpServletResponse doCreate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { log.error("Not implemented yet!"); return null; } static public String patchMimeType(String mimeType, HttpServletRequest request) throws ServletException, IOException { if (mimeType != null) { String httpAcceptMediaTypes = request.getHeader("Accept"); if (mimeType.equals("application/xhtml+xml") && httpAcceptMediaTypes != null && httpAcceptMediaTypes.indexOf("application/xhtml+xml") < 0) { log.info("Patch contentType with text/html because client (" + request.getHeader("User-Agent") + ") does not seem to understand application/xhtml+xml"); return "text/html"; } else if (mimeType.equals("text/html")) { log.info("Mime type was already set to text/html for request: " + request.getServletPath()); } } else { log.warn("No mime type returned for request: " + request.getServletPath()); } return mimeType; } /** * Intercept InputStream and log content ... */ private InputStream intercept(InputStream in) throws IOException { java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); byte[] buf = new byte[8192]; int bytesR; while ((bytesR = in.read(buf)) != -1) { baos.write(buf, 0, bytesR); } // Buffer within memory (TODO: Maybe replace with File-buffering ...) byte[] memBuffer = baos.toByteArray(); log.debug("InputStream: " + baos); return new java.io.ByteArrayInputStream(memBuffer); } /** * Generate a "Yanel" response (page information, 404, internal server error, ...) */ private void setYanelOutput(HttpServletRequest request, HttpServletResponse response, Document doc) throws ServletException { String path = getResource(request, response).getPath(); String backToRealm = org.wyona.yanel.core.util.PathUtil.backToRealm(path); try { String yanelFormat = request.getParameter("yanel.format"); if(yanelFormat != null) { if (yanelFormat.equals("xml")) { response.setContentType("application/xml; charset=" + DEFAULT_ENCODING); XMLHelper.writeDocument(doc, response.getOutputStream()); /* OutputStream out = response.getOutputStream(); javax.xml.transform.TransformerFactory.newInstance().newTransformer().transform(new javax.xml.transform.dom.DOMSource(doc), new javax.xml.transform.stream.StreamResult(out)); out.close(); */ } else if (yanelFormat.equals("json")) { log.error("TODO: JSON format not implemented yet!"); } else { log.error("No such format '" + yanelFormat + "' supported!"); } } else { String mimeType = patchMimeType("application/xhtml+xml", request); // TODO: doLogAccess response.setContentType(mimeType + "; charset=" + DEFAULT_ENCODING); // create identity transformer which serves as a dom-to-sax transformer TransformerIdentityImpl transformer = new TransformerIdentityImpl(); // create xslt transformer: SAXTransformerFactory saxTransformerFactory = (SAXTransformerFactory)SAXTransformerFactory.newInstance(); TransformerHandler xsltTransformer = saxTransformerFactory.newTransformerHandler(new StreamSource(xsltInfoAndException)); xsltTransformer.getTransformer().setParameter("yanel.back2realm", backToRealm); xsltTransformer.getTransformer().setParameter("yanel.reservedPrefix", reservedPrefix); // create i18n transformer: I18nTransformer2 i18nTransformer = new I18nTransformer2("global", getLanguage(request), yanelInstance.getMap().getRealm(request.getServletPath()).getDefaultLanguage()); CatalogResolver catalogResolver = new CatalogResolver(); i18nTransformer.setEntityResolver(new CatalogResolver()); // create serializer: Serializer serializer = SerializerFactory.getSerializer(SerializerFactory.XHTML_STRICT); // chain everything together (create a pipeline): xsltTransformer.setResult(new SAXResult(i18nTransformer)); i18nTransformer.setResult(new SAXResult(serializer.asContentHandler())); serializer.setOutputStream(response.getOutputStream()); // execute pipeline: transformer.transform(new DOMSource(doc), new SAXResult(xsltTransformer)); } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } /** * Get language with the following priorization: 1) yanel.meta.language query string parameter, 2) Accept-Language header, 3) Default en */ private String getLanguage(HttpServletRequest request) throws Exception { // TODO: Shouldn't this be replaced by Resource.getRequestedLanguage() or Resource.getContentLanguage() ?! String language = request.getParameter("yanel.meta.language"); if (language == null) { language = request.getHeader("Accept-Language"); if (language != null) { int commaIndex = language.indexOf(","); if (commaIndex > 0) { language = language.substring(0, commaIndex); } int dashIndex = language.indexOf("-"); if (dashIndex > 0) { language = language.substring(0, dashIndex); } } } if(language != null && language.length() > 0) return language; return yanelInstance.getMap().getRealm(request.getServletPath()).getDefaultLanguage(); } /** * Write to output stream of modifiable resource */ private void write(InputStream in, OutputStream out, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if (out != null) { log.debug("Content-Type: " + request.getContentType()); // TODO: Compare mime-type from response with mime-type of resource //if (contentType.equals("text/xml")) { ... } byte[] buffer = new byte[8192]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } out.flush(); out.close(); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Data has been saved ...</p>"); sb.append("</body>"); sb.append("</html>"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setContentType("application/xhtml+xml; charset=" + DEFAULT_ENCODING); PrintWriter w = response.getWriter(); w.print(sb); log.info("Data has been saved ..."); return; } else { log.error("OutputStream is null!"); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Exception: OutputStream is null!</p>"); sb.append("</body>"); sb.append("</html>"); PrintWriter w = response.getWriter(); w.print(sb); response.setContentType("application/xhtml+xml; charset=" + DEFAULT_ENCODING); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } } /** * Get the identity from the given request (associated with a realm) or via the 'Authorization' HTTP header in the case of BASIC or DIGEST * @param request Client/Servlet request * @param map Map in order to determine realm * @return Identity if one exist, or otherwise an empty identity */ private static Identity getIdentity(HttpServletRequest request, Map map) throws Exception { return getIdentity(request, map.getRealm(request.getServletPath())); } /** * @deprecated Use {@link #getIdentity(HttpSession, String)} instead * Get the identity from the HTTP session (associated with the given request) for a specific realm * @param session HTTP session of client * @param realm Realm * @return Identity if one exist, or otherwise null */ public static Identity getIdentity(HttpSession session, Realm realm) throws Exception { return getIdentity(session, realm.getID()); /* if (session != null) { IdentityMap identityMap = (IdentityMap)session.getAttribute(IDENTITY_MAP_KEY); if (identityMap != null) { Identity identity = (Identity)identityMap.get(realm.getID()); if (identity != null && !identity.isWorld()) { return identity; } } } return null; */ } /** * Get the identity from the HTTP session (associated with the given request) for a specific realm * @param session HTTP session of client * @param realmID Realm ID * @return Identity if one exist, or otherwise null */ public static Identity getIdentity(HttpSession session, String realmID) throws Exception { if (session != null) { IdentityMap identityMap = (IdentityMap)session.getAttribute(IDENTITY_MAP_KEY); if (identityMap != null) { Identity identity = (Identity)identityMap.get(realmID); if (identity != null && !identity.isWorld()) { return identity; } } } return null; } /** * Attach the identity to the HTTP session for a specific realm (associated with the given request) * @param session HTTP session of client * @param realm Realm */ public static void setIdentity(Identity identity, HttpSession session, Realm realm) throws Exception { if (session != null) { IdentityMap identityMap = (IdentityMap)session.getAttribute(IDENTITY_MAP_KEY); if (identityMap == null) { identityMap = new IdentityMap(); session.setAttribute(YanelServlet.IDENTITY_MAP_KEY, identityMap); } //log.debug("Firstname: " + identity.getFirstname()); identityMap.put(realm.getID(), identity); // INFO: Please note that the constructor Identity(User, String) is resolving group IDs (including parent group IDs) and hence these are "attached" to the session in order to improve performance during authorization checks } else { log.warn("Session is null!"); } } /** * Get the identity from the given request/session (for a specific realm) or via the 'Authorization' HTTP header in the case of BASIC or DIGEST * @param request Client/Servlet request * @param realm Realm * @return Identity if one exist, or otherwise an empty identity */ private static Identity getIdentity(HttpServletRequest request, Realm realm) throws Exception { Identity identity = getIdentity(request.getSession(false), realm); if (identity != null) { return identity; } // HTTP BASIC Authentication (For clients such as for instance Sunbird, OpenOffice or cadaver) // IMPORT NOTE: BASIC Authentication needs to be checked on every request, because clients often do not support session handling String authorizationHeader = request.getHeader("Authorization"); if (log.isDebugEnabled()) log.debug("No identity attached to session, hence check request authorization header: " + authorizationHeader); if (authorizationHeader != null) { if (authorizationHeader.toUpperCase().startsWith("BASIC")) { log.warn("Using BASIC authorization ..."); // TODO: Reformulate text ... // Get encoded user and password, comes after "BASIC " String userpassEncoded = authorizationHeader.substring(6); // Decode it, using any base 64 decoder sun.misc.BASE64Decoder dec = new sun.misc.BASE64Decoder(); String userpassDecoded = new String(dec.decodeBuffer(userpassEncoded)); log.debug("Username and Password Decoded: " + userpassDecoded); String[] up = userpassDecoded.split(":"); String username = up[0]; String password = up[1]; log.debug("username: " + username + ", password: " + password); try { String trueID = realm.getIdentityManager().getUserManager().getTrueId(username); User user = realm.getIdentityManager().getUserManager().getUser(trueID); if (user != null && user.authenticate(password)) { return new Identity(user, username); } else { log.warn("HTTP BASIC Authentication failed for " + username + " (True ID: '" + trueID + "')!"); /* response.setHeader("WWW-Authenticate", "BASIC realm=\"" + realm.getName() + "\""); response.sendError(HttpServletResponse.SC_UNAUTHORIZED); PrintWriter writer = response.getWriter(); writer.print("BASIC Authentication Failed!"); return response; */ } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } else if (authorizationHeader.toUpperCase().startsWith("DIGEST")) { log.error("DIGEST is not implemented"); /* authorized = false; response.sendError(HttpServletResponse.SC_UNAUTHORIZED); response.setHeader("WWW-Authenticate", "DIGEST realm=\"" + realm.getName() + "\""); PrintWriter writer = response.getWriter(); writer.print("DIGEST is not implemented!"); */ } else { log.warn("No such authorization type implemented: " + authorizationHeader); } } if(log.isDebugEnabled()) log.debug("No identity yet (Neither session nor header based! Identity is set to WORLD!)"); // TBD: Should add world identity to the session? return new Identity(); } /** * Create a DOM Document */ static public Document getDocument(String namespace, String localname) throws Exception { return XMLHelper.createDocument(namespace, localname); } private Realm getRealm(HttpServletRequest request) throws Exception { Realm realm = yanelInstance.getMap().getRealm(request.getServletPath()); return realm; } /** * Generate response using a resource configuration * @param response Response which is being generated/completed * @param statusCode HTTP response status code (because one is not able to get status code from response) * @param rc Resource configuration * @return true if generation of response was successful or return false otherwise */ private boolean generateResponseFromRTview(HttpServletRequest request, HttpServletResponse response, int statusCode, ResourceConfiguration rc, String path) throws ServletException { try { Resource resource = yanelInstance.getResourceManager().getResource(getEnvironment(request, response), getRealm(request), path, rc); return generateResponseFromResourceView(request, response, statusCode, resource); } catch (Exception e) { throw new ServletException(e); } } /** * Generate response using a resource configuration * @param statusCode HTTP response status code (because one is not able to get status code from response) * @param resource Resource * @return true if generation of response was successful or return false otherwise */ private boolean generateResponseFromResourceView(HttpServletRequest request, HttpServletResponse response, int statusCode, Resource resource) throws Exception { String viewId = getViewID(request); View view = ((ViewableV2) resource).getView(viewId); if (view != null) { TrackingInformationV1 trackInfo = null; if (generateResponse(view, resource, request, response, statusCode, getDocument(NAMESPACE, "yanel"), -1, -1, trackInfo) != null) { return true; } } log.warn("No response has been generated: " + resource.getPath()); return false; } /** * Get global data located below reserved prefix */ private void getGlobalData(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Resource resource = getResource(request, response); String path = resource.getPath(); java.util.Map<String, String> properties = new HashMap<String, String>(); final String pathPrefix = "/" + reservedPrefix + "/"; final String ABOUT_PAGE_PATH = pathPrefix + "about.html"; // About Yanel final String ABOUT_REALM_PAGE_PATH = pathPrefix + "about-realm.html"; // About realm final String RESOURCE_TYPES_PATH_PREFIX = pathPrefix + "resource-types/"; //XXX REFACTORME: in the cases where we simply use a resource-type's view Realm realm; Environment environment = getEnvironment(request, response); ResourceConfiguration rc; YanelGlobalResourceTypeMatcher RTmatcher = new YanelGlobalResourceTypeMatcher(pathPrefix, servletContextRealPath); try { realm = getRealm(request); rc = RTmatcher.getResourceConfiguration(environment, realm, path); } catch (Exception e) { throw new ServletException(e.getMessage(), e); } if (rc != null) { if (generateResponseFromRTview(request, response, -1, rc, path)) { return; } response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } else if (path.equals(ABOUT_PAGE_PATH)) { //XXX REFACTORME: we should define an "about" resource-type instead! response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setHeader("Content-Type", "text/html"); PrintWriter w = response.getWriter(); w.print(About.toHTML(yanelInstance.getVersion(), yanelInstance.getRevision(), yanelInstance.getTargetEnvironment())); return; } else if (path.equals(ABOUT_REALM_PAGE_PATH)) { //XXX REFACTORME: we should define an "about-realm" resource-type instead! response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setHeader("Content-Type", "text/html"); PrintWriter w = response.getWriter(); w.print(AboutRealm.toHTML(realm)); return; } else if (path.startsWith(RESOURCE_TYPES_PATH_PREFIX)) { final String[] namespaceURI_and_rest = path.substring(RESOURCE_TYPES_PATH_PREFIX.length()).split("::", 2); final String namespaceURI = namespaceURI_and_rest[0]; final String[] name_and_rest = namespaceURI_and_rest[1].split("/", 2); final String name = name_and_rest[0]; final String decoded_namespaceURI = HttpServletRequestHelper.decodeURIinURLpath('^', namespaceURI); log.debug("decoded_namespaceURI: " + decoded_namespaceURI); if (log.isDebugEnabled()) log.debug("decoded_namespaceURI: "+decoded_namespaceURI); final String namespace = ! decoded_namespaceURI.equals(namespaceURI) ? decoded_namespaceURI : namespaceURI.replaceAll("http:/", "http: rc = new ResourceConfiguration(name, namespace, properties); try { Resource resourceOfPrefix = yanelInstance.getResourceManager().getResource(environment, realm, path, rc); String htdocsPath; if (name_and_rest[1].startsWith(reservedPrefix + "/")) { htdocsPath = "rtyanelhtdocs:" + name_and_rest[1].substring(reservedPrefix.length()).replace('/', File.separatorChar); } else { htdocsPath = "rthtdocs:" + File.separatorChar + name_and_rest[1].replace('/', File.separatorChar); } SourceResolver resolver = new SourceResolver(resourceOfPrefix); Source source = resolver.resolve(htdocsPath, null); long sourceLastModified = -1; // Compare If-Modified-Since with lastModified and return 304 without content resp. check on ETag if (source instanceof YanelStreamSource) { sourceLastModified = ((YanelStreamSource) source).getLastModified(); long ifModifiedSince = request.getDateHeader("If-Modified-Since"); if (log.isDebugEnabled()) log.debug("sourceLastModified <= ifModifiedSince: " + sourceLastModified + " <= " + ifModifiedSince); if (ifModifiedSince != -1) { if (sourceLastModified <= ifModifiedSince) { response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_MODIFIED); return; } } } InputStream htdocIn = ((StreamSource) source).getInputStream(); if (htdocIn != null) { log.debug("Resource-Type specific data: " + htdocsPath); // TODO: Set more HTTP headers (size, etc.) String mimeType = guessMimeType(FilenameUtils.getExtension(FilenameUtils.getName(htdocsPath))); if(sourceLastModified >= 0) response.setDateHeader("Last-Modified", sourceLastModified); response.setHeader("Content-Type", mimeType); byte buffer[] = new byte[8192]; int bytesRead; OutputStream out = response.getOutputStream(); while ((bytesRead = htdocIn.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } htdocIn.close(); // allow client-side caching: if (cacheExpires != 0) { setExpiresHeader(response, cacheExpires); } return; } else { log.error("No such file or directory: " + htdocsPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } } catch (Exception e) { throw new ServletException(e.getMessage(), e); } } else { File globalFile = org.wyona.commons.io.FileUtil.file(servletContextRealPath, "htdocs" + File.separator + path.substring(pathPrefix.length())); if (globalFile.exists()) { log.debug("Global data: " + globalFile); // TODO: Set more HTTP headers (size, etc.) String mimeType = guessMimeType(FilenameUtils.getExtension(globalFile.getName())); response.setHeader("Content-Type", mimeType); byte buffer[] = new byte[8192]; int bytesRead; InputStream in = new java.io.FileInputStream(globalFile); OutputStream out = response.getOutputStream(); while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } in.close(); // allow client-side caching: if (cacheExpires != 0) { setExpiresHeader(response, cacheExpires); } return; } else { log.error("No such file or directory: " + globalFile); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); return; } } } /** * Set expire date within HTTP header */ private void setExpiresHeader(HttpServletResponse response, int hours) { Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.HOUR_OF_DAY, hours); String expires = DateUtil.formatRFC822GMT(calendar.getTime()); response.setHeader("Expires", expires); } /** * Generate response from a resource view, whereas it will be checked first if the resource already wrote the response (if so, then just return) * @param res Resource which handles the request in order to generate a response * @param statusCode HTTP response status code (because one is not able to get status code from response) * @param trackInfo Tracking information bean which might be updated by resource if resource is implementing trackable */ private HttpServletResponse generateResponse(View view, Resource res, HttpServletRequest request, HttpServletResponse response, int statusCode, Document doc, long size, long lastModified, TrackingInformationV1 trackInfo) throws ServletException, IOException { //log.debug("Generate response: " + res.getPath()); // TODO: It seems like no header fields are being set (e.g. Content-Length, ...). Please see below ... // INFO: Check if viewable resource has already created a response if (!view.isResponse()) { if(logAccessIsApplicable(view.getMimeType(), res)) { //log.debug("Mime type '" + view.getMimeType() + "' of request: " + request.getServletPath() + "?" + request.getQueryString()); doLogAccess(request, response, statusCode, res, trackInfo); } log.debug("It seems that resource '" + res.getPath() + "' has directly created the response."); return response; } // Set mime type and encoding String mimeType = view.getMimeType(); if (view.getEncoding() != null) { mimeType = patchMimeType(mimeType, request); response.setContentType(mimeType + "; charset=" + view.getEncoding()); } else if (res.getConfiguration() != null && res.getConfiguration().getEncoding() != null) { mimeType = patchMimeType(mimeType, request); response.setContentType(mimeType + "; charset=" + res.getConfiguration().getEncoding()); } else { // try to guess if we have to set the default encoding if (mimeType != null && mimeType.startsWith("text") || mimeType.equals("application/xml") || mimeType.equals("application/xhtml+xml") || mimeType.equals("application/atom+xml") || mimeType.equals("application/x-javascript")) { mimeType = patchMimeType(mimeType, request); response.setContentType(mimeType + "; charset=" + DEFAULT_ENCODING); } else { // probably binary mime-type, don't set encoding mimeType = patchMimeType(mimeType, request); response.setContentType(mimeType); } } if(logAccessIsApplicable(mimeType, res)) { //log.debug("Mime type '" + mimeType + "' of request: " + request.getServletPath() + "?" + request.getQueryString()); doLogAccess(request, response, statusCode, res, trackInfo); } // Set HTTP headers: HashMap<?, ?> headers = view.getHttpHeaders(); Iterator<?> iter = headers.keySet().iterator(); while (iter.hasNext()) { String name = (String)iter.next(); String value = (String)headers.get(name); if (log.isDebugEnabled()) { log.debug("set http header: " + name + ": " + value); } response.setHeader(name, value); } // INFO: Confirm DNT (do not track) String dntValue = request.getHeader("DNT"); if (dntValue != null) { response.setHeader("DNT", dntValue); } else { //log.debug("No DNT (do not track) header set, hence do not echo."); } // Possibly embed toolbar: // TODO: Check if user is authorized to actually see toolbar (Current flaw: Enabled Toolbar, Login, Toolbar is enabled, Logout, Toolbar is still visible!) if (yanelUI.isToolbarEnabled(request)) { // TODO: Check whether resource configuration has toolbar configured as suppressed: if ("suppress".equals(res.getResConfiguration("yanel.toolbar"))) { if (mimeType != null && mimeType.indexOf("html") > 0) { // TODO: What about other query strings or frames or TinyMCE (e.g. link.htm)? if (request.getParameter(YANEL_RESOURCE_USECASE) == null) { // INFO: In the case of a yanel resource usecase do NOT add the toolbar if (toolbarMasterSwitch.equals("on")) { OutputStream os = response.getOutputStream(); try { Usecase usecase = new Usecase(TOOLBAR_USECASE); Identity identity = getIdentity(request, map); Realm realm = map.getRealm(request.getServletPath()); String path = map.getPath(realm, request.getServletPath()); // NOTE: This extra authorization check is necessary within a multi-realm environment, because after activating the toolbar with a query string, the toolbar flag attached to the session will be ignored by doAccessControl(). One could possibly do this check within doAccessControl(), but could be a peformance issue! Or as an alternative one could refactor the code, such that the toolbar session flag is realm aware. if(realm.getPolicyManager().authorize(path, identity, usecase)) { yanelUI.mergeToolbarWithContent(request, response, res, view); return response; } else { log.warn("Toolbar authorization denied (Realm: '" + realm.getName() + "', User: '" + identity.getUsername() + "', Path: '" + path + "')!"); } } catch (Exception e) { String message = "Error merging toolbar into content: " + e.getMessage(); //log.error(message, e); log.error(e, e); Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return response; } } else { log.info("Toolbar has been disabled. Please check web.xml!"); } } else { log.warn("Yanel resource usecase is not null, but set to '" + request.getParameter(YANEL_RESOURCE_USECASE) + "' and hence Yanel toolbar is suppressed/omitted in order to avoid that users are leaving the usecase because they might click on some toolbar menu item."); } } else { log.info("No HTML related mime type: " + mimeType); } } else { log.debug("Toolbar is turned off."); } InputStream is = view.getInputStream(); if (is != null) { // Write actual content into response byte buffer[] = new byte[8192]; int bytesRead; bytesRead = is.read(buffer); try { // Compare If-Modified-Since with lastModified and return 304 without content resp. check on ETag long ifModifiedSince = request.getDateHeader("If-Modified-Since"); if (ifModifiedSince != -1) { if (res instanceof ModifiableV2) { long resourceLastMod = ((ModifiableV2)res).getLastModified(); //log.debug(resourceLastMod + " " + ifModifiedSince); if (resourceLastMod <= ifModifiedSince) { response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_MODIFIED); return response; } } else { // TODO: Many resources do not implement ModifiableV2 and hence never return a lastModified and hence the browser will never ask for ifModifiedSince! //log.warn("Resource of path '" + res.getPath() + "' is not ModifiableV2 and hence cannot be cached!"); if (log.isDebugEnabled()) log.debug("Resource of path '" + res.getPath() + "' is not ModifiableV2 and hence cannot be cached!"); } } } catch (Exception e) { log.error(e.getMessage(), e); } if(lastModified >= 0) response.setDateHeader("Last-Modified", lastModified); if(size > 0) { if (log.isDebugEnabled()) log.debug("Size of " + request.getRequestURI() + ": " + size); response.setContentLength((int) size); } else { if (log.isDebugEnabled()) log.debug("No size for " + request.getRequestURI() + ": " + size); } // Check if InputStream is empty if (bytesRead != -1) { java.io.OutputStream os = response.getOutputStream(); os.write(buffer, 0, bytesRead); while ((bytesRead = is.read(buffer)) != -1) { os.write(buffer, 0, bytesRead); } os.close(); } else { log.warn("Returned content size of request '" + request.getRequestURI() + "' is 0"); } is.close(); return response; } else { String message = "Returned InputStream of request '" + request.getRequestURI() + "' is null!"; Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); is.close(); return response; } } @Override public void destroy() { super.destroy(); yanelInstance.destroy(); if (scheduler != null) { try { log.warn("Shutdown scheduler ..."); scheduler.shutdown(); //scheduler.shutdown(true); // INFO: true means to wait until all jobs have completed } catch(Exception e) { log.error(e, e); } } log.warn("Yanel webapp has been shut down."); } /** * Get usecase. Maps query strings, etc. to usecases, which then can be used for example within access control policies */ private Usecase getUsecase(HttpServletRequest request) { // TODO: Replace hardcoded roles by mapping between roles amd query strings ... Usecase usecase = new Usecase("view"); String yanelResUsecaseValue = request.getParameter(YANEL_RESOURCE_USECASE); if (yanelResUsecaseValue != null) { if (yanelResUsecaseValue.equals("save")) { log.debug("Save data ..."); usecase = new Usecase("write"); } else if (yanelResUsecaseValue.equals("checkin")) { log.debug("Checkin data ..."); usecase = new Usecase("write"); } else if (yanelResUsecaseValue.equals("roll-back")) { log.debug("Roll back to previous revision ..."); usecase = new Usecase("write"); } else if (yanelResUsecaseValue.equals("introspection")) { if(log.isDebugEnabled()) log.debug("Dynamically generated introspection ..."); usecase = new Usecase("introspection"); } else if (yanelResUsecaseValue.equals("checkout")) { log.debug("Checkout data ..."); usecase = new Usecase("open"); } else if (yanelResUsecaseValue.equals("delete")) { log.info("Delete resource (yanel resource usecase delete)"); usecase = new Usecase("delete"); } else { log.warn("No such generic Yanel resource usecase: " + yanelResUsecaseValue + " (maybe some custom resource usecase)"); } } String yanelUsecaseValue = request.getParameter(YANEL_USECASE); if (yanelUsecaseValue != null) { if (yanelUsecaseValue.equals("create")) { log.debug("Create new resource ..."); usecase = new Usecase("resource.create"); } else if (yanelUsecaseValue.equals("policy.read")) { usecase = new Usecase("policy.read"); } else { log.warn("No such generic Yanel usecase: " + yanelUsecaseValue + " (maybe some custom usecase)"); } } String contentType = request.getContentType(); String method = request.getMethod(); if (contentType != null && contentType.indexOf("application/atom+xml") >= 0 && (method.equals(METHOD_PUT) || method.equals(METHOD_POST))) { // TODO: Is posting atom entries different from a general post (see below)?! log.warn("Write/Checkin Atom entry ..."); usecase = new Usecase("write"); // TODO: METHOD_POST is not generally protected, but save, checkin, application/atom+xml are being protected. See doPost(.... } else if (method.equals(METHOD_PUT)) { log.warn("Upload data ..."); usecase = new Usecase("write"); } else if (method.equals(METHOD_DELETE)) { log.warn("Delete resource (HTTP method delete)"); usecase = new Usecase("delete"); } String workflowTransitionValue = request.getParameter(YANEL_RESOURCE_WORKFLOW_TRANSITION); if (workflowTransitionValue != null) { // TODO: At the moment the authorization of workflow transitions are checked within executeWorkflowTransition or rather workflowable.doTransition(transition, revision) log.warn("Workflow transition is currently handled as view usecase: " + workflowTransitionValue); usecase = new Usecase("view"); // TODO: Return workflow transition ID //usecase = new Usecase(transitionID); } String toolbarValue = request.getParameter("yanel.toolbar"); if (toolbarValue != null && toolbarValue.equals("on")) { log.debug("Turn on toolbar ..."); usecase = new Usecase(TOOLBAR_USECASE); } String yanelPolicyValue = request.getParameter(YANEL_ACCESS_POLICY_USECASE); if (yanelPolicyValue != null) { if (yanelPolicyValue.equals("create")) { usecase = new Usecase("policy.create"); } else if (yanelPolicyValue.equals("read")) { usecase = new Usecase("policy.read"); } else if (yanelPolicyValue.equals("update")) { usecase = new Usecase("policy.update"); } else if (yanelPolicyValue.equals("delete")) { usecase = new Usecase("policy.delete"); } else { log.warn("No such policy usecase: " + yanelPolicyValue); } } String showResourceMeta = request.getParameter(RESOURCE_META_ID_PARAM_NAME); if (showResourceMeta != null) { usecase = new Usecase(RESOURCE_META_ID_PARAM_NAME); } return usecase; } /** * Handle access policy requests (CRUD, whereas delete is not implemented yet!) * @param version Version of policy manager implementation */ private void doAccessPolicyRequest(HttpServletRequest request, HttpServletResponse response, int version) throws ServletException, IOException { try { String viewId = getViewID(request); Realm realm = map.getRealm(request.getServletPath()); ResourceConfiguration rc; if (version == 2) { rc = getGlobalResourceConfiguration("policy-manager-v2_yanel-rc.xml", realm); } else { rc = getGlobalResourceConfiguration("policy-manager_yanel-rc.xml", realm); } String path = map.getPath(realm, request.getServletPath()); if (generateResponseFromRTview(request, response, -1, rc, path)) { return; } log.error("Something went terribly wrong!"); response.getWriter().print("Something went terribly wrong!"); return; } catch(Exception e) { throw new ServletException(e.getMessage(), e); } } /** * Handle delete usecase */ private void handleDeleteUsecase(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String confirmed = request.getParameter("confirmed"); if (confirmed != null) { String path = getResource(request, response).getPath(); log.warn("Really delete resource at " + path); doDelete(request, response); return; } else { response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setContentType("text/html" + "; charset=" + "UTF-8"); StringBuilder sb = new StringBuilder(); Resource res = getResource(request, response); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { log.info("Delete has not been confirmed by client yet!"); sb = new StringBuilder("<html xmlns=\"http: } else { String message = "Resource '" + res.getPath() + "' cannot be deleted, because it does not implement interface ModifiableV2!"; log.warn(message); sb = new StringBuilder("<html xmlns=\"http: } PrintWriter w = response.getWriter(); w.print(sb); return; } } /** * Get resource configuration from global location of the realm or if not available there, then from global location of Yanel * * @param resConfigName Filename of resource configuration * @param realm Current realm */ private ResourceConfiguration getGlobalResourceConfiguration(String resConfigName, Realm realm) { return YanelGlobalResourceTypeMatcher.getGlobalResourceConfiguration(resConfigName, realm, servletContextRealPath); } /** * Handle a generic exception. * @param request The request object. * @param response The response object. * @param ex The exception to handle. */ private void handleException(HttpServletRequest request, HttpServletResponse response, Exception ex) { try { Realm realm = yanelInstance.getMap().getRealm(request.getServletPath()); String path = getResource(request, response).getPath(); ResourceConfiguration rc = getGlobalResourceConfiguration("generic-exception-handler_yanel-rc.xml", realm); BasicGenericExceptionHandlerResource resource = (BasicGenericExceptionHandlerResource) yanelInstance.getResourceManager().getResource(getEnvironment(request, response), getRealm(request), path, rc); resource.setException(ex); int statusCode = javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR; response.setStatus(statusCode); boolean hasBeenHandled = generateResponseFromResourceView(request, response, statusCode, resource); if(!hasBeenHandled) { log.error("Generic exception handler is broken!"); log.error("Unable to output/handle the following exception:"); log.error(ex, ex); } } catch (Exception e) { log.error("Generic exception handler is broken!"); log.error("Unable to handle the following exception:"); log.error(ex, ex); log.error("Caught exception while handling the above exception:"); log.error(e, e); } } /** * Generate a graceful 404 response * @param doc Debug/Meta document */ private void do404(HttpServletRequest request, HttpServletResponse response, Document doc, String exceptionMessage) throws ServletException { log404.info("Referer: " + request.getHeader("referer")); log404.warn(request.getRequestURL().toString()); // TODO: Log 404 per realm //org.wyona.yarep.core.Node node = realm.getRepository().getNode("/yanel-logs/404.txt"); String message = "No such node/resource exception: " + exceptionMessage; log.warn(message); Element exceptionElement = (Element) doc.getDocumentElement().appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); exceptionElement.setAttributeNS(NAMESPACE, "status", "404"); response.setStatus(HttpServletResponse.SC_NOT_FOUND); try { Realm realm = yanelInstance.getMap().getRealm(request.getServletPath()); String path = getResource(request, response).getPath(); ResourceConfiguration rc = getGlobalResourceConfiguration("404_yanel-rc.xml", realm); if (generateResponseFromRTview(request, response, HttpServletResponse.SC_NOT_FOUND, rc, path)) { return; } log.error("404 response seems to be broken!"); return; } catch (Exception e) { log.error(e.getMessage(), e); return; } } /** * Check if yanel resource usecase is 'roll back" usecase */ private boolean isRollBack(HttpServletRequest request) { String yanelResUsecase = request.getParameter(YANEL_RESOURCE_USECASE); if (yanelResUsecase != null) { if (yanelResUsecase.equals("roll-back")) return true; } return false; } /** * Check if request comes from Neutron supporting client */ private boolean isClientSupportingNeutron(HttpServletRequest request) { String neutronVersions = request.getHeader("Neutron"); if (neutronVersions != null) { log.info("Neutron version(s) supported by client: " + neutronVersions); return true; } return false; } /** * Get Neutron versions which are supported by client */ private String getClientSupportedNeutronVersions(HttpServletRequest request) { return request.getHeader("Neutron"); } /** * Get client authentication scheme */ private String getClientAuthenticationScheme(HttpServletRequest request) { return request.getHeader("WWW-Authenticate"); } /** * Respond with introspection */ private void sendIntrospectionAsResponse(Resource res, Document doc, Element rootElement, HttpServletRequest request, HttpServletResponse response) throws ServletException { try { if (ResourceAttributeHelper.hasAttributeImplemented(res, "Introspectable", "1")) { response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.getWriter().print(((IntrospectableV1)res).getIntrospection()); } else { String message = "Resource '" + res.getPath() + "' is not introspectable!"; log.warn(message); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(message)); setYanelOutput(request, response, doc); } return; } catch(Exception e) { log.error(e.getMessage(), e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception")); exceptionElement.appendChild(doc.createTextNode(e.getMessage())); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(request, response, doc); return; } } /** * Set/get meta data re resource */ private Element getResourceMetaData(Resource res, Document doc, Element rootElement) { Element resourceElement = (Element) rootElement.appendChild(doc.createElement("resource")); ResourceConfiguration resConfig = res.getConfiguration(); if (resConfig != null) { Element resConfigElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "config")); resConfigElement.setAttributeNS(NAMESPACE, "rti-name", resConfig.getName()); resConfigElement.setAttributeNS(NAMESPACE, "rti-namespace", resConfig.getNamespace()); } else { Element noResConfigElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "no-config")); } Element realmElement = (Element) resourceElement.appendChild(doc.createElementNS(NAMESPACE, "realm")); realmElement.setAttributeNS(NAMESPACE, "name", res.getRealm().getName()); realmElement.setAttributeNS(NAMESPACE, "rid", res.getRealm().getID()); realmElement.setAttributeNS(NAMESPACE, "prefix", res.getRealm().getMountPoint()); Element identityManagerElement = (Element) realmElement.appendChild(doc.createElementNS(NAMESPACE, "identity-manager")); Element userManagerElement = (Element) identityManagerElement.appendChild(doc.createElementNS(NAMESPACE, "user-manager")); return resourceElement; } /** * Append view descriptors to meta */ private void appendViewDescriptors(Document doc, Element viewElement, ViewDescriptor[] vd) { if (vd != null) { for (int i = 0; i < vd.length; i++) { Element descriptorElement = (Element) viewElement.appendChild(doc.createElement("descriptor")); if (vd[i].getMimeType() != null) { descriptorElement.appendChild(doc.createTextNode(vd[i].getMimeType())); } descriptorElement.setAttributeNS(NAMESPACE, "id", vd[i].getId()); } } else { viewElement.appendChild(doc.createTextNode("No View Descriptors!")); } } /** * Log browser history of each user * @param resource Resource which handles the request * @param statusCode HTTP response status code (because one is not able to get status code from response) * @param trackInfo Tracking information bean */ private void doLogAccess(HttpServletRequest request, HttpServletResponse response, int statusCode, Resource resource, TrackingInformationV1 trackInfo) { // See apache-tomcat-5.5.33/logs/localhost_access_log.2009-11-07.txt // 127.0.0.1 - - [07/Nov/2009:01:24:09 +0100] "GET /yanel/from-scratch-realm/de/index.html HTTP/1.1" 200 4464 if ("1".equals(request.getHeader("DNT"))) { if (log.isDebugEnabled()) { log.debug("Do not track: " + request.getRemoteAddr()); } return; } try { Realm realm = map.getRealm(request.getServletPath()); String[] tags = null; if (resource != null) { if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Annotatable", "1")) { AnnotatableV1 anno = (AnnotatableV1) resource; try { tags = anno.getAnnotations(); //log.debug("Resource has tags: " + tags); } catch (Exception ex) { log.error(ex, ex); } } else { if (log.isDebugEnabled()) { log.debug("Resource has no tags yet: " + resource.getPath()); } } } else { log.debug("Resource is null because access was probably denied: " + request.getServletPath()); } String accessLogMessage; if (trackInfo != null) { String[] trackingTags = trackInfo.getTags(); if (trackingTags != null && trackingTags.length > 0) { accessLogMessage = AccessLog.getLogMessage(request, response, realm.getUserTrackingDomain(), trackingTags, ACCESS_LOG_TAG_SEPARATOR); } else { accessLogMessage = AccessLog.getLogMessage(request, response, realm.getUserTrackingDomain(), tags, ACCESS_LOG_TAG_SEPARATOR); } String pageType = trackInfo.getPageType(); if (pageType != null) { accessLogMessage = accessLogMessage + AccessLog.encodeLogField("pt", pageType); } String requestAction = trackInfo.getRequestAction(); if (requestAction != null) { accessLogMessage = accessLogMessage + AccessLog.encodeLogField("ra", requestAction); } HashMap<String, String> customFields = trackInfo.getCustomFields(); if (customFields != null) { for (java.util.Map.Entry field : customFields.entrySet()) { accessLogMessage = accessLogMessage + AccessLog.encodeLogField((String) field.getKey(), (String) field.getValue()); } } } else { accessLogMessage = AccessLog.getLogMessage(request, response, realm.getUserTrackingDomain(), tags, ACCESS_LOG_TAG_SEPARATOR); } // TBD/TODO: What if user has logged out, but still has a persistent cookie?! Identity identity = getIdentity(request, map); if (identity != null && identity.getUsername() != null) { accessLogMessage = accessLogMessage + AccessLog.encodeLogField("u", identity.getUsername()); /* TODO: This does not scale re many users ... User user = realm.getIdentityManager().getUserManager().getUser(identity.getUsername()); // The log should be attached to the user, because realms can share a UserManager, but the UserManager API has no mean to save such data, so how should we do this? // What if realm ID is changing? String logPath = "/yanel-logs/browser-history/" + user.getID() + ".txt"; if (!realm.getRepository().existsNode(logPath)) { org.wyona.yarep.util.YarepUtil.addNodes(realm.getRepository(), logPath, org.wyona.yarep.core.NodeType.RESOURCE); } org.wyona.yarep.core.Node node = realm.getRepository().getNode(logPath); // Stream into node (append log entry, see for example log4j) // 127.0.0.1 - - [07/Nov/2009:01:24:09 +0100] "GET /yanel/from-scratch-realm/de/index.html HTTP/1.1" 200 4464 */ } String httpAcceptLanguage = request.getHeader("Accept-Language"); if (httpAcceptLanguage != null) { accessLogMessage = accessLogMessage + AccessLog.encodeLogField("a-lang", httpAcceptLanguage); } HttpSession session = request.getSession(true); if (session != null) { accessLogMessage = accessLogMessage + AccessLog.encodeLogField("sid", session.getId()); } if (statusCode >= 0) { accessLogMessage = accessLogMessage + AccessLog.encodeLogField("http-status", "" + statusCode); } else { accessLogMessage = accessLogMessage + AccessLog.encodeLogField("http-status", "" + HttpServletResponse.SC_OK); } String remoteIPAddr = request.getHeader("X-FORWARDED-FOR"); if (remoteIPAddr != null) { // INFO: We do not need to check realm.isProxySet() additionally, because some deployments are using a proxy without having set the Yanel proxy configuration, hence it is sufficient to just check whether an X-FORWARDED-FOR header is set accessLogMessage = accessLogMessage + AccessLog.encodeLogField("ip", remoteIPAddr); } else { if (log.isDebugEnabled()) { log.debug("No such request header: X-FORWARDED-FOR (hence fallback to request.getRemoteAddr())"); // INFO: For example in the case of AJP or if no proxy is used } accessLogMessage = accessLogMessage + AccessLog.encodeLogField("ip", request.getRemoteAddr()); // INFO: For performance reasons we do not use getRemoteHost(), but rather just log the IP address. } logAccess.info(accessLogMessage); //log.debug("Referer: " + request.getHeader(HTTP_REFERRER)); // INFO: Store last accessed page in session such that session manager can show user activity. if(session != null) { session.setAttribute(YANEL_LAST_ACCESS_ATTR, request.getServletPath()); //log.debug("Last access: " + request.getServletPath()); } } catch(Exception e) { // Catch all exceptions, because we do not want to throw exceptions because of possible logging browser history errors log.error(e, e); } } private void appendRevisionsAndWorkflow(Document doc, Element resourceElement, Resource res, HttpServletRequest request) throws Exception { if (ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) { WorkflowableV1 workflowableResource = null; Workflow workflow = null; String liveRevisionName = null; if (ResourceAttributeHelper.hasAttributeImplemented(res, "Workflowable", "1")) { workflowableResource = (WorkflowableV1)res; workflow = WorkflowHelper.getWorkflow(res); liveRevisionName = WorkflowHelper.getLiveRevision(res); } RevisionInformation[] revisionsInfo = ((VersionableV2)res).getRevisions(); Element revisionsElement = (Element) resourceElement.appendChild(doc.createElement("revisions")); if (revisionsInfo != null && revisionsInfo.length > 0) { for (int i = revisionsInfo.length - 1; i >= 0; i Element revisionElement = (Element) revisionsElement.appendChild(doc.createElement("revision")); log.debug("Revision: " + revisionsInfo[i].getName()); revisionElement.appendChild(XMLHelper.createTextElement(doc, "name", revisionsInfo[i].getName(), null)); log.debug("Date: " + revisionsInfo[i].getDate()); revisionElement.appendChild(XMLHelper.createTextElement(doc, "date", "" + revisionsInfo[i].getDate(), null)); if (revisionsInfo[i].getUser() != null) { log.debug("User: " + revisionsInfo[i].getUser()); revisionElement.appendChild(XMLHelper.createTextElement(doc, "user", revisionsInfo[i].getUser(), null)); } else { revisionElement.appendChild(doc.createElement("no-user")); } if (revisionsInfo[i].getComment() != null) { log.debug("Comment: " + revisionsInfo[i].getComment()); revisionElement.appendChild(XMLHelper.createTextElement(doc, "comment", revisionsInfo[i].getComment(), null)); } else { revisionElement.appendChild(doc.createElement("no-comment")); } // Add workflow info if (workflowableResource != null && workflow != null) { Element revisionWorkflowElement = (Element) revisionElement.appendChild(doc.createElement("workflow-state")); String wfState = workflowableResource.getWorkflowState(revisionsInfo[i].getName()); if (wfState == null) { wfState = workflow.getInitialState(); } if (liveRevisionName != null && revisionsInfo[i].getName().equals(liveRevisionName)) { revisionWorkflowElement.appendChild(doc.createTextNode(wfState + " (LIVE)")); } else { revisionWorkflowElement.appendChild(doc.createTextNode(wfState)); } } } } else { Element noRevisionsYetElement = (Element) resourceElement.appendChild(doc.createElement("no-revisions-yet")); } } else { Element notVersionableElement = (Element) resourceElement.appendChild(doc.createElement("not-versionable")); } } /** * Check whether mime type is html, pdf or video * @param mt Mime type */ private boolean isMimeTypeOk(String mt) { // TODO: Add more mime types or rather make it configurable // INFO: Only HTML pages and PDFs etc. should be logged, but no images, CSS, etc. Check the mime-type instead the suffix or use JavaScript or Pixel if (mt.indexOf("html") > 0 || mt.indexOf("pdf") > 0 || mt.indexOf("video") >= 0) { return true; } return false; } /** * Get workflow exception */ private static String getWorkflowException(String message) { StringBuilder sb = new StringBuilder(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"" + org.wyona.yanel.core.workflow.Workflow.NAMESPACE + "\" type=\"" + "workflow" + "\">"); sb.append("<message>" + message + "</message>"); sb.append("</exception>"); return sb.toString(); } /** * Check whether user agent is mobile device and if so, then set mobile flag inside session */ private void doMobile(HttpServletRequest request) { HttpSession session = request.getSession(true); String mobileDevice = (String) session.getAttribute(MOBILE_KEY); if (detectMobilePerRequest || mobileDevice == null) { String userAgent = request.getHeader("User-Agent"); //log.debug("User agent: " + userAgent); //log.debug("User agent screen: " + request.getHeader("UA-Pixels")); // INFO: UA-Pixels, UA-Color, UA-OS, UA-CPU // TODO: Lower case! session.setAttribute(YanelServlet.MOBILE_KEY, "false"); // INFO: First assume user agent is not a mobile device... for (int i = 0; i < mobileDevices.length; i++) { if (userAgent != null && userAgent.indexOf(mobileDevices[i]) > 0) { session.setAttribute(YanelServlet.MOBILE_KEY, mobileDevices[i]); //log.debug("This seems to be a mobile device: " + mobileDevices[i]); break; } } /* if (((String)session.getAttribute(YanelServlet.MOBILE_KEY)).equals("false")) { log.debug("This does not seem to be a mobile device: " + userAgent); } */ } else { //log.debug("Mobile device detection already done."); } } /** * Append annotations of resource to page meta document * @param doc Page meta document * @param resource Resource which might has some annotations */ private void appendAnnotations(Document doc, Resource resource) { if (ResourceAttributeHelper.hasAttributeImplemented(resource, "Annotatable", "1")) { AnnotatableV1 anno = (AnnotatableV1) resource; try { String[] tags = anno.getAnnotations(); if (tags != null && tags.length > 0) { //log.debug("Resource has tags: " + tags); Element annotationsElem = doc.createElementNS(NAMESPACE, "annotations"); doc.getDocumentElement().appendChild(annotationsElem); for (int i = 0; i < tags.length; i++) { Element annotationElem = doc.createElementNS(NAMESPACE, "annotation"); annotationElem.appendChild(doc.createTextNode(tags[i])); annotationsElem.appendChild(annotationElem); } } else { Element noAnnotationsYetElem = doc.createElementNS(NAMESPACE, "no-annotations-yet"); noAnnotationsYetElem.setAttribute("annotatable-v1", "true"); doc.getDocumentElement().appendChild(noAnnotationsYetElem); } } catch (Exception ex) { log.error(ex, ex); } } else { if (log.isDebugEnabled()) { log.debug("Resource has no tags yet: " + resource.getPath()); } Element noAnnotationsYetElem = doc.createElementNS(NAMESPACE, "no-annotations-yet"); noAnnotationsYetElem.setAttribute("annotatable-v1", "false"); doc.getDocumentElement().appendChild(noAnnotationsYetElem); } } /** * Append tracking information of resource to page meta document * @param doc Page meta document */ private void appendTrackingInformation(Document doc, TrackingInformationV1 trackInfo) { if (trackInfo != null) { Element trackInfoElem = doc.createElementNS(NAMESPACE, "tracking-info"); doc.getDocumentElement().appendChild(trackInfoElem); String[] trackingTags = trackInfo.getTags(); if (trackingTags != null && trackingTags.length > 0) { Element interestsElem = doc.createElementNS(NAMESPACE, "interests"); trackInfoElem.appendChild(interestsElem); for (int i = 0; i < trackingTags.length; i++) { Element interestElem = doc.createElementNS(NAMESPACE, "interest"); interestElem.appendChild(doc.createTextNode(trackingTags[i])); interestsElem.appendChild(interestElem); } } else { Element noInterestsElem = doc.createElementNS(NAMESPACE, "no-interests-yet"); trackInfoElem.appendChild(noInterestsElem); } String pageType = trackInfo.getPageType(); if (pageType != null) { Element pageTypeElem = doc.createElementNS(NAMESPACE, "page-type"); pageTypeElem.appendChild(doc.createTextNode(pageType)); trackInfoElem.appendChild(pageTypeElem); } String requestAction = trackInfo.getRequestAction(); if (requestAction != null) { Element requestActionElem = doc.createElementNS(NAMESPACE, "request-action"); requestActionElem.appendChild(doc.createTextNode(requestAction)); trackInfoElem.appendChild(requestActionElem); } HashMap<String, String> customFields = trackInfo.getCustomFields(); if (customFields != null) { Element customFieldsElem = doc.createElementNS(NAMESPACE, "custom-fields"); trackInfoElem.appendChild(customFieldsElem); for (java.util.Map.Entry field : customFields.entrySet()) { Element fieldElem = doc.createElementNS(NAMESPACE, "field"); fieldElem.setAttribute("name", (String) field.getKey()); fieldElem.setAttribute("value", (String) field.getValue()); customFieldsElem.appendChild(fieldElem); } } } else { log.debug("No tracking information."); Element noTrackInfoElem = doc.createElementNS(NAMESPACE, "no-tracking-information"); doc.getDocumentElement().appendChild(noTrackInfoElem); } } /** * Determine requested view ID (try to get it from session or query string) */ private String getViewID(HttpServletRequest request) { String viewId = null; String viewIdFromSession = (String) request.getSession(true).getAttribute(VIEW_ID_PARAM_NAME); if (viewIdFromSession != null) { //log.debug("It seems like the view id is set inside session: " + viewIdFromSession); viewId = viewIdFromSession; } if (request.getParameter(VIEW_ID_PARAM_NAME) != null) { viewId = request.getParameter(VIEW_ID_PARAM_NAME); } if (request.getParameter("yanel.format") != null) { // backwards compatible viewId = request.getParameter("yanel.format"); log.warn("For backwards compatibility reasons also consider parameter 'yanel.format', but which is deprecated. Please use '" + VIEW_ID_PARAM_NAME + "' instead."); } //log.debug("Tried to get view id from query string or session attribute: " + viewId); return viewId; } /** * Check whether access logging makes sense * @param mimeType Content type of requested resource * @param resource Resource/controller handling request */ private boolean logAccessIsApplicable(String mimeType, Resource resource) { if(logAccessEnabled) { // TODO: Check whether resource is trackable if (isTrackable(resource) || (mimeType != null && isMimeTypeOk(mimeType))) { return true; } else { //log.debug("Neither trackable nor a mime type which makes sense, hence do not track."); } } else { //log.debug("Tracking disabled globally."); } return false; } /** * Check whether a resource/controller is trackable * @param resource Resource/controller which might has the trackable interface implemented */ private boolean isTrackable(Resource resource) { return ResourceAttributeHelper.hasAttributeImplemented(resource, "Trackable", "1"); } }
package com.db.util; import java.util.Comparator; import java.util.Iterator; import java.util.Vector; /** * A tree with nodes that can have multiple ordered children. The * children of a node are ordered relative to their siblings, * not relative to the entire tree. * * @param <T> the type of object to store in this SortedSiblingTree. * * @author Dave Longley */ public class SortedSiblingTree<T> { /** * The base sibling node. This node has no parents and its object is * always null. */ protected SiblingNode mRoot; /** * Creates a new sorted sibling tree. */ public SortedSiblingTree() { mRoot = new SiblingNode(null); } /** * Finds a sibling node based on its object. * * @param node the node to start searching at. * @param object the object to search with. * * @return the sibling node or null if no match was found. */ protected SiblingNode findNode(SiblingNode node, T object) { SiblingNode rval = null; // if the starting node is null, set it to the root node if(node == null) { node = mRoot; } // check this node // FIXME: why does the line below seem to be unnecessarily complicated? Object nodeObject = (node.getObject() == null) ? null: node.getObject(); if(object == nodeObject || (object != null && object.equals(nodeObject))) { rval = node; } else { // check this node's children Iterator i = node.getChildren().iterator(); while(i.hasNext()) { rval = findNode((SiblingNode)i.next(), object); if(rval != null) { break; } } } return rval; } /** * Finds a sibling node based on its object. * * @param object the object to search with. * * @return the sibling node or null if no match was found. */ protected SiblingNode findNode(T object) { return findNode(null, object); } /** * Adds a child to a parent. * * @param parent the parent object to add the child to. * @param child the child object to add. * * @return true if the child was added, false if not. */ public boolean add(T parent, T child) { boolean rval = false; SiblingNode node = findNode(parent); if(node != null) { rval = node.addChild(new SiblingNode(child)); } return rval; } /** * Adds a child as a root sibling. * * @param child the child object to add. * * @return true if the child was added, false if not. */ public boolean add(T child) { return add(null, child); } /** * Removes a child from a parent. * * @param parent the parent object to remove the child from. * @param child the child object to remove. * * @return true if the child was removed, false if not. */ public boolean remove(T parent, T child) { boolean rval = false; SiblingNode node = findNode(parent); if(node != null) { SiblingNode sn = findNode(node, child); if(sn != null) { rval = node.removeChild(sn); } } return rval; } /** * Gets all of the root siblings of this tree. * * @return a vector containing the root siblings. Calling "remove" or * "add" on the returned vector will not affect the tree structure. */ public Vector<T> getRootSiblings() { return getChildren(null); } /** * Gets the children of a parent. * * @param parent the parent to get the children of. * * @return a vector of the parent's children. Calling "remove" or * "add" on the returned vector will not affect the tree structure. */ public Vector<T> getChildren(T parent) { Vector<T> children = new Vector<T>(); SiblingNode node = findNode(parent); if(node != null) { for(SiblingNode sn: node.getChildren()) { children.add(sn.getObject()); } } return children; } /** * Returns the total number of entries in this tree. * * @return the total number of entries in this tree. */ public int size() { return mRoot.getDescendantCount(); } /** * A node in a sorted sibling tree. * * @author Dave Longley */ public class SiblingNode { /** * The object stored in this node. */ protected T mObject; /** * The parent of this node. */ protected SiblingNode mParent; /** * The children of this sibling node. */ protected Vector<SiblingNode> mChildren; /** * Creates a new sibling node. * * @param obj the object to store in the node. */ public SiblingNode(T obj) { mObject = obj; mParent = null; mChildren = new Vector<SiblingNode>(); } /** * Gets the object inside this node. * * @return the object inside of this node. */ public T getObject() { return mObject; } /** * Sets the parent of this node. * * @param parent the parent node of this node. */ public void setParent(SiblingNode parent) { mParent = parent; } /** * Gets the parent of this node. * * @return the parent of this node. */ public SiblingNode getParent() { return mParent; } /** * Gets the children of this node. * * @return the children of node. */ public Vector<SiblingNode> getChildren() { return mChildren; } /** * Adds a child to this node's children. * * @param child the children node to add. * * @return true if the child was added, false if not. */ @SuppressWarnings("unchecked") public boolean addChild(SiblingNode child) { boolean rval = false; T object = child.getObject(); if(object instanceof Comparator) { Comparator<T> c = (Comparator<T>)object; // add the child according to its order Vector<SiblingNode> children = getChildren(); int size = children.size(); for(int i = 0; i < size && rval; i++) { T siblingObject = children.get(i).getObject(); if(c.compare(object, siblingObject) < 0) { children.insertElementAt(child, i); child.setParent(this); rval = true; } } // if the child is the largest, add it to the end if(size == children.size()) { if(children.add(child)) { child.setParent(this); rval = true; } } } else { // cannot compare objects, so arbitrarily add if(getChildren().add(child)) { child.setParent(this); rval = true; } } return rval; } /** * Removes a child from this node's children. * * @param child the node to remove. * * @return true if the child was removed, false if not. */ public boolean removeChild(SiblingNode child) { return getChildren().remove(child); } /** * Gets the siblings of this node (includes this node). * * @return the siblings of this node (includes this node). */ public Vector<SiblingNode> getSiblings() { Vector<SiblingNode> siblings = null; if(mParent != null) { siblings = mParent.getChildren(); } else { siblings = new Vector<SiblingNode>(); siblings.add(this); } return siblings; } /** * Gets the total number of descendants this node has. * * @return the total number of descendants this node has. */ public int getDescendantCount() { int rval = getChildren().size(); Iterator i = getChildren().iterator(); while(i.hasNext()) { SiblingNode child = (SiblingNode)i.next(); rval += child.getDescendantCount(); } return rval; } } }
package fortran.ofp; import java.util.ArrayList; import java.util.Arrays; import java.util.logging.Level; import java.util.logging.Logger; import org.antlr.runtime.Token; import org.w3c.dom.Attr; import org.w3c.dom.Element; import fortran.ofp.parser.java.IActionEnums; import fortran.ofp.parser.java.IFortranParser; public class XMLPrinter extends XMLPrinterBase { private static final Logger LOG = Logger.getLogger(XMLPrinter.class.getName()); public XMLPrinter(String[] args, IFortranParser parser, String filename) { super(args, parser, filename); } protected void genericOperationForceOpen(int nodesCount) { ArrayList<Element> nodes = contextNodes(-nodesCount, nodesCount); contextOpen("operation"); if (nodesCount == 2) setAttribute("type", "unary"); else if (nodesCount > 2) setAttribute("type", "multiary"); else cleanUpAfterError("didn't expect nodesCount=" + nodesCount); for (Element node : nodes) { boolean needsTransform = !node.getTagName().equals("operand") && !node.getTagName().equals("operator"); if (needsTransform) contextOpen("operand"); moveHere(node); if (needsTransform) contextClose(); } } protected void genericOperationOpen(int numberOfOperators) { if (numberOfOperators > 0) { int nodesCount = 2 * numberOfOperators + 1; genericOperationForceOpen(nodesCount); } } protected void genericOperationClose(int numberOfOperators) { if (numberOfOperators > 0) contextClose(); } protected void genericLoopControl(boolean hasStep) { String[] contexts = { "lower-bound", "upper-bound", "step" }; int takenNodesCount = hasStep ? 3 : 2; ArrayList<Element> takenNodes = contextNodes(-takenNodesCount, takenNodesCount); context = contextNode(-takenNodesCount - 1); for (int i = 0; i < takenNodes.size(); ++i) { contextOpen(contexts[i]); moveHere(takenNodes.get(i)); contextClose(); } contextClose(); } public void generic_name_list_part(Token id) { contextOpen("name"); setAttribute("id", id); if (verbosity >= 100) super.generic_name_list_part(id); contextClose(); } public void generic_name_list__begin() { if (context.getTagName().equals("specification") || context.getTagName().equals("file")) contextOpen("declaration"); super.generic_name_list__begin(); } public void specification_part(int numUseStmts, int numImportStmts, int numImplStmts, int numDeclConstructs) { if (context.getTagName().equals("header")) { contextClose("header"); contextOpen("body"); } if (context.getTagName().equals("declaration")) { LOG.log(Level.FINER, "closing unclosed declaration at specification_part"); contextClose("declaration"); } if (!context.getTagName().equals("specification")) contextOpen("specification"); contextCloseAllInner("specification"); if (verbosity >= 80) super.specification_part(numUseStmts, numImportStmts, numImplStmts, numDeclConstructs); setAttribute("uses", numUseStmts); setAttribute("imports", numImportStmts); setAttribute("implicits", numImplStmts); setAttribute("declarations", numDeclConstructs); contextClose(); contextOpen("statement"); } public void declaration_construct() { contextClose("declaration"); if (verbosity >= 100) super.declaration_construct(); contextOpen("declaration"); } public void execution_part_construct() { if (verbosity >= 100) super.execution_part_construct(); } public void specification_stmt() { if (verbosity >= 100) super.specification_stmt(); } public void executable_construct() { if (verbosity >= 100) super.executable_construct(); } public void action_stmt() { if (contextTryFind("statement") == null) { // TODO this ugly workaround should be removed contextClose(); Element element = contextNode(-1); contextOpen("statement"); moveHere(element); } if (verbosity >= 100) super.action_stmt(); contextClose("statement"); contextOpen("statement"); } public void keyword() { if (verbosity >= 100) super.keyword(); } public void name(Token id) { super.name(id); } public void constant(Token id) { super.constant(id); } public void scalar_constant() { if (verbosity >= 100) super.scalar_constant(); } public void literal_constant() { if (verbosity >= 100) super.literal_constant(); contextClose("literal"); } public void label(Token lbl) { boolean closedLoop = false; Element outerContext = context; while (outerContext != root) { if (outerContext.getTagName().equals("loop") && outerContext.getAttribute("label").equals(lbl.getText())) { context = outerContext; closedLoop = true; break; } outerContext = (Element) outerContext.getParentNode(); } super.label(lbl); if (closedLoop) contextOpen("statement"); } public void type_param_value(boolean hasExpr, boolean hasAsterisk, boolean hasColon) { Element value = hasExpr ? contextNode(-1): null; contextOpen("type-attribute"); if (hasExpr) moveHere(value); super.type_param_value(hasExpr, hasAsterisk, hasColon); contextClose(); } public void intrinsic_type_spec(Token keyword1, Token keyword2, int type, boolean hasKindSelector) { if (!context.getTagName().equals("declaration")) { // TODO: ensure being in body contextOpen("declaration"); } setAttribute("type", "variable"); super.intrinsic_type_spec(keyword1, keyword2, type, hasKindSelector); } public void kind_selector(Token token1, Token token2, boolean hasExpression) { if (hasExpression) { Element value = contextNode(-1); contextOpen("kind"); moveHere(value); } else { contextOpen("kind"); setAttribute("value", token2); } super.kind_selector(token1, token2, hasExpression); contextClose(); } public void int_literal_constant(Token digitString, Token kindParam) { if (kindParam != null) { Element kind = contextNode(-1); assert kind.getTagName().equals("kind-param"); contextOpen("literal"); moveHere(kind); } else { contextOpen("literal"); } setAttribute("type", "int"); setAttribute("value", digitString); super.int_literal_constant(digitString, kindParam); } public void boz_literal_constant(Token constant) { contextOpen("literal"); setAttribute("type", "int"); setAttribute("value", constant); super.boz_literal_constant(constant); } public void real_literal_constant(Token realConstant, Token kindParam) { if (kindParam != null) { Element kind = contextNode(-1); assert kind.getTagName().equals("kind-param"); contextOpen("literal"); moveHere(kind); } else { contextOpen("literal"); } setAttribute("type", "real"); setAttribute("value", realConstant); super.real_literal_constant(realConstant, kindParam); } public void char_selector(Token tk1, Token tk2, int kindOrLen1, int kindOrLen2, boolean hasAsterisk) { int[] attribute_types = new int[]{kindOrLen2, kindOrLen1}; contextOpen("type-attributes"); Element localContext = context; contextClose(); Element value = null; for(int attribute_type: attribute_types) { switch (attribute_type) { case IActionEnums.KindLenParam_none: break; case IActionEnums.KindLenParam_len: value = contextNode(-2); moveTo(localContext, value); contextRename(value, "type-attribute", "length"); break; case IActionEnums.KindLenParam_kind: value = contextNode(-2); Element prevContext = context; context = localContext; contextOpen("kind"); moveHere(value); contextClose(); context = prevContext; break; default: throw new IllegalArgumentException(Integer.toString(attribute_type)); } } context = localContext; if (value == null) { contextClose(); context.removeChild(localContext); } super.char_selector(tk1, tk2, kindOrLen1, kindOrLen2, hasAsterisk); if (value != null) contextClose(); } public void char_length(boolean hasTypeParamValue) { Element value = contextNode(-1); contextOpen("length"); moveHere(value); if (hasTypeParamValue) { moveHere(contextNodes(value)); context.removeChild(value); } super.char_length(hasTypeParamValue); contextClose(); } public void scalar_int_literal_constant() { if (verbosity >= 100) super.scalar_int_literal_constant(); contextClose("literal"); } public void char_literal_constant(Token digitString, Token id, Token str) { contextOpen("literal"); setAttribute("type", "char"); setAttribute("value", str); super.char_literal_constant(digitString, id, str); } public void logical_literal_constant(Token logicalValue, boolean isTrue, Token kindParam) { if (kindParam != null) { Element kind = contextNode(-1); assert kind.getTagName().equals("kind-param"); contextOpen("literal"); moveHere(kind); } else { contextOpen("literal"); } setAttribute("type", "bool"); setAttribute("value", isTrue); super.logical_literal_constant(logicalValue, isTrue, kindParam); } public void derived_type_stmt(Token label, Token keyword, Token id, Token eos, boolean hasTypeAttrSpecList, boolean hasGenericNameList) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "type"); super.derived_type_stmt(label, keyword, id, eos, hasTypeAttrSpecList, hasGenericNameList); } public void derived_type_spec(Token typeName, boolean hasTypeParamSpecList) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "variable"); super.derived_type_spec(typeName, hasTypeParamSpecList); } public void array_constructor() { context = contextNode(-1); // temporarily reopen previously-closed context if (verbosity >= 100) super.array_constructor(); contextClose(); // re-close previously closed context } public void ac_spec() { context = contextNode(-1); // temporarily reopen previously-closed context if (verbosity >= 100) super.ac_spec(); contextClose(); // re-close previously closed context } public void ac_value() { contextClose("value"); if (verbosity >= 100) super.ac_value(); contextOpen("value"); } public void ac_value_list__begin() { contextOpen("array-constructor-values"); if (verbosity >= 100) super.ac_value_list__begin(); contextOpen("value"); } public void ac_value_list(int count) { contextClose("value"); contextCloseAllInner("array-constructor-values", "array-constructor"); setAttribute("count", count); if (verbosity >= 100) super.ac_value_list(count); contextClose(); } public void ac_implied_do() { super.ac_implied_do(); contextRename("array-constructor-values", "array-constructor"); contextOpen("value"); } public void ac_implied_do_control(boolean hasStride) { genericLoopControl(hasStride); Element element = contextNode(-1); contextClose("value"); contextOpen("header"); moveHere(element); // contextClose("index-variable"); super.ac_implied_do_control(hasStride); contextClose(); } public void type_declaration_stmt(Token label, int numAttributes, Token eos) { super.type_declaration_stmt(label, numAttributes, eos); } public void declaration_type_spec(Token udtKeyword, int type) { ArrayList<Element> typeDeclarations = contextNodes(); contextOpen("type"); setAttribute("hasLength", false); setAttribute("hasKind", false); setAttribute("hasAttributes", false); Attr n; for (Element declaration : typeDeclarations) { switch (declaration.getTagName()) { case "intrinsic-type-spec": n = getAttribute("name"); if (n != null) new IllegalArgumentException(declaration.getTagName()); setAttribute("name", declaration.getAttribute("keyword1")); setAttribute("type", "intrinsic"); break; case "derived-type-spec": n = getAttribute("name"); if (n != null) new IllegalArgumentException(declaration.getTagName()); setAttribute("name", declaration.getAttribute("typeName")); setAttribute("type", "derived"); break; case "length": setAttribute("hasLength", true); break; case "kind": setAttribute("hasKind", true); break; case "type-attributes": setAttribute("hasAttributes", true); break; default: break; } moveHere(declaration); } super.declaration_type_spec(udtKeyword, type); contextClose(); } public void attr_spec(Token attrKeyword, int attr) { String nestIn = ""; switch (attr) { case IActionEnums.AttrSpec_access: // private break; case IActionEnums.AttrSpec_language_binding: // bind break; case IActionEnums.AttrSpec_ALLOCATABLE: nestIn = "allocatable"; break; case IActionEnums.AttrSpec_ASYNCHRONOUS: nestIn = "asynchronous"; break; case IActionEnums.AttrSpec_CODIMENSION: nestIn = "codimension"; break; case IActionEnums.AttrSpec_DIMENSION: // dimension break; case IActionEnums.AttrSpec_EXTERNAL: nestIn = "external"; break; case IActionEnums.AttrSpec_INTENT: // intent break; case IActionEnums.AttrSpec_INTRINSIC: nestIn = "intrinsic"; break; case IActionEnums.AttrSpec_OPTIONAL: nestIn = "optional"; break; case IActionEnums.AttrSpec_PARAMETER: nestIn = "parameter"; break; case IActionEnums.AttrSpec_POINTER: nestIn = "pointer"; break; case IActionEnums.AttrSpec_PROTECTED: nestIn = "protected"; break; case IActionEnums.AttrSpec_SAVE: nestIn = "save"; break; case IActionEnums.AttrSpec_TARGET: nestIn = "target"; break; case IActionEnums.AttrSpec_VALUE: nestIn = "value"; break; case IActionEnums.AttrSpec_VOLATILE: nestIn = "volatile"; break; default: throw new IllegalArgumentException(Integer.toString(attr) + " - " + attrKeyword); } if (nestIn.length() > 0) contextOpen("attribute-" + nestIn); super.attr_spec(attrKeyword, attr); if (nestIn.length() > 0) contextClose(); } public void entity_decl(Token id, boolean hasArraySpec, boolean hasCoarraySpec, boolean hasCharLength, boolean hasInitialization) { contextCloseAllInner("variable"); super.entity_decl(id, hasArraySpec, hasCoarraySpec, hasCharLength, hasInitialization); setAttribute("name", id); setAttribute("hasInitialValue", hasInitialization); contextClose(); contextOpen("variable"); } public void entity_decl_list__begin() { super.entity_decl_list__begin(); contextOpen("variable"); } public void entity_decl_list(int count) { contextClose("variable"); super.entity_decl_list(count); } public void initialization(boolean hasExpr, boolean hasNullInit) { Element initialValue = contextNode(-1); contextOpen("initial-value"); moveHere(initialValue); super.initialization(hasExpr, hasNullInit); contextClose(); } public void access_spec(Token keyword, int type) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); super.access_spec(keyword, type); } public void language_binding_spec(Token keyword, Token id, boolean hasName) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); super.language_binding_spec(keyword, id, hasName); } public void array_spec(int count) { contextCloseAllInner("dimensions"); if (verbosity >= 100) super.array_spec(count); setAttribute("count", count); contextClose(); } public void array_spec_element(int type) { Element value = null; Element value2 = null; switch (type) { case IActionEnums.ArraySpecElement_expr_colon_expr: value2 = contextNode(-2); case IActionEnums.ArraySpecElement_expr: case IActionEnums.ArraySpecElement_expr_colon: case IActionEnums.ArraySpecElement_expr_colon_asterisk: value = contextNode(-1); break; case IActionEnums.ArraySpecElement_asterisk: case IActionEnums.ArraySpecElement_colon: break; default: throw new IllegalArgumentException(Integer.toString(type)); } if (!context.getTagName().equals("dimensions")) contextOpen("dimensions"); contextOpen("dimension"); switch (type) { case IActionEnums.ArraySpecElement_expr: setAttribute("type", "simple"); moveHere(value); break; case IActionEnums.ArraySpecElement_expr_colon: setAttribute("type", "upper-bound-assumed-shape"); moveHere(value); break; case IActionEnums.ArraySpecElement_expr_colon_expr: setAttribute("type", "range"); contextOpen("range"); contextOpen("lower-bound"); moveHere(value2); contextClose(); contextOpen("upper-bound"); moveHere(value); contextClose(); contextClose(); break; case IActionEnums.ArraySpecElement_expr_colon_asterisk: setAttribute("type", "upper-bound-assumed-size"); moveHere(value); break; case IActionEnums.ArraySpecElement_asterisk: setAttribute("type", "assumed-size"); break; case IActionEnums.ArraySpecElement_colon: setAttribute("type", "assumed-shape"); break; default: throw new IllegalArgumentException(Integer.toString(type)); } super.array_spec_element(type); contextClose(); } public void intent_spec(Token intentKeyword1, Token intentKeyword2, int intent) { contextOpen("intent"); switch (intent) { case IActionEnums.IntentSpec_IN: setAttribute("type", "in"); break; case IActionEnums.IntentSpec_OUT: setAttribute("type", "out"); break; case IActionEnums.IntentSpec_INOUT: setAttribute("type", "inout"); break; default: throw new IllegalArgumentException(Integer.toString(intent)); } if (verbosity >= 100) super.intent_spec(intentKeyword1, intentKeyword2, intent); contextClose(); } public void access_id_list__begin() { // contextOpen("access-list"); if (verbosity >= 100) super.access_id_list__begin(); } public void access_id_list(int count) { super.access_id_list(count); // contextClose("access-list"); } public void allocatable_decl_list__begin() { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "allocatables"); super.allocatable_decl_list__begin(); } public void asynchronous_stmt(Token label, Token keyword, Token eos) { if (!context.getTagName().equals("declaration")) { Element value = contextNode(-1); if (value.getTagName() != "names") cleanUpAfterError("tag name is not 'names' but '" + value.getTagName() + "'"); contextOpen("declaration"); moveHere(value); } super.asynchronous_stmt(label, keyword, eos); } public void codimension_decl_list__begin() { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "codimensions"); super.codimension_decl_list__begin(); } public void data_stmt_object() { if (verbosity >= 100) super.data_stmt_object(); } public void data_stmt_object_list__begin() { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "data"); super.data_stmt_object_list__begin(); } public void data_stmt_value(Token asterisk) { contextCloseAllInner("values"); if (verbosity >= 100) super.data_stmt_value(asterisk); } public void hollerith_literal_constant(Token hollerithConstant) { contextOpen("literal"); setAttribute("type", "hollerith"); setAttribute("value", hollerithConstant); super.hollerith_literal_constant(hollerithConstant); } public void dimension_stmt(Token label, Token keyword, Token eos, int count) { contextCloseAllInner("variables"); setAttribute("count", count); super.dimension_stmt(label, keyword, eos, count); contextClose(); setAttribute("type", "variable-dimensions"); } public void dimension_decl(Token id) { Element value = contextNode(-1); if (!context.getTagName().equals("variables")) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); contextOpen("variables"); } contextOpen("variable"); setAttribute("name", id); moveHere(value); /* if (contextTryFind("declaration") == null) { contextOpen("declaration"); setAttribute("type", "dimension"); } */ super.dimension_decl(id); contextClose("variable"); } public void named_constant_def_list__begin() { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "parameter"); super.named_constant_def_list__begin(); } public void named_constant_def(Token id) { Element value = contextNode(-1); contextOpen("constant"); setAttribute("name", id); moveHere(value); if (verbosity >= 100) super.named_constant_def(id); contextClose(); } public void pointer_stmt(Token label, Token keyword, Token eos) { super.pointer_stmt(label, keyword, eos); if (!context.getTagName().equals("declaration")) LOG.warning("pointer_stmt in unexpected context"); setAttribute("type", "pointer"); } public void pointer_decl_list__begin() { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); super.pointer_decl_list__begin(); } public void pointer_decl(Token id, boolean hasSpecList) { contextOpen("name"); super.pointer_decl(id, hasSpecList); setAttribute("id", id); contextClose("name"); } public void save_stmt(Token label, Token keyword, Token eos, boolean hasSavedEntityList) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "save"); super.save_stmt(label, keyword, eos, hasSavedEntityList); } public void saved_entity(Token id, boolean isCommonBlockName) { contextOpen("name"); super.saved_entity(id, isCommonBlockName); setAttribute("id", id); contextClose(); } public void target_decl_list__begin() { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "targets"); if (verbosity >= 100) super.target_decl_list__begin(); } public void target_decl_list(int count) { // TODO Auto-generated method stub super.target_decl_list(count); } public void value_stmt(Token label, Token keyword, Token eos) { // TODO: get also label node if there is one Element value = contextNode(-1); if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "value"); moveHere(value); super.value_stmt(label, keyword, eos); } public void volatile_stmt(Token label, Token keyword, Token eos) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "volatile"); super.volatile_stmt(label, keyword, eos); } public void implicit_stmt(Token label, Token implicitKeyword, Token noneKeyword, Token eos, boolean hasImplicitSpecList) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); if (verbosity >= 20) super.implicit_stmt(label, implicitKeyword, noneKeyword, eos, hasImplicitSpecList); setAttribute("type", "implicit"); setAttribute("subtype", noneKeyword == null ? "some" : "none"); contextClose("declaration"); contextOpen("declaration"); } public void letter_spec(Token id1, Token id2) { contextOpen("letter-range"); setAttribute("begin", id1); setAttribute("end", id2); if (verbosity >= 100) super.letter_spec(id1, id2); contextClose(); } public void namelist_stmt(Token label, Token keyword, Token eos, int count) { contextCloseAllInner("namelists"); super.namelist_stmt(label, keyword, eos, count); setAttribute("count", count); } public void namelist_group_name(Token id) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "namelists"); contextOpen("namelists"); contextOpen("names"); if (verbosity >= 100) super.namelist_group_name(id); setAttribute("id", id); } public void namelist_group_object_list(int count) { contextCloseAllInner("names"); setAttribute("count", count); if (verbosity >= 100) super.namelist_group_object_list(count); contextClose(); } public void equivalence_set_list__begin() { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "equivalence"); super.equivalence_set_list__begin(); contextOpen("equivalent"); } public void equivalence_set_list(int count) { contextClose("equivalent"); super.equivalence_set_list(count); } public void equivalence_object() { contextClose("equivalent"); if (verbosity >= 100) super.equivalence_object(); contextOpen("equivalent"); } public void equivalence_object_list__begin() { // TODO Auto-generated method stub super.equivalence_object_list__begin(); } public void equivalence_object_list(int count) { // TODO Auto-generated method stub super.equivalence_object_list(count); } public void common_block_name(Token id) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "common"); super.common_block_name(id); } public void variable() { if (verbosity >= 100) super.variable(); setAttribute("type", "variable"); contextClose("name"); } public void designator_or_func_ref() { if (verbosity >= 100) super.designator_or_func_ref(); setAttribute("type", "ambiguous"); contextClose("name"); } public void substring_range(boolean hasLowerBound, boolean hasUpperBound) { Element lowerBound = null; Element upperBound = null; if (hasLowerBound) lowerBound = contextNode(-1); if (hasUpperBound) { upperBound = lowerBound; if (hasLowerBound) lowerBound = contextNode(-2); else lowerBound = null; } contextOpen("name"); contextOpen("range"); if (lowerBound != null) { contextOpen("lower-bound"); moveHere(lowerBound); contextClose(); } if (upperBound != null) { contextOpen("upper-bound"); moveHere(upperBound); contextClose(); } if (verbosity >= 100) super.substring_range(hasLowerBound, hasUpperBound); contextClose(); } public void data_ref(int numPartRef) { for (int i = 1; i < numPartRef; ++i) { assert context.getTagName().equals("name"); Element innerName = context; ArrayList<Element> elements = contextNodes(); Attr innerNameId = getAttribute("id"); contextClose(); assert context.getTagName().equals("name"); moveHere(elements); setAttribute("id", getAttribute("id").getValue() + "%" + innerNameId.getValue()); context.removeChild(innerName); } super.data_ref(numPartRef); } public void part_ref(Token id, boolean hasSectionSubscriptList, boolean hasImageSelector) { Element e = null; if (hasSectionSubscriptList) { e = contextNode(-1); if (!e.getTagName().equals("subscripts")) cleanUpAfterError("tag name is not 'subscripts' but '" + e.getTagName() + "'"); } contextOpen("name"); setAttribute("id", id); setAttribute("hasSubscripts", hasSectionSubscriptList); if (hasSectionSubscriptList) moveHere(e); if (verbosity >= 60) super.part_ref(id, hasSectionSubscriptList, hasImageSelector); } public void section_subscript(boolean hasLowerBound, boolean hasUpperBound, boolean hasStride, boolean isAmbiguous) { // contextCloseAllInner("subscript"); Element outerContext = context; contextOpen("subscript"); if (!hasLowerBound && !hasUpperBound && !hasStride) setAttribute("type", "empty"); else if (hasLowerBound && !hasUpperBound && !hasStride) { setAttribute("type", "simple"); moveHere(contextNode(outerContext, -2)); } else { setAttribute("type", "range"); Element lowerBound = null; Element upperBound = null; Element step = null; contextOpen("range"); if (hasLowerBound) { lowerBound = contextOpen("lower-bound"); contextClose(); } if (hasUpperBound) { upperBound = contextOpen("upper-bound"); contextClose(); } if (hasStride) { step = contextOpen("step"); contextClose(); } contextClose(); if (hasStride) moveTo(step, contextNode(outerContext, -2)); if (hasUpperBound) moveTo(upperBound, contextNode(outerContext, -2)); if (hasLowerBound) moveTo(lowerBound, contextNode(outerContext, -2)); } if (verbosity >= 80) super.section_subscript(hasLowerBound, hasUpperBound, hasStride, isAmbiguous); contextClose(); } public void section_subscript_list__begin() { super.section_subscript_list__begin(); // contextOpen("subscript"); } public void section_subscript_list(int count) { // contextClose("subscript"); super.section_subscript_list(count); } public void allocate_stmt(Token label, Token allocateKeyword, Token eos, boolean hasTypeSpec, boolean hasAllocOptList) { /* if (hasAllocOptList) cleanUpAfterError("didn't expect hasAllocOptList=" + hasAllocOptList); */ int movedCount = 1 + (hasAllocOptList ? 1 : 0); ArrayList<Element> elements = contextNodes(-movedCount, movedCount); contextOpen("allocate"); moveHere(elements); super.allocate_stmt(label, allocateKeyword, eos, hasTypeSpec, hasAllocOptList); contextClose(); } public void alloc_opt(Token allocOpt) { contextCloseAllInner("keyword-arguments"); Element element = contextNode(-1); contextOpen("keyword-argument"); setAttribute("name", allocOpt); moveHere(element); if (verbosity >= 100) super.alloc_opt(allocOpt); contextClose(); } public void allocation(boolean hasAllocateShapeSpecList, boolean hasAllocateCoarraySpec) { if (hasAllocateShapeSpecList || hasAllocateCoarraySpec) cleanUpAfterError("didn't expect hasAllocateShapeSpecList=" + hasAllocateShapeSpecList + " hasAllocateCoarraySpec=" + hasAllocateCoarraySpec); Element element = contextNode(-1); if (element.getTagName().equals("expression")) context = element; else { contextOpen("expression"); moveHere(element); } super.allocation(hasAllocateShapeSpecList, hasAllocateCoarraySpec); contextClose(); } public void allocate_object() { setAttribute("type", "variable"); contextClose("name"); Element element = contextNode(-1); contextOpen("expression"); moveHere(element); if (verbosity >= 100) super.allocate_object(); contextClose(); } public void nullify_stmt(Token label, Token nullifyKeyword, Token eos) { ArrayList<Element> elements = contextNodes(); contextOpen("nullify"); moveHere(elements); super.nullify_stmt(label, nullifyKeyword, eos); contextClose(); } public void pointer_object() { contextCloseAllInner("pointers"); Element pointer = contextNode(-1); contextOpen("pointer"); moveHere(pointer); if (verbosity >= 100) super.pointer_object(); contextClose(); } public void deallocate_stmt(Token label, Token deallocateKeyword, Token eos, boolean hasDeallocOptList) { Element element2 = hasDeallocOptList ? contextNode(-2) : null; Element element = contextNode(-1); contextOpen("deallocate"); if (hasDeallocOptList) moveHere(element2); moveHere(element); super.deallocate_stmt(label, deallocateKeyword, eos, hasDeallocOptList); contextClose(); } public void dealloc_opt(Token id) { contextCloseAllInner("keyword-arguments"); Element element = contextNode(-1); contextOpen("keyword-argument"); setAttribute("name", id); moveHere(element); if (verbosity >= 100) super.dealloc_opt(id); contextClose(); } public void primary() { context = contextNode(-1); // temporarily reopen previously-closed context if (verbosity >= 100) super.primary(); contextClose(); // re-close previously closed context } public void parenthesized_expr() { context = contextNode(-1); // temporarily reopen previously-closed context if (verbosity >= 100) super.parenthesized_expr(); contextClose(); // re-close previously closed context } public void power_operand(boolean hasPowerOperand) { /* if (!hasPowerOperand) cleanUpAfterError("didn't expect hasPowerOperand=" + hasPowerOperand); */ int numPowerOp = hasPowerOperand ? 1 : 0; genericOperationOpen(numPowerOp); if (verbosity >= 100) super.power_operand(hasPowerOperand); genericOperationClose(numPowerOp); } public void power_operand__power_op(Token powerOp) { if (verbosity >= 100) super.power_operand__power_op(powerOp); cleanUpAfterError(); } public void mult_operand(int numMultOps) { genericOperationOpen(numMultOps); if (verbosity >= 100) super.mult_operand(numMultOps); genericOperationClose(numMultOps); } public void mult_operand__mult_op(Token multOp) { Element element = contextNode(-1); contextOpen("operand"); moveHere(element); if (verbosity >= 100) super.mult_operand__mult_op(multOp); contextClose(); } public void signed_operand(Token addOp) { if (addOp != null) genericOperationForceOpen(2); if (verbosity >= 100) super.signed_operand(addOp); if (addOp != null) genericOperationClose(1); } public void add_operand(int numAddOps) { genericOperationOpen(numAddOps); if (verbosity >= 100) super.add_operand(numAddOps); genericOperationClose(numAddOps); } public void add_operand__add_op(Token addOp) { // same as mult_operand__mult_op() Element element = contextNode(-1); contextOpen("operand"); moveHere(element); if (verbosity >= 100) super.add_operand__add_op(addOp); contextClose(); } public void level_2_expr(int numConcatOps) { genericOperationOpen(numConcatOps); if (verbosity >= 100) super.level_2_expr(numConcatOps); genericOperationClose(numConcatOps); } public void power_op(Token powerKeyword) { contextOpen("operator"); setAttribute("operator", powerKeyword); if (verbosity >= 100) super.power_op(powerKeyword); contextClose(); } public void mult_op(Token multKeyword) { contextOpen("operator"); setAttribute("operator", multKeyword); if (verbosity >= 100) super.mult_op(multKeyword); contextClose(); } public void add_op(Token addKeyword) { contextOpen("operator"); setAttribute("operator", addKeyword); if (verbosity >= 100) super.add_op(addKeyword); contextClose(); } public void level_3_expr(Token relOp) { int numRelOp = relOp == null ? 0 : 1; genericOperationOpen(numRelOp); if (verbosity >= 80) super.level_3_expr(relOp); genericOperationClose(numRelOp); } public void concat_op(Token concatKeyword) { contextOpen("operator"); if (verbosity >= 100) super.concat_op(concatKeyword); setAttribute("operator", " contextClose(); } public void rel_op(Token relOp) { contextOpen("operator"); setAttribute("operator", relOp); if (verbosity >= 100) super.rel_op(relOp); contextClose(); } public void and_operand(boolean hasNotOp, int numAndOps) { if (hasNotOp) if (numAndOps == 0) genericOperationForceOpen(2); else { int nodesCount = 2 * numAndOps + 2; ArrayList<Element> nodes = contextNodes(-nodesCount, 2); Element reference = contextNode(-nodesCount + 2); Element operation = contextOpen("operation"); setAttribute("type", "unary"); for (Element node : nodes) { boolean needsTransform = !node.getTagName().equals("operand") && !node.getTagName().equals("operator"); if (needsTransform) contextOpen("operand"); moveHere(node); if (needsTransform) contextClose(); } contextClose(); context.removeChild(operation); context.insertBefore(operation, reference); genericOperationOpen(numAndOps); // cleanUpAfterError("didn't expect hasNotOp=" + hasNotOp + " numAndOps=" + numAndOps); } else genericOperationOpen(numAndOps); if (verbosity >= 100) super.and_operand(hasNotOp, numAndOps); if (hasNotOp) genericOperationClose(numAndOps > 0 ? numAndOps : 1); else genericOperationClose(numAndOps); } public void and_operand__not_op(boolean hasNotOp) { if (hasNotOp) { genericOperationForceOpen(2); genericOperationClose(1); // cleanUpAfterError("didn't expect hasNotOp=" + hasNotOp); } // same as mult_operand__mult_op() Element element = contextNode(-1); contextOpen("operand"); moveHere(element); if (verbosity >= 100) super.and_operand__not_op(hasNotOp); contextClose(); } public void or_operand(int numOrOps) { genericOperationOpen(numOrOps); if (verbosity >= 100) super.or_operand(numOrOps); genericOperationClose(numOrOps); } public void equiv_operand(int numEquivOps) { genericOperationOpen(numEquivOps); if (verbosity >= 100) super.equiv_operand(numEquivOps); genericOperationClose(numEquivOps); } public void equiv_operand__equiv_op(Token equivOp) { // same as mult_operand__mult_op() Element element = contextNode(-1); contextOpen("operand"); moveHere(element); if (verbosity >= 100) super.equiv_operand__equiv_op(equivOp); contextClose(); } public void not_op(Token notOp) { contextOpen("operator"); setAttribute("operator", notOp); if (verbosity >= 100) super.not_op(notOp); contextClose(); } public void and_op(Token andOp) { contextOpen("operator"); setAttribute("operator", andOp); if (verbosity >= 100) super.and_op(andOp); contextClose(); } public void or_op(Token orOp) { contextOpen("operator"); setAttribute("operator", orOp); if (verbosity >= 100) super.or_op(orOp); contextClose(); } public void equiv_op(Token equivOp) { contextOpen("operator"); setAttribute("operator", equivOp); if (verbosity >= 100) super.equiv_op(equivOp); contextClose(); } public void assignment_stmt(Token label, Token eos) { ArrayList<Element> nodes = contextNodes(); if (nodes.size() < 2) cleanUpAfterError("there should be at least 2 nodes for 'assignment' but " + nodes.size() + " found"); Element target = contextNode(-2); Element value = contextNode(-1); contextOpen("assignment"); contextOpen("target"); moveHere(target); contextClose(); contextOpen("value"); moveHere(value); contextClose(); if (verbosity >= 100) super.assignment_stmt(label, eos); contextClose(); } public void pointer_assignment_stmt(Token label, Token eos, boolean hasBoundsSpecList, boolean hasBoundsRemappingList) { Element value = contextNode(-1); contextClose(); Element target = contextNode(-1); contextOpen("pointer-assignment"); contextOpen("target"); moveHere(target); contextClose(); contextOpen("value"); moveHere(value); contextClose(); super.pointer_assignment_stmt(label, eos, hasBoundsSpecList, hasBoundsRemappingList); contextClose(); } public void forall_construct() { if (verbosity >= 100) super.forall_construct(); contextClose("loop"); contextOpen("statement"); } public void forall_construct_stmt(Token label, Token id, Token forallKeyword, Token eos) { contextRename("statement", "loop"); setAttribute("type", "forall"); ArrayList<Element> elements = contextNodes(); contextOpen("header"); moveHere(elements); contextClose(); super.forall_construct_stmt(label, id, forallKeyword, eos); contextOpen("body"); contextOpen("statement"); } public void forall_header() { if (verbosity >= 100) super.forall_header(); } public void forall_triplet_spec(Token id, boolean hasStride) { contextOpen("index-variable"); setAttribute("name", id); contextClose(); Element element = contextNode(-1); context.removeChild(element); context.insertBefore(element, contextNode(hasStride ? -3 : -2)); genericLoopControl(hasStride); context = contextNode(-1); super.forall_triplet_spec(id, hasStride); contextClose(); } public void forall_assignment_stmt(boolean isPointerAssignment) { Element assignment = contextNode(-1); if (!context.getTagName().equals("header")) cleanUpAfterError("didn't expect <" + context.getTagName() + ">"); contextClose(); contextOpen("body"); contextOpen("statement"); moveHere(assignment); context = assignment; // temporarily reopen assignment context if (!context.getTagName().equals("assignment")) cleanUpAfterError("didn't expect <" + context.getTagName() + ">"); if (verbosity >= 100) super.forall_assignment_stmt(isPointerAssignment); contextClose(); // re-close assignment context contextClose(); contextClose(); } public void end_forall_stmt(Token label, Token endKeyword, Token forallKeyword, Token id, Token eos) { contextCloseAllInner("loop"); super.end_forall_stmt(label, endKeyword, forallKeyword, id, eos); } public void forall_stmt__begin() { contextRename("statement", "loop"); setAttribute("type", "forall"); if (verbosity >= 100) super.forall_stmt__begin(); contextOpen("header"); } public void forall_stmt(Token label, Token forallKeyword) { contextCloseAllInner("loop"); super.forall_stmt(label, forallKeyword); contextClose(); contextOpen("statement"); // TODO: temporary workaround } public void block() { contextCloseAllInner("body"); if (verbosity >= 100) super.block(); } public void if_construct() { contextCloseAllInner("if"); if (verbosity >= 100) super.if_construct(); contextClose(); contextOpen("statement"); } public void if_then_stmt(Token label, Token id, Token ifKeyword, Token thenKeyword, Token eos) { contextRename("statement", "if"); ArrayList<Element> nodes = contextNodes(); contextOpen("header"); moveHere(nodes); contextClose(); if (verbosity >= 80) super.if_then_stmt(label, id, ifKeyword, thenKeyword, eos); contextOpen("body"); contextOpen("statement"); } public void else_if_stmt(Token label, Token elseKeyword, Token ifKeyword, Token thenKeyword, Token id, Token eos) { Element condition = contextNode(-1); contextClose("body"); contextOpen("header"); setAttribute("type", "else-if"); moveHere(condition); contextClose(); if (verbosity >= 80) super.else_if_stmt(label, elseKeyword, ifKeyword, thenKeyword, id, eos); contextOpen("body"); setAttribute("type", "else-if"); contextOpen("statement"); } public void else_stmt(Token label, Token elseKeyword, Token id, Token eos) { contextClose("body"); if (verbosity >= 80) super.else_stmt(label, elseKeyword, id, eos); contextOpen("body"); setAttribute("type", "else"); contextOpen("statement"); } public void end_if_stmt(Token label, Token endKeyword, Token ifKeyword, Token id, Token eos) { contextCloseAllInner("if"); if (verbosity >= 80) super.end_if_stmt(label, endKeyword, ifKeyword, id, eos); } public void if_stmt__begin() { contextRename("statement", "if"); if (verbosity >= 100) super.if_stmt__begin(); contextOpen("header"); // will be filled by if_stmt() contextClose(); contextOpen("body"); contextOpen("statement"); } public void if_stmt(Token label, Token ifKeyword) { contextClose("body"); Element ifHeader = contextNode(-2); Element ifBody = contextNode(-1); Element statementToBeFixed = contextNode(ifBody, 0); Element ifCondition = contextNode(statementToBeFixed, 0); if (!ifBody.getTagName().equals("body")) cleanUpAfterError("if body node must be named body"); moveTo(ifHeader, ifCondition); contextCloseAllInner("if"); super.if_stmt(label, ifKeyword); contextClose(); contextOpen("statement"); } public void block_construct() { if (verbosity >= 100) super.block_construct(); } public void case_construct() { contextCloseAllInner("select"); if (verbosity >= 100) super.case_construct(); contextClose(); contextOpen("statement"); } public void select_case_stmt(Token label, Token id, Token selectKeyword, Token caseKeyword, Token eos) { contextRename("statement", "select"); ArrayList<Element> nodes = contextNodes(); contextOpen("header"); moveHere(nodes); contextClose(); super.select_case_stmt(label, id, selectKeyword, caseKeyword, eos); contextOpen("body"); } public void case_stmt(Token label, Token caseKeyword, Token id, Token eos) { super.case_stmt(label, caseKeyword, id, eos); contextOpen("body"); contextOpen("statement"); } public void end_select_stmt(Token label, Token endKeyword, Token selectKeyword, Token id, Token eos) { contextCloseAllInner("select"); super.end_select_stmt(label, endKeyword, selectKeyword, id, eos); } public void case_selector(Token defaultToken) { if (!context.getTagName().equals("case") && contextTryFind("case") != null) { contextClose("case"); contextOpen("case"); setAttribute("type", "default"); contextOpen("header"); contextClose(); } super.case_selector(defaultToken); } public void case_value_range() { contextClose("value-range"); if (verbosity >= 100) super.case_value_range(); contextOpen("value-range"); contextOpen("value"); } public void case_value_range_list__begin() { if (context.getTagName().equals("body") && ((Element) context.getParentNode()).getTagName().equals("case")) { contextClose("body"); contextClose("case"); } contextOpen("case"); setAttribute("type", "specific"); contextOpen("header"); super.case_value_range_list__begin(); contextOpen("value-range"); contextOpen("value"); } public void case_value_range_list(int count) { super.case_value_range_list(count); contextClose("header"); } public void case_value_range_suffix() { contextCloseAllInner("value-range"); if (verbosity >= 100) super.case_value_range_suffix(); } public void case_value() { contextClose("value"); if (verbosity >= 100) super.case_value(); contextOpen("value"); } public void associate_construct() { super.associate_construct(); contextClose("associate"); contextOpen("statement"); } public void associate_stmt(Token label, Token id, Token associateKeyword, Token eos) { Element element = contextNode(-1); contextRename("statement", "associate"); contextOpen("header"); moveHere(element); contextClose(); super.associate_stmt(label, id, associateKeyword, eos); contextOpen("body"); contextOpen("statement"); } public void association(Token id) { context = contextNode(-1); assert context.getNodeName().equals("keyword-argument"); setAttribute("argument-name", id); if (verbosity >= 100) super.association(id); contextClose(); } public void selector() { Element element = contextNode(-1); contextOpen("keyword-argument"); moveHere(element); if (verbosity >= 100) super.selector(); contextClose(); } public void end_associate_stmt(Token label, Token endKeyword, Token associateKeyword, Token id, Token eos) { contextClose("body"); super.end_associate_stmt(label, endKeyword, associateKeyword, id, eos); } public void type_guard_stmt(Token label, Token typeKeyword, Token isOrDefaultKeyword, Token selectConstructName, Token eos) { // TODO Auto-generated method stub contextOpen("statement"); super.type_guard_stmt(label, typeKeyword, isOrDefaultKeyword, selectConstructName, eos); } public void do_construct() { contextCloseAllInner("loop"); if (verbosity >= 100) super.do_construct(); contextClose(); contextOpen("statement"); } public void block_do_construct() { if (verbosity >= 100) super.block_do_construct(); } public void do_stmt(Token label, Token id, Token doKeyword, Token digitString, Token eos, boolean hasLoopControl) { if (!hasLoopControl) { contextRename("statement", "loop"); setAttribute("type", "do-label"); } /* if (digitString != null) // TODO: is this needed? setAttribute("label", digitString); */ super.do_stmt(label, id, doKeyword, digitString, eos, hasLoopControl); contextOpen("body"); contextOpen("statement"); } public void label_do_stmt(Token label, Token id, Token doKeyword, Token digitString, Token eos, boolean hasLoopControl) { super.label_do_stmt(label, id, doKeyword, digitString, eos, hasLoopControl); cleanUpAfterError("didn't expect label-do-stmt"); } public void loop_control(Token whileKeyword, int doConstructType, boolean hasOptExpr) { /* if(hasOptExpr) cleanUpAfterError("didn't expect hasOptExpr=" + hasOptExpr); */ if (doConstructType == 1701) genericLoopControl(hasOptExpr); contextRename("statement", "loop"); String loopType = ""; switch (doConstructType) { case IActionEnums.DoConstruct_concurrent: loopType = "do-concurrent"; break; case IActionEnums.DoConstruct_variable: loopType = "do"; break; case IActionEnums.DoConstruct_while: loopType = "do-while"; break; default: throw new IllegalArgumentException(Integer.toString(doConstructType)); } setAttribute("type", loopType); Element element = contextNode(-1); contextOpen("header"); moveHere(element); super.loop_control(whileKeyword, doConstructType, hasOptExpr); contextClose(); } public void do_variable(Token id) { contextOpen("index-variable"); setAttribute("name", id); super.do_variable(id); contextClose(); } public void end_do() { if (verbosity >= 100) super.end_do(); } public void end_do_stmt(Token label, Token endKeyword, Token doKeyword, Token id, Token eos) { contextCloseAllInner("loop"); if (verbosity >= 80) super.end_do_stmt(label, endKeyword, doKeyword, id, eos); } public void do_term_action_stmt(Token label, Token endKeyword, Token doKeyword, Token id, Token eos, boolean inserted) { contextCloseAllInner("loop"); if (verbosity >= 80) super.do_term_action_stmt(label, endKeyword, doKeyword, id, eos, inserted); } public void cycle_stmt(Token label, Token cycleKeyword, Token id, Token eos) { contextOpen("cycle"); if (verbosity >= 80) super.cycle_stmt(label, cycleKeyword, id, eos); contextClose(); } public void exit_stmt(Token label, Token exitKeyword, Token id, Token eos) { contextOpen("exit"); if (verbosity >= 80) super.exit_stmt(label, exitKeyword, id, eos); contextClose(); } public void goto_stmt(Token label, Token goKeyword, Token toKeyword, Token target_label, Token eos) { // TODO Auto-generated method stub super.goto_stmt(label, goKeyword, toKeyword, target_label, eos); } public void continue_stmt(Token label, Token continueKeyword, Token eos) { Element labelNode = contextNodesCount() > 0 ? contextNode(-1) : null; labelNode = labelNode != null && labelNode.getTagName() == "label" ? labelNode : null; contextOpen("statement"); contextOpen("continue"); if (labelNode != null) moveHere(labelNode); super.continue_stmt(label, continueKeyword, eos); contextClose(); } public void stop_stmt(Token label, Token stopKeyword, Token eos, boolean hasStopCode) { if (hasStopCode) { Element value = contextNode(-1); contextOpen("stop"); moveHere(value); Attr stopCode = getAttribute("digitString", value); setAttribute("code", stopCode.getValue()); } else { contextOpen("stop"); setAttribute("code", ""); } if (verbosity >= 60) super.stop_stmt(label, stopKeyword, eos, hasStopCode); contextClose(); } public void open_stmt(Token label, Token openKeyword, Token eos) { Element args = contextNode(-1); contextOpen("open"); moveHere(args); super.open_stmt(label, openKeyword, eos); contextClose(); } public void connect_spec(Token id) { contextCloseAllInner("keyword-argument"); setAttribute("argument-name", id); contextClose(); if (verbosity >= 100) super.connect_spec(id); contextOpen("keyword-argument"); } public void connect_spec_list__begin() { super.connect_spec_list__begin(); contextOpen("keyword-argument"); } public void connect_spec_list(int count) { contextClose("keyword-argument"); super.connect_spec_list(count); } public void close_stmt(Token label, Token closeKeyword, Token eos) { Element args = contextNode(-1); contextOpen("close"); moveHere(args); super.close_stmt(label, closeKeyword, eos); contextClose(); } public void close_spec(Token closeSpec) { contextCloseAllInner("keyword-argument"); setAttribute("argument-name", closeSpec); contextClose(); if (verbosity >= 100) super.close_spec(closeSpec); contextOpen("keyword-argument"); } public void close_spec_list__begin() { super.close_spec_list__begin(); contextOpen("keyword-argument"); } public void close_spec_list(int count) { contextClose("keyword-argument"); super.close_spec_list(count); } public void read_stmt(Token label, Token readKeyword, Token eos, boolean hasInputItemList) { Element outerContext = context; contextOpen("read"); if (hasInputItemList) moveHere(contextNode(outerContext, -3)); moveHere(contextNode(outerContext, -2)); super.read_stmt(label, readKeyword, eos, hasInputItemList); contextClose(); } public void write_stmt(Token label, Token writeKeyword, Token eos, boolean hasOutputItemList) { Element args = contextNode(-1); Element outputs = null; if (hasOutputItemList) { outputs = args; args = contextNode(-2); } contextOpen("write"); moveHere(args); if (hasOutputItemList) moveHere(outputs); super.write_stmt(label, writeKeyword, eos, hasOutputItemList); contextClose(); } public void print_stmt(Token label, Token printKeyword, Token eos, boolean hasOutputItemList) { Element outputs = hasOutputItemList ? contextNode(-1) : null; Element format = contextNode(hasOutputItemList ? -2 : -1); contextOpen("print"); moveHere(format); if (hasOutputItemList) moveHere(outputs); super.print_stmt(label, printKeyword, eos, hasOutputItemList); contextClose(); } public void io_control_spec(boolean hasExpression, Token keyword, boolean hasAsterisk) { if (hasExpression) { Element element = contextNode(-1); contextOpen("io-control"); moveHere(element); } else contextOpen("io-control"); setAttribute("argument-name", keyword == null ? "" : keyword); super.io_control_spec(hasExpression, keyword, hasAsterisk); contextClose("io-control"); } public void format() { Element label = null; if (contextNodesCount() > 0) { Element node = contextNode(-1); if (node.getNodeName().equals("literal")) label = node; } contextOpen("print-format"); setAttribute("type", label == null ? "*" : "label"); if (label != null) moveHere(label); if (verbosity >= 100) super.format(); contextClose(); } public void input_item() { Element element = contextNode(-1); contextOpen("input"); moveHere(element); if (verbosity >= 100) super.input_item(); contextClose("input"); } public void output_item() { Element element = contextNode(-1); contextOpen("output"); moveHere(element); if (verbosity >= 100) super.output_item(); contextClose(); } public void io_implied_do() { ArrayList<Element> elements = contextNodes(); Element header = contextNode(-1); contextOpen("loop"); setAttribute("type", "implied-do"); contextOpen("body"); for (Element node : elements) if (node.getTagName().equals("expression")) moveHere(node); contextClose(); moveHere(header); super.io_implied_do(); contextClose(); } public void io_implied_do_object() { context = contextNode(-1); contextRename("expression"); if (verbosity >= 100) super.io_implied_do_object(); contextClose(); } public void io_implied_do_control(boolean hasStride) { genericLoopControl(hasStride); Element element = contextNode(-1); contextOpen("header"); moveHere(element); super.io_implied_do_control(hasStride); contextClose(); } public void format_stmt(Token label, Token formatKeyword, Token eos) { Element labelNode = (label != null) ? contextNode(-2) : null; context = contextNode(-1); if (label != null) moveHere(0, labelNode); if (verbosity >= 60) super.format_stmt(label, formatKeyword, eos); contextClose(); } public void format_specification(boolean hasFormatItemList) { Element items = hasFormatItemList ? contextNode(-1) : null; contextOpen("format"); if (hasFormatItemList) moveHere(items); if (verbosity >= 60) super.format_specification(hasFormatItemList); contextClose(); } public void main_program__begin() { contextOpen("program"); if (verbosity >= 100) super.main_program__begin(); contextOpen("header"); } public void ext_function_subprogram(boolean hasPrefix) { context = contextNode(-1); // temporarily reopen previously-closed context if (verbosity >= 100) super.ext_function_subprogram(hasPrefix); contextClose(); // re-close previously closed context } public void main_program(boolean hasProgramStmt, boolean hasExecutionPart, boolean hasInternalSubprogramPart) { super.main_program(hasProgramStmt, hasExecutionPart, hasInternalSubprogramPart); contextClose("program"); } public void program_stmt(Token label, Token programKeyword, Token id, Token eos) { contextClose("header"); if (verbosity >= 20) super.program_stmt(label, programKeyword, id, eos); setAttribute("name", id); contextOpen("body"); contextOpen("specification"); contextOpen("declaration"); } public void end_program_stmt(Token label, Token endKeyword, Token programKeyword, Token id, Token eos) { if (contextTryFind("program") == null) { // TODO: this workaround should not be needed ArrayList<Element> nodes = contextNodes(); contextOpen("program"); moveHere(nodes); } contextCloseAllInner("program"); super.end_program_stmt(label, endKeyword, programKeyword, id, eos); } public void module() { contextCloseAllInner("module"); if (verbosity >= 100) super.module(); contextClose(); } public void module_stmt__begin() { contextOpen("module"); if (verbosity >= 100) super.module_stmt__begin(); contextOpen("header"); } public void module_stmt(Token label, Token moduleKeyword, Token id, Token eos) { contextClose("header"); setAttribute("name", id); super.module_stmt(label, moduleKeyword, id, eos); contextOpen("body"); contextOpen("specification"); contextOpen("declaration"); } public void end_module_stmt(Token label, Token endKeyword, Token moduleKeyword, Token id, Token eos) { if (!context.getTagName().equals("members")) { ArrayList<String> hierarchy = contextNameHierarchy(); String[] expected = { "body", "module" }; if (hierarchy.size() >= 2 && (Arrays.equals(hierarchy.subList(0, 2).toArray(), expected) || (hierarchy.size() >= 3 && Arrays.equals(hierarchy.subList(1, 3).toArray(), expected)))) { contextClose("body"); contextOpen("members"); } /* else System.err.println("Context hierarchy for 'end module' statement: " + hierarchy); */ } contextClose("members"); super.end_module_stmt(label, endKeyword, moduleKeyword, id, eos); } public void module_subprogram(boolean hasPrefix) { super.module_subprogram(hasPrefix); } public void use_stmt(Token label, Token useKeyword, Token id, Token onlyKeyword, Token eos, boolean hasModuleNature, boolean hasRenameList, boolean hasOnly) { if (context.getTagName().equals("declaration")) { LOG.log(Level.FINE, "closing unclosed declaration at use_stmt id={0}", id.getText()); contextClose("declaration"); } if (!context.getTagName().equals("use")) contextOpen("use"); setAttribute("name", id); super.use_stmt(label, useKeyword, id, onlyKeyword, eos, hasModuleNature, hasRenameList, hasOnly); contextClose("use"); contextOpen("declaration"); } public void module_nature(Token nature) { if (context.getTagName().equals("declaration")) { LOG.log(Level.FINE, "closing unclosed declaration at module_nature nature={0}", nature.getText()); contextClose("declaration"); } if (!context.getTagName().equals("use")) contextOpen("use"); contextOpen("nature"); setAttribute("name", nature); if (verbosity >= 80) super.module_nature(nature); contextClose("nature"); } public void rename_list__begin() { if (context.getTagName().equals("declaration")) { LOG.log(Level.FINE, "closing unclosed declaration at rename_list__begin"); contextClose("declaration"); } if (!context.getTagName().equals("use")) contextOpen("use"); super.rename_list__begin(); } public void only_list__begin() { if (context.getTagName().equals("declaration")) { LOG.log(Level.FINE, "closing unclosed declaration at only_list__begin"); contextClose("declaration"); } if (!context.getTagName().equals("use")) contextOpen("use"); super.only_list__begin(); } public void block_data() { if (verbosity >= 100) super.block_data(); contextClose("block-data"); } public void block_data_stmt__begin() { contextOpen("block-data"); if (verbosity >= 100) super.block_data_stmt__begin(); contextOpen("specification"); contextOpen("declaration"); } public void interface_block() { // TODO Auto-generated method stub super.interface_block(); } public void interface_specification() { // TODO Auto-generated method stub super.interface_specification(); } public void interface_stmt__begin() { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); contextOpen("interface"); if (verbosity >= 100) super.interface_stmt__begin(); contextOpen("header"); } public void interface_stmt(Token label, Token abstractToken, Token keyword, Token eos, boolean hasGenericSpec) { if (contextTryFind("declaration") == null) // interface_stmt__begin is not always emitted contextOpen("declaration"); if (contextTryFind("interface") == null) { contextOpen("interface"); contextOpen("header"); } contextClose("header"); super.interface_stmt(label, abstractToken, keyword, eos, hasGenericSpec); if (abstractToken != null) setAttribute("type", abstractToken); contextOpen("body"); contextOpen("specification"); contextOpen("declaration"); } public void end_interface_stmt(Token label, Token kw1, Token kw2, Token eos, boolean hasGenericSpec) { contextCloseAllInner("interface"); super.end_interface_stmt(label, kw1, kw2, eos, hasGenericSpec); contextClose(); if (!context.getTagName().equals("declaration")) cleanUpAfterError("expected interface to be within declaration context, but its in " + context.getTagName()); setAttribute("type", "interface"); } public void interface_body(boolean hasPrefix) { // TODO Auto-generated method stub super.interface_body(hasPrefix); } public void generic_spec(Token keyword, Token name, int type) { contextOpen("name"); setAttribute("id", name); super.generic_spec(keyword, name, type); contextClose(); } public void import_stmt(Token label, Token importKeyword, Token eos, boolean hasGenericNameList) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "import"); super.import_stmt(label, importKeyword, eos, hasGenericNameList); contextClose("declaration"); } public void external_stmt(Token label, Token externalKeyword, Token eos) { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); if (verbosity >= 80) super.external_stmt(label, externalKeyword, eos); setAttribute("type", "external"); } public void procedure_declaration_stmt(Token label, Token procedureKeyword, Token eos, boolean hasProcInterface, int count) { // TODO Auto-generated method stub super.procedure_declaration_stmt(label, procedureKeyword, eos, hasProcInterface, count); } public void proc_decl(Token id, boolean hasNullInit) { contextOpen("procedure"); setAttribute("name", id); if (verbosity >= 80) super.proc_decl(id, hasNullInit); contextClose(); } public void proc_decl_list__begin() { if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "procedures"); super.proc_decl_list__begin(); } public void intrinsic_stmt(Token label, Token intrinsicKeyword, Token eos) { Element condition = contextNode(-1); if (!context.getTagName().equals("declaration")) contextOpen("declaration"); setAttribute("type", "intrinsic"); moveHere(condition); super.intrinsic_stmt(label, intrinsicKeyword, eos); } public void call_stmt(Token label, Token callKeyword, Token eos, boolean hasActualArgSpecList) { Element name = contextNode(-1); Element arguments = null; if (name.getTagName() == "arguments") { arguments = name; name = contextNode(-2); } else if (name.getTagName() != "name") cleanUpAfterError("tag name is not 'name' but '" + name.getTagName() + "'"); contextOpen("call"); moveHere(name); if (arguments != null) moveHere(arguments); super.call_stmt(label, callKeyword, eos, hasActualArgSpecList); contextClose(); } public void procedure_designator() { if (verbosity >= 100) super.procedure_designator(); setAttribute("type", "procedure"); contextClose("name"); } public void actual_arg_spec(Token keyword) { boolean inArgumentContext = contextTryFind("argument") != null; if (!inArgumentContext) contextOpen("argument"); setAttribute("name", keyword); if (verbosity >= 100) super.actual_arg_spec(keyword); if (inArgumentContext) contextClose("argument"); } public void actual_arg_spec_list__begin() { super.actual_arg_spec_list__begin(); contextOpen("argument"); } public void actual_arg_spec_list(int count) { contextClose("argument"); super.actual_arg_spec_list(count); } public void actual_arg(boolean hasExpr, Token label) { boolean inArgumentContext = contextTryFind("argument") != null; if (!inArgumentContext) { if (hasExpr) { Element element = contextNode(-1); contextOpen("argument"); moveHere(element); } else contextOpen("argument"); } if (verbosity >= 60) super.actual_arg(hasExpr, label); if (inArgumentContext) contextClose("argument"); } public void function_subprogram(boolean hasExePart, boolean hasIntSubProg) { super.function_subprogram(hasExePart, hasIntSubProg); if (context.getTagName().equals("function")) contextClose("function"); } public void function_stmt__begin() { contextOpen("function"); contextOpen("header"); if (verbosity >= 100) super.function_stmt__begin(); } public void function_stmt(Token label, Token keyword, Token name, Token eos, boolean hasGenericNameList, boolean hasSuffix) { contextClose("header"); super.function_stmt(label, keyword, name, eos, hasGenericNameList, hasSuffix); setAttribute("name", name); contextOpen("body"); contextOpen("specification"); contextOpen("declaration"); } public void prefix_spec(boolean isDecTypeSpec) { super.prefix_spec(isDecTypeSpec); if (isDecTypeSpec) contextClose("declaration"); } public void end_function_stmt(Token label, Token keyword1, Token keyword2, Token name, Token eos) { contextCloseAllInner("function"); super.end_function_stmt(label, keyword1, keyword2, name, eos); } public void subroutine_stmt__begin() { contextOpen("subroutine"); contextOpen("header"); if (verbosity >= 100) super.subroutine_stmt__begin(); } public void subroutine_stmt(Token label, Token keyword, Token name, Token eos, boolean hasPrefix, boolean hasDummyArgList, boolean hasBindingSpec, boolean hasArgSpecifier) { super.subroutine_stmt(label, keyword, name, eos, hasPrefix, hasDummyArgList, hasBindingSpec, hasArgSpecifier); contextClose("header"); setAttribute("name", name); contextOpen("body"); contextOpen("specification"); contextOpen("declaration"); } public void dummy_arg(Token dummy) { contextOpen("argument"); setAttribute("name", dummy); if (verbosity >= 100) super.dummy_arg(dummy); contextClose(); } public void end_subroutine_stmt(Token label, Token keyword1, Token keyword2, Token name, Token eos) { contextCloseAllInner("subroutine"); super.end_subroutine_stmt(label, keyword1, keyword2, name, eos); contextClose(); } public void return_stmt(Token label, Token keyword, Token eos, boolean hasScalarIntExpr) { if (hasScalarIntExpr) { Element element = contextNode(-1); contextOpen("return"); contextOpen("value"); moveHere(element); contextClose(); } else contextOpen("return"); setAttribute("hasValue", hasScalarIntExpr); super.return_stmt(label, keyword, eos, hasScalarIntExpr); contextClose(); } public void contains_stmt(Token label, Token keyword, Token eos) { ArrayList<String> hierarchy = contextNameHierarchy(); boolean acceptedContext = false; if (hierarchy.size() >= 3) { Object[] hierarchyArray = hierarchy.subList(0, 3).toArray(); for (String enclosingGroup : new String[] { "subroutine", "program", "module" }) { acceptedContext = Arrays.equals(hierarchyArray, new String[] { "statement", "body", enclosingGroup }); if (acceptedContext) break; } } /* if (!acceptedContext) cleanUpAfterError("Context hierarchy for 'contains' statement is invalid: " + hierarchy); */ if (acceptedContext) contextClose("body"); super.contains_stmt(label, keyword, eos); if (acceptedContext) contextOpen("members"); } public void separate_module_subprogram(boolean hasExecutionPart, boolean hasInternalSubprogramPart) { super.separate_module_subprogram(hasExecutionPart, hasInternalSubprogramPart); contextClose("subroutine"); } public void separate_module_subprogram__begin() { contextOpen("subroutine"); super.separate_module_subprogram__begin(); contextOpen("header"); } public void mp_subprogram_stmt(Token label, Token moduleKeyword, Token procedureKeyword, Token name, Token eos) { contextClose("header"); setAttribute("name", name); super.mp_subprogram_stmt(label, moduleKeyword, procedureKeyword, name, eos); contextOpen("body"); } public void end_mp_subprogram_stmt(Token label, Token keyword1, Token keyword2, Token name, Token eos) { contextCloseAllInner("subroutine"); super.end_mp_subprogram_stmt(label, keyword1, keyword2, name, eos); } public void start_of_file(String filename, String path) { if (contextTryFind("file") != null) { if (context.getTagName().equals("declaration")) { LOG.log(Level.FINER, "closing unclosed declaration at start_of_file"); contextClose("declaration"); } contextOpen("declaration"); setAttribute("type", "include"); } contextOpen("file"); if (verbosity >= 100) super.start_of_file(filename, path); if (path.equals("ERROR_FILE_NOT_FOUND")) setAttribute("path", filename); else setAttribute("path", path); } public void end_of_file(String filename, String path) { contextCloseAllInner("file"); if (verbosity >= 100) super.end_of_file(filename, path); contextClose(); } public void next_token(Token tk) { System.err.println("next_token"); System.err.println(tk); } }
package com.intellij.util; import com.intellij.openapi.util.Comparing; import gnu.trove.Equality; import junit.framework.Assert; import junit.framework.AssertionFailedError; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import java.util.*; public class Assertion extends Assert { private StringConvertion myStringConvertion; private Equality myEquality = Equality.CANONICAL; public Assertion() { this(StringConvertion.DEFAULT); } public Assertion(StringConvertion stringConvertion) { myStringConvertion = stringConvertion; } public void setStringConvertion(StringConvertion stringConvertion) { myStringConvertion = stringConvertion; } public StringConvertion getStringConvertion() { return myStringConvertion; } public Equality getEquality() { return myEquality; } public void compareAll(Object[] expected, Object[] actual) { checkNotNulls(expected, actual); String expectedLines = converToLines(expected); String actualLines = converToLines(actual); Assert.assertEquals(expectedLines, actualLines); Assert.assertEquals(expected.length, actual.length); for (int i = 0; i < expected.length; i++) { checkEquals("Index=" + i, expected[i], actual[i]); } } private void checkNotNulls(Object[] expected, Object[] actual) { Assert.assertNotNull("Expected is null", expected); Assert.assertNotNull("Actual is null", actual); } public void compareAll(Object[][] expected, Object[][] actual) { checkNotNulls(expected, actual); Assert.assertEquals(convertToLines(expected), convertToLines(actual)); Assert.assertEquals(expected.length, actual.length); for (int i = 0; i < expected.length; i++) { compareAll(expected[i], actual[i]); } } private String convertToLines(Object[][] expected) { StringBuffer expectedLines = new StringBuffer(); for (int i = 0; i < expected.length; i++) { Object[] objects = expected[i]; expectedLines.append(concatenateAsStrings(objects, " ")); expectedLines.append("\n"); } return expectedLines.toString(); } private void checkEquals(String message, Object expected, Object actual) { Assert.assertTrue(message + " expected:<" + convertToString(expected) + "> actual:" + convertToString(actual) + ">", myEquality.equals(expected, actual)); } public String converToLines(Object[] objects) { return concatenateAsStrings(objects, "\n"); } private String concatenateAsStrings(Object[] objects, String separator) { StringBuffer buffer = new StringBuffer(); String lineEnd = ""; for (int i = 0; i < objects.length; i++) { Object object = objects[i]; buffer.append(lineEnd); buffer.append(convertToString(object)); lineEnd = separator; } String reference = buffer.toString(); return reference; } public void enumerate(Object[] objects) { for (int i = 0; i < objects.length; i++) { Object object = objects[i]; System.out.println("[" + i + "] = " + convertToString(object)); } } public void enumerate(Collection objects) { enumerate(objects.toArray()); } private String convertToString(Object object) { if (object == null) return "null"; return myStringConvertion.convert(object); } public void compareAll(Object[] expected, List actual) { compareAll(expected, actual.toArray()); } public void compareAll(List expected, Object[] actual) { compareAll(expected.toArray(), actual); } public void compareUnordered(Object[] expected, Collection actual) { assertEquals(expected.length, actual.size()); for (Object exp : expected) { assertTrue(actual.contains(exp)); } //ArrayList expectedList = new ArrayList(Arrays.asList(new Object[Math.max(actual.size(), expected.length)])); //ArrayList actualList = new ArrayList(actual); //for (int i = 0; i < expected.length; i++) { // Object object = expected[i]; // int index = actualList.indexOf(object); // if (index == -1) index = i; // expectedList.set(index, object); //compareAll(expectedList, actualList); } public void compareUnordered(Collection expected, Collection actual) { compareUnordered(expected.toArray(), actual); } public void compareUnordered(Collection expected, Object[] actual) { compareUnordered(expected, new ArrayList(Arrays.asList(actual))); } public void compareAll(List expected, List actual) { compareAll(expected, actual.toArray()); } public static void compareLines(String text, String[] lines) throws IOException { BufferedReader reader = new BufferedReader(new StringReader(text)); for (int i = 0; i < lines.length - 1; i++) Assert.assertEquals(lines[i], reader.readLine()); String lastLine = lines[lines.length - 1]; char[] buffer = new char[lastLine.length()]; reader.read(buffer, 0, buffer.length); Assert.assertEquals(lastLine, new String(buffer)); Assert.assertEquals(-1, reader.read()); } public void contains(Collection collection, Object object) { if (collection.contains(object)) return; compareAll(new Object[]{object}, collection.toArray()); Assert.assertTrue(collection.contains(object)); } public void contains(Object[] array, Object object) { contains(Arrays.asList(array), object); } public void singleElement(Collection collection, Object object) { compareAll(new Object[]{object}, collection.toArray()); Assert.assertEquals(1, collection.size()); checkEquals("", object, collection.iterator().next()); } public void empty(Object[] array) { try { compareAll(ArrayUtil.EMPTY_OBJECT_ARRAY, array); } catch(AssertionFailedError e) { System.err.println("Size: " + array.length); throw e; } } public void empty(Collection objects) { empty(objects.toArray()); } public void count(int count, Collection objects) { if (count != objects.size()) { empty(objects); } Assert.assertEquals(count, objects.size()); } public void empty(int[] ints) { Object[] objects = new Object[ints.length]; for (int i = 0; i < ints.length; i++) { objects[i] = new Integer(ints[i]); } } public void singleElement(Object[] objects, Object element) { singleElement(Arrays.asList(objects), element); } public void count(int number, Object[] objects) { count(number, Arrays.asList(objects)); } public void compareUnordered(Object[] expected, Object[] actual) { compareUnordered(expected, new HashSet(Arrays.asList(actual))); } public void compareAll(int[] expected, int[] actual) { compareAll(asObjectArray(expected), asObjectArray(actual)); } private static Object[] asObjectArray(int[] ints) { Object[] result = new Object[ints.length]; for (int i = 0; i < ints.length; i++) { int anInt = ints[i]; result[i] = new Integer(anInt); } return result; } public void setEquality(Equality equality) { myEquality = equality; } public void singleElement(int[] actual, int element) { compareAll(new int[]{element}, actual); } public void size(int size, Collection collection) { if (collection.size() != size) { System.err.println("Expected: " + size + " actual: " + collection.size()); compareUnordered(ArrayUtil.EMPTY_OBJECT_ARRAY, collection); } Assert.assertEquals(size, collection.size()); } public void containsAll(Object[] array, Collection subCollection) { containsAll(Arrays.asList(array), subCollection); } public void containsAll(Collection list, Collection subCollection) { if (list.containsAll(subCollection)) return; for (Iterator iterator = subCollection.iterator(); iterator.hasNext();) { Object item = iterator.next(); boolean isContained = false; for (Iterator iterator1 = list.iterator(); iterator1.hasNext();) { Object superSetItem = iterator1.next(); if (myEquality.equals(superSetItem, item)) { isContained = true; break; } } Assert.assertTrue(myStringConvertion.convert(item), isContained); } } public <T> void singleOccurence(Collection<T> collection, T item) { int number = countOccurences(collection, item); if (number != 1) { enumerate(collection); Assert.fail(myStringConvertion.convert(item) + "\n occured " + number + " times"); } } public <T> int countOccurences(Collection<T> collection, T item) { int counter = 0; for (Iterator<T> iterator = collection.iterator(); iterator.hasNext();) { T obj = iterator.next(); if (Comparing.equal(item, obj)) counter++; } return counter; } public void containsAll(Collection collection, Object[] subArray) { containsAll(collection, Arrays.asList(subArray)); } public void size(int size, Object[] objects) { size(size, Arrays.asList(objects)); } public void containsAll(Object[] array, Object[] subArray) { containsAll(array, Arrays.asList(subArray)); } public void compareAll(char[] expected, char[] actual) { compareAll(asObjectArray(expected), asObjectArray(actual)); } private Object[] asObjectArray(char[] chars) { Object[] array = new Object[chars.length]; for (int i = 0; i < chars.length; i++) { char c = chars[i]; array[i] = new Character(c); } return array; } }
package gameEngine; import gameEngine.actors.BaseTower; import java.util.ArrayList; import java.util.List; import javafx.animation.KeyFrame; import javafx.animation.Timeline; import javafx.beans.property.DoubleProperty; import javafx.beans.property.SimpleDoubleProperty; import javafx.scene.image.ImageView; import javafx.scene.layout.Pane; import javafx.util.Duration; import utilities.GSON.objectWrappers.DataWrapper; import utilities.chatroom.Chatroom; import utilities.networking.HTTPConnection; public class CoOpManager extends SingleThreadedEngineManager { private static final String GET_PLAYERS = "get_num_players"; private static final String GET_MASTER_JSON = "get_master_json"; private static final String UPDATE_MASTER_JSON = "update_master_json"; private static final String MASTER_JSON = "master_json="; private static final String GAME_DIRECTORY = "game_directory="; private static final String MAKE_GAME = "make_game"; private static final String SERVER_URL = "https://voogasalad.herokuapp.com/"; private static final String JOIN_GAME = "join_game"; private static final int REQUIRED_NUM_PLAYERS = 2; private static final HTTPConnection HTTP_CONNECTOR = new HTTPConnection(SERVER_URL); private static final int TIMER_END = 30; private static final double QUERY_SERVER_TIME = 1.0; private DoubleProperty myTimer; private String myDirectory; public CoOpManager () { super(); myDirectory = ""; myTimer = new SimpleDoubleProperty(); } public void startNewGame (String directory) { myDirectory = directory; HTTP_CONNECTOR.sendPost(MAKE_GAME, GAME_DIRECTORY + directory); } public boolean isReady () { return Integer.parseInt(HTTP_CONNECTOR.sendGet(GET_PLAYERS)) >= REQUIRED_NUM_PLAYERS; } public DoubleProperty getTimer(){ return myTimer; } public boolean joinGame () { myDirectory = HTTP_CONNECTOR.sendPost(JOIN_GAME, ""); return !myDirectory.equals("None"); } public String initializeGame (Pane engineGroup) { addGroups(engineGroup); super.initializeGame(myDirectory); new Chatroom(); allowInteraction(); return myDirectory; } private void allowInteraction () { myTimer.set(30); Timeline timeline = new Timeline(); timeline.setCycleCount(TIMER_END); timeline.getKeyFrames().add(new KeyFrame(Duration.seconds(QUERY_SERVER_TIME), event -> getTowersFromServer())); timeline.setOnFinished(event -> startLevel()); timeline.play(); } @Override public void changeRunSpeed(double d){ // nothing } private void startLevel () { getTowersFromServer(); myTimer.set(0); super.resume(); } @Override protected void onLevelEnd () { super.onLevelEnd(); allowInteraction(); } private void writeTowersToServer () { HTTP_CONNECTOR.sendPost(UPDATE_MASTER_JSON, MASTER_JSON + convertTowersToString()); } private String convertTowersToString () { List<DataWrapper> wrapper = new ArrayList<>(); for (BaseTower tower : myTowerGroup) { wrapper.add(new DataWrapper(tower)); } return myFileWriter.convertWrappersToJson(wrapper); } private void getTowersFromServer () { myTimer.set(myTimer.get()-QUERY_SERVER_TIME); String response = HTTP_CONNECTOR.sendGet(GET_MASTER_JSON); if(response.trim().equals("None")){ return; } List<DataWrapper> listFromServer = myFileReader.readWrappers(response); for (BaseTower tower : myTowerGroup) { if (!listFromServer.contains(new DataWrapper(tower))) { myTowerGroup.addActorToRemoveBuffer(tower); } else { listFromServer.remove(new DataWrapper(tower)); } } for (DataWrapper wrapper : listFromServer) { super.addTower(wrapper.getName(), wrapper.getX(), wrapper.getY()); } } @Override public void removeTower (ImageView node) { if (myTimer.get()>0) { getTowersFromServer(); super.removeTower(node); writeTowersToServer(); } } @Override public ImageView addTower (String name, double x, double y) { if (myTimer.get()>0) { getTowersFromServer(); ImageView ans = super.addTower(name, x, y); writeTowersToServer(); return ans; } else { return null; } } }
package railo.runtime.tag.util; import railo.commons.lang.StringUtil; import railo.runtime.exp.ApplicationException; public class FileUtil { public static final int NAMECONFLICT_UNDEFINED = 1; // can't start at 0 because we need to be able to do a bitmask test public static final int NAMECONFLICT_ERROR = 2; public static final int NAMECONFLICT_SKIP = 4; // same as IGNORE public static final int NAMECONFLICT_OVERWRITE = 8; // same as MERGE public static final int NAMECONFLICT_MAKEUNIQUE = 16; // public static final int NAMECONFLICT_CLOSURE = 32; // FUTURE public static int toNameConflict( String nameConflict ) throws ApplicationException { if(StringUtil.isEmpty(nameConflict,true)) return NAMECONFLICT_UNDEFINED; nameConflict = nameConflict.trim().toLowerCase(); if("error".equals( nameConflict) ) return NAMECONFLICT_ERROR; if("skip".equals(nameConflict) || "ignore".equals(nameConflict)) return NAMECONFLICT_SKIP; if("merge".equals(nameConflict) || "overwrite".equals(nameConflict)) return NAMECONFLICT_OVERWRITE; if("makeunique".equals(nameConflict) || "unique".equals(nameConflict)) return NAMECONFLICT_MAKEUNIQUE; throw new ApplicationException("Invalid value for attribute nameConflict ["+nameConflict+"]", "valid values are [" + fromNameConflictBitMask( Integer.MAX_VALUE ) + "]"); } /** * * @param nameConflict * @param allowedValuesMask * @return * @throws ApplicationException */ public static int toNameConflict( String nameConflict, int allowedValuesMask ) throws ApplicationException { int result = toNameConflict( nameConflict ); if ( ( allowedValuesMask & result ) == 0 ) { throw new ApplicationException("Invalid value for attribute nameConflict ["+nameConflict+"]", "valid values are [" + fromNameConflictBitMask( allowedValuesMask ) + "]"); } return result; } /** * * @param nameConflict * @param allowedValuesMask * @param defaultValue * @return * @throws ApplicationException */ public static int toNameConflict( String nameConflict, int allowedValuesMask, int defaultValue ) throws ApplicationException { int result = toNameConflict( nameConflict, allowedValuesMask ); if ( result == NAMECONFLICT_UNDEFINED ) return defaultValue; return result; } public static String fromNameConflictBitMask( int bitmask ) { StringBuilder sb = new StringBuilder(); if ( (bitmask & NAMECONFLICT_ERROR) > 0 ) sb.append( "error" ).append(','); if ( (bitmask & NAMECONFLICT_MAKEUNIQUE) > 0 ) sb.append( "makeunique (unique)" ).append(','); if ( (bitmask & NAMECONFLICT_OVERWRITE) > 0 ) sb.append( "overwrite (merge)" ).append(','); if ( (bitmask & NAMECONFLICT_SKIP) > 0 ) sb.append( "skip (ignore)" ).append(','); if ( sb.length() > 0 ) sb.setLength( sb.length() - 1 ); // remove last , return sb.toString(); } }
package com.intellij.openapi.util; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.IntObjectMap; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; /** * Provides type-safe access to data. * * @author max * @author Konstantin Bulenkov */ public class Key<T> { private static final AtomicInteger ourKeysCounter = new AtomicInteger(); private static final IntObjectMap<Key<?>> allKeys = ContainerUtil.createConcurrentIntObjectWeakValueMap(); private final int myIndex = ourKeysCounter.getAndIncrement(); private final String myName; // for debug purposes only public Key(@NotNull String name) { myName = name; allKeys.put(myIndex, this); } // Final because some clients depend on one-to-one key index/key instance relationship (e.g. UserDataHolderBase). @Override public final int hashCode() { return myIndex; } @Override public final boolean equals(Object obj) { return obj == this; } @Override public String toString() { return myName; } @NotNull public static <T> Key<T> create(@NotNull String name) { return new Key<>(name); } @Contract("null -> null") public T get(@Nullable UserDataHolder holder) { return holder == null ? null : holder.getUserData(this); } @Contract("null -> null") public T get(@Nullable Map<Key, ?> holder) { //noinspection unchecked return holder == null ? null : (T)holder.get(this); } @Contract("_, !null -> !null") public T get(@Nullable UserDataHolder holder, T defaultValue) { T t = get(holder); return t == null ? defaultValue : t; } @NotNull public T getRequired(@NotNull UserDataHolder holder) { return ObjectUtils.notNull(holder.getUserData(this)); } /** * Returns {@code true} if and only if the {@code holder} has not null value for the key. */ public boolean isIn(@Nullable UserDataHolder holder) { return get(holder) != null; } public void set(@Nullable UserDataHolder holder, @Nullable T value) { if (holder != null) { holder.putUserData(this, value); } } public void set(@Nullable Map<Key, Object> holder, T value) { if (holder != null) { holder.put(this, value); } } @Nullable("can become null if the key has been gc-ed") public static <T> Key<T> getKeyByIndex(int index) { //noinspection unchecked return (Key<T>)allKeys.get(index); } /** @deprecated access to a key via its name is a dirty hack; use Key instance directly instead */ @Deprecated @Nullable public static Key<?> findKeyByName(String name) { for (IntObjectMap.Entry<Key<?>> key : allKeys.entrySet()) { if (name.equals(key.getValue().myName)) { return key.getValue(); } } return null; } }
package pl.shockah.godwit.asset; import com.badlogic.gdx.assets.AssetDescriptor; import com.badlogic.gdx.assets.AssetLoaderParameters; import com.badlogic.gdx.assets.AssetManager; import com.badlogic.gdx.utils.Array; import java.lang.reflect.Field; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.Getter; import pl.shockah.godwit.Godwit; public class SingleAsset<T> extends Asset<T> { @Getter(lazy = true) private static final Field loadQueueField = getLoadQueueFieldLazy(); @Nonnull public final String fileName; @Nonnull public final Class<T> clazz; @Nullable public final AssetLoaderParameters<T> parameters; @Nonnull public final AssetDescriptor<T> descriptor; public SingleAsset(@Nonnull String fileName, @Nonnull Class<T> clazz) { this(fileName, clazz, null); } public SingleAsset(@Nonnull String fileName, @Nonnull Class<T> clazz, @Nullable AssetLoaderParameters<T> parameters) { this.fileName = fileName; this.clazz = clazz; this.parameters = parameters; descriptor = new AssetDescriptor<T>(fileName, clazz, parameters); } private static Field getLoadQueueFieldLazy() { try { Field field = AssetManager.class.getDeclaredField("loadQueue"); field.setAccessible(true); return field; } catch (Exception e) { throw new RuntimeException(e); } } protected static AssetManager getAssetManager() { return Godwit.getInstance().getAssetManager(); } @SuppressWarnings("unchecked") private static Array<AssetDescriptor> getLoadQueue() { try { return (Array<AssetDescriptor>)getLoadQueueField().get(getAssetManager()); } catch (Exception e) { throw new RuntimeException(e); } } public void load() { getAssetManager().load(descriptor); } public void unload() { AssetManager manager = getAssetManager(); if (manager.isLoaded(fileName, clazz)) manager.unload(fileName); } public void finishLoading() { AssetManager manager = getAssetManager(); if (manager.isLoaded(fileName, clazz)) return; for (AssetDescriptor<?> descriptor : getLoadQueue()) { if (descriptor.type == this.descriptor.type && descriptor.fileName.equals(this.descriptor.fileName)) { manager.finishLoadingAsset(fileName); return; } } throw new IllegalStateException(String.format("SingleAsset %s is not queued for loading.", descriptor)); } @Override public T get() { finishLoading(); return getAssetManager().get(descriptor); } }
package com.rexsl.core; import com.ymock.util.Logger; import java.io.File; import java.lang.annotation.Annotation; import java.net.MalformedURLException; import java.net.URL; import java.util.HashSet; import java.util.Set; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.ext.ContextResolver; import javax.ws.rs.ext.Provider; import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.Marshaller; import javax.xml.validation.SchemaFactory; import org.apache.commons.lang.StringEscapeUtils; /** * Provider of JAXB {@link Marshaller} for JAX-RS framework. * * <p>You don't need to use this class directly. It is made public only becuase * JAX-RS implementation should be able to discover it in classpath. * * @author Yegor Bugayenko (yegor@rexsl.com) * @author Krzysztof Krason (Krzysztof.Krason@gmail.com) * @version $Id$ * @since 0.2 */ @Provider @Produces({ MediaType.APPLICATION_XML, MediaType.TEXT_XML }) public final class XslResolver implements ContextResolver<Marshaller> { /** * Folder with XSD files. * @see #setServletContext(ServletContext) */ private transient File xsdFolder; /** * Classes to process. */ private final transient Set<Class> classes = new HashSet<Class>(); /** * JAXB context. */ private transient JAXBContext context; /** * Servlet request. */ private transient HttpServletRequest request; /** * Set servlet context from container, to be called by JAX-RS framework * because of {@link Context} annotation. * @param ctx The context */ @Context public void setServletContext(final ServletContext ctx) { if (ctx == null) { throw new IllegalArgumentException("ServletContext can't be NULL"); } final String name = ctx.getInitParameter("com.rexsl.core.XSD_FOLDER"); if (name != null) { this.xsdFolder = new File(name); Logger.debug( this, "#setServletContext(%s): XSD folder set to '%s'", ctx.getClass().getName(), this.xsdFolder ); } Logger.debug( this, "#setServletContext(%s): context injected by JAX-RS", ctx.getClass().getName() ); } /** * Set request to provide information about resourse context. * @param req The request */ @Context public void setHttpServletRequest(final HttpServletRequest req) { this.request = req; } /** * {@inheritDoc} */ @Override public Marshaller getContext(final Class<?> type) { Marshaller mrsh; try { mrsh = this.buildContext(type).createMarshaller(); mrsh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE); final String header = String.format( "\n<?xml-stylesheet type='text/xsl' href='%s'?>", StringEscapeUtils.escapeXml(this.stylesheet(type)) ); mrsh.setProperty("com.sun.xml.bind.xmlHeaders", header); } catch (javax.xml.bind.JAXBException ex) { throw new IllegalStateException(ex); } if (this.xsdFolder == null) { Logger.debug( this, "#getContext(%s): marshaller created (no XSD validator)", type.getName() ); } else { mrsh = this.addXsdValidator(mrsh, type); } return mrsh; } /** * Add new class to context. * @param cls The class we should add */ public void add(final Class cls) { synchronized (this) { if (!this.classes.contains(cls)) { try { this.classes.add(cls); this.context = JAXBContext.newInstance( this.classes.toArray(new Class[this.classes.size()]) ); Logger.info( this, // @checkstyle LineLength (1 line) "#add(%s): added to JAXBContext (%d total), stylesheet: '%s'", cls.getName(), this.classes.size(), this.stylesheet(cls) ); } catch (javax.xml.bind.JAXBException ex) { throw new IllegalStateException(ex); } } } } /** * Create and return a context. * @param cls The class we should process * @return The context */ private JAXBContext buildContext(final Class cls) { this.add(cls); return this.context; } /** * Returns the name of XSL stylesheet for this type. * @param type The class * @return The name of stylesheet * @see #getContext(Class) */ private String stylesheet(final Class<?> type) { final Annotation antn = type.getAnnotation(Stylesheet.class); String stylesheet; if (antn == null) { stylesheet = String.format( "/xsl/%s.xsl", type.getSimpleName() ); if (this.request != null) { try { stylesheet = new URL( this.request.getScheme(), this.request.getServerName(), this.request.getServerPort(), String.format( "%s%s", this.request.getContextPath(), stylesheet ) ).toString(); } catch (MalformedURLException ex) { throw new IllegalStateException(ex); } } } else { stylesheet = ((Stylesheet) antn).value(); } Logger.debug( XslResolver.class, "#stylesheet(%s): '%s' stylesheet discovered", type.getName(), stylesheet ); return stylesheet; } /** * Configure marhaller and return a new one (or the same). * @param mrsh The marshaller, already created and ready to marshal * @param type The class to be marshalled * @return New marshalled to be used instead * @see #getContext(Class) */ private Marshaller addXsdValidator(final Marshaller mrsh, final Class<?> type) { final String name = this.schema(type); if (name.isEmpty()) { Logger.debug( this, "Schema validation turned off for class '%s'", type.getName() ); } else { final File xsd = new File(this.xsdFolder, name); if (xsd.exists()) { final SchemaFactory factory = SchemaFactory.newInstance( XMLConstants.W3C_XML_SCHEMA_NS_URI ); try { mrsh.setSchema(factory.newSchema(xsd)); } catch (org.xml.sax.SAXException ex) { throw new IllegalStateException( String.format( "Failed to use XSD schema from '%s' for class '%s'", xsd, type.getName() ), ex ); } try { mrsh.setEventHandler(new XsdEventHandler()); } catch (javax.xml.bind.JAXBException ex) { throw new IllegalStateException(ex); } Logger.debug( this, "Class '%s' will be validated with '%s' schema", type.getName(), xsd ); } else { Logger.warn( this, "No XSD schema for class '%s' in '%s' file", type.getName(), xsd ); } } return mrsh; } /** * Returns the name of XSD schema for this type. * @param type The class * @return The name of XSD file */ private static String schema(final Class<?> type) { final Annotation antn = type.getAnnotation(Schema.class); String schema; if (antn == null) { schema = String.format("%s.xsd", type.getName()); } else { if (((Schema) antn).ignore()) { schema = ""; } else { schema = ((Schema) antn).value(); } } Logger.debug( XslResolver.class, "#schema(%s): '%s' schema discovered", type.getName(), schema ); return schema; } }
package arez.test; import arez.AbstractArezTest; import arez.Arez; import arez.ArezContext; import arez.ArezObserverTestUtil; import arez.ArezTestUtil; import arez.ComputedValue; import arez.Disposable; import arez.Observable; import arez.Observer; import arez.ObserverErrorHandler; import arez.Procedure; import arez.SpyEventHandler; import arez.Zone; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; import javax.annotation.Nonnull; import org.realityforge.guiceyloops.shared.ValueUtil; import org.testng.annotations.Test; import static org.testng.Assert.*; /** * This class tests all the public API of Arez and identifies all * the elements that should be visible outside package. */ @SuppressWarnings( "Duplicates" ) public class ExternalApiTest extends AbstractArezTest { @Test public void triggerScheduler() { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); context.autorun( ValueUtil.randomString(), false, callCount::incrementAndGet, false ); assertEquals( callCount.get(), 0 ); context.triggerScheduler(); assertEquals( callCount.get(), 1 ); } @Test public void areNamesEnabled() throws Exception { assertTrue( Arez.areNamesEnabled() ); ArezTestUtil.disableNames(); assertFalse( Arez.areNamesEnabled() ); } @Test public void arePropertyIntrospectorsEnabled() { assertTrue( Arez.arePropertyIntrospectorsEnabled() ); ArezTestUtil.disablePropertyIntrospectors(); assertFalse( Arez.arePropertyIntrospectorsEnabled() ); } @Test public void areRepositoryResultsUnmodifiable() { assertTrue( Arez.areCollectionsPropertiesUnmodifiable() ); ArezTestUtil.makeCollectionPropertiesModifiable(); assertFalse( Arez.areCollectionsPropertiesUnmodifiable() ); } @Test public void areSpiesEnabled() { assertTrue( Arez.areSpiesEnabled() ); ArezTestUtil.disableSpies(); assertFalse( Arez.areSpiesEnabled() ); } @Test public void areZonesEnabled() { ArezTestUtil.disableZones(); assertFalse( Arez.areZonesEnabled() ); ArezTestUtil.enableZones(); assertTrue( Arez.areZonesEnabled() ); } @Test public void areNativeComponentsEnabled() { assertTrue( Arez.areNativeComponentsEnabled() ); ArezTestUtil.disableNativeComponents(); assertFalse( Arez.areNativeComponentsEnabled() ); } @Test public void areRegistriesEnabled() { assertTrue( Arez.areRegistriesEnabled() ); ArezTestUtil.disableRegistries(); assertFalse( Arez.areRegistriesEnabled() ); } @Test public void createComputedValue() throws Throwable { final ArezContext context = Arez.context(); final String name = ValueUtil.randomString(); final ComputedValue<String> computedValue = context.createComputedValue( name, () -> "", Objects::equals ); context.action( ValueUtil.randomString(), true, () -> { assertEquals( computedValue.getName(), name ); assertEquals( computedValue.get(), "" ); assertEquals( context.isTransactionActive(), true ); computedValue.dispose(); assertThrows( computedValue::get ); } ); } @Test public void createReactionObserver() throws Exception { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final String name = ValueUtil.randomString(); final Observer observer = context.autorun( name, false, callCount::incrementAndGet, true ); assertEquals( observer.getName(), name ); assertEquals( ArezObserverTestUtil.isActive( observer ), true ); assertEquals( callCount.get(), 1 ); observer.dispose(); assertEquals( ArezObserverTestUtil.isActive( observer ), false ); } @Test public void observerErrorHandler() throws Exception { setIgnoreObserverErrors( true ); setPrintObserverErrors( false ); final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final ObserverErrorHandler handler = ( observer, error, throwable ) -> callCount.incrementAndGet(); context.addObserverErrorHandler( handler ); final Procedure reaction = () -> { throw new RuntimeException(); }; // This will run immediately and generate an exception context.autorun( ValueUtil.randomString(), false, reaction, true ); assertEquals( callCount.get(), 1 ); context.removeObserverErrorHandler( handler ); // This will run immediately and generate an exception context.autorun( ValueUtil.randomString(), false, reaction, true ); assertEquals( callCount.get(), 1 ); } @Test public void spyEventHandler() throws Exception { final ArezContext context = Arez.context(); final AtomicInteger callCount = new AtomicInteger(); final SpyEventHandler handler = e -> callCount.incrementAndGet(); context.getSpy().addSpyEventHandler( handler ); // Generate an event context.createObservable(); assertEquals( callCount.get(), 1 ); context.getSpy().removeSpyEventHandler( handler ); // Generate an event context.createObservable(); assertEquals( callCount.get(), 1 ); } @Test public void safeProcedure_interactionWithSingleObservable() throws Exception { final ArezContext context = Arez.context(); final Observable observable = context.createObservable(); final AtomicInteger reactionCount = new AtomicInteger(); final Observer observer = context.autorun( ValueUtil.randomString(), false, () -> { observable.reportObserved(); reactionCount.incrementAndGet(); }, true ); assertEquals( reactionCount.get(), 1 ); assertEquals( ArezObserverTestUtil.isActive( observer ), true ); context.safeAction( ValueUtil.randomString(), true, observable::reportChanged ); assertEquals( reactionCount.get(), 2 ); assertEquals( ArezObserverTestUtil.isActive( observer ), true ); } @Test public void interactionWithSingleObservable() throws Throwable { final ArezContext context = Arez.context(); final Observable observable = context.createObservable(); final AtomicInteger reactionCount = new AtomicInteger(); final Observer observer = context.autorun( ValueUtil.randomString(), false, () -> { observable.reportObserved(); reactionCount.incrementAndGet(); }, true ); assertEquals( reactionCount.get(), 1 ); assertEquals( ArezObserverTestUtil.isActive( observer ), true ); // Run an "action" context.action( ValueUtil.randomString(), true, observable::reportChanged ); assertEquals( reactionCount.get(), 2 ); assertEquals( ArezObserverTestUtil.isActive( observer ), true ); } @Test public void interactionWithMultipleObservable() throws Throwable { final ArezContext context = Arez.context(); final Observable observable1 = context.createObservable(); final Observable observable2 = context.createObservable(); final Observable observable3 = context.createObservable(); final Observable observable4 = context.createObservable(); final AtomicInteger reactionCount = new AtomicInteger(); final Observer observer = context.autorun( ValueUtil.randomString(), false, () -> { observable1.reportObserved(); observable2.reportObserved(); observable3.reportObserved(); reactionCount.incrementAndGet(); }, true ); assertEquals( reactionCount.get(), 1 ); assertEquals( ArezObserverTestUtil.isActive( observer ), true ); // Run an "action" context.action( ValueUtil.randomString(), true, observable1::reportChanged ); assertEquals( reactionCount.get(), 2 ); assertEquals( ArezObserverTestUtil.isActive( observer ), true ); // Update observer1+observer2 in transaction context.action( ValueUtil.randomString(), true, () -> { observable1.reportChanged(); observable2.reportChanged(); } ); assertEquals( reactionCount.get(), 3 ); assertEquals( ArezObserverTestUtil.isActive( observer ), true ); context.action( ValueUtil.randomString(), true, () -> { observable3.reportChanged(); observable4.reportChanged(); } ); assertEquals( reactionCount.get(), 4 ); assertEquals( ArezObserverTestUtil.isActive( observer ), true ); // observable4 should not cause a reaction as not observed context.action( ValueUtil.randomString(), true, observable4::reportChanged ); assertEquals( reactionCount.get(), 4 ); assertEquals( ArezObserverTestUtil.isActive( observer ), true ); } @Test public void action_function() throws Throwable { final ArezContext context = Arez.context(); final Observable observable = context.createObservable(); assertNotInTransaction( context, observable ); final String expectedValue = ValueUtil.randomString(); final String v0 = context.action( ValueUtil.randomString(), false, () -> { assertInTransaction( context, observable ); return expectedValue; } ); assertNotInTransaction( context, observable ); assertEquals( v0, expectedValue ); } @Test public void action_safeFunction() throws Exception { final ArezContext context = Arez.context(); final Observable observable = context.createObservable(); assertNotInTransaction( context, observable ); final String expectedValue = ValueUtil.randomString(); final String v0 = context.safeAction( ValueUtil.randomString(), false, () -> { assertInTransaction( context, observable ); return expectedValue; } ); assertNotInTransaction( context, observable ); assertEquals( v0, expectedValue ); } @Test public void proceduresCanBeNested() throws Throwable { final ArezContext context = Arez.context(); final Observable observable = context.createObservable(); assertNotInTransaction( context, observable ); context.action( ValueUtil.randomString(), false, () -> { assertInTransaction( context, observable ); //First nested exception context.action( ValueUtil.randomString(), false, () -> { assertInTransaction( context, observable ); //Second nested exception context.action( ValueUtil.randomString(), false, () -> assertInTransaction( context, observable ) ); assertInTransaction( context, observable ); } ); assertInTransaction( context, observable ); } ); assertNotInTransaction( context, observable ); } @Test public void action_nestedFunctions() throws Throwable { final ArezContext context = Arez.context(); final Observable observable = context.createObservable(); assertNotInTransaction( context, observable ); final String expectedValue = ValueUtil.randomString(); final String v0 = context.action( ValueUtil.randomString(), false, () -> { assertInTransaction( context, observable ); //First nested exception final String v1 = context.action( ValueUtil.randomString(), false, () -> { assertInTransaction( context, observable ); //Second nested exception final String v2 = context.action( ValueUtil.randomString(), false, () -> { assertInTransaction( context, observable ); return expectedValue; } ); assertInTransaction( context, observable ); return v2; } ); assertInTransaction( context, observable ); return v1; } ); assertNotInTransaction( context, observable ); assertEquals( v0, expectedValue ); } @Test public void supportsMultipleContexts() throws Throwable { final Zone zone1 = Arez.createZone(); final Zone zone2 = Arez.createZone(); final ArezContext context1 = zone1.getContext(); final ArezContext context2 = zone2.getContext(); final Observable observable1 = context1.createObservable(); final Observable observable2 = context2.createObservable(); final AtomicInteger autorunCallCount1 = new AtomicInteger(); final AtomicInteger autorunCallCount2 = new AtomicInteger(); context1.autorun( () -> { observable1.reportObserved(); autorunCallCount1.incrementAndGet(); } ); context2.autorun( () -> { observable2.reportObserved(); autorunCallCount2.incrementAndGet(); } ); assertEquals( autorunCallCount1.get(), 1 ); assertEquals( autorunCallCount2.get(), 1 ); assertNotInTransaction( context1, observable1 ); assertNotInTransaction( context2, observable2 ); context1.action( () -> { assertInTransaction( context1, observable1 ); //First nested exception context1.action( () -> { assertInTransaction( context1, observable1 ); observable1.reportChanged(); //Second nested exception context1.action( () -> assertInTransaction( context1, observable1 ) ); context2.action( () -> { assertNotInTransaction( context1, observable1 ); assertInTransaction( context2, observable2 ); observable2.reportChanged(); context2.action( () -> { assertNotInTransaction( context1, observable1 ); assertInTransaction( context2, observable2 ); observable2.reportChanged(); } ); assertEquals( autorunCallCount1.get(), 1 ); context1.action( () -> assertInTransaction( context1, observable1 ) ); // Still no autorun reaction as it has transaction up the stack assertEquals( autorunCallCount1.get(), 1 ); assertEquals( autorunCallCount2.get(), 1 ); } ); // Second context runs now as it got to it's top level trnsaction assertEquals( autorunCallCount2.get(), 2 ); assertInTransaction( context1, observable1 ); assertNotInTransaction( context2, observable2 ); } ); assertInTransaction( context1, observable1 ); assertNotInTransaction( context2, observable2 ); } ); assertEquals( autorunCallCount1.get(), 2 ); assertEquals( autorunCallCount2.get(), 2 ); assertNotInTransaction( context1, observable1 ); assertNotInTransaction( context2, observable2 ); } @Test public void pauseScheduler() throws Exception { final ArezContext context = Arez.context(); final Disposable lock1 = context.pauseScheduler(); assertEquals( context.isSchedulerPaused(), true ); final AtomicInteger callCount = new AtomicInteger(); // This would normally be scheduled and run now but scheduler should be paused context.autorun( ValueUtil.randomString(), false, callCount::incrementAndGet, false ); context.triggerScheduler(); assertEquals( callCount.get(), 0 ); final Disposable lock2 = context.pauseScheduler(); lock2.dispose(); assertEquals( context.isSchedulerPaused(), true ); // Already disposed so this is a noop lock2.dispose(); assertEquals( callCount.get(), 0 ); lock1.dispose(); assertEquals( callCount.get(), 1 ); assertEquals( context.isSchedulerPaused(), false ); } /** * Test we are in a transaction by trying to observe an observable. */ private void assertInTransaction( @Nonnull final ArezContext context, @Nonnull final Observable observable ) { assertEquals( context.isTransactionActive(), true ); observable.reportObserved(); } /** * Test we are not in a transaction by trying to observe an observable. */ private void assertNotInTransaction( @Nonnull final ArezContext context, @Nonnull final Observable observable ) { assertEquals( context.isTransactionActive(), false ); assertThrows( observable::reportObserved ); } }
package git4idea.actions; import com.intellij.dvcs.DvcsUtil; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import git4idea.branch.GitRebaseParams; import git4idea.i18n.GitBundle; import git4idea.rebase.GitRebaseDialog; import git4idea.rebase.GitRebaseUtils; import git4idea.repo.GitRepository; import git4idea.repo.GitRepositoryManager; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.List; import java.util.Objects; import static com.intellij.dvcs.DvcsUtil.sortRepositories; import static git4idea.GitUtil.*; import static git4idea.rebase.GitRebaseUtils.getRebasingRepositories; import static java.util.Collections.singletonList; public class GitRebase extends DumbAwareAction { @Override public void update(@NotNull AnActionEvent e) { super.update(e); Project project = e.getProject(); if (project == null || !hasGitRepositories(project) || !getRebasingRepositories(project).isEmpty()) { e.getPresentation().setEnabledAndVisible(false); } else { e.getPresentation().setEnabledAndVisible(true); } } @Override public void actionPerformed(@NotNull AnActionEvent e) { final Project project = e.getRequiredData(CommonDataKeys.PROJECT); ArrayList<GitRepository> repositories = new ArrayList<>(getRepositories(project)); repositories.removeAll(getRebasingRepositories(project)); List<VirtualFile> roots = new ArrayList<>(getRootsFromRepositories(sortRepositories(repositories))); VirtualFile defaultRoot = DvcsUtil.guessVcsRoot(project, e.getData(CommonDataKeys.VIRTUAL_FILE)); final GitRebaseDialog dialog = new GitRebaseDialog(project, roots, defaultRoot); if (dialog.showAndGet()) { VirtualFile root = dialog.gitRoot(); GitRebaseParams selectedParams = dialog.getSelectedParams(); ProgressManager.getInstance().run(new Task.Backgroundable(project, GitBundle.getString("rebase.progress.indicator.title")) { @Override public void run(@NotNull ProgressIndicator indicator) { GitRepository selectedRepository = Objects.requireNonNull(GitRepositoryManager.getInstance(project).getRepositoryForRoot(root)); GitRebaseUtils.rebase(project, singletonList(selectedRepository), selectedParams, indicator); } }); } } }
package sofia.graphics.collision; import sofia.graphics.Shape; import static sofia.graphics.ShapeAccessUtilities.*; import java.util.*; /** * A collision checker using a Binary Space Partition tree. * * <p>Each node of the tree represents a rectangular area, and potentially has * two non-overlapping child nodes which together cover the same area as their * parent. * * @author Davin McCall */ public class IBSPColChecker implements CollisionChecker { public static final int X_AXIS = 0; public static final int Y_AXIS = 1; public static final int PARENT_LEFT = 0; public static final int PARENT_RIGHT = 1; public static final int PARENT_NONE = 3; // no particular side public static final int REBALANCE_THRESHOLD = 20; private GOCollisionQuery shapeQuery = new GOCollisionQuery(); private NeighbourCollisionQuery neighbourQuery = new NeighbourCollisionQuery(); private PointCollisionQuery pointQuery = new PointCollisionQuery(); private InRangeQuery inRangeQuery = new InRangeQuery(); private BSPNode bspTree; /* * @see greenfoot.collision.CollisionChecker#addObject(greenfoot.Actor) */ public synchronized void addObject(Shape shape) { // checkConsistency(); Rect bounds = getShapeBounds(shape); // FIXME Hack hack hack hack hack hack hack // (The while loop below does NOT like shapes with zero width or zero // height) if (bounds.getWidth() == 0) { bounds.setWidth(0.001f); } if (bounds.getHeight() == 0) { bounds.setHeight(0.001f); } if (bspTree == null) { // The tree is currently empty; just create a new node containing only the one actor int splitAxis; float splitPos; if (bounds.getWidth() > bounds.getHeight()) { splitAxis = X_AXIS; splitPos = bounds.getMiddleX(); } else { splitAxis = Y_AXIS; splitPos = bounds.getMiddleY(); } bspTree = BSPNodeCache.getBSPNode(); bspTree.getArea().copyFrom(bounds); bspTree.setSplitAxis(splitAxis); bspTree.setSplitPos(splitPos); bspTree.addShape(shape); } else { Rect treeArea = bspTree.getArea(); while (! treeArea.contains(bounds)) { // We increase the tree area in up to four directions: System.out.println("bounds = " + bounds + ", treeArea = " + treeArea); if (bounds.getX() < treeArea.getX()) { // double the width out to the left float bx = treeArea.getX() - treeArea.getWidth(); Rect newArea = new Rect(bx, treeArea.getY(), treeArea.getRight() - bx, treeArea.getHeight()); BSPNode newTop = BSPNodeCache.getBSPNode(); newTop.getArea().copyFrom(newArea); newTop.setSplitAxis(X_AXIS); newTop.setSplitPos(treeArea.getX()); newTop.setChild(PARENT_RIGHT, bspTree); bspTree = newTop; treeArea = newArea; System.out.println("left: newArea = " + newArea); } if (bounds.getRight() > treeArea.getRight()) { // double the width out to the right float bx = treeArea.getRight() + treeArea.getWidth(); Rect newArea = new Rect(treeArea.getX(), treeArea.getY(), bx - treeArea.getX(), treeArea.getHeight()); BSPNode newTop = BSPNodeCache.getBSPNode(); newTop.getArea().copyFrom(newArea); newTop.setSplitAxis(X_AXIS); newTop.setSplitPos(treeArea.getRight()); newTop.setChild(PARENT_LEFT, bspTree); bspTree = newTop; treeArea = newArea; System.out.println("right: newArea = " + newArea); } if (bounds.getY() < treeArea.getY()) { // double the height out the top float by = treeArea.getY() - treeArea.getHeight(); Rect newArea = new Rect(treeArea.getX(), by, treeArea.getWidth(), treeArea.getTop() - by); BSPNode newTop = BSPNodeCache.getBSPNode(); newTop.getArea().copyFrom(newArea); newTop.setSplitAxis(Y_AXIS); newTop.setSplitPos(treeArea.getY()); newTop.setChild(PARENT_RIGHT, bspTree); bspTree = newTop; treeArea = newArea; System.out.println("top: newArea = " + newArea); } if (bounds.getTop() > treeArea.getTop()) { // double the height out the bottom float by = treeArea.getTop() + treeArea.getHeight(); Rect newArea = new Rect(treeArea.getX(), treeArea.getY(), treeArea.getWidth(), by - treeArea.getY()); BSPNode newTop = BSPNodeCache.getBSPNode(); newTop.getArea().copyFrom(newArea); newTop.setSplitAxis(Y_AXIS); newTop.setSplitPos(treeArea.getTop()); newTop.setChild(PARENT_LEFT, bspTree); bspTree = newTop; treeArea = newArea; System.out.println("bottom: newArea = " + newArea); } } insertObject(shape, bounds, bounds, treeArea, bspTree); } // checkConsistency(); } /** * Check the consistency of the tree, useful for debugging. */ /** * Insert a shape into the tree at the given position * * @param shape The shape to insert * @param shapeBounds The total bounds of the shape * @param bounds The bounds of the shape (limited to the present area) * @param area The total area represented by the current search node * @param node The current search node (null, if the search has reached its end!) */ private void insertObject(Shape shape, Rect shapeBounds, Rect bounds, Rect area, BSPNode node) { // the current search node might already contain the // actor... if (node.containsShape(shape)) { return; } // If there's no actor at all in the node yet, then we can stop here. // Also, if the area is sufficiently small, there's no point subdividing it. if (node.isEmpty() || (area.getWidth() <= shapeBounds.getWidth() && area.getHeight() <= shapeBounds.getHeight())) { node.addShape(shape); return; } // The search continues... Rect leftArea = node.getLeftArea(); Rect rightArea = node.getRightArea(); Rect leftIntersects = Rect.getIntersection(leftArea, bounds); Rect rightIntersects = Rect.getIntersection(rightArea, bounds); if (leftIntersects != null) { if (node.getLeft() == null) { BSPNode newLeft = createNewNode(leftArea); newLeft.addShape(shape); node.setChild(PARENT_LEFT, newLeft); } else { insertObject(shape, shapeBounds, leftIntersects, leftArea, node.getLeft()); } } if (rightIntersects != null) { if (node.getRight() == null) { BSPNode newRight = createNewNode(rightArea); newRight.addShape(shape); node.setChild(PARENT_RIGHT, newRight); } else { insertObject(shape, shapeBounds, rightIntersects, rightArea, node.getRight()); } } } /** * Create a new node for the given area. */ private BSPNode createNewNode(Rect area) { int splitAxis; float splitPos; if (area.getWidth() > area.getHeight()) { splitAxis = X_AXIS; splitPos = area.getMiddleX(); } else { splitAxis = Y_AXIS; splitPos = area.getMiddleY(); } BSPNode newNode = BSPNodeCache.getBSPNode(); newNode.setArea(area); newNode.setSplitAxis(splitAxis); newNode.setSplitPos(splitPos); return newNode; } public final Rect getShapeBounds(Shape shape) { Rect r = new Rect(shape.getBounds()); return r; } // public static void printTree(BSPNode node, String indent, String lead) // if (node == null) { // return; // String xx = lead; // xx += node + ": "; // xx += node.getArea(); // println(xx); // BSPNode left = node.getLeft(); // BSPNode right = node.getRight(); // if (left != null) { // String newIndent; // if (right != null) { // newIndent = indent + " |"; // else { // newIndent = indent + " "; // printTree(left, newIndent, indent + " \\L-"); // if (right != null) { // printTree(node.getRight(), indent + " ", indent + " \\R-"); // public void printTree() // printTree(bspTree, "", ""); public synchronized void removeObject(Shape object) { // checkConsistency(); ShapeNode node = getNodeForShape(object); while (node != null) { BSPNode bspNode = node.getBSPNode(); node.remove(); checkRemoveNode(bspNode); node = getNodeForShape(object); } // checkConsistency(); } /** * Check whether a node can be removed, and remove it if so, traversing up the * tree and so on. Returns the highest node which wasn't removed. */ private BSPNode checkRemoveNode(BSPNode node) { while (node != null && node.isEmpty()) { BSPNode parent = node.getParent(); int side = (parent != null) ? parent.getChildSide(node) : PARENT_NONE; BSPNode left = node.getLeft(); BSPNode right = node.getRight(); if (left == null) { if (parent != null) { if (right != null) { right.setArea(node.getArea()); } parent.setChild(side, right); } else { bspTree = right; if (right != null) { right.setParent(null); } } node.setChild(PARENT_RIGHT, null); BSPNodeCache.returnNode(node); node = parent; } else if (right == null) { if (parent != null) { if (left != null) { left.setArea(node.getArea()); } parent.setChild(side, left); } else { bspTree = left; if (left != null) { left.setParent(null); } } node.setChild(PARENT_LEFT, null); BSPNodeCache.returnNode(node); node = parent; } else { break; } } return node; } // private static int dbgCounter = 0; // private static void println(String s) // if (dbgCounter < 3000) { // System.out.println(s); // // dbgCounter++; public static ShapeNode getNodeForShape(Shape object) { return getShapeNode(object); } public static void setNodeForShape(Shape object, ShapeNode node) { setShapeNode(object, node); } /** * An actors position or size has changed - update the tree. */ private synchronized void updateObject(Shape object) { //checkConsistency(); ShapeNode node = getNodeForShape(object); if (node == null) { // It seems that this can get called before the actor is added to the // checker... return; } Rect newBounds = getShapeBounds(object); if (! bspTree.getArea().contains(newBounds)) { // The actor has moved out of the existing tree area while (node != null) { BSPNode rNode = node.getBSPNode(); node.remove(); checkRemoveNode(rNode); node = node.getNext(); } addObject(object); return; } // First process all existing actor nodes. We cull nodes which // no longer contain any part of the actor; also, if we find a // BSPNode which completely contains the actor, we just throw // all the other actor nodes away. while (node != null) { //updateNodeForMovedObject(object, newBounds, bspNode); BSPNode bspNode = node.getBSPNode(); Rect bspArea = bspNode.getArea(); if (bspArea.contains(newBounds)) { // Ok, we found a BSPNode which completely contains the // actor - we can throw all other actor nodes away ShapeNode iter = getNodeForShape(object); while (iter != null) { if (iter != node) { BSPNode rNode = iter.getBSPNode(); iter.remove(); checkRemoveNode(rNode); } iter = iter.getNext(); } return; } else if (! bspArea.intersects(newBounds)) { // This actor node is no longer needed BSPNode rNode = node.getBSPNode(); node.remove(); checkRemoveNode(rNode); } node.clearMark(); node = node.getNext(); } // If we got here, there was no single node which contained the whole // actor (and we have culled any nodes which no longer contain any // part of the actor). We now need to find a suitable BSPNode // and do a re-insertion. node = getNodeForShape(object); BSPNode bspNode; Rect bspArea; if (node != null) { bspNode = node.getBSPNode(); while (bspNode != null && ! bspNode.getArea().contains(newBounds)) { bspNode = bspNode.getParent(); } if (bspNode == null) { // No node contains the whole actor; we need to expand the tree size // First: remove old actor nodes while (node != null) { bspNode = node.getBSPNode(); node.remove(); checkRemoveNode(bspNode); node = node.getNext(); } // Now: expand the tree addObject(object); return; } } else { bspNode = bspTree; } // Note, we can pass null as the parent because bspNode is guaranteed not to be null. bspArea = bspNode.getArea(); insertObject(object, newBounds, newBounds, bspArea, bspNode); // Finally, it's possible the object changed size and therefore has been stored // in higher nodes than previously. This means there are duplicate actor nodes. // The insertObject call will mark all the nodes it touches, so we need remove // any unmarked nodes. node = getNodeForShape(object); while (node != null) { if (! node.checkMark()) { bspNode = node.getBSPNode(); node.remove(); checkRemoveNode(bspNode); } node = node.getNext(); } // checkConsistency(); } public void updateObjectLocation(Shape object) { updateObject(object); } public void updateObjectSize(Shape object) { updateObject(object); } private Set<Shape> getIntersectingObjects(Rect r, CollisionQuery query) { Set<Shape> set = new HashSet<Shape>(); getIntersectingObjects(r, query, set, bspTree); return set; } private void getIntersectingObjects(Rect r, CollisionQuery query, Set<Shape> resultSet, BSPNode startNode) { LinkedList<BSPNode> nodeStack = new LinkedList<BSPNode>(); if (startNode != null) { nodeStack.add(startNode); } while (! nodeStack.isEmpty()) { BSPNode node = nodeStack.removeLast(); if (node.getArea().intersects(r)) { for (Shape shape : node) { if (query.checkCollision(shape)) { if (! resultSet.contains(shape)) { resultSet.add(shape); } } } BSPNode left = node.getLeft(); BSPNode right = node.getRight(); if (left != null) { nodeStack.add(left); } if (right != null) { nodeStack.add(right); } } } } /** * Check if there is at least one actor in the given BSPNode which matches * the given collision query, and return it if so. */ private Shape checkForOneCollision(Shape ignore, BSPNode node, CollisionQuery query) { for (Shape candidate : node) { if (ignore != candidate && query.checkCollision(candidate)) { return candidate; } } return null; } /** * Search for a single object which matches the given collision * query, starting from the given tree node and searching only * down the tree. * * @param ignore - do not return this actor * @param r Bounds - do not search nodes which don't intersect this * @param query The query to check objects against * @param startNode The node to begin the search from * @return The actor found, or null */ private Shape getOneObjectDownTree(Shape ignore, Rect r, CollisionQuery query, BSPNode startNode) { if (startNode == null) { return null; } LinkedList<BSPNode> nodeStack = new LinkedList<BSPNode>(); nodeStack.add(startNode); while (! nodeStack.isEmpty()) { BSPNode node = nodeStack.removeLast(); if (node.getArea().intersects(r)) { Shape res = checkForOneCollision(ignore, node, query); if (res != null) { return res; } BSPNode left = node.getLeft(); BSPNode right = node.getRight(); if (left != null) { nodeStack.add(left); } if (right != null) { nodeStack.add(right); } } } return null; } /** * Search down the tree, but only so far as the last node which fully contains the area. * @param r * @param query * @param shape * @return */ private Shape getOneIntersectingDown(Rect r, CollisionQuery query, Shape shape) { if (bspTree == null) { return null; } LinkedList<BSPNode> nodeStack = new LinkedList<BSPNode>(); nodeStack.add(bspTree); while (! nodeStack.isEmpty()) { BSPNode node = nodeStack.removeLast(); if (node.getArea().contains(r)) { Shape res = checkForOneCollision(shape, node, query); if (res != null) { return res; } BSPNode left = node.getLeft(); BSPNode right = node.getRight(); if (left != null) { nodeStack.add(left); } if (right != null) { nodeStack.add(right); } } } return null; } /** * Search up the tree, up to (not including) the node which fully contains the area. * @param r * @param query * @param shape * @param start */ public Shape getOneIntersectingUp(Rect r, CollisionQuery query, Shape shape, BSPNode start) { while (start != null && ! start.getArea().contains(r)) { Shape res = checkForOneCollision(shape, start, query); if (res != null) { return res; } start = start.getParent(); } return null; } @SuppressWarnings("unchecked") public <T extends Shape> Set<T> getObjectsAt(float x, float y, Class<T> cls) { synchronized (pointQuery) { pointQuery.init(x, y, cls); return (Set<T>) getIntersectingObjects(new Rect(x, y, 1, 1), pointQuery); } } @SuppressWarnings("unchecked") public <T extends Shape> Set<T> getIntersectingObjects(Shape shape, Class<T> cls) { Rect r = getShapeBounds(shape); synchronized (shapeQuery) { shapeQuery.init(cls, shape); return (Set<T>) getIntersectingObjects(r, shapeQuery); } } @SuppressWarnings("unchecked") public <T extends Shape> Set<T> getObjectsInRange(float x, float y, float r, Class<T> cls) { float size = 2 * r; Rect rect = new Rect((x - r), (y - r), size, size); Set<T> result; synchronized (shapeQuery) { shapeQuery.init(cls, null); result = (Set<T>) getIntersectingObjects(rect, shapeQuery); } Iterator<T> i = result.iterator(); synchronized (inRangeQuery) { inRangeQuery.init(x, y, r); while (i.hasNext()) { if (! inRangeQuery.checkCollision(i.next())) { i.remove(); } } } return result; } @SuppressWarnings("unchecked") public <T extends Shape> Set<T> getNeighbors(Shape shape, float distance, boolean diag, Class<T> cls) { float x = shape.getX(); float y = shape.getY(); Rect r = new Rect(x - distance, y - distance, distance * 2 + 1, distance * 2 + 1); synchronized (neighbourQuery) { neighbourQuery.init(x, y, distance, diag, cls); return (Set<T>) getIntersectingObjects(r, neighbourQuery); } } public <T extends Shape> Set<T> getObjectsInDirection(float x, float y, float angle, float length, Class<T> cls) { // non-functional // return new ArrayList<T>(); throw new UnsupportedOperationException("not implemented!"); } @SuppressWarnings("unchecked") public <T extends Shape> Set<T> getObjects(Class<T> cls) { Set<T> set = new HashSet<T>(); LinkedList<BSPNode> nodeStack = new LinkedList<BSPNode>(); if (bspTree != null) { nodeStack.add(bspTree); } while (! nodeStack.isEmpty()) { BSPNode node = nodeStack.removeLast(); for (Shape shape : node) { if (cls == null || cls.isInstance(shape)) { set.add((T) shape); } } BSPNode left = node.getLeft(); BSPNode right = node.getRight(); if (left != null) { nodeStack.add(left); } if (right != null) { nodeStack.add(right); } } return set; } public Set<Shape> getObjects() { return getObjects(null); } public final void startSequence() { // Nothing necessary. } @SuppressWarnings("unchecked") public <T extends Shape> T getOneObjectAt(Shape object, float dx, float dy, Class<T> cls) { synchronized (pointQuery) { float px = dx; float py = dy; pointQuery.init(px, py, cls); CollisionQuery query = pointQuery; if (cls != null) { query = new ClassQuery(cls, pointQuery); } // Use of getOneIntersectingDown is ok, because the area is only 1x1 pixel // in size - it will be contained by all nodes. return (T) getOneIntersectingDown(new Rect(px, py, 1, 1), query, object); } } @SuppressWarnings("unchecked") public <T extends Shape> T getOneIntersectingObject(Shape shape, Class<T> cls) { Rect r = getShapeBounds(shape); synchronized (shapeQuery) { shapeQuery.init(cls, shape); ShapeNode node = getNodeForShape(shape); do { BSPNode bspNode = node.getBSPNode(); T ret = (T) getOneObjectDownTree(shape, r, shapeQuery, bspNode); if (ret != null) { return ret; } ret = (T) getOneIntersectingUp(r, shapeQuery, shape, bspNode.getParent()); if (ret != null) { return ret; } node = node.getNext(); } while (node != null); return (T) getOneIntersectingDown(r, shapeQuery, shape); } } }
package net.i2p.router.transport.ntcp; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedChannelException; import java.nio.channels.ClosedSelectorException; import java.nio.channels.NoConnectionPendingException; import java.nio.channels.NotYetConnectedException; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.channels.UnresolvedAddressException; import java.util.Set; import java.util.concurrent.LinkedBlockingQueue; import net.i2p.data.RouterIdentity; import net.i2p.data.RouterInfo; import net.i2p.router.RouterContext; import net.i2p.router.transport.FIFOBandwidthLimiter; import net.i2p.util.I2PThread; import net.i2p.util.Log; public class EventPumper implements Runnable { private RouterContext _context; private Log _log; private volatile boolean _alive; private Selector _selector; private final LinkedBlockingQueue<ByteBuffer> _bufCache; private final LinkedBlockingQueue<NTCPConnection> _wantsRead = new LinkedBlockingQueue<NTCPConnection>(); private final LinkedBlockingQueue<NTCPConnection> _wantsWrite = new LinkedBlockingQueue<NTCPConnection>(); private final LinkedBlockingQueue<ServerSocketChannel> _wantsRegister = new LinkedBlockingQueue<ServerSocketChannel>(); private final LinkedBlockingQueue<NTCPConnection> _wantsConRegister = new LinkedBlockingQueue<NTCPConnection>(); private NTCPTransport _transport; private long _expireIdleWriteTime; private static final int BUF_SIZE = 8*1024; private static final int MAX_CACHE_SIZE = 64; /** * every few seconds, iterate across all ntcp connections just to make sure * we have their interestOps set properly (and to expire any looong idle cons). * as the number of connections grows, we should try to make this happen * less frequently (or not at all), but while the connection count is small, * the time to iterate across them to check a few flags shouldn't be a problem. */ private static final long FAILSAFE_ITERATION_FREQ = 2*1000l; /** tunnel test is every 30-60s, so this should be longer than, say, 3*45s to allow for drops */ private static final long MIN_EXPIRE_IDLE_TIME = 3*60*1000l; private static final long MAX_EXPIRE_IDLE_TIME = 15*60*1000l; public EventPumper(RouterContext ctx, NTCPTransport transport) { _context = ctx; _log = ctx.logManager().getLog(getClass()); _transport = transport; _alive = false; _bufCache = new LinkedBlockingQueue<ByteBuffer>(MAX_CACHE_SIZE); _expireIdleWriteTime = MAX_EXPIRE_IDLE_TIME; } public synchronized void startPumping() { if (_log.shouldLog(Log.INFO)) _log.info("Starting pumper"); try { _selector = Selector.open(); _alive = true; new I2PThread(this, "NTCP Pumper", true).start(); } catch (IOException ioe) { _log.log(Log.CRIT, "Error opening the NTCP selector", ioe); } catch (java.lang.InternalError jlie) { // "unable to get address of epoll functions, pre-2.6 kernel?" _log.log(Log.CRIT, "Error opening the NTCP selector", jlie); } } public synchronized void stopPumping() { _alive = false; if (_selector != null && _selector.isOpen()) _selector.wakeup(); } /** * Selector can take quite a while to close after calling stopPumping() */ public boolean isAlive() { return _alive || (_selector != null && _selector.isOpen()); } public void register(ServerSocketChannel chan) { if (_log.shouldLog(Log.DEBUG)) _log.debug("Registering server socket channel"); _wantsRegister.offer(chan); _selector.wakeup(); } public void registerConnect(NTCPConnection con) { if (_log.shouldLog(Log.DEBUG)) _log.debug("Registering outbound connection"); _context.statManager().addRateData("ntcp.registerConnect", 1, 0); _wantsConRegister.offer(con); _selector.wakeup(); } public void run() { long lastFailsafeIteration = System.currentTimeMillis(); while (_alive && _selector.isOpen()) { try { runDelayedEvents(); int count = 0; try { //if (_log.shouldLog(Log.DEBUG)) // _log.debug("before select..."); count = _selector.select(200); } catch (IOException ioe) { if (_log.shouldLog(Log.WARN)) _log.warn("Error selecting", ioe); } if (count <= 0) continue; if (_log.shouldLog(Log.DEBUG)) _log.debug("select returned " + count); Set<SelectionKey> selected = null; try { selected = _selector.selectedKeys(); } catch (ClosedSelectorException cse) { continue; } processKeys(selected); selected.clear(); if (lastFailsafeIteration + FAILSAFE_ITERATION_FREQ < System.currentTimeMillis()) { // in the *cough* unthinkable possibility that there are bugs in // the code, lets periodically pass over all NTCP connections and // make sure that anything which should be able to write has been // properly marked as such, etc lastFailsafeIteration = System.currentTimeMillis(); try { Set<SelectionKey> all = _selector.keys(); int failsafeWrites = 0; int failsafeCloses = 0; int failsafeInvalid = 0; // Increase allowed idle time if we are well under allowed connections, otherwise decrease if (_transport.haveCapacity()) _expireIdleWriteTime = Math.min(_expireIdleWriteTime + 1000, MAX_EXPIRE_IDLE_TIME); else _expireIdleWriteTime = Math.max(_expireIdleWriteTime - 3000, MIN_EXPIRE_IDLE_TIME); for (SelectionKey key : all) { try { Object att = key.attachment(); if (!(att instanceof NTCPConnection)) continue; // to the next con NTCPConnection con = (NTCPConnection)att; if ((!key.isValid()) && (!((SocketChannel)key.channel()).isConnectionPending()) && con.getTimeSinceCreated() > 2 * NTCPTransport.ESTABLISH_TIMEOUT) { if (_log.shouldLog(Log.WARN)) _log.warn("Invalid key " + con); // this will cancel the key, and it will then be removed from the keyset con.close(); failsafeInvalid++; continue; } if ( (con.getWriteBufCount() > 0) && ((key.interestOps() & SelectionKey.OP_WRITE) == 0) ) { // the data queued to be sent has already passed through // the bw limiter and really just wants to get shoved // out the door asap. key.interestOps(SelectionKey.OP_WRITE | key.interestOps()); failsafeWrites++; } if ( con.getTimeSinceSend() > _expireIdleWriteTime && con.getTimeSinceReceive() > _expireIdleWriteTime) { // we haven't sent or received anything in a really long time, so lets just close 'er up con.close(); failsafeCloses++; } } catch (CancelledKeyException cke) { // cancelled while updating the interest ops. ah well } } if (failsafeWrites > 0) _context.statManager().addRateData("ntcp.failsafeWrites", failsafeWrites, 0); if (failsafeCloses > 0) _context.statManager().addRateData("ntcp.failsafeCloses", failsafeCloses, 0); if (failsafeInvalid > 0) _context.statManager().addRateData("ntcp.failsafeInvalid", failsafeInvalid, 0); } catch (ClosedSelectorException cse) { continue; } } } catch (RuntimeException re) { _log.log(Log.CRIT, "Error in the event pumper", re); } } try { if (_selector.isOpen()) { if (_log.shouldLog(Log.DEBUG)) _log.debug("Closing down the event pumper with selection keys remaining"); Set<SelectionKey> keys = _selector.keys(); for (SelectionKey key : keys) { try { Object att = key.attachment(); if (att instanceof ServerSocketChannel) { ServerSocketChannel chan = (ServerSocketChannel)att; chan.close(); key.cancel(); } else if (att instanceof NTCPConnection) { NTCPConnection con = (NTCPConnection)att; con.close(); key.cancel(); } } catch (Exception ke) { _log.error("Error closing key " + key + " on pumper shutdown", ke); } } _selector.close(); } else { if (_log.shouldLog(Log.DEBUG)) _log.debug("Closing down the event pumper with no selection keys remaining"); } } catch (Exception e) { _log.error("Error closing keys on pumper shutdown", e); } _wantsConRegister.clear(); _wantsRead.clear(); _wantsRegister.clear(); _wantsWrite.clear(); } private void processKeys(Set<SelectionKey> selected) { for (SelectionKey key : selected) { try { int ops = key.readyOps(); boolean accept = (ops & SelectionKey.OP_ACCEPT) != 0; boolean connect = (ops & SelectionKey.OP_CONNECT) != 0; boolean read = (ops & SelectionKey.OP_READ) != 0; boolean write = (ops & SelectionKey.OP_WRITE) != 0; if (_log.shouldLog(Log.DEBUG)) _log.debug("ready ops for : " + key + " accept? " + accept + " connect? " + connect + " read? " + read + "/" + ((key.interestOps()&SelectionKey.OP_READ)!= 0) + " write? " + write + "/" + ((key.interestOps()&SelectionKey.OP_WRITE)!= 0) + " on " + key.attachment() ); if (accept) { _context.statManager().addRateData("ntcp.accept", 1, 0); processAccept(key); } if (connect) { key.interestOps(key.interestOps() & ~SelectionKey.OP_CONNECT); processConnect(key); } if (read) { _context.statManager().addRateData("ntcp.read", 1, 0); key.interestOps(key.interestOps() & ~SelectionKey.OP_READ); processRead(key); } if (write) { _context.statManager().addRateData("ntcp.write", 1, 0); key.interestOps(key.interestOps() & ~SelectionKey.OP_WRITE); processWrite(key); } } catch (CancelledKeyException cke) { if (_log.shouldLog(Log.DEBUG)) _log.debug("key cancelled"); } } } public void wantsWrite(NTCPConnection con, byte data[]) { ByteBuffer buf = ByteBuffer.wrap(data); FIFOBandwidthLimiter.Request req = _context.bandwidthLimiter().requestOutbound(data.length, "NTCP write");//con, buf); if (req.getPendingOutboundRequested() > 0) { if (_log.shouldLog(Log.INFO)) _log.info("queued write on " + con + " for " + data.length); _context.statManager().addRateData("ntcp.wantsQueuedWrite", 1, 0); con.queuedWrite(buf, req); } else { // fully allocated if (_log.shouldLog(Log.INFO)) _log.info("fully allocated write on " + con + " for " + data.length); con.write(buf); } } /** called by the connection when it has data ready to write (after bw allocation) */ public void wantsWrite(NTCPConnection con) { if (_log.shouldLog(Log.INFO)) _log.info("Before adding wants to write on " + con); if (!_wantsWrite.contains(con)) _wantsWrite.offer(con); if (_log.shouldLog(Log.INFO)) _log.info("Wants to write on " + con); _selector.wakeup(); if (_log.shouldLog(Log.DEBUG)) _log.debug("selector awoken for write"); } public void wantsRead(NTCPConnection con) { if (!_wantsRead.contains(con)) _wantsRead.offer(con); if (_log.shouldLog(Log.DEBUG)) _log.debug("wants to read on " + con); _selector.wakeup(); if (_log.shouldLog(Log.DEBUG)) _log.debug("selector awoken for read"); } private static final int MIN_BUFS = 5; /** * There's only one pumper, so static is fine, unless multi router * Is there a better way to do this? */ private static int NUM_BUFS = 5; private static int __liveBufs = 0; private static int __consecutiveExtra; ByteBuffer acquireBuf() { ByteBuffer rv = _bufCache.poll(); if (rv == null) { rv = ByteBuffer.allocate(BUF_SIZE); NUM_BUFS = ++__liveBufs; if (_log.shouldLog(Log.DEBUG)) _log.debug("creating a new read buffer " + System.identityHashCode(rv) + " with " + __liveBufs + " live: " + rv); _context.statManager().addRateData("ntcp.liveReadBufs", NUM_BUFS, 0); } else { if (_log.shouldLog(Log.DEBUG)) _log.debug("acquiring existing read buffer " + System.identityHashCode(rv) + " with " + __liveBufs + " live: " + rv); } return rv; } void releaseBuf(ByteBuffer buf) { //if (false) return; if (_log.shouldLog(Log.DEBUG)) _log.debug("releasing read buffer " + System.identityHashCode(buf) + " with " + __liveBufs + " live: " + buf); buf.clear(); int extra = _bufCache.size(); boolean cached = extra < NUM_BUFS; if (cached) { _bufCache.offer(buf); if (extra > 5) { __consecutiveExtra++; if (__consecutiveExtra >= 20) { NUM_BUFS = Math.max(NUM_BUFS - 1, MIN_BUFS); __consecutiveExtra = 0; } } } else { __liveBufs } if (cached && _log.shouldLog(Log.DEBUG)) _log.debug("read buffer " + System.identityHashCode(buf) + " cached with " + __liveBufs + " live"); } private void processAccept(SelectionKey key) { if (_log.shouldLog(Log.DEBUG)) _log.debug("processing accept"); ServerSocketChannel servChan = (ServerSocketChannel)key.attachment(); try { SocketChannel chan = servChan.accept(); chan.configureBlocking(false); if (!_transport.allowConnection()) { if (_log.shouldLog(Log.WARN)) _log.warn("Receive session request but at connection limit: " + chan.socket().getInetAddress()); try { chan.close(); } catch (IOException ioe) { } return; } if (_context.blocklist().isBlocklisted(chan.socket().getInetAddress().getAddress())) { if (_log.shouldLog(Log.WARN)) _log.warn("Receive session request from blocklisted IP: " + chan.socket().getInetAddress()); // need to add this stat first // _context.statManager().addRateData("ntcp.connectBlocklisted", 1, 0); try { chan.close(); } catch (IOException ioe) { } return; } // BUGFIX for firewalls. --Sponge chan.socket().setKeepAlive(true); SelectionKey ckey = chan.register(_selector, SelectionKey.OP_READ); NTCPConnection con = new NTCPConnection(_context, _transport, chan, ckey); if (_log.shouldLog(Log.DEBUG)) _log.debug("new NTCP connection established: " +con); } catch (IOException ioe) { if (_log.shouldLog(Log.ERROR)) _log.error("Error accepting", ioe); } } private void processConnect(SelectionKey key) { NTCPConnection con = (NTCPConnection)key.attachment(); try { SocketChannel chan = con.getChannel(); boolean connected = chan.finishConnect(); if (_log.shouldLog(Log.DEBUG)) _log.debug("processing connect for " + key + " / " + con + ": connected? " + connected); if (connected) { // BUGFIX for firewalls. --Sponge chan.socket().setKeepAlive(true); con.setKey(key); con.outboundConnected(); _context.statManager().addRateData("ntcp.connectSuccessful", 1, 0); } else { con.close(); _transport.markUnreachable(con.getRemotePeer().calculateHash()); _context.statManager().addRateData("ntcp.connectFailedTimeout", 1, 0); } } catch (IOException ioe) { // this is the usual failure path for a timeout or connect refused if (_log.shouldLog(Log.WARN)) _log.warn("Failed outbound connection to " + con.getRemotePeer().calculateHash(), ioe); con.close(); //_context.shitlist().shitlistRouter(con.getRemotePeer().calculateHash(), "Error connecting", NTCPTransport.STYLE); _transport.markUnreachable(con.getRemotePeer().calculateHash()); _context.statManager().addRateData("ntcp.connectFailedTimeoutIOE", 1, 0); } catch (NoConnectionPendingException ncpe) { // ignore } } private void processRead(SelectionKey key) { NTCPConnection con = (NTCPConnection)key.attachment(); ByteBuffer buf = acquireBuf(); try { int read = con.getChannel().read(buf); if (read == -1) { if (_log.shouldLog(Log.DEBUG)) _log.debug("EOF on " + con); _context.statManager().addRateData("ntcp.readEOF", 1, 0); con.close(); releaseBuf(buf); } else if (read == 0) { if (_log.shouldLog(Log.DEBUG)) _log.debug("nothing to read for " + con + ", but stay interested"); key.interestOps(key.interestOps() | SelectionKey.OP_READ); releaseBuf(buf); } else if (read > 0) { byte data[] = new byte[read]; buf.flip(); buf.get(data); releaseBuf(buf); buf=null; ByteBuffer rbuf = ByteBuffer.wrap(data); FIFOBandwidthLimiter.Request req = _context.bandwidthLimiter().requestInbound(read, "NTCP read"); //con, buf); if (req.getPendingInboundRequested() > 0) { key.interestOps(key.interestOps() & ~SelectionKey.OP_READ); if (_log.shouldLog(Log.DEBUG)) _log.debug("bw throttled reading for " + con + ", so we don't want to read anymore"); _context.statManager().addRateData("ntcp.queuedRecv", read, 0); con.queuedRecv(rbuf, req); } else { // fully allocated if (_log.shouldLog(Log.DEBUG)) _log.debug("not bw throttled reading for " + con); key.interestOps(key.interestOps() | SelectionKey.OP_READ); con.recv(rbuf); } } } catch (CancelledKeyException cke) { if (_log.shouldLog(Log.WARN)) _log.warn("error reading", cke); con.close(); _context.statManager().addRateData("ntcp.readError", 1, 0); if (buf != null) releaseBuf(buf); } catch (IOException ioe) { if (_log.shouldLog(Log.WARN)) _log.warn("error reading", ioe); con.close(); _context.statManager().addRateData("ntcp.readError", 1, 0); if (buf != null) releaseBuf(buf); } catch (NotYetConnectedException nyce) { } } private void processWrite(SelectionKey key) { int totalWritten = 0; int buffers = 0; long before = System.currentTimeMillis(); NTCPConnection con = (NTCPConnection)key.attachment(); try { while (true) { ByteBuffer buf = con.getNextWriteBuf(); if (buf != null) { if (_log.shouldLog(Log.DEBUG)) _log.debug("writing " + buf.remaining()+"..."); if (buf.remaining() <= 0) { long beforeRem = System.currentTimeMillis(); con.removeWriteBuf(buf); long afterRem = System.currentTimeMillis(); if (_log.shouldLog(Log.DEBUG)) _log.debug("buffer was already fully written and removed after " + (afterRem-beforeRem) + "..."); buf = null; buffers++; continue; } int written = con.getChannel().write(buf); totalWritten += written; if (written == 0) { if ( (buf.remaining() > 0) || (con.getWriteBufCount() >= 1) ) { if (_log.shouldLog(Log.DEBUG)) _log.debug("done writing, but data remains..."); key.interestOps(key.interestOps() | SelectionKey.OP_WRITE); } else { if (_log.shouldLog(Log.DEBUG)) _log.debug("done writing, no data remains..."); } break; } else if (buf.remaining() > 0) { if (_log.shouldLog(Log.DEBUG)) _log.debug("buffer data remaining..."); key.interestOps(key.interestOps() | SelectionKey.OP_WRITE); break; } else { long beforeRem = System.currentTimeMillis(); con.removeWriteBuf(buf); long afterRem = System.currentTimeMillis(); if (_log.shouldLog(Log.DEBUG)) _log.debug("buffer "+ buffers+"/"+written+"/"+totalWritten+" fully written after " + (beforeRem-before) + ", then removed after " + (afterRem-beforeRem) + "..."); //releaseBuf(buf); buf = null; buffers++; //if (buffer time is too much, add OP_WRITe to the interest ops and break?) } } else { break; } } } catch (CancelledKeyException cke) { if (_log.shouldLog(Log.WARN)) _log.warn("error writing", cke); _context.statManager().addRateData("ntcp.writeError", 1, 0); con.close(); } catch (IOException ioe) { if (_log.shouldLog(Log.WARN)) _log.warn("error writing", ioe); _context.statManager().addRateData("ntcp.writeError", 1, 0); con.close(); } long after = System.currentTimeMillis(); if (_log.shouldLog(Log.INFO)) _log.info("Wrote " + totalWritten + " in " + buffers + " buffers on " + con + " after " + (after-before)); } private void runDelayedEvents() { NTCPConnection con; while ((con = _wantsRead.poll()) != null) { SelectionKey key = con.getKey(); try { key.interestOps(key.interestOps() | SelectionKey.OP_READ); } catch (CancelledKeyException cke) { // ignore, we remove/etc elsewhere } } while ((con = _wantsWrite.poll()) != null) { SelectionKey key = con.getKey(); try { key.interestOps(key.interestOps() | SelectionKey.OP_WRITE); } catch (CancelledKeyException cke) { // ignore } } ServerSocketChannel chan; while ((chan = _wantsRegister.poll()) != null) { try { SelectionKey key = chan.register(_selector, SelectionKey.OP_ACCEPT); key.attach(chan); } catch (ClosedChannelException cce) { if (_log.shouldLog(Log.WARN)) _log.warn("Error registering", cce); } } while ((con = _wantsConRegister.poll()) != null) { try { SelectionKey key = con.getChannel().register(_selector, SelectionKey.OP_CONNECT); key.attach(con); con.setKey(key); try { NTCPAddress naddr = con.getRemoteAddress(); if (naddr.getPort() <= 0) throw new IOException("Invalid NTCP address: " + naddr); InetSocketAddress saddr = new InetSocketAddress(naddr.getHost(), naddr.getPort()); boolean connected = con.getChannel().connect(saddr); if (connected) { _context.statManager().addRateData("ntcp.connectImmediate", 1, 0); key.interestOps(SelectionKey.OP_READ); processConnect(key); } } catch (IOException ioe) { if (_log.shouldLog(Log.WARN)) _log.warn("error connecting", ioe); _context.statManager().addRateData("ntcp.connectFailedIOE", 1, 0); _transport.markUnreachable(con.getRemotePeer().calculateHash()); //if (ntcpOnly(con)) { // _context.shitlist().shitlistRouter(con.getRemotePeer().calculateHash(), "unable to connect: " + ioe.getMessage()); // con.close(false); //} else { // _context.shitlist().shitlistRouter(con.getRemotePeer().calculateHash(), "unable to connect: " + ioe.getMessage(), NTCPTransport.STYLE); con.close(true); } catch (UnresolvedAddressException uae) { if (_log.shouldLog(Log.WARN)) _log.warn("unresolved address connecting", uae); _context.statManager().addRateData("ntcp.connectFailedUnresolved", 1, 0); _transport.markUnreachable(con.getRemotePeer().calculateHash()); //if (ntcpOnly(con)) { // _context.shitlist().shitlistRouter(con.getRemotePeer().calculateHash(), "unable to connect/resolve: " + uae.getMessage()); // con.close(false); //} else { // _context.shitlist().shitlistRouter(con.getRemotePeer().calculateHash(), "unable to connect/resolve: " + uae.getMessage(), NTCPTransport.STYLE); con.close(true); } catch (CancelledKeyException cke) { con.close(false); } } catch (ClosedChannelException cce) { if (_log.shouldLog(Log.WARN)) _log.warn("Error registering", cce); } } long now = System.currentTimeMillis(); if (_lastExpired + 1000 <= now) { expireTimedOut(); _lastExpired = now; } } /** * If the other peer only supports ntcp, we should shitlist them when we can't reach 'em, * but if they support other transports (eg ssu) we should allow those transports to be * tried as well. */ private boolean ntcpOnly(NTCPConnection con) { RouterIdentity ident = con.getRemotePeer(); if (ident == null) return true; RouterInfo info = _context.netDb().lookupRouterInfoLocally(ident.calculateHash()); if (info == null) return true; return info.getAddresses().size() == 1; } private long _lastExpired; private void expireTimedOut() { _transport.expireTimedOut(); } public long getIdleTimeout() { return _expireIdleWriteTime; } }
package hu.sch.ejb; import hu.sch.domain.enums.ValuationStatus; import hu.sch.domain.user.User; import hu.sch.domain.*; import hu.sch.domain.config.Configuration; import hu.sch.domain.user.ProfileImage; import hu.sch.domain.user.UserAttribute; import hu.sch.domain.user.UserAttributeName; import hu.sch.domain.user.UserStatus; import hu.sch.ejb.image.ImageProcessor; import hu.sch.ejb.image.ImageSaver; import hu.sch.services.*; import hu.sch.services.exceptions.DuplicatedUserException; import hu.sch.services.exceptions.PekEJBException; import hu.sch.services.exceptions.PekErrorCode; import hu.sch.util.hash.Hashing; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.nio.file.Files; import java.nio.file.Paths; import java.security.SecureRandom; import java.util.*; import javax.annotation.Resource; import javax.ejb.EJB; import javax.ejb.SessionContext; import javax.ejb.Stateless; import javax.persistence.*; import org.apache.commons.codec.binary.Base64; import org.hibernate.Hibernate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author hege * @author messo * @author tomi */ @Stateless public class UserManagerBean implements UserManagerLocal { private static int PASSWORD_SALT_LENGTH = 8; private static Logger logger = LoggerFactory.getLogger(UserManagerBean.class); @PersistenceContext EntityManager em; @EJB(name = "LogManagerBean") LogManagerLocal logManager; @EJB MailManagerBean mailManager; @EJB(name = "PostManagerBean") PostManagerLocal postManager; @EJB(name = "SystemManagerBean") private SystemManagerLocal systemManager; @Resource private SessionContext sessionContext; public UserManagerBean() { } // for testing public UserManagerBean(EntityManager em) { this.em = em; } @Override public User findUserById(Long userId) { return findUserById(userId, false); } @Override public User findUserById(Long userId, boolean includeMemberships) { if (userId.equals(0L)) { return null; } if (!includeMemberships) { return em.find(User.class, userId); } TypedQuery<User> q = em.createNamedQuery(User.findWithMemberships, User.class); q.setParameter("id", userId); try { return q.getSingleResult(); } catch (Exception ex) { logger.warn("Can't find user with memberships for this id: " + userId); return null; } } @Override public User findUserByScreenName(String screenName) { try { return em.createNamedQuery(User.findByScreenName, User.class) .setParameter("screenName", screenName) .getSingleResult(); } catch (NoResultException ex) { logger.info("User with {} screenname was not found.", screenName); } return null; } @Override public User findUserByNeptun(final String neptun) { return findUserByNeptun(neptun, false); } @Override public User findUserByNeptun(final String neptun, boolean includeMemberships) { try { final User user = em.createNamedQuery(User.findUserByNeptunCode, User.class) .setParameter("neptun", neptun) .getSingleResult(); if (includeMemberships) { Hibernate.initialize(user.getMemberships()); } return user; } catch (NoResultException ex) { logger.info("User not found with {} neptun.", neptun); } return null; } @Override public User findUserByEmail(final String email) throws DuplicatedUserException { try { return em.createQuery("SELECT u FROM User u WHERE u.emailAddress = :email", User.class) .setParameter("email", email) .getSingleResult(); } catch (NoResultException ex) { logger.info("Could not find user with email: {}", email); } catch (NonUniqueResultException ex) { throw new DuplicatedUserException(String.format("Duplicate user with %s email", email), ex); } return null; } @Override public User findUserByConfirmationCode(final String code) { TypedQuery<User> q = em.createQuery("SELECT u FROM User u WHERE u.confirmationCode = :code", User.class); q.setParameter("code", code); User result = null; try { result = q.getSingleResult(); } catch (NoResultException ex) { logger.info("No user was found with {} confirmation code.", code); } catch (NonUniqueResultException ex) { logger.error("Multiple users were found for the same {} confirmation code.", code); } return result; } @Override public void confirm(final User user, final String password) throws PekEJBException { if (password != null) { byte[] salt = generateSalt(); String passwordDigest = hashPassword(password, salt); user.setSalt(Base64.encodeBase64String(salt)); user.setPasswordDigest(passwordDigest); } user.setConfirmationCode(null); user.setUserStatus(UserStatus.ACTIVE); em.merge(user); } @Override public List<EntrantRequest> getEntrantRequestsForUser(User felhasznalo) { Query q = em.createQuery("SELECT e FROM EntrantRequest e " + "WHERE e.user=:user " + "ORDER BY e.valuation.semester DESC, e.entrantType ASC"); q.setParameter("user", felhasznalo); return q.getResultList(); } @Override public List<PointRequest> getPointRequestsForUser(User felhasznalo) { Query q = em.createQuery("SELECT p FROM PointRequest p " + "WHERE p.user=:user " + "ORDER BY p.valuation.semester DESC, p.valuation.group.name ASC"); q.setParameter("user", felhasznalo); return q.getResultList(); } @Override public void createUser(User user, String password) throws PekEJBException { byte[] salt = generateSalt(); String passwordDigest = hashPassword(password, salt); boolean isAdmin = sessionContext.isCallerInRole(Roles.ADMIN); if (!isAdmin) { user.setSalt(Base64.encodeBase64String(salt)); user.setPasswordDigest(passwordDigest); } user.setConfirmationCode(generateConfirmationCode()); sendConfirmationEmail(user, isAdmin); em.persist(user); } @Override public void updateUser(User user) throws PekEJBException { updateUser(user, null); } @Override public void updateUser(User user, ProfileImage image) throws PekEJBException { // process image if (image != null) { ImageProcessor proc = new ImageProcessor(user, image, Configuration.getImageUploadConfig()); String imagePath = proc.process(); user.setPhotoPath(imagePath); } // save user em.merge(user); } @Override public List<User> findUsersByName(String name) { Query q = em.createQuery("SELECT u FROM User u WHERE UPPER(concat(concat(u.lastName, ' '), " + "u.firstName)) LIKE UPPER(:name) " + "ORDER BY u.lastName ASC, u.firstName ASC"); q.setParameter("name", "%" + name + "%"); return q.getResultList(); } /** * {@inheritDoc} */ @Override public List<Semester> getAllValuatedSemesterForUser(User user) { return em.createNamedQuery(User.getAllValuatedSemesterForUser).setParameter("user", user).getResultList(); } /** * {@inheritDoc} */ @Override public int getSemesterPointForUser(User user, Semester semester) { List<PointRequest> pontigenyek = getPointRequestsForUser(user); Map<Group, Integer> points = new HashMap<Group, Integer>(); for (PointRequest pr : pontigenyek) { Valuation v = pr.getValuation(); if (!v.isObsolete() && v.getPointStatus().equals(ValuationStatus.ELFOGADVA)) { if (v.getSemester().equals(semester) || v.getSemester().equals(semester.getPrevious())) { if (points.containsKey(v.getGroup()) == false) { points.put(v.getGroup(), 0); } points.put(v.getGroup(), points.get(v.getGroup()) + pr.getPoint()); } } } int sum = 0; for (Integer pointFromGroup : points.values()) { sum += pointFromGroup * pointFromGroup; } return (int) Math.min(Math.sqrt(sum), 100); } @Override public SpotImage getSpotImage(User user) { TypedQuery<SpotImage> q = em.createNamedQuery(SpotImage.findByNeptun, SpotImage.class); q.setParameter("neptunCode", user.getNeptunCode()); try { return q.getSingleResult(); } catch (NoResultException ex) { return null; } } @Override public boolean acceptRecommendedPhoto(String screenName) { User user = findUserByScreenName(screenName); TypedQuery<SpotImage> q = em.createNamedQuery(SpotImage.findByNeptun, SpotImage.class); q.setParameter("neptunCode", user.getNeptunCode()); try { SpotImage si = q.getSingleResult(); ImageSaver imageSaver = new ImageSaver(user); String imgPath = imageSaver.copy(si.getImageFullPath()).getRelativePath(); user.setPhotoPath(imgPath); removeSpotImage(si); return true; } catch (NoResultException ex) { logger.error("No user with {} screen name.", screenName); } catch (PekEJBException ex) { logger.error("Could not copy image. Error code: {}", ex.getErrorCode()); } return false; } @Override public void declineRecommendedPhoto(User user) { TypedQuery<SpotImage> q = em.createNamedQuery(SpotImage.findByNeptun, SpotImage.class); q.setParameter("neptunCode", user.getNeptunCode()); SpotImage img = q.getSingleResult(); removeSpotImage(img); } @Override public void invertAttributeVisibility(User user, UserAttributeName attr) { boolean done = false; for (UserAttribute a : user.getPrivateAttributes()) { if (a.getAttributeName() == attr) { a.setVisible(!a.isVisible()); done = true; break; } } // user's attribute list does not contian the given attribute // which means it is NOT visible, so we'll make it visible if (!done) { user.getPrivateAttributes().add(new UserAttribute(attr, true)); } } @Override public void changePassword(String screenName, String oldPwd, String newPwd) throws PekEJBException { User user = findUserByScreenName(screenName); byte[] salt = Base64.decodeBase64(user.getSalt()); String passwordHash = hashPassword(oldPwd, salt); if (!passwordHash.equals(user.getPasswordDigest())) { logger.info("Password change requested with invalid password for user {}", user.getId()); throw new PekEJBException(PekErrorCode.USER_PASSWORD_INVALID); } user.setPasswordDigest(hashPassword(newPwd, salt)); em.merge(user); } /** * Deletes the spot image from the file system and db. * * @param img the image to delete */ private void removeSpotImage(SpotImage img) { try { Files.deleteIfExists(Paths.get(img.getImageFullPath())); } catch (IOException ex) { logger.warn("IO Error while deleting file.", ex); // nothing to do. } // this updates the user record via a trigger. // usr_show_recommended will be false after the update. em.remove(img); } /** * {@inheritDoc} */ @Override public boolean sendUserNameReminder(final String email) throws PekEJBException { if (email == null || email.isEmpty()) { throw new IllegalArgumentException("email argument can't be null when sending user name reminder"); } try { final User result = findUserByEmail(email); if (result == null) { throw new PekEJBException(PekErrorCode.USER_NOTFOUND); } else { final String subject = MailManagerBean.getMailString(MailManagerBean.MAIL_USERNAME_REMINDER_SUBJECT); final String messageBody; if (systemManager.getNewbieTime()) { messageBody = String.format( MailManagerBean.getMailString(MailManagerBean.MAIL_USERNAME_REMINDER_BODY_NEWBIE), result.getFirstName(), result.getScreenName()); } else { messageBody = String.format( MailManagerBean.getMailString(MailManagerBean.MAIL_USERNAME_REMINDER_BODY), result.getFirstName(), result.getScreenName()); } return mailManager.sendEmail(email, subject, messageBody); } } catch (DuplicatedUserException ex) { logger.error("sendUserNameReminder: Duplicated user with email={}", email); } return false; } private String hashPassword(String password, byte[] salt) throws PekEJBException { byte[] passwordBytes; try { passwordBytes = password.getBytes("UTF-8"); } catch (UnsupportedEncodingException ex) { logger.error("UTF-8 is not supported.", ex); throw new PekEJBException(PekErrorCode.SYSTEM_ENCODING_NOTSUPPORTED); } byte[] hashInput = new byte[passwordBytes.length + salt.length]; System.arraycopy(passwordBytes, 0, hashInput, 0, passwordBytes.length); System.arraycopy(salt, 0, hashInput, passwordBytes.length, salt.length); return Hashing.sha1(hashInput).toBase64(); } private byte[] generateSalt() { byte[] salt = new byte[PASSWORD_SALT_LENGTH]; new SecureRandom().nextBytes(salt); return salt; } /** * Generates and sets a random confirmation code for the user. */ private String generateConfirmationCode() { Random rnd = new SecureRandom(); byte[] bytes = new byte[48]; String confirm = null; TypedQuery<Long> q = em.createQuery("SELECT COUNT(u) FROM User u WHERE u.confirmationCode = :confirm", Long.class); // check for uniqueness! do { rnd.nextBytes(bytes); confirm = Base64.encodeBase64URLSafeString(bytes); q.setParameter("confirm", confirm); } while (!q.getSingleResult().equals(0L)); // 48 byte of randomness encoded into 64 characters return confirm; } private boolean sendConfirmationEmail(User user, boolean isCreatedByAdmin) { String subject, body; subject = MailManagerBean.getMailString(MailManagerBean.MAIL_CONFIRMATION_SUBJECT); if (isCreatedByAdmin) { body = String.format( MailManagerBean.getMailString(MailManagerBean.MAIL_CONFIRMATION_ADMIN_BODY), user.getFullName(), generateConfirmationLink(user)); } else { body = String.format( MailManagerBean.getMailString(MailManagerBean.MAIL_CONFIRMATION_BODY), user.getFullName(), generateConfirmationLink(user)); } return mailManager.sendEmail(user.getEmailAddress(), subject, body); } private String generateConfirmationLink(User user) { String domain = Configuration.getProfileDomain(); return String.format("https://%s/profile/confirm/code/%s", domain, user.getConfirmationCode()); } // TODO: password policy }
package hu.sch.ejb; import com.unboundid.ldap.sdk.LDAPException; import hu.sch.domain.user.User; import hu.sch.domain.*; import hu.sch.domain.user.UserAttribute; import hu.sch.domain.user.UserAttributeName; import hu.sch.domain.user.UserStatus; import hu.sch.ejb.ldap.LdapSynchronizer; import hu.sch.services.*; import hu.sch.services.exceptions.CreateFailedException; import hu.sch.services.exceptions.DuplicatedUserException; import hu.sch.services.exceptions.InvalidPasswordException; import hu.sch.services.exceptions.UpdateFailedException; import java.util.*; import java.util.logging.Level; import javax.ejb.EJB; import javax.ejb.Stateless; import javax.persistence.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author hege * @author messo * @author tomi */ @Stateless public class UserManagerBean implements UserManagerLocal { private static Logger logger = LoggerFactory.getLogger(UserManagerBean.class); @PersistenceContext EntityManager em; @EJB(name = "LogManagerBean") LogManagerLocal logManager; @EJB(name = "MailManagerBean") MailManagerLocal mailManager; @EJB(name = "PostManagerBean") PostManagerLocal postManager; @Override public User findUserById(Long userId) { return findUserById(userId, false); } @Override public User findUserById(Long userId, boolean includeMemberships) { if (userId.equals(0L)) { return null; } if (!includeMemberships) { return em.find(User.class, userId); } TypedQuery<User> q = em.createNamedQuery(User.findWithMemberships, User.class); q.setParameter("id", userId); try { return q.getSingleResult(); } catch (Exception ex) { logger.warn("Can't find user with memberships for this id: " + userId); return null; } } @Override public User findUserByScreenName(String screenName) { try { return em.createNamedQuery(User.findByScreenName, User.class) .setParameter("screenName", screenName) .getSingleResult(); } catch (NoResultException ex) { logger.info("User with {} screenname was not found.", screenName); } return null; } @Override public User findUserByNeptun(final String neptun) { try { return em.createQuery("SELECT u FROM User u WHERE u.neptunCode = :neptun", User.class) .setParameter("neptun", neptun) .getSingleResult(); } catch (NoResultException ex) { logger.info("User not found with {} neptun.", neptun); } return null; } @Override public User findUserByEmail(final String email) throws DuplicatedUserException { try { return em.createQuery("SELECT u FROM User u WHERE u.emailAddress = :email", User.class) .setParameter("email", email) .getSingleResult(); } catch (NoResultException ex) { logger.info("Could not find user with email: {}", email); } catch (NonUniqueResultException ex) { throw new DuplicatedUserException(String.format("Duplicate user with %s email", email), ex); } return null; } @Override public List<EntrantRequest> getEntrantRequestsForUser(User felhasznalo) { Query q = em.createQuery("SELECT e FROM EntrantRequest e " + "WHERE e.user=:user " + "ORDER BY e.valuation.semester DESC, e.entrantType ASC"); q.setParameter("user", felhasznalo); return q.getResultList(); } @Override public List<PointRequest> getPointRequestsForUser(User felhasznalo) { Query q = em.createQuery("SELECT p FROM PointRequest p " + "WHERE p.user=:user " + "ORDER BY p.valuation.semester DESC, p.valuation.group.name ASC"); q.setParameter("user", felhasznalo); return q.getResultList(); } @Override public void createUser(User user, String password, UserStatus status) throws CreateFailedException { try (LdapSynchronizer sync = new LdapSynchronizer()) { sync.createEntry(user, password, status); em.persist(user); } catch (InvalidPasswordException ex) { throw new CreateFailedException("Password is not valid. It must be at least 6 chars long.", ex); } catch (LDAPException ex) { throw new CreateFailedException("Could not create entry in DS", ex); } catch (Exception ex) { throw new CreateFailedException("Unknown error.", ex); } } @Override public void updateUser(User user) throws UpdateFailedException { try (LdapSynchronizer sync = new LdapSynchronizer()) { sync.syncEntry(user); em.merge(user); } catch (Exception ex) { throw new UpdateFailedException("Failed updating the DS entry for the user.", ex); } } @Override public List<User> findUsersByName(String name) { Query q = em.createQuery("SELECT u FROM User u WHERE UPPER(concat(concat(u.lastName, ' '), " + "u.firstName)) LIKE UPPER(:name) " + "ORDER BY u.lastName ASC, u.firstName ASC"); q.setParameter("name", "%" + name + "%"); return q.getResultList(); } /** * {@inheritDoc} */ @Override public List<Semester> getAllValuatedSemesterForUser(User user) { return em.createNamedQuery(User.getAllValuatedSemesterForUser).setParameter("user", user).getResultList(); } /** * {@inheritDoc} */ @Override public int getSemesterPointForUser(User user, Semester semester) { List<PointRequest> pontigenyek = getPointRequestsForUser(user); Map<Group, Integer> points = new HashMap<Group, Integer>(); for (PointRequest pr : pontigenyek) { Valuation v = pr.getValuation(); if (!v.isObsolete() && v.getPointStatus().equals(ValuationStatus.ELFOGADVA)) { if (v.getSemester().equals(semester) || v.getSemester().equals(semester.getPrevious())) { if (points.containsKey(v.getGroup()) == false) { points.put(v.getGroup(), 0); } points.put(v.getGroup(), points.get(v.getGroup()) + pr.getPoint()); } } } int sum = 0; for (Integer pointFromGroup : points.values()) { sum += pointFromGroup * pointFromGroup; } return (int) Math.min(Math.sqrt(sum), 100); } @Override public SpotImage getSpotImage(User user) { TypedQuery<SpotImage> q = em.createNamedQuery(SpotImage.findByNeptun, SpotImage.class); q.setParameter("neptunCode", user.getNeptunCode()); try { return q.getSingleResult(); } catch (NoResultException ex) { return null; } } @Override public boolean acceptRecommendedPhoto(String screenName) { User user = findUserByScreenName(screenName); TypedQuery<SpotImage> q = em.createNamedQuery(SpotImage.findByNeptun, SpotImage.class); q.setParameter("neptunCode", user.getNeptunCode()); try { SpotImage si = q.getSingleResult(); user.setPhotoPath(si.getImagePath()); em.remove(si); return true; } catch (NoResultException ex) { return false; } } @Override public void declineRecommendedPhoto(User user) { Query q = em.createNamedQuery(SpotImage.deleteByNeptun); q.setParameter("neptunCode", user.getNeptunCode()); q.executeUpdate(); } @Override public void invertAttributeVisibility(User user, UserAttributeName attr) { User managedUser = em.merge(user); boolean done = false; for (UserAttribute a : managedUser.getPrivateAttributes()) { if (a.getAttributeName() == attr) { a.setVisible(!a.isVisible()); done = true; break; } } // user's attribute list does not contian the given attribute // which means it is NOT visible, so we'll make it visible if (!done) { managedUser.getPrivateAttributes().add(new UserAttribute(attr, true)); } } @Override public void updateUserStatus(User user, UserStatus userStatus) throws UpdateFailedException { try (LdapSynchronizer sync = new LdapSynchronizer()) { sync.updateStatus(user, userStatus); } catch (LDAPException ex) { throw new UpdateFailedException("Could not update the user's status in the directory entry.", ex); } catch (Exception ex) { throw new UpdateFailedException("Unknown error.", ex); } } @Override public void changePassword(String screenName, String oldPwd, String newPwd) throws InvalidPasswordException, UpdateFailedException { try (LdapSynchronizer sync = new LdapSynchronizer()) { sync.changePassword(screenName, oldPwd, newPwd); } catch (Exception ex) { throw new UpdateFailedException("Could not update password.", ex); } } }
package org.sejda.impl.sambox; import static java.util.Comparator.reverseOrder; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.sejda.common.ComponentsUtility.nullSafeCloseQuietly; import static org.sejda.core.notification.dsl.ApplicationEventsNotifier.notifyEvent; import static org.sejda.core.support.io.IOUtils.createTemporaryPdfBuffer; import static org.sejda.core.support.io.model.FileOutput.file; import static org.sejda.core.support.prefix.NameGenerator.nameGenerator; import static org.sejda.core.support.prefix.model.NameGenerationRequest.nameRequest; import static org.sejda.impl.sambox.util.FontUtils.getStandardType1Font; import java.io.File; import java.util.SortedSet; import java.util.TreeSet; import org.sejda.core.support.io.MultipleOutputWriter; import org.sejda.core.support.io.OutputWriters; import org.sejda.impl.sambox.component.DefaultPdfSourceOpener; import org.sejda.impl.sambox.component.PDDocumentHandler; import org.sejda.impl.sambox.component.PageGeometricalShapeWriter; import org.sejda.impl.sambox.component.PageImageWriter; import org.sejda.impl.sambox.component.PageTextReplacer; import org.sejda.impl.sambox.component.PageTextWriter; import org.sejda.impl.sambox.util.FontUtils; import org.sejda.model.RectangularBox; import org.sejda.model.exception.TaskException; import org.sejda.model.input.PdfSource; import org.sejda.model.input.PdfSourceOpener; import org.sejda.model.parameter.EditParameters; import org.sejda.model.parameter.edit.AddImageOperation; import org.sejda.model.parameter.edit.AddShapeOperation; import org.sejda.model.parameter.edit.AppendTextOperation; import org.sejda.model.parameter.edit.DeletePageOperation; import org.sejda.model.parameter.edit.EditTextOperation; import org.sejda.model.parameter.edit.HighlightTextOperation; import org.sejda.model.parameter.edit.InsertPageOperation; import org.sejda.model.pdf.encryption.PdfAccessPermission; import org.sejda.model.task.BaseTask; import org.sejda.model.task.TaskExecutionContext; import org.sejda.sambox.pdmodel.PDPage; import org.sejda.sambox.pdmodel.common.PDRectangle; import org.sejda.sambox.pdmodel.font.PDFont; import org.sejda.sambox.pdmodel.font.PDType1Font; import org.sejda.sambox.pdmodel.graphics.color.PDColor; import org.sejda.sambox.pdmodel.graphics.color.PDDeviceRGB; import org.sejda.sambox.pdmodel.graphics.image.PDImageXObject; import org.sejda.sambox.pdmodel.interactive.annotation.PDAnnotationTextMarkup; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class EditTask extends BaseTask<EditParameters> { private static final Logger LOG = LoggerFactory.getLogger(EditTask.class); private int totalSteps; private PDDocumentHandler documentHandler = null; private MultipleOutputWriter outputWriter; private PdfSourceOpener<PDDocumentHandler> documentLoader; @Override public void before(EditParameters parameters, TaskExecutionContext executionContext) throws TaskException { super.before(parameters, executionContext); totalSteps = parameters.getSourceList().size(); documentLoader = new DefaultPdfSourceOpener(); outputWriter = OutputWriters.newMultipleOutputWriter(parameters.getExistingOutputPolicy(), executionContext); } @Override public void execute(EditParameters parameters) throws TaskException { int currentStep = 0; for (PdfSource<?> source : parameters.getSourceList()) { executionContext().assertTaskNotCancelled(); currentStep++; LOG.debug("Opening {}", source); documentHandler = source.open(documentLoader); documentHandler.getPermissions().ensurePermission(PdfAccessPermission.MODIFY); documentHandler.setCreatorOnPDDocument(); File tmpFile = createTemporaryPdfBuffer(); LOG.debug("Created output on temporary buffer {}", tmpFile); documentHandler.setVersionOnPDDocument(parameters.getVersion()); documentHandler.setCompress(parameters.isCompress()); // before we start removing pages, keep track of page size PDRectangle firstPageMediaBox = documentHandler.getPage(1).getMediaBox(); int firstPageRotation = documentHandler.getPage(1).getRotation(); // to be able to delete multiple pages without having issues due to index shift // remove them in descending order, one by one TreeSet<Integer> pagesToDeleteSorted = new TreeSet<>(reverseOrder()); for(DeletePageOperation deleteOperation: parameters.getDeletePageOperations()) { pagesToDeleteSorted.add(deleteOperation.getPageNumber()); } for(Integer pageNumber : pagesToDeleteSorted) { LOG.debug("Deleting page {}", pageNumber); documentHandler.removePage(pageNumber); } for(InsertPageOperation insertPageOperation : parameters.getInsertPageOperations()) { int pageNumber = insertPageOperation.getPageNumber(); if(documentHandler.getNumberOfPages() == 0) { // strange place to be, right? Eg: document had 1 page, user removed it and inserted a new one in place PDPage added = documentHandler.addBlankPage(firstPageMediaBox); added.setRotation(firstPageRotation); } else { if (pageNumber > 1) { LOG.debug("Adding new page after page {}", pageNumber - 1); PDPage added = documentHandler.addBlankPageAfter(pageNumber - 1); added.setRotation(firstPageRotation); } else { LOG.debug("Adding new page before page {}", pageNumber); PDPage added = documentHandler.addBlankPageBefore(pageNumber); added.setRotation(firstPageRotation); } } } int totalPages = documentHandler.getNumberOfPages(); PageTextReplacer textReplacer = new PageTextReplacer(documentHandler.getUnderlyingPDDocument()); for(EditTextOperation editTextOperation: parameters.getEditTextOperations()) { SortedSet<Integer> pageNumbers = editTextOperation.getPageRange().getPages(totalPages); for (int pageNumber : pageNumbers) { PDPage page = documentHandler.getPageCached(pageNumber); textReplacer.replaceText(page, pageNumber, editTextOperation.getText(), editTextOperation.getBoundingBox()); } } for (AppendTextOperation textOperation : parameters.getAppendTextOperations()) { PageTextWriter textWriter = new PageTextWriter(documentHandler.getUnderlyingPDDocument()); SortedSet<Integer> pageNumbers = textOperation.getPageRange().getPages(totalPages); for (int pageNumber : pageNumbers) { PDPage page = documentHandler.getPageCached(pageNumber); PDFont font = defaultIfNull(getStandardType1Font(textOperation.getFont()), PDType1Font.HELVETICA); textWriter.write(page, textOperation.getPosition(), textOperation.getText(), font, textOperation.getFontSize(), textOperation.getColor()); } } for(AddImageOperation imageOperation: parameters.getImageOperations()) { PageImageWriter imageWriter = new PageImageWriter(documentHandler.getUnderlyingPDDocument()); PDImageXObject image = PageImageWriter.toPDXImageObject(imageOperation.getImageSource()); SortedSet<Integer> pageNumbers = imageOperation.getPageRange().getPages(totalPages); for (int pageNumber : pageNumbers) { PDPage page = documentHandler.getPageCached(pageNumber); imageWriter.append(page, image, imageOperation.getPosition(), imageOperation.getWidth(), imageOperation.getHeight(), null); } } for(HighlightTextOperation highlightTextOperation: parameters.getHighlightTextOperations()) { for(RectangularBox boundingBox: highlightTextOperation.getBoundingBoxes()){ PDAnnotationTextMarkup markup = new PDAnnotationTextMarkup(PDAnnotationTextMarkup.SUB_TYPE_HIGHLIGHT); PDRectangle rect = new PDRectangle(boundingBox.getLeft(), boundingBox.getBottom(), boundingBox.getRight() - boundingBox.getLeft(), boundingBox.getTop() - boundingBox.getBottom()); markup.setRectangle(rect); markup.setQuadPoints(quadsOf(rect)); markup.setConstantOpacity((float) 0.4); markup.setColor(new PDColor(new float[]{ highlightTextOperation.getColor().getRed(), highlightTextOperation.getColor().getGreen(), highlightTextOperation.getColor().getBlue() }, PDDeviceRGB.INSTANCE)); documentHandler.getPageCached(highlightTextOperation.getPageNumber()).getAnnotations().add(markup); } } PageGeometricalShapeWriter shapeWriter = new PageGeometricalShapeWriter(documentHandler.getUnderlyingPDDocument()); for(AddShapeOperation shapeOperation: parameters.getShapeOperations()) { SortedSet<Integer> pageNumbers = shapeOperation.getPageRange().getPages(totalPages); for (int pageNumber : pageNumbers) { PDPage page = documentHandler.getPageCached(pageNumber); shapeWriter.drawShape( shapeOperation.getShape(), page, shapeOperation.getPosition(), shapeOperation.getWidth(), shapeOperation.getHeight(), shapeOperation.getBorderColor(), shapeOperation.getBackgroundColor(), shapeOperation.getBorderWidth() ); } } documentHandler.savePDDocument(tmpFile); String outName = nameGenerator(parameters.getOutputPrefix()).generate( nameRequest().originalName(source.getName()).fileNumber(currentStep)); outputWriter.addOutput(file(tmpFile).name(outName)); FontUtils.clearLoadedFontCache(documentHandler.getUnderlyingPDDocument()); notifyEvent(executionContext().notifiableTaskMetadata()).stepsCompleted(currentStep).outOf(totalSteps); } parameters.getOutput().accept(outputWriter); } @Override public void after() { nullSafeCloseQuietly(documentHandler); } private float[] quadsOf(PDRectangle position) { // work out the points forming the four corners of the annotations // set out in anti clockwise form (Completely wraps the text) // OK, the below doesn't match that description. // It's what acrobat 7 does and displays properly! float[] quads = new float[8]; quads[0] = position.getLowerLeftX(); quads[1] = position.getUpperRightY(); quads[2] = position.getUpperRightX(); quads[3] = quads[1]; quads[4] = quads[0]; quads[5] = position.getLowerLeftY(); quads[6] = quads[2]; quads[7] = quads[5]; return quads; } }
package org.jboss.as.server.deployment; /** * An enumeration of the phases of a deployment unit's processing cycle. * * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> */ public enum Phase { /* == TEMPLATE == * Upon entry, this phase performs the following actions: * <ul> * <li></li> * </ul> * <p> * Processors in this phase have access to the following phase attachments: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> * Processors in this phase have access to the following deployment unit attachments, in addition to those defined * for the previous phase: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> * In this phase, these phase attachments may be modified: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> */ /** * This phase creates the initial root structure. Depending on the service for this phase will ensure that the * deployment unit's initial root structure is available and accessible. * <p> * Upon entry, this phase performs the following actions: * <ul> * <li>The primary deployment root is mounted (during {@link #STRUCTURE_MOUNT})</li> * <li>Other internal deployment roots are mounted (during {@link #STRUCTURE_NESTED_JAR})</li> * </ul> * <p> * Processors in this phase have access to the following phase attachments: * <ul> * <li><i>N/A</i></li> * </ul> * <p> * Processors in this phase have access to the following deployment unit attachments: * <ul> * <li>{@link Attachments#DEPLOYMENT_ROOT} - the mounted deployment root for this deployment unit</li> * </ul> * <p> * In this phase, these phase attachments may be modified: * <ul> * </ul> * <p> */ STRUCTURE(null), /** * This phase assembles information from the root structure to prepare for adding and processing additional external * structure, such as from class path entries and other similar mechanisms. * <p> * Upon entry, this phase performs the following actions: * <ul> * <li>The root content's MANIFEST is read and made available during {@link #PARSE_MANIFEST}.</li> * <li>The annotation index for the root structure is calculated during {@link #STRUCTURE_ANNOTATION_INDEX}.</li> * </ul> * <p> * Processors in this phase have access to the following phase attachments: * <ul> * <li>{@link Attachments#MANIFEST} - the parsed manifest of the root structure</li> * </ul> * <p> * Processors in this phase have access to the following deployment unit attachments, in addition to those defined * for the previous phase: * <ul> * <li><i>N/A</i></li> * </ul> * <p> * In this phase, these phase attachments may be modified: * <ul> * <li>{@link Attachments#CLASS_PATH_ENTRIES} - class path entries found in the manifest and elsewhere.</li> * <li>{@link Attachments#EXTENSION_LIST_ENTRIES} - extension-list entries found in the manifest and elsewhere.</li> * </ul> * <p> */ PARSE(null), /** * In this phase, the full structure of the deployment unit is made available and module dependencies may be assembled. * <p> * Upon entry, this phase performs the following actions: * <ul> * <li>Any additional external structure is mounted during {@link #XXX}</li> * <li></li> * </ul> * <p> * Processors in this phase have access to the following phase attachments: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> * Processors in this phase have access to the following deployment unit attachments, in addition to those defined * for the previous phase: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> * In this phase, these phase attachments may be modified: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> */ DEPENDENCIES(null), CONFIGURE_MODULE(null), POST_MODULE(null), INSTALL(null), CLEANUP(null), ; /** * This is the key for the attachment to use as the phase's "value". The attachment is taken from * the deployment unit. If a phase doesn't have a single defining "value", {@code null} is specified. */ private final AttachmentKey<?> phaseKey; private Phase(final AttachmentKey<?> key) { phaseKey = key; } /** * Get the next phase, or {@code null} if none. * * @return the next phase, or {@code null} if there is none */ public Phase next() { final int ord = ordinal() + 1; final Phase[] phases = Phase.values(); return ord == phases.length ? null : phases[ord]; } /** * Get the attachment key of the {@code DeploymentUnit} attachment that represents the result value * of this phase. * * @return the key */ public AttachmentKey<?> getPhaseKey() { return phaseKey; } // STRUCTURE public static final int STRUCTURE_MOUNT = 0x0000; public static final int STRUCTURE_RAR = 0x0100; public static final int STRUCTURE_WAR_DEPLOYMENT_INIT = 0x0200; public static final int STRUCTURE_WAR = 0x0300; public static final int STRUCTURE_EAR_DEPLOYMENT_INIT = 0x0400; public static final int STRUCTURE_EAR_APP_XML_PARSE = 0x0500; public static final int STRUCTURE_EAR_JBOSS_APP_XML_PARSE = 0x0600; public static final int STRUCTURE_EAR = 0x0700; public static final int STRUCTURE_MANIFEST = 0x0800; public static final int STRUCTURE_OSGI_MANIFEST = 0x0900; public static final int STRUCTURE_SERVICE_MODULE_LOADER = 0x0A00; public static final int STRUCTURE_ANNOTATION_INDEX = 0x0B00; public static final int STRUCTURE_EJB_JAR_IN_EAR = 0x0C00; public static final int STRUCTURE_SAR_SUB_DEPLOY_CHECK = 0x0D00; public static final int STRUCTURE_SUB_DEPLOYMENT = 0x0E00; public static final int STRUCTURE_MODULE_IDENTIFIERS = 0x0F00; public static final int STRUCTURE_EE_MODULE_INIT = 0x1000; // PARSE public static final int PARSE_EE_MODULE_NAME = 0x0100; public static final int PARSE_STRUCTURE_DESCRIPTOR = 0x0200; public static final int PARSE_COMPOSITE_ANNOTATION_INDEX = 0x0300; public static final int PARSE_EAR_LIB_CLASS_PATH = 0x0400; public static final int PARSE_ADDITIONAL_MODULES = 0x0500; public static final int PARSE_CLASS_PATH = 0x0600; public static final int PARSE_EXTENSION_LIST = 0x0700; public static final int PARSE_EXTENSION_NAME = 0x0800; public static final int PARSE_OSGI_BUNDLE_INFO = 0x0900; public static final int PARSE_OSGI_PROPERTIES = 0x0A00; // create and attach EJB metadata for EJB deployments public static final int PARSE_EJB_DEPLOYMENT = 0x0B00; // create and attach the component description out of EJB annotations public static final int PARSE_EJB_ANNOTATION = 0x0C00; public static final int PARSE_WEB_DEPLOYMENT = 0x0D00; public static final int PARSE_WEB_DEPLOYMENT_FRAGMENT = 0x0E00; public static final int PARSE_JBOSS_WEB_DEPLOYMENT = 0x0F00; public static final int PARSE_TLD_DEPLOYMENT = 0x1000; public static final int PARSE_RA_DEPLOYMENT = 0x1100; public static final int PARSE_SERVICE_LOADER_DEPLOYMENT = 0x1200; public static final int PARSE_SERVICE_DEPLOYMENT = 0x1300; public static final int PARSE_MC_BEAN_DEPLOYMENT = 0x1400; public static final int PARSE_IRON_JACAMAR_DEPLOYMENT = 0x1500; public static final int PARSE_RESOURCE_ADAPTERS = 0x1600; public static final int PARSE_DATA_SOURCES = 0x1700; public static final int PARSE_ARQUILLIAN_RUNWITH = 0x1800; public static final int PARSE_MANAGED_BEAN_ANNOTATION = 0x1900; public static final int PARSE_JAXRS_ANNOTATIONS = 0x1A00; public static final int PARSE_WELD_DEPLOYMENT = 0x1B00; public static final int PARSE_WEBSERVICES_XML = 0x1C00; public static final int PARSE_DATA_SOURCE_DEFINITION = 0x1D00; public static final int PARSE_EJB_CONTEXT_BINDING = 0x1E00; public static final int PARSE_PERSISTENCE_UNIT = 0x1F00; // DEPENDENCIES public static final int DEPENDENCIES_MODULE = 0x100; public static final int DEPENDENCIES_DS = 0x200; public static final int DEPENDENCIES_RAR_CONFIG = 0x300; public static final int DEPENDENCIES_MANAGED_BEAN = 0x400; public static final int DEPENDENCIES_SAR_MODULE = 0x500; public static final int DEPENDENCIES_WAR_MODULE = 0x600; public static final int DEPENDENCIES_ARQUILLIAN = 0x700; public static final int DEPENDENCIES_CLASS_PATH = 0x800; public static final int DEPENDENCIES_EXTENSION_LIST = 0x900; public static final int DEPENDENCIES_WELD = 0xA00; public static final int DEPENDENCIES_NAMING = 0xB00; public static final int DEPENDENCIES_WS = 0xC00; public static final int DEPENDENCIES_JAXRS = 0xD00; public static final int DEPENDENCIES_SUB_DEPLOYMENTS = 0xE00; // Sets up appropriate module dependencies for EJB deployments public static final int DEPENDENCIES_EJB = 0xF00; public static final int DEPENDENCIES_JPA = 0x1000; // CONFIGURE_MODULE public static final int CONFIGURE_MODULE_SPEC = 0x100; // POST_MODULE public static final int POST_MODULE_ANNOTATION_WAR = 0x0100; public static final int POST_MODULE_EJB_TRANSACTION_MANAGEMENT = 0x0200; public static final int POST_MODULE_EJB_LOCAL_VIEW_ANNOTATION = 0x0300; public static final int POST_MODULE_EJB_NO_INTERFACE_VIEW_ANNOTATION = 0x0400; public static final int POST_MODULE_EJB_STARTUP_ANNOTATION = 0x0500; public static final int POST_MODULE_EJB_CONCURRENCY_MANAGEMENT_ANNOTATION = 0x0501; // should be after ConcurrencyManagement annotation processor public static final int POST_MODULE_EJB_LOCK_ANNOTATION = 0x0502; // should be after ConcurrencyManagement annotation processor public static final int POST_MODULE_EJB_ACCESS_TIMEOUT_ANNOTATION = 0x0503; public static final int POST_MODULE_ANNOTATION_ARQUILLIAN_JUNIT = 0x0600; public static final int POST_MODULE_WELD_WEB_INTEGRATION = 0x0700; public static final int POST_MODULE_INSTALL_EXTENSION = 0x0800; public static final int POST_MODULE_WEB_COMPONENTS = 0x0900; public static final int POST_MODULE_BEAN_INTERCEPTOR_ANNOTATION = 0x0A00; public static final int POST_MODULE_LIEFCYCLE_ANNOTATION = 0x0B00; public static final int POST_MODULE_AROUNDINVOKE_ANNOTATION = 0x0C00; public static final int POST_MODULE_RESOURCE_INJECTION_ANNOTATION = 0x0D00; public static final int POST_MODULE_EJB_INJECTION_ANNOTATION = 0x0E00; public static final int POST_MODULE_WEB_SERVICE_INJECTION_ANNOTATION = 0x0E10; public static final int POST_MODULE_VALIDATOR_FACTORY = 0x0F00; public static final int POST_MODULE_EAR_DEPENDENCY = 0x1000; public static final int POST_MODULE_WELD_BEAN_ARCHIVE = 0x1100; public static final int POST_MODULE_WELD_PORTABLE_EXTENSIONS = 0x1200; public static final int POST_PERSISTENCE_ANNOTATION = 0x1F00; // TODO: move back to PARSE when // Stuart moves the other POST entries. // INSTALL public static final int INSTALL_EAR_AGGREGATE_COMPONENT_INDEX = 0x0000; public static final int INSTALL_REFLECTION_INDEX = 0x0100; public static final int INSTALL_APP_CONTEXT = 0x0200; public static final int INSTALL_MODULE_CONTEXT = 0x0300; public static final int INSTALL_SERVICE_ACTIVATOR = 0x0400; public static final int INSTALL_OSGI_DEPLOYMENT = 0x0500; public static final int INSTALL_WAR_METADATA = 0x0600; public static final int INSTALL_RA_DEPLOYMENT = 0x0700; public static final int INSTALL_SERVICE_DEPLOYMENT = 0x0800; public static final int INSTALL_MC_BEAN_DEPLOYMENT = 0x0900; public static final int INSTALL_RA_XML_DEPLOYMENT = 0x0A00; public static final int INSTALL_WELD_DEPLOYMENT = 0x0B00; public static final int INSTALL_WELD_BEAN_MANAGER = 0x0C00; public static final int INSTALL_EE_COMP_LAZY_BINDING_SOURCE_HANDLER = 0x0D00; public static final int INSTALL_WS_LAZY_BINDING_SOURCE_HANDLER = 0x0E00; public static final int INSTALL_EE_COMPONENT = 0x0F00; public static final int INSTALL_SERVLET_INIT_DEPLOYMENT = 0x1000; public static final int INSTALL_JAXRS_SCANNING = 0x1100; public static final int INSTALL_JAXRS_DEPLOYMENT = 0x1200; public static final int INSTALL_WAR_DEPLOYMENT = 0x1300; public static final int INSTALL_ARQUILLIAN_DEPLOYMENT = 0x1400; public static final int INSTALL_JDBC_DRIVER = 0x1500; public static final int INSTALL_TRANSACTION_BINDINGS = 0x1600; public static final int INSTALL_PERSISTENTUNIT = 0x1700; // CLEANUP public static final int CLEANUP_REFLECTION_INDEX = 0x100; }
package org.jboss.as.server.deployment; /** * An enumeration of the phases of a deployment unit's processing cycle. * * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> */ public enum Phase { /* == TEMPLATE == * Upon entry, this phase performs the following actions: * <ul> * <li></li> * </ul> * <p> * Processors in this phase have access to the following phase attachments: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> * Processors in this phase have access to the following deployment unit attachments, in addition to those defined * for the previous phase: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> * In this phase, these phase attachments may be modified: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> */ /** * This phase creates the initial root structure. Depending on the service for this phase will ensure that the * deployment unit's initial root structure is available and accessible. * <p> * Upon entry, this phase performs the following actions: * <ul> * <li>The primary deployment root is mounted (during {@link #STRUCTURE_MOUNT})</li> * <li>Other internal deployment roots are mounted (during {@link #STRUCTURE_NESTED_JAR})</li> * </ul> * <p> * Processors in this phase have access to the following phase attachments: * <ul> * <li><i>N/A</i></li> * </ul> * <p> * Processors in this phase have access to the following deployment unit attachments: * <ul> * <li>{@link Attachments#DEPLOYMENT_ROOT} - the mounted deployment root for this deployment unit</li> * </ul> * <p> * In this phase, these phase attachments may be modified: * <ul> * </ul> * <p> */ STRUCTURE(null), /** * This phase assembles information from the root structure to prepare for adding and processing additional external * structure, such as from class path entries and other similar mechanisms. * <p> * Upon entry, this phase performs the following actions: * <ul> * <li>The root content's MANIFEST is read and made available during {@link #PARSE_MANIFEST}.</li> * <li>The annotation index for the root structure is calculated during {@link #STRUCTURE_ANNOTATION_INDEX}.</li> * </ul> * <p> * Processors in this phase have access to the following phase attachments: * <ul> * <li>{@link Attachments#MANIFEST} - the parsed manifest of the root structure</li> * </ul> * <p> * Processors in this phase have access to the following deployment unit attachments, in addition to those defined * for the previous phase: * <ul> * <li><i>N/A</i></li> * </ul> * <p> * In this phase, these phase attachments may be modified: * <ul> * <li>{@link Attachments#CLASS_PATH_ENTRIES} - class path entries found in the manifest and elsewhere.</li> * <li>{@link Attachments#EXTENSION_LIST_ENTRIES} - extension-list entries found in the manifest and elsewhere.</li> * </ul> * <p> */ PARSE(null), /** * In this phase, the full structure of the deployment unit is made available and module dependencies may be assembled. * <p> * Upon entry, this phase performs the following actions: * <ul> * <li>Any additional external structure is mounted during {@link #XXX}</li> * <li></li> * </ul> * <p> * Processors in this phase have access to the following phase attachments: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> * Processors in this phase have access to the following deployment unit attachments, in addition to those defined * for the previous phase: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> * In this phase, these phase attachments may be modified: * <ul> * <li>{@link Attachments#BLAH} - description here</li> * </ul> * <p> */ DEPENDENCIES(null), CONFIGURE_MODULE(null), POST_MODULE(null), INSTALL(null), CLEANUP(null), ; /** * This is the key for the attachment to use as the phase's "value". The attachment is taken from * the deployment unit. If a phase doesn't have a single defining "value", {@code null} is specified. */ private final AttachmentKey<?> phaseKey; private Phase(final AttachmentKey<?> key) { phaseKey = key; } /** * Get the next phase, or {@code null} if none. * * @return the next phase, or {@code null} if there is none */ public Phase next() { final int ord = ordinal() + 1; final Phase[] phases = Phase.values(); return ord == phases.length ? null : phases[ord]; } /** * Get the attachment key of the {@code DeploymentUnit} attachment that represents the result value * of this phase. * * @return the key */ public AttachmentKey<?> getPhaseKey() { return phaseKey; } // STRUCTURE public static final int STRUCTURE_WAR_DEPLOYMENT_INIT = 0x0000; public static final int STRUCTURE_MOUNT = 0x0001; public static final int STRUCTURE_MANIFEST = 0x0100; // must be before osgi public static final int STRUCTURE_JDBC_DRIVER = 0x0150; public static final int STRUCTURE_OSGI_MANIFEST = 0x0200; public static final int STRUCTURE_RAR = 0x0300; public static final int STRUCTURE_WAR = 0x0500; public static final int STRUCTURE_EAR_DEPLOYMENT_INIT = 0x0600; public static final int STRUCTURE_EAR_APP_XML_PARSE = 0x0700; public static final int STRUCTURE_EAR_JBOSS_APP_XML_PARSE = 0x0800; public static final int STRUCTURE_EAR = 0x0900; public static final int STRUCTURE_SERVICE_MODULE_LOADER = 0x0A00; public static final int STRUCTURE_ANNOTATION_INDEX = 0x0B00; public static final int STRUCTURE_EJB_JAR_IN_EAR = 0x0C00; public static final int STRUCTURE_MANAGED_BEAN_JAR_IN_EAR = 0x0C01; public static final int STRUCTURE_SAR_SUB_DEPLOY_CHECK = 0x0D00; public static final int STRUCTURE_ADDITIONAL_MANIFEST = 0x0E00; public static final int STRUCTURE_SUB_DEPLOYMENT = 0x0F00; public static final int STRUCTURE_MODULE_IDENTIFIERS = 0x1000; public static final int STRUCTURE_EE_MODULE_INIT = 0x1100; // PARSE public static final int PARSE_EE_MODULE_NAME = 0x0100; public static final int PARSE_EAR_SUBDEPLOYMENTS_ISOLATION_DEFAULT = 0x0200; public static final int PARSE_STRUCTURE_DESCRIPTOR = 0x0201; public static final int PARSE_DEPENDENCIES_MANIFEST = 0x0300; public static final int PARSE_COMPOSITE_ANNOTATION_INDEX = 0x0301; public static final int PARSE_EAR_LIB_CLASS_PATH = 0x0400; public static final int PARSE_ADDITIONAL_MODULES = 0x0500; public static final int PARSE_CLASS_PATH = 0x0600; public static final int PARSE_EXTENSION_LIST = 0x0700; public static final int PARSE_EXTENSION_NAME = 0x0800; public static final int PARSE_OSGI_BUNDLE_INFO = 0x0900; public static final int PARSE_OSGI_XSERVICE_PROPERTIES = 0x0A00; public static final int PARSE_OSGI_DEPLOYMENT = 0x0A80; public static final int PARSE_WEB_DEPLOYMENT = 0x0B00; public static final int PARSE_WEB_DEPLOYMENT_FRAGMENT = 0x0C00; public static final int PARSE_JSF_VERSION = 0x0C50; public static final int PARSE_ANNOTATION_WAR = 0x0D00; public static final int PARSE_ANNOTATION_EJB = 0x0D10; public static final int PARSE_JBOSS_WEB_DEPLOYMENT = 0x0E00; public static final int PARSE_TLD_DEPLOYMENT = 0x0F00; public static final int PARSE_EAR_CONTEXT_ROOT = 0x1000; // create and attach EJB metadata for EJB deployments public static final int PARSE_EJB_DEPLOYMENT = 0x1100; public static final int PARSE_SESSION_BEAN_CREATE_COMPONENT_DESCRIPTIONS = 0x1150; public static final int PARSE_MDB_CREATE_COMPONENT_DESCRIPTIONS = 0x1151; public static final int PARSE_ENTITY_BEAN_CREATE_COMPONENT_DESCRIPTIONS = 0x1152; public static final int PARSE_EJB_SESSION_BEAN_DD = 0x1200; // create and attach the component description out of EJB annotations public static final int PARSE_JAXWS_HANDLER_CHAIN_ANNOTATION = 0x1700; public static final int PARSE_JAXWS_ENDPOINT_CREATE_COMPONENT_DESCRIPTIONS = 0x1701; public static final int PARSE_JAXWS_HANDLER_CREATE_COMPONENT_DESCRIPTIONS = 0x1702; public static final int PARSE_WS_EJB_INTEGRATION = 0x1703; public static final int PARSE_EJB_APPLICATION_EXCEPTION_ANNOTATION = 0x1901; public static final int PARSE_WEB_COMPONENTS = 0x1F00; public static final int PARSE_WEB_MERGE_METADATA = 0x2000; public static final int PARSE_RA_DEPLOYMENT = 0x2100; public static final int PARSE_SERVICE_LOADER_DEPLOYMENT = 0x2200; public static final int PARSE_SERVICE_DEPLOYMENT = 0x2300; public static final int PARSE_POJO_DEPLOYMENT = 0x2400; public static final int PARSE_IRON_JACAMAR_DEPLOYMENT = 0x2500; public static final int PARSE_MANAGED_BEAN_ANNOTATION = 0x2900; public static final int PARSE_JAXRS_ANNOTATIONS = 0x2A00; public static final int PARSE_WELD_DEPLOYMENT = 0x2B00; public static final int PARSE_WELD_WEB_INTEGRATION = 0x2B10; public static final int PARSE_WEBSERVICES_XML = 0x2C00; public static final int PARSE_DATA_SOURCE_DEFINITION_ANNOTATION = 0x2D00; public static final int PARSE_EJB_CONTEXT_BINDING = 0x2E00; public static final int PARSE_EJB_TIMERSERVICE_BINDING = 0x2E01; public static final int PARSE_PERSISTENCE_UNIT = 0x2F00; public static final int PARSE_PERSISTENCE_ANNOTATION = 0x3000; public static final int PARSE_INTERCEPTORS_ANNOTATION = 0x3100; public static final int PARSE_LIEFCYCLE_ANNOTATION = 0x3200; public static final int PARSE_AROUNDINVOKE_ANNOTATION = 0x3300; public static final int PARSE_AROUNDTIMEOUT_ANNOTATION = 0x3400; public static final int PARSE_TIMEOUT_ANNOTATION = 0x3401; public static final int PARSE_EJB_DD_INTERCEPTORS = 0x3500; public static final int PARSE_EJB_SECURITY_ROLE_REF_DD = 0x3501; public static final int PARSE_EJB_ASSEMBLY_DESC_DD = 0x3600; public static final int PARSE_DISTINCT_NAME = 0x3601; // should be after all components are known public static final int PARSE_EJB_INJECTION_ANNOTATION = 0x3700; public static final int PARSE_EJB_REMOTE_CLIENT_CONTEXT = 0x3900; // DEPENDENCIES public static final int DEPENDENCIES_EJB = 0x0000; public static final int DEPENDENCIES_MODULE = 0x0100; public static final int DEPENDENCIES_RAR_CONFIG = 0x0300; public static final int DEPENDENCIES_MANAGED_BEAN = 0x0400; public static final int DEPENDENCIES_SAR_MODULE = 0x0500; public static final int DEPENDENCIES_WAR_MODULE = 0x0600; public static final int DEPENDENCIES_CLASS_PATH = 0x0800; public static final int DEPENDENCIES_EXTENSION_LIST = 0x0900; public static final int DEPENDENCIES_WELD = 0x0A00; public static final int DEPENDENCIES_SEAM = 0x0A01; public static final int DEPENDENCIES_WS = 0x0C00; public static final int DEPENDENCIES_JAXRS = 0x0D00; public static final int DEPENDENCIES_SUB_DEPLOYMENTS = 0x0E00; public static final int DEPENDENCIES_JPA = 0x1000; public static final int DEPENDENCIES_GLOBAL_MODULES = 0x1100; public static final int DEPENDENCIES_JDK = 0x1200; //must be last public static final int DEPENDENCIES_MODULE_INFO_SERVICE = 0x1300; // CONFIGURE_MODULE public static final int CONFIGURE_MODULE_SPEC = 0x0100; // POST_MODULE public static final int POST_MODULE_INJECTION_ANNOTATION = 0x0100; public static final int POST_MODULE_REFLECTION_INDEX = 0x0200; public static final int POST_MODULE_TRANSFORMER = 0x0201; public static final int POST_MODULE_JSF_MANAGED_BEANS = 0x0300; public static final int POST_MODULE_EJB_BUSINESS_VIEW_ANNOTATION = 0x0400; public static final int POST_MODULE_EJB_HOME_MERGE = 0x0401; public static final int POST_MODULE_EJB_DD_METHOD_RESOLUTION = 0x0402; public static final int POST_MODULE_EJB_TIMER_METADATA_MERGE = 0x0506; public static final int POST_MODULE_EJB_DD_INTERCEPTORS = 0x0600; public static final int POST_MODULE_EJB_TIMER_SERVICE = 0x0601; public static final int POST_MODULE_EJB_TRANSACTION_MANAGEMENT = 0x0602; public static final int POST_MODULE_EJB_TX_ATTR_MERGE = 0x0603; public static final int POST_MODULE_EJB_CONCURRENCY_MANAGEMENT_MERGE= 0x0604; public static final int POST_MODULE_EJB_CONCURRENCY_MERGE = 0x0605; public static final int POST_MODULE_EJB_RUN_AS_MERGE = 0x0606; public static final int POST_MODULE_EJB_RESOURCE_ADAPTER_MERGE = 0x0607; public static final int POST_MODULE_EJB_REMOVE_METHOD = 0x0608; public static final int POST_MODULE_EJB_STARTUP_MERGE = 0x0609; public static final int POST_MODULE_EJB_SECURITY_DOMAIN = 0x060A; public static final int POST_MODULE_EJB_ROLES = 0x060B; public static final int POST_MODULE_METHOD_PERMISSIONS = 0x060C; public static final int POST_MODULE_EJB_STATEFUL_TIMEOUT = 0x060D; public static final int POST_MODULE_EJB_ASYNCHRONOUS_MERGE = 0x060E; public static final int POST_MODULE_EJB_SESSION_SYNCHRONIZATION = 0x060F; public static final int POST_MODULE_EJB_INIT_METHOD = 0x0610; public static final int POST_MODULE_WELD_COMPONENT_INTEGRATION = 0x0800; public static final int POST_MODULE_INSTALL_EXTENSION = 0x0A00; public static final int POST_MODULE_VALIDATOR_FACTORY = 0x0B00; public static final int POST_MODULE_EAR_DEPENDENCY = 0x0C00; public static final int POST_MODULE_WELD_BEAN_ARCHIVE = 0x0D00; public static final int POST_MODULE_WELD_PORTABLE_EXTENSIONS = 0x0E00; // should come before ejb jndi bindings processor public static final int POST_MODULE_EJB_IMPLICIT_NO_INTERFACE_VIEW = 0x1000; public static final int POST_MODULE_EJB_JNDI_BINDINGS = 0x1100; public static final int POST_MODULE_EJB_MODULE_CONFIGURATION = 0x1200; public static final int POST_INITIALIZE_IN_ORDER = 0x1300; public static final int POST_MODULE_ENV_ENTRY = 0x1400; public static final int POST_MODULE_EJB_REF = 0x1500; public static final int POST_MODULE_PERSISTENCE_REF = 0x1600; public static final int POST_MODULE_DATASOURCE_REF = 0x1700; public static final int POST_MODULE_WS_REF_DESCRIPTOR = 0x1800; public static final int POST_MODULE_WS_REF_ANNOTATION = 0x1801; public static final int POST_MODULE_WS_JMS_INTEGRATION = 0x1802; public static final int POST_MODULE_JAXRS_SCANNING = 0x1A00; public static final int POST_MODULE_JAXRS_COMPONENT = 0x1B00; public static final int POST_MODULE_JAXRS_CDI_INTEGRATION = 0x1C00; public static final int POST_MODULE_EJB_CLIENT_CONTEXT_SETUP = 0x1D00; public static final int POST_MODULE_LOCAL_HOME = 0x1E00; // INSTALL public static final int INSTALL_JNDI_DEPENDENCY_SETUP = 0x0100; public static final int INSTALL_JPA_INTERCEPTORS = 0x0200; public static final int INSTALL_APP_CONTEXT = 0x0300; public static final int INSTALL_COMPONENT_AGGREGATION = 0x0400; public static final int INSTALL_MODULE_CONTEXT = 0x0401; public static final int INSTALL_RESOLVE_EJB_INJECTIONS = 0x0402; public static final int INSTALL_SERVICE_ACTIVATOR = 0x0500; public static final int INSTALL_OSGI_DEPLOYMENT = 0x0600; public static final int INSTALL_OSGI_MODULE = 0x0650; public static final int INSTALL_WS_DEPLOYMENT_TYPE_DETECTOR = 0x0700; public static final int INSTALL_WS_UNIVERSAL_META_DATA_MODEL = 0x0701; public static final int INSTALL_WS_DEPLOYMENT_ASPECTS = 0x0710; // IMPORTANT: WS integration installs deployment aspects dynamically // so consider INSTALL 0x0710 - 0x07FF reserved for WS subsystem! public static final int INSTALL_RA_DEPLOYMENT = 0x0800; public static final int INSTALL_SERVICE_DEPLOYMENT = 0x0900; public static final int INSTALL_POJO_DEPLOYMENT = 0x0A00; public static final int INSTALL_RA_XML_DEPLOYMENT = 0x0B00; public static final int INSTALL_EE_CLASS_CONFIG = 0x1100; public static final int INSTALL_EE_MODULE_CONFIG = 0x1101; public static final int INSTALL_MODULE_JNDI_BINDINGS = 0x1200; public static final int INSTALL_DEPENDS_ON_ANNOTATION = 0x1210; public static final int INSTALL_PERSISTENCE_PROVIDER = 0x1215; // before INSTALL_PERSISTENTUNIT public static final int INSTALL_PERSISTENTUNIT = 0x1220; public static final int INSTALL_EE_COMPONENT = 0x1230; public static final int INSTALL_SERVLET_INIT_DEPLOYMENT = 0x1300; public static final int INSTALL_JAXRS_DEPLOYMENT = 0x1500; public static final int INSTALL_JSF_ANNOTATIONS = 0x1600; public static final int INSTALL_JDBC_DRIVER = 0x1800; public static final int INSTALL_TRANSACTION_BINDINGS = 0x1900; public static final int INSTALL_BUNDLE_CONTEXT_BINDING = 0x1A00; public static final int INSTALL_WELD_DEPLOYMENT = 0x1B00; public static final int INSTALL_WELD_BEAN_MANAGER = 0x1C00; public static final int INSTALL_JNDI_DEPENDENCIES = 0x1C01; public static final int INSTALL_WAR_DEPLOYMENT = 0x1D00; public static final int INSTALL_DEPLOYMENT_REPOSITORY = 0x1F00; public static final int INSTALL_EJB_MANAGEMENT_RESOURCES = 0x2000; // CLEANUP public static final int CLEANUP_REFLECTION_INDEX = 0x0100; public static final int CLEANUP_EE = 0x0200; public static final int CLEANUP_EJB = 0x0300; }
package io.spine.server.delivery; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterators; import com.google.common.truth.Truth8; import com.google.protobuf.Timestamp; import com.google.protobuf.util.Durations; import com.google.protobuf.util.Timestamps; import io.spine.base.Time; import io.spine.core.Event; import io.spine.core.EventContext; import io.spine.server.DefaultRepository; import io.spine.server.delivery.given.ConsecutiveNumberProcess; import io.spine.server.delivery.given.ConsecutiveProjection; import io.spine.server.delivery.given.CounterView; import io.spine.server.entity.Repository; import io.spine.server.event.EventStore; import io.spine.server.projection.Projection; import io.spine.server.projection.ProjectionRepository; import io.spine.test.delivery.ConsecutiveNumberView; import io.spine.test.delivery.EmitNextNumber; import io.spine.test.delivery.NumberAdded; import io.spine.testing.SlowTest; import io.spine.testing.server.TestEventFactory; import io.spine.testing.server.blackbox.BlackBoxBoundedContext; import io.spine.testing.server.blackbox.SingleTenantBlackBoxContext; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.stream.IntStream; import static com.google.common.truth.Truth.assertThat; import static io.spine.testing.Tests.nullRef; import static java.util.stream.Collectors.toList; @SlowTest @DisplayName("Catch-up of projection instances should") public class CatchUpTest extends AbstractDeliveryTest { @Override @AfterEach public void tearDown() { super.tearDown(); Time.resetProvider(); } @Test @DisplayName("catch up only particular instances by their IDs, " + "given the time is provided with ms resolution") void byIdWithMillisResolution() throws InterruptedException { Time.setProvider(new WithMillisOnlyResolution()); testCatchUpByIds(); } @Test @DisplayName("catch up only particular instances by their IDs, " + "given the time is provided with nanosecond resolution") void byIdWithNanosResolution() throws InterruptedException { testCatchUpByIds(); } @Test @DisplayName("catch up all of projection instances, " + "given the time is provided with millisecond resolution") void allInOrderWithMillisResolution() throws InterruptedException { Time.setProvider(new WithMillisOnlyResolution()); testCatchUpAll(); } @Test @DisplayName("catch up all of projection instances, " + "given the time is provided with nanos resolution") void allInOrderWithNanosResolution() throws InterruptedException { testCatchUpAll(); } @SuppressWarnings("OverlyLongMethod") // Complex environment setup. private static void testCatchUpByIds() throws InterruptedException { Timestamp aWhileAgo = Timestamps.subtract(Time.currentTime(), Durations.fromHours(1)); String[] ids = {"first", "second", "third", "fourth"}; List<NumberAdded> events = generateEvents(200, ids); changeShardCountTo(2); CounterView.Repository repo = new CounterView.Repository(); SingleTenantBlackBoxContext ctx = BlackBoxBoundedContext.singleTenant() .with(repo); addHistory(aWhileAgo, events, ctx); // Round 1. Fight! int initialWeight = 1; CounterView.changeWeightTo(initialWeight); dispatchInParallel(ctx, events, 20); List<Integer> initialTotals = readTotals(repo, ids); int sumInRound = events.size() / ids.length * initialWeight; IntStream sums = IntStream.iterate(sumInRound, i -> i) .limit(ids.length); assertThat(initialTotals).isEqualTo(sums.boxed() .collect(toList())); // Round 2. Catch up the first and the second and fight! int newWeight = 100; CounterView.changeWeightTo(newWeight); List<Callable<Object>> jobs = new ArrayList<>(); // Do the same, but add the catch-up for ID #0 as the first job. String firstId = ids[0]; Callable<Object> firstCatchUp = () -> { repo.catchUp(aWhileAgo, ImmutableSet.of(firstId)); return nullRef(); }; // And add the catch-up for ID #1 as the second job. String secondId = ids[1]; Callable<Object> secondCatchUp = () -> { repo.catchUp(aMinuteAgo(), ImmutableSet.of(secondId)); return nullRef(); }; jobs.add(firstCatchUp); jobs.add(secondCatchUp); jobs.addAll(asPostEventJobs(ctx, events)); post(jobs); List<Integer> totalsAfterCatchUp = readTotals(repo, ids); int firstSumExpected = sumInRound * newWeight / initialWeight * 3; int secondSumExpected = sumInRound * newWeight / initialWeight * 2; int untouchedSum = sumInRound + sumInRound * newWeight / initialWeight; List<Integer> expectedTotals = ImmutableList.of(firstSumExpected, secondSumExpected, untouchedSum, untouchedSum); assertThat(totalsAfterCatchUp).isEqualTo(expectedTotals); } @SuppressWarnings("OverlyLongMethod") // Complex environment setup. private static void testCatchUpAll() throws InterruptedException { ConsecutiveProjection.usePositives(); String[] ids = {"erste", "zweite", "dritte", "vierte"}; int totalCommands = 300; List<EmitNextNumber> commands = generateEmissionCommands(totalCommands, ids); changeShardCountTo(3); ConsecutiveProjection.Repo projectionRepo = new ConsecutiveProjection.Repo(); Repository<String, ConsecutiveNumberProcess> pmRepo = DefaultRepository.of(ConsecutiveNumberProcess.class); SingleTenantBlackBoxContext ctx = BlackBoxBoundedContext.singleTenant() .with(projectionRepo) .with(pmRepo); List<Callable<Object>> jobs = asPostCommandJobs(ctx, commands); post(jobs); int positiveExpected = totalCommands / ids.length; List<Integer> positiveValues = ImmutableList.of(positiveExpected, positiveExpected, positiveExpected, positiveExpected); List<Integer> actualLastValues = readLastValues(projectionRepo, ids); assertThat(actualLastValues).isEqualTo(positiveValues); ConsecutiveProjection.useNegatives(); String excludedTarget = ids[0]; projectionRepo.excludeFromRouting(excludedTarget); List<Callable<Object>> sameWithCatchUp = ImmutableList.<Callable<Object>>builder() .addAll(jobs) .add(() -> { projectionRepo.catchUpAll(aMinuteAgo()); return nullRef(); }) .build(); post(sameWithCatchUp); int negativeExpected = -1 * positiveExpected * 2; Truth8.assertThat(projectionRepo.find(excludedTarget)) .isEmpty(); for (int idIndex = 1; idIndex < ids.length; idIndex++) { String identifier = ids[idIndex]; Optional<ConsecutiveProjection> maybeState = projectionRepo.find(identifier); Truth8.assertThat(maybeState) .isPresent(); ConsecutiveNumberView state = maybeState.get() .state(); assertThat(state.getLastValue()).isEqualTo(negativeExpected); } } private static void post(List<Callable<Object>> jobs) throws InterruptedException { ExecutorService service = Executors.newFixedThreadPool(20); invokeAll(jobs, service); } private static void invokeAll(List<Callable<Object>> jobs, ExecutorService service) throws InterruptedException { service.invokeAll(jobs); List<Runnable> leftovers = service.shutdownNow(); assertThat(leftovers).isEmpty(); } private static Timestamp aMinuteAgo() { return Timestamps.subtract(Time.currentTime(), Durations.fromMinutes(1)); } private static List<Integer> readLastValues(ConsecutiveProjection.Repo repo, String[] ids) { return Arrays.stream(ids) .map((id) -> readLastValue(repo, id)) .collect(toList()); } private static int readLastValue(ConsecutiveProjection.Repo repo, String id) { return findView(repo, id).state() .getLastValue(); } private static List<EmitNextNumber> generateEmissionCommands(int howMany, String[] ids) { Iterator<String> idIterator = Iterators.cycle(ids); List<EmitNextNumber> commands = new ArrayList<>(howMany); for (int i = 0; i < howMany; i++) { commands.add(EmitNextNumber.newBuilder() .setId(idIterator.next()) .vBuild()); } return commands; } private static void addHistory(Timestamp when, List<NumberAdded> events, SingleTenantBlackBoxContext ctx) { EventStore eventStore = ctx.eventBus() .eventStore(); TestEventFactory factory = TestEventFactory.newInstance(DeliveryTest.class); for (NumberAdded message : events) { Event event = factory.createEvent(message, null); EventContext context = event.getContext(); EventContext modifiedContext = context.toBuilder() .setTimestamp(when) .vBuild(); Event eventAtTime = event.toBuilder() .setContext(modifiedContext) .vBuild(); eventStore.append(eventAtTime); } } private static List<Callable<Object>> asPostCommandJobs(SingleTenantBlackBoxContext ctx, List<EmitNextNumber> commands) { return commands.stream() .map(cmd -> (Callable<Object>) () -> ctx.receivesCommand(cmd)) .collect(toList()); } private static List<Integer> readTotals(CounterView.Repository repo, String[] ids) { return Arrays.stream(ids) .map((id) -> findView(repo, id).state() .getTotal()) .collect(toList()); } private static List<NumberAdded> generateEvents(int howMany, String[] targets) { Iterator<String> idIterator = Iterators.cycle(targets); List<NumberAdded> events = new ArrayList<>(howMany); for (int i = 0; i < howMany; i++) { events.add(NumberAdded.newBuilder() .setCalculatorId(idIterator.next()) .setValue(0) .vBuild()); } return events; } private static <P extends Projection<String, ?, ?>> P findView(ProjectionRepository<String, P, ?> repo, String id) { Optional<P> view = repo.find(id); Truth8.assertThat(view) .isPresent(); return view.get(); } private static List<Callable<Object>> asPostEventJobs(SingleTenantBlackBoxContext ctx, List<NumberAdded> events) { return events.stream() .map(e -> (Callable<Object>) () -> ctx.receivesEvent(e)) .collect(toList()); } private static void dispatchInParallel(SingleTenantBlackBoxContext ctx, List<NumberAdded> events, int threads) throws InterruptedException { ExecutorService service = Executors.newFixedThreadPool(threads); invokeAll(asPostEventJobs(ctx, events), service); } /** * A time provider which provides the current time based upon JDK's wall clock, i.e. without * the emulated nanoseconds. */ private static class WithMillisOnlyResolution implements Time.Provider { @Override public Timestamp currentTime() { Instant now = Instant.now(); Timestamp result = Timestamp.newBuilder() .setSeconds(now.getEpochSecond()) .setNanos(now.getNano()) .build(); return result; } } }
package org.webrtc; import static java.lang.Math.abs; import static java.lang.Math.ceil; import android.content.Context; import android.graphics.ImageFormat; import android.graphics.SurfaceTexture; import android.hardware.Camera; import android.hardware.Camera.PreviewCallback; import android.opengl.GLES11Ext; import android.opengl.GLES20; import android.os.Handler; import android.os.Looper; import android.os.SystemClock; import android.util.Log; import android.view.Surface; import android.view.WindowManager; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Exchanger; import java.util.concurrent.TimeUnit; // Android specific implementation of VideoCapturer. // VideoCapturerAndroid.create(); // This class extends VideoCapturer with a method to easily switch between the // front and back camera. It also provides methods for enumerating valid device // names. // Threading notes: this class is called from C++ code, and from Camera // Java callbacks. Since these calls happen on different threads, // the entry points to this class are all synchronized. This shouldn't present // a performance bottleneck because only onPreviewFrame() is called more than // once (and is called serially on a single thread), so the lock should be // uncontended. Note that each of these synchronized methods must check // |camera| for null to account for having possibly waited for stopCapture() to // complete. @SuppressWarnings("deprecation") public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallback { private final static String TAG = "VideoCapturerAndroid"; private final static int CAMERA_OBSERVER_PERIOD_MS = 5000; private Camera camera; // Only non-null while capturing. private CameraThread cameraThread; private Handler cameraThreadHandler; private Context applicationContext; private int id; private Camera.CameraInfo info; private SurfaceTexture cameraSurfaceTexture; private int[] cameraGlTextures = null; private final FramePool videoBuffers = new FramePool(); // Remember the requested format in case we want to switch cameras. private int requestedWidth; private int requestedHeight; private int requestedFramerate; // The capture format will be the closest supported format to the requested format. private CaptureFormat captureFormat; private int cameraFramesCount; private int captureBuffersCount; private volatile boolean pendingCameraSwitch; private CapturerObserver frameObserver = null; private CameraErrorHandler errorHandler = null; // List of formats supported by all cameras. This list is filled once in order // to be able to switch cameras. private static List<List<CaptureFormat>> supportedFormats; // Camera error callback. private final Camera.ErrorCallback cameraErrorCallback = new Camera.ErrorCallback() { @Override public void onError(int error, Camera camera) { String errorMessage; if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) { errorMessage = "Camera server died!"; } else { errorMessage = "Camera error: " + error; } Log.e(TAG, errorMessage); if (errorHandler != null) { errorHandler.onCameraError(errorMessage); } } }; // Camera observer - monitors camera framerate and amount of available // camera buffers. Observer is excecuted on camera thread. private final Runnable cameraObserver = new Runnable() { @Override public void run() { int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2) / CAMERA_OBSERVER_PERIOD_MS; double averageCaptureBuffersCount = 0; if (cameraFramesCount > 0) { averageCaptureBuffersCount = (double)captureBuffersCount / cameraFramesCount; } Log.d(TAG, "Camera fps: " + cameraFps + ". CaptureBuffers: " + String.format("%.1f", averageCaptureBuffersCount) + ". Pending buffers: " + videoBuffers.pendingFramesTimeStamps()); if (cameraFramesCount == 0) { Log.e(TAG, "Camera freezed."); if (errorHandler != null) { errorHandler.onCameraError("Camera failure."); } } else { cameraFramesCount = 0; captureBuffersCount = 0; if (cameraThreadHandler != null) { cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS); } } } }; // Camera error handler - invoked when camera stops receiving frames // or any camera exception happens on camera thread. public static interface CameraErrorHandler { public void onCameraError(String errorDescription); } // Returns device names that can be used to create a new VideoCapturerAndroid. public static String[] getDeviceNames() { String[] names = new String[Camera.getNumberOfCameras()]; for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { names[i] = getDeviceName(i); } return names; } // Returns number of cameras on device. public static int getDeviceCount() { return Camera.getNumberOfCameras(); } // Returns the name of the camera with camera index. Returns null if the // camera can not be used. public static String getDeviceName(int index) { Camera.CameraInfo info = new Camera.CameraInfo(); try { Camera.getCameraInfo(index, info); } catch (Exception e) { Log.e(TAG, "getCameraInfo failed on index " + index,e); return null; } String facing = (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back"; return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation; } // Returns the name of the front facing camera. Returns null if the // camera can not be used or does not exist. public static String getNameOfFrontFacingDevice() { for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { Camera.CameraInfo info = new Camera.CameraInfo(); try { Camera.getCameraInfo(i, info); if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) return getDeviceName(i); } catch (Exception e) { Log.e(TAG, "getCameraInfo failed on index " + i, e); } } return null; } // Returns the name of the back facing camera. Returns null if the // camera can not be used or does not exist. public static String getNameOfBackFacingDevice() { for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { Camera.CameraInfo info = new Camera.CameraInfo(); try { Camera.getCameraInfo(i, info); if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) return getDeviceName(i); } catch (Exception e) { Log.e(TAG, "getCameraInfo failed on index " + i, e); } } return null; } public static VideoCapturerAndroid create(String name, CameraErrorHandler errorHandler) { VideoCapturer capturer = VideoCapturer.create(name); if (capturer != null) { VideoCapturerAndroid capturerAndroid = (VideoCapturerAndroid) capturer; capturerAndroid.errorHandler = errorHandler; return capturerAndroid; } return null; } // Switch camera to the next valid camera id. This can only be called while // the camera is running. // Returns true on success. False if the next camera does not support the // current resolution. public synchronized boolean switchCamera(final Runnable switchDoneEvent) { if (Camera.getNumberOfCameras() < 2 ) return false; if (cameraThread == null) { Log.e(TAG, "Camera has not been started"); return false; } if (pendingCameraSwitch) { // Do not handle multiple camera switch request to avoid blocking // camera thread by handling too many switch request from a queue. Log.w(TAG, "Ignoring camera switch request."); return false; } pendingCameraSwitch = true; id = (id + 1) % Camera.getNumberOfCameras(); cameraThreadHandler.post(new Runnable() { @Override public void run() { switchCameraOnCameraThread(switchDoneEvent); } }); return true; } // Requests a new output format from the video capturer. Captured frames // by the camera will be scaled/or dropped by the video capturer. public synchronized void onOutputFormatRequest( final int width, final int height, final int fps) { if (cameraThreadHandler == null) { Log.e(TAG, "Calling onOutputFormatRequest() for already stopped camera."); return; } cameraThreadHandler.post(new Runnable() { @Override public void run() { onOutputFormatRequestOnCameraThread(width, height, fps); } }); } // Reconfigure the camera to capture in a new format. This should only be called while the camera // is running. public synchronized void changeCaptureFormat( final int width, final int height, final int framerate) { if (cameraThreadHandler == null) { Log.e(TAG, "Calling changeCaptureFormat() for already stopped camera."); return; } cameraThreadHandler.post(new Runnable() { @Override public void run() { startPreviewOnCameraThread(width, height, framerate); } }); } public synchronized List<CaptureFormat> getSupportedFormats() { return supportedFormats.get(id); } private VideoCapturerAndroid() { Log.d(TAG, "VideoCapturerAndroid"); } // Called by native code. // Enumerates resolution and frame rates for all cameras to be able to switch // cameras. Initializes local variables for the camera named |deviceName| and // starts a thread to be used for capturing. // If deviceName is empty, the first available device is used in order to be // compatible with the generic VideoCapturer class. synchronized boolean init(String deviceName) { Log.d(TAG, "init: " + deviceName); if (deviceName == null || !initStatics()) return false; boolean foundDevice = false; if (deviceName.isEmpty()) { this.id = 0; foundDevice = true; } else { for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { String existing_device = getDeviceName(i); if (existing_device != null && deviceName.equals(existing_device)) { this.id = i; foundDevice = true; } } } return foundDevice; } private static boolean initStatics() { if (supportedFormats != null) return true; try { Log.d(TAG, "Get supported formats."); supportedFormats = new ArrayList<List<CaptureFormat>>(Camera.getNumberOfCameras()); // Start requesting supported formats from camera with the highest index // (back camera) first. If it fails then likely camera is in bad state. for (int i = Camera.getNumberOfCameras() - 1; i >= 0; i ArrayList<CaptureFormat> supportedFormat = getSupportedFormats(i); if (supportedFormat.size() == 0) { Log.e(TAG, "Fail to get supported formats for camera " + i); supportedFormats = null; return false; } supportedFormats.add(supportedFormat); } // Reverse the list since it is filled in reverse order. Collections.reverse(supportedFormats); Log.d(TAG, "Get supported formats done."); return true; } catch (Exception e) { supportedFormats = null; Log.e(TAG, "InitStatics failed",e); } return false; } String getSupportedFormatsAsJson() throws JSONException { return getSupportedFormatsAsJson(id); } public static class CaptureFormat { public final int width; public final int height; public final int maxFramerate; public final int minFramerate; // TODO(hbos): If VideoCapturerAndroid.startCapture is updated to support // other image formats then this needs to be updated and // VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of // all imageFormats. public final int imageFormat = ImageFormat.YV12; public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) { this.width = width; this.height = height; this.minFramerate = minFramerate; this.maxFramerate = maxFramerate; } // Calculates the frame size of this capture format. public int frameSize() { return frameSize(width, height, imageFormat); } // Calculates the frame size of the specified image format. Currently only // supporting ImageFormat.YV12. The YV12's stride is the closest rounded up // multiple of 16 of the width and width and height are always even. // Android guarantees this: // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29 public static int frameSize(int width, int height, int imageFormat) { if (imageFormat != ImageFormat.YV12) { throw new UnsupportedOperationException("Don't know how to calculate " + "the frame size of non-YV12 image formats."); } int yStride = roundUp(width, 16); int uvStride = roundUp(yStride / 2, 16); int ySize = yStride * height; int uvSize = uvStride * height / 2; return ySize + uvSize * 2; } // Rounds up |x| to the closest value that is a multiple of |alignment|. private static int roundUp(int x, int alignment) { return (int)ceil(x / (double)alignment) * alignment; } @Override public String toString() { return width + "x" + height + "@[" + minFramerate + ":" + maxFramerate + "]"; } @Override public boolean equals(Object that) { if (!(that instanceof CaptureFormat)) { return false; } final CaptureFormat c = (CaptureFormat) that; return width == c.width && height == c.height && maxFramerate == c.maxFramerate && minFramerate == c.minFramerate; } } private static String getSupportedFormatsAsJson(int id) throws JSONException { List<CaptureFormat> formats = supportedFormats.get(id); JSONArray json_formats = new JSONArray(); for (CaptureFormat format : formats) { JSONObject json_format = new JSONObject(); json_format.put("width", format.width); json_format.put("height", format.height); json_format.put("framerate", (format.maxFramerate + 999) / 1000); json_formats.put(json_format); } Log.d(TAG, "Supported formats for camera " + id + ": " + json_formats.toString(2)); return json_formats.toString(); } // Returns a list of CaptureFormat for the camera with index id. static ArrayList<CaptureFormat> getSupportedFormats(int id) { ArrayList<CaptureFormat> formatList = new ArrayList<CaptureFormat>(); Camera camera; try { Log.d(TAG, "Opening camera " + id); camera = Camera.open(id); } catch (Exception e) { Log.e(TAG, "Open camera failed on id " + id, e); return formatList; } try { Camera.Parameters parameters; parameters = camera.getParameters(); // getSupportedPreviewFpsRange returns a sorted list. List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange(); int[] range = {0, 0}; if (listFpsRange != null) range = listFpsRange.get(listFpsRange.size() -1); List<Camera.Size> supportedSizes = parameters.getSupportedPreviewSizes(); for (Camera.Size size : supportedSizes) { formatList.add(new CaptureFormat(size.width, size.height, range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX])); } } catch (Exception e) { Log.e(TAG, "getSupportedFormats failed on id " + id, e); } camera.release(); camera = null; return formatList; } private class CameraThread extends Thread { private Exchanger<Handler> handlerExchanger; public CameraThread(Exchanger<Handler> handlerExchanger) { this.handlerExchanger = handlerExchanger; } @Override public void run() { Looper.prepare(); exchange(handlerExchanger, new Handler()); Looper.loop(); } } // Called by native code. Returns true if capturer is started. // Note that this actually opens the camera, and Camera callbacks run on the // thread that calls open(), so this is done on the CameraThread. Since the // API needs a synchronous success return value we wait for the result. synchronized void startCapture( final int width, final int height, final int framerate, final Context applicationContext, final CapturerObserver frameObserver) { Log.d(TAG, "startCapture requested: " + width + "x" + height + "@" + framerate); if (applicationContext == null) { throw new RuntimeException("applicationContext not set."); } if (frameObserver == null) { throw new RuntimeException("frameObserver not set."); } if (cameraThreadHandler != null) { throw new RuntimeException("Camera has already been started."); } Exchanger<Handler> handlerExchanger = new Exchanger<Handler>(); cameraThread = new CameraThread(handlerExchanger); cameraThread.start(); cameraThreadHandler = exchange(handlerExchanger, null); cameraThreadHandler.post(new Runnable() { @Override public void run() { startCaptureOnCameraThread(width, height, framerate, frameObserver, applicationContext); } }); } private void startCaptureOnCameraThread( int width, int height, int framerate, CapturerObserver frameObserver, Context applicationContext) { Throwable error = null; this.applicationContext = applicationContext; this.frameObserver = frameObserver; try { Log.d(TAG, "Opening camera " + id); camera = Camera.open(id); info = new Camera.CameraInfo(); Camera.getCameraInfo(id, info); // No local renderer (we only care about onPreviewFrame() buffers, not a // directly-displayed UI element). Camera won't capture without // setPreview{Texture,Display}, so we create a SurfaceTexture and hand // it over to Camera, but never listen for frame-ready callbacks, // and never call updateTexImage on it. try { cameraSurfaceTexture = null; cameraGlTextures = new int[1]; // Generate one texture pointer and bind it as an external texture. GLES20.glGenTextures(1, cameraGlTextures, 0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraGlTextures[0]); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); cameraSurfaceTexture = new SurfaceTexture(cameraGlTextures[0]); cameraSurfaceTexture.setOnFrameAvailableListener(null); camera.setPreviewTexture(cameraSurfaceTexture); } catch (IOException e) { Log.e(TAG, "setPreviewTexture failed", error); throw new RuntimeException(e); } Log.d(TAG, "Camera orientation: " + info.orientation + " .Device orientation: " + getDeviceOrientation()); camera.setErrorCallback(cameraErrorCallback); startPreviewOnCameraThread(width, height, framerate); frameObserver.OnCapturerStarted(true); // Start camera observer. cameraFramesCount = 0; captureBuffersCount = 0; cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS); return; } catch (RuntimeException e) { error = e; } Log.e(TAG, "startCapture failed", error); stopCaptureOnCameraThread(); cameraThreadHandler = null; frameObserver.OnCapturerStarted(false); if (errorHandler != null) { errorHandler.onCameraError("Camera can not be started."); } return; } // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|. private void startPreviewOnCameraThread(int width, int height, int framerate) { Log.d(TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate); if (camera == null) { Log.e(TAG, "Calling startPreviewOnCameraThread on stopped camera."); return; } requestedWidth = width; requestedHeight = height; requestedFramerate = framerate; // Find closest supported format for |width| x |height| @ |framerate|. final Camera.Parameters parameters = camera.getParameters(); final int[] range = getFramerateRange(parameters, framerate * 1000); final Camera.Size previewSize = getClosestSupportedSize(parameters.getSupportedPreviewSizes(), width, height); final CaptureFormat captureFormat = new CaptureFormat( previewSize.width, previewSize.height, range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); // Check if we are already using this capture format, then we don't need to do anything. if (captureFormat.equals(this.captureFormat)) { return; } // Update camera parameters. Log.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported()); if (parameters.isVideoStabilizationSupported()) { parameters.setVideoStabilization(true); } // Note: setRecordingHint(true) actually decrease frame rate on N5. // parameters.setRecordingHint(true); if (captureFormat.maxFramerate > 0) { parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate); } parameters.setPreviewSize(captureFormat.width, captureFormat.height); parameters.setPreviewFormat(captureFormat.imageFormat); // Picture size is for taking pictures and not for preview/video, but we need to set it anyway // as a workaround for an aspect ratio problem on Nexus 7. final Camera.Size pictureSize = getClosestSupportedSize(parameters.getSupportedPictureSizes(), width, height); parameters.setPictureSize(pictureSize.width, pictureSize.height); // Temporarily stop preview if it's already running. if (this.captureFormat != null) { camera.stopPreview(); // Calling |setPreviewCallbackWithBuffer| with null should clear the internal camera buffer // queue, but sometimes we receive a frame with the old resolution after this call anyway. camera.setPreviewCallbackWithBuffer(null); } // (Re)start preview. Log.d(TAG, "Start capturing: " + captureFormat); this.captureFormat = captureFormat; camera.setParameters(parameters); videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera); camera.setPreviewCallbackWithBuffer(this); camera.startPreview(); } // Called by native code. Returns true when camera is known to be stopped. synchronized void stopCapture() throws InterruptedException { if (cameraThreadHandler == null) { Log.e(TAG, "Calling stopCapture() for already stopped camera."); return; } Log.d(TAG, "stopCapture"); cameraThreadHandler.post(new Runnable() { @Override public void run() { stopCaptureOnCameraThread(); } }); cameraThread.join(); cameraThreadHandler = null; Log.d(TAG, "stopCapture done"); } private void stopCaptureOnCameraThread() { doStopCaptureOnCameraThread(); Looper.myLooper().quit(); return; } private void doStopCaptureOnCameraThread() { Log.d(TAG, "stopCaptureOnCameraThread"); if (camera == null) { return; } try { cameraThreadHandler.removeCallbacks(cameraObserver); Log.d(TAG, "Stop preview."); camera.stopPreview(); camera.setPreviewCallbackWithBuffer(null); videoBuffers.stopReturnBuffersToCamera(); captureFormat = null; camera.setPreviewTexture(null); cameraSurfaceTexture = null; if (cameraGlTextures != null) { GLES20.glDeleteTextures(1, cameraGlTextures, 0); cameraGlTextures = null; } Log.d(TAG, "Release camera."); camera.release(); camera = null; } catch (IOException e) { Log.e(TAG, "Failed to stop camera", e); } } private void switchCameraOnCameraThread(Runnable switchDoneEvent) { Log.d(TAG, "switchCameraOnCameraThread"); doStopCaptureOnCameraThread(); startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver, applicationContext); pendingCameraSwitch = false; Log.d(TAG, "switchCameraOnCameraThread done"); if (switchDoneEvent != null) { switchDoneEvent.run(); } } private void onOutputFormatRequestOnCameraThread( int width, int height, int fps) { if (camera == null) { return; } Log.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height + "@" + fps); frameObserver.OnOutputFormatRequest(width, height, fps); } synchronized void returnBuffer(final long timeStamp) { if (cameraThreadHandler == null) { // The camera has been stopped. videoBuffers.returnBuffer(timeStamp); return; } cameraThreadHandler.post(new Runnable() { @Override public void run() { videoBuffers.returnBuffer(timeStamp); } }); } private int getDeviceOrientation() { int orientation = 0; WindowManager wm = (WindowManager) applicationContext.getSystemService( Context.WINDOW_SERVICE); switch(wm.getDefaultDisplay().getRotation()) { case Surface.ROTATION_90: orientation = 90; break; case Surface.ROTATION_180: orientation = 180; break; case Surface.ROTATION_270: orientation = 270; break; case Surface.ROTATION_0: default: orientation = 0; break; } return orientation; } // Helper class for finding the closest supported format for the two functions below. private static abstract class ClosestComparator<T> implements Comparator<T> { // Difference between supported and requested parameter. abstract int diff(T supportedParameter); @Override public int compare(T t1, T t2) { return diff(t1) - diff(t2); } } private static int[] getFramerateRange(Camera.Parameters parameters, final int framerate) { List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange(); if (listFpsRange.isEmpty()) { Log.w(TAG, "No supported preview fps range"); return new int[]{0, 0}; } return Collections.min(listFpsRange, new ClosestComparator<int[]>() { @Override int diff(int[] range) { return abs(framerate - range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]) + abs(framerate - range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); } }); } private static Camera.Size getClosestSupportedSize( List<Camera.Size> supportedSizes, final int requestedWidth, final int requestedHeight) { return Collections.min(supportedSizes, new ClosestComparator<Camera.Size>() { @Override int diff(Camera.Size size) { return abs(requestedWidth - size.width) + abs(requestedHeight - size.height); } }); } // Called on cameraThread so must not "synchronized". @Override public void onPreviewFrame(byte[] data, Camera callbackCamera) { if (Thread.currentThread() != cameraThread) { throw new RuntimeException("Camera callback not on camera thread?!?"); } if (camera == null) { return; } if (camera != callbackCamera) { throw new RuntimeException("Unexpected camera in callback!"); } final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime()); captureBuffersCount += videoBuffers.numCaptureBuffersAvailable(); int rotation = getDeviceOrientation(); if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { rotation = 360 - rotation; } rotation = (info.orientation + rotation) % 360; // Mark the frame owning |data| as used. // Note that since data is directBuffer, // data.length >= videoBuffers.frameSize. if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) { cameraFramesCount++; frameObserver.OnFrameCaptured(data, videoBuffers.frameSize, captureFormat.width, captureFormat.height, rotation, captureTimeNs); } else { Log.w(TAG, "reserveByteBuffer failed - dropping frame."); } } // runCameraThreadUntilIdle make sure all posted messages to the cameraThread // is processed before returning. It does that by itself posting a message to // to the message queue and waits until is has been processed. // It is used in tests. void runCameraThreadUntilIdle() { if (cameraThreadHandler == null) return; final Exchanger<Boolean> result = new Exchanger<Boolean>(); cameraThreadHandler.post(new Runnable() { @Override public void run() { exchange(result, true); // |true| is a dummy here. } }); exchange(result, false); // |false| is a dummy value here. return; } // Exchanges |value| with |exchanger|, converting InterruptedExceptions to // RuntimeExceptions (since we expect never to see these). private static <T> T exchange(Exchanger<T> exchanger, T value) { try { return exchanger.exchange(value); } catch (InterruptedException e) { throw new RuntimeException(e); } } // Class used for allocating and bookkeeping video frames. All buffers are // direct allocated so that they can be directly used from native code. private static class FramePool { // Arbitrary queue depth. Higher number means more memory allocated & held, // lower number means more sensitivity to processing time in the client (and // potentially stalling the capturer if it runs out of buffers to write to). private static final int numCaptureBuffers = 3; // This container tracks the buffers added as camera callback buffers. It is needed for finding // the corresponding ByteBuffer given a byte[]. private final Map<byte[], ByteBuffer> queuedBuffers = new IdentityHashMap<byte[], ByteBuffer>(); // This container tracks the frames that have been sent but not returned. It is needed for // keeping the buffers alive and for finding the corresponding ByteBuffer given a timestamp. private final Map<Long, ByteBuffer> pendingBuffers = new HashMap<Long, ByteBuffer>(); private int frameSize = 0; private Camera camera; int numCaptureBuffersAvailable() { return queuedBuffers.size(); } // Discards previous queued buffers and adds new callback buffers to camera. void queueCameraBuffers(int frameSize, Camera camera) { this.camera = camera; this.frameSize = frameSize; queuedBuffers.clear(); for (int i = 0; i < numCaptureBuffers; ++i) { final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize); camera.addCallbackBuffer(buffer.array()); queuedBuffers.put(buffer.array(), buffer); } Log.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers + " buffers of size " + frameSize + "."); } String pendingFramesTimeStamps() { List<Long> timeStampsMs = new ArrayList<Long>(); for (Long timeStampNs : pendingBuffers.keySet()) { timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(timeStampNs)); } return timeStampsMs.toString(); } void stopReturnBuffersToCamera() { this.camera = null; Log.d(TAG, "stopReturnBuffersToCamera called." + (pendingBuffers.isEmpty() ? " All buffers have been returned." : " Pending buffers: " + pendingFramesTimeStamps() + ".")); } boolean reserveByteBuffer(byte[] data, long timeStamp) { final ByteBuffer buffer = queuedBuffers.remove(data); if (buffer == null) { // Frames might be posted to |onPreviewFrame| with the previous format while changing // capture format in |startPreviewOnCameraThread|. Drop these old frames. Log.w(TAG, "Received callback buffer from previous configuration with length: " + data.length); return false; } if (buffer.capacity() != frameSize) { throw new IllegalStateException("Callback buffer has unexpected frame size"); } if (pendingBuffers.containsKey(timeStamp)) { Log.e(TAG, "Timestamp already present in pending buffers - they need to be unique"); return false; } pendingBuffers.put(timeStamp, buffer); if (queuedBuffers.isEmpty()) { Log.v(TAG, "Camera is running out of capture buffers." + " Pending buffers: " + pendingFramesTimeStamps()); } return true; } void returnBuffer(long timeStamp) { final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp); if (returnedFrame == null) { throw new RuntimeException("unknown data buffer with time stamp " + timeStamp + "returned?!?"); } if (camera != null && returnedFrame.capacity() == frameSize) { camera.addCallbackBuffer(returnedFrame.array()); if (queuedBuffers.isEmpty()) { Log.v(TAG, "Frame returned when camera is running out of capture" + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp)); } queuedBuffers.put(returnedFrame.array(), returnedFrame); return; } if (returnedFrame.capacity() != frameSize) { Log.d(TAG, "returnBuffer with time stamp " + TimeUnit.NANOSECONDS.toMillis(timeStamp) + " called with old frame size, " + returnedFrame.capacity() + "."); // Since this frame has the wrong size, don't requeue it. Frames with the correct size are // created in queueCameraBuffers so this must be an old buffer. return; } Log.d(TAG, "returnBuffer with time stamp " + TimeUnit.NANOSECONDS.toMillis(timeStamp) + " called after camera has been stopped."); } } // Interface used for providing callbacks to an observer. interface CapturerObserver { // Notify if the camera have been started successfully or not. // Called on a Java thread owned by VideoCapturerAndroid. abstract void OnCapturerStarted(boolean success); // Delivers a captured frame. Called on a Java thread owned by // VideoCapturerAndroid. abstract void OnFrameCaptured(byte[] data, int length, int width, int height, int rotation, long timeStamp); // Requests an output format from the video capturer. Captured frames // by the camera will be scaled/or dropped by the video capturer. // Called on a Java thread owned by VideoCapturerAndroid. abstract void OnOutputFormatRequest(int width, int height, int fps); } // An implementation of CapturerObserver that forwards all calls from // Java to the C layer. static class NativeObserver implements CapturerObserver { private final long nativeCapturer; public NativeObserver(long nativeCapturer) { this.nativeCapturer = nativeCapturer; } @Override public void OnCapturerStarted(boolean success) { nativeCapturerStarted(nativeCapturer, success); } @Override public void OnFrameCaptured(byte[] data, int length, int width, int height, int rotation, long timeStamp) { nativeOnFrameCaptured(nativeCapturer, data, length, width, height, rotation, timeStamp); } @Override public void OnOutputFormatRequest(int width, int height, int fps) { nativeOnOutputFormatRequest(nativeCapturer, width, height, fps); } private native void nativeCapturerStarted(long nativeCapturer, boolean success); private native void nativeOnFrameCaptured(long nativeCapturer, byte[] data, int length, int width, int height, int rotation, long timeStamp); private native void nativeOnOutputFormatRequest(long nativeCapturer, int width, int height, int fps); } }
package sophena.rcp.editors.results.compare; import org.eclipse.swt.widgets.Composite; import org.eclipse.ui.forms.widgets.FormToolkit; import sophena.calc.Comparison; import sophena.calc.CostResult; import sophena.calc.CostResult.FieldSet; import sophena.rcp.utils.UI; import sophena.utils.Num; class CostTable { private final Comparison result; private CostTable(Comparison result) { this.result = result; } static CostTable of(Comparison result) { return new CostTable(result); } void render(Composite body, FormToolkit tk) { Composite comp = UI.formSection(body, tk, "Wirtschaftlichkeit"); Table table = new Table(result); createItems(table); table.render(comp); } private void createItems(Table t) { // investment costs t.row("Investitionskosten", idx -> Num.intStr(costs(idx).investments) + " EUR"); t.row("Investitionsförderung", idx -> Num.intStr(costs(idx).funding) + " EUR"); t.row("Anschlusskostenbeiträge", idx -> { double c = result.projects[idx].costSettings.connectionFees; return Num.intStr(c) + " EUR"; }); t.boldRow("Finanzierungsbedarf", idx -> { FieldSet costs = costs(idx); double cf = result.projects[idx].costSettings.connectionFees; double s = costs.investments - costs.funding - cf; return Num.intStr(s) + " EUR"; }); t.emptyRow(); // annual costs t.row("Kapitalgebundene Kosten", idx -> Num.intStr(costs(idx).capitalCosts) + " EUR/a"); t.row("Bedarfsgebundene Kosten", idx -> Num.intStr(costs(idx).consumptionCosts) + " EUR/a"); t.row("Betriebsgebundene Kosten", idx -> Num.intStr(costs(idx).operationCosts) + " EUR/a"); t.row("Sonstige Kosten", idx -> Num.intStr(costs(idx).otherAnnualCosts) + " EUR/a"); t.boldRow("Gesamtkosten", idx -> Num.intStr(costs(idx).totalAnnualCosts) + " EUR/a"); t.emptyRow(); // revenues t.row("Wärmeerlöse", idx -> Num.intStr(costs(idx).revenuesHeat) + " EUR/a"); t.row("Stromerlöse", idx -> Num.intStr(costs(idx).revenuesElectricity) + " EUR/a"); t.boldRow("Gesamterlöse", idx -> { FieldSet costs = costs(idx); double revs = costs.revenuesElectricity + costs.revenuesHeat; return Num.intStr(revs) + " EUR/a"; }); t.emptyRow(); t.boldRow("Jahresüberschuss", idx -> Num.intStr(costs(idx).annualSurplus) + " EUR/a"); t.boldRow("Wärmegestehungskosten", idx -> Num.intStr(costs(idx).heatGenerationCosts) + " EUR/MWh"); } private CostResult.FieldSet costs(int idx) { if (idx >= result.results.length) return new CostResult.FieldSet(); return result.results[idx].costResultFunding.dynamicTotal; } }
package gnu.expr; import gnu.bytecode.*; import gnu.mapping.Values; public class StackTarget extends Target { Type type; public StackTarget(Type type) { this.type = type; } public Type getType() { return type; } public void compileFromStack(Compilation comp, Type stackType) { if (type == stackType) return; CodeAttr code = comp.getCode(); if (stackType == Type.void_type) { comp.compileConstant (Values.empty); stackType = Type.pointer_type; } else if (stackType instanceof PrimType && type instanceof PrimType) { code.emitConvert(stackType, type); return; } if (stackType instanceof ClassType && type instanceof ClassType) { // If stackType inherits from target type, no coercion is needed. ClassType baseClass = (ClassType) stackType; while (baseClass != null) { if (baseClass == type) return; baseClass = baseClass.getSuperclass(); } } stackType.emitCoerceToObject(code); type.emitCoerceFromObject(code); } }
package com.gildedrose; import static org.junit.Assert.*; import org.junit.Test; public class GildedRoseTest { private static final String JUNK = "junk"; private static final String AGED_BRIE = "Aged Brie"; @Test public void itemsWhosQualityIsNegativeAreReportedAsQualityZero() { Item[] items = new Item[] { new Item(JUNK, 0, -1)}; GildedRose app = new GildedRose(items); assertEquals(0, app.items[0].quality); } @Test public void ctItemsWhosQualityBecomesNegativeAreReportedAsQualityZero() { Item[] items = new Item[] { new Item(JUNK, 0, 0)}; GildedRose app = new GildedRose(items); app.updateQuality(); assertEquals(0, app.items[0].quality); } @Test public void ctSpecialItemsWhosQualityIs50CannotGetHigherQuality() { Item[] items = new Item[] { new Item(AGED_BRIE, 0, 50)}; GildedRose app = new GildedRose(items); app.updateQuality(); assertEquals(50, app.items[0].quality); } @Test public void ctSpecialItemsIncreaseInQualityWithAge() { Item[] items = new Item[] { new Item(AGED_BRIE, 11, 40)}; GildedRose app = new GildedRose(items); app.updateQuality(); assertEquals(41, app.items[0].quality); } @Test public void ctAgedBrieIncreasesInQualityByTwoOnItsExpirationDay() { Item[] items = new Item[] { new Item(AGED_BRIE, 0, 40)}; GildedRose app = new GildedRose(items); app.updateQuality(); assertEquals(42, app.items[0].quality); } }
package com.github.nsnjson; import com.fasterxml.jackson.databind.*; import com.fasterxml.jackson.databind.node.ObjectNode; import org.junit.*; public class DriverTest extends AbstractFormatTest { @Test public void shouldBeConsistencyWhenGivenNull() { assertConsistency(getNull()); } @Test public void shouldBeConsistencyWhenGivenNumberIsInt() { assertConsistency(getNumberInt()); } @Test public void shouldBeConsistencyWhenGivenNumberIsLong() { assertConsistency(getNumberLong()); } @Test public void shouldBeConsistencyWhenGivenNumberIsDouble() { assertConsistency(getNumberDouble()); } @Test public void shouldBeConsistencyWhenGivenStringIsEmpty() { assertConsistency(getEmptyString()); } @Test public void shouldBeConsistencyWhenGivenString() { assertConsistency(getString()); } @Test public void shouldBeConsistencyWhenGivenBooleanIsTrue() { assertConsistency(getBooleanTrue()); } @Test public void shouldBeConsistencyWhenGivenBooleanIsFalse() { assertConsistency(getBooleanFalse()); } @Test public void shouldBeConsistencyWhenGivenArrayIsEmpty() { assertConsistency(getEmptyArray()); } @Test public void shouldBeConsistencyWhenGivenArray() { assertConsistency(getArray()); } @Test public void shouldBeConsistencyWhenGivenObjectIsEmpty() { assertConsistency(getEmptyObject()); } @Test public void shouldBeConsistencyWhenGivenObject() { assertConsistency(getObject()); } private static void assertConsistency(JsonNode value) { JsonNode restoredValue = Driver.decode(Driver.encode(value)); assertEquals(value, restoredValue); } private static void assertEquals(JsonNode value1, JsonNode value2) { Assert.assertEquals(value1.toString(), value2.toString()); } }
package com.firefly.net.tcp.ssl; import java.io.Closeable; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLEngineResult; import javax.net.ssl.SSLEngineResult.HandshakeStatus; import com.firefly.net.Session; import com.firefly.net.buffer.FileRegion; import com.firefly.utils.log.Log; import com.firefly.utils.log.LogFactory; public class SSLSession implements Closeable { private static Log log = LogFactory.getInstance().getLog("firefly-system"); private Session session; private SSLEngine sslEngine; private ByteBuffer inNetBuffer; protected ByteBuffer requestBuffer; private static final int requestBufferSize = 1024 * 8; private static final int writeBufferSize = 1024 * 8; /* * An empty ByteBuffer for use when one isn't available, say * as a source buffer during initial handshake wraps or for close * operations. */ private static final ByteBuffer hsBuffer = ByteBuffer.allocate(0); /* * During our initial handshake, keep track of the next * SSLEngine operation that needs to occur: * * NEED_WRAP/NEED_UNWRAP * * Once the initial handshake has completed, we can short circuit * handshake checks with initialHSComplete. */ private HandshakeStatus initialHSStatus; private boolean initialHSComplete; /* * We have received the shutdown request by our caller, and have * closed our outbound side. */ private boolean closed = false; public SSLSession(SSLContext sslContext, Session session) throws Throwable { this.session = session; requestBuffer = ByteBuffer.allocate(requestBufferSize); sslEngine = sslContext.createSSLEngine(); sslEngine.setUseClientMode(false); initialHSStatus = HandshakeStatus.NEED_UNWRAP; initialHSComplete = false; } /** * The initial handshake is a procedure by which the two peers exchange communication parameters until an SSLSession is established. * Application data can not be sent during this phase. * @param receiveBuffer encrypted message * @return It return true means handshake success * @throws Throwable */ public boolean doHandshake(ByteBuffer receiveBuffer) throws Throwable { if(!session.isOpen()) { sslEngine.closeInbound(); return (initialHSComplete = false); } if (initialHSComplete) return initialHSComplete; if(initialHSStatus == HandshakeStatus.FINISHED) { log.info("session {} handshake end", session.getSessionId()); return (initialHSComplete = true); } switch (initialHSStatus) { case NEED_UNWRAP: doHandshakeReceive(receiveBuffer); if (initialHSStatus != HandshakeStatus.NEED_WRAP) break; case NEED_WRAP: doHandshakeResponse(); break; default: // NOT_HANDSHAKING/NEED_TASK/FINISHED throw new RuntimeException("Invalid Handshaking State" + initialHSStatus); } return initialHSComplete; } private void copyNetBuffer(ByteBuffer now) { if(inNetBuffer == null) { inNetBuffer = now; return; } inNetBuffer.flip(); ByteBuffer bb = ByteBuffer.allocate(inNetBuffer.remaining() + now.remaining()); bb.put(inNetBuffer).put(now).flip(); inNetBuffer = bb; } private void doHandshakeReceive(ByteBuffer receiveBuffer) throws Throwable { SSLEngineResult result; if(receiveBuffer != null) copyNetBuffer(receiveBuffer); needIO: while (initialHSStatus == HandshakeStatus.NEED_UNWRAP) { unwrap: while(true) { result = sslEngine.unwrap(inNetBuffer, requestBuffer); if(!inNetBuffer.hasRemaining()) inNetBuffer = null; initialHSStatus = result.getHandshakeStatus(); switch (result.getStatus()) { case OK: switch (initialHSStatus) { case NOT_HANDSHAKING: throw new IOException("Not handshaking during initial handshake"); case NEED_TASK: initialHSStatus = doTasks(); break; case FINISHED: initialHSComplete = true; log.info("session {} handshake end", session.getSessionId()); break needIO; default: break; } break unwrap; case BUFFER_UNDERFLOW: break needIO; case BUFFER_OVERFLOW: // Reset the application buffer size. int appSize = sslEngine.getSession().getApplicationBufferSize(); ByteBuffer b = ByteBuffer.allocate(appSize + requestBuffer.position()); requestBuffer.flip(); b.put(requestBuffer); requestBuffer = b; // retry the operation. break; default: //CLOSED: throw new IOException("Received" + result.getStatus() + "during initial handshaking"); } } } // "needIO" block. } private void doHandshakeResponse() throws Throwable { while(initialHSStatus == HandshakeStatus.NEED_WRAP) { SSLEngineResult result; ByteBuffer writeBuf = ByteBuffer.allocate(writeBufferSize); wrap: while(true) { result = sslEngine.wrap(hsBuffer, writeBuf); initialHSStatus = result.getHandshakeStatus(); switch (result.getStatus()) { case OK: if (initialHSStatus == HandshakeStatus.NEED_TASK) initialHSStatus = doTasks(); writeBuf.flip(); session.write(writeBuf); break wrap; case BUFFER_OVERFLOW: int netSize = sslEngine.getSession().getPacketBufferSize(); ByteBuffer b = ByteBuffer.allocate(writeBuf.position() + netSize); writeBuf.flip(); b.put(writeBuf); writeBuf = b; // retry the operation. break; default: // BUFFER_OVERFLOW/BUFFER_UNDERFLOW/CLOSED: throw new IOException("Received" + result.getStatus() + "during initial handshaking"); } } } } /** * This method is used to decrypt, it implied do handshake * @param receiveBuffer encrypted message * @return plaintext * @throws Throwable sslEngine error during data read */ public ByteBuffer read(ByteBuffer receiveBuffer) throws Throwable { if(!doHandshake(receiveBuffer)) return null; copyNetBuffer(receiveBuffer); SSLEngineResult result; while(true) { result = sslEngine.unwrap(inNetBuffer, requestBuffer); if(!inNetBuffer.hasRemaining()) inNetBuffer = null; /* * Could check here for a renegotation, but we're only * doing a simple read/write, and won't have enough state * transitions to do a complete handshake, so ignore that * possibility. */ switch (result.getStatus()) { case BUFFER_OVERFLOW: // Reset the application buffer size. int appSize = sslEngine.getSession().getApplicationBufferSize(); ByteBuffer b = ByteBuffer.allocate(appSize + requestBuffer.position()); requestBuffer.flip(); b.put(requestBuffer); requestBuffer = b; // retry the operation. break; case BUFFER_UNDERFLOW: return null; case OK: if (result.getHandshakeStatus() == HandshakeStatus.NEED_TASK) { doTasks(); } return getRequestBuffer(); default: throw new IOException("sslEngine error during data read: " + result.getStatus()); } } } /** * This method is used to encrypt and flush to socket channel * @param outputBuffer plaintext message * @return writen length * @throws Throwable sslEngine error during data write */ public int write(ByteBuffer outputBuffer) throws Throwable { if (!initialHSComplete) throw new IllegalStateException(); int ret = 0; if(!outputBuffer.hasRemaining()) return ret; final int remain = outputBuffer.remaining(); // log.info("src remain {}", remain); while(ret < remain) { ByteBuffer writeBuf = ByteBuffer.allocate(writeBufferSize); wrap: while(true) { SSLEngineResult result = sslEngine.wrap(outputBuffer, writeBuf); ret += result.bytesConsumed(); // log.info("consumed data: {} | {}", ret, outputBuffer.remaining()); switch (result.getStatus()) { case OK: if (result.getHandshakeStatus() == HandshakeStatus.NEED_TASK) doTasks(); writeBuf.flip(); session.write(writeBuf); break wrap; case BUFFER_OVERFLOW: int netSize = sslEngine.getSession().getPacketBufferSize(); // log.info("sssss: {}", (writeBuf.position() + netSize)); ByteBuffer b = ByteBuffer.allocate(writeBuf.position() + netSize); writeBuf.flip(); b.put(writeBuf); writeBuf = b; // retry the operation. break; default: throw new IOException("sslEngine error during data write: " + result.getStatus()); } } } return ret; } /** * Perform a FileChannel.TransferTo on the socket channel. * <P> * We have to copy the data into an intermediary app ByteBuffer * first, then send it through the SSLEngine. * <P> * We return the number of bytes actually read out of the * filechannel. However, the data may actually be stuck * in the fileChannelBB or the outNetBB. The caller * is responsible for making sure to call dataFlush() * before shutting down. * * @param fc to transfer FileChannel * @param pos start position * @param len length * @return writen length * @throws Throwable */ public long transferTo(FileChannel fc, long pos, long len) throws Throwable { if (!initialHSComplete) throw new IllegalStateException(); long ret = 0; try { ByteBuffer buf = ByteBuffer.allocate(1024 * 4); int i = 0; while((i = fc.read(buf, pos)) != -1) { if(i > 0) { ret += i; pos += i; buf.flip(); write(buf); buf = ByteBuffer.allocate(1024 * 4); } if(pos >= len) break; } } finally { fc.close(); } return ret; } public long transferFileRegion(FileRegion file) throws Throwable { long ret = 0; try { ret = transferTo(file.getFile(), file.getPosition(), file.getCount()); } finally { file.releaseExternalResources(); } return ret; } protected ByteBuffer getRequestBuffer() { requestBuffer.flip(); ByteBuffer buf = ByteBuffer.allocate(requestBuffer.remaining()); buf.put(requestBuffer).flip(); requestBuffer.flip(); log.info("current request buffer size: {}, {}", requestBuffer.remaining(), requestBuffer.capacity()); return buf; } /** * Do all the outstanding handshake tasks in the current Thread. */ protected SSLEngineResult.HandshakeStatus doTasks() { Runnable runnable; // We could run this in a separate thread, but do in the current for now. while ((runnable = sslEngine.getDelegatedTask()) != null) { runnable.run(); } return sslEngine.getHandshakeStatus(); } @Override public void close() throws IOException { if (!closed) { sslEngine.closeOutbound(); closed = true; } } }
package guitests; import javafx.scene.control.ComboBox; import javafx.scene.control.TextField; import javafx.scene.input.KeyCode; import org.junit.After; import org.junit.Test; import org.loadui.testfx.utils.FXTestUtils; import prefs.Preferences; import java.io.File; import java.util.List; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; public class UseGlobalConfigsTest extends UITest { String configFileDirectory = Preferences.DIRECTORY; String testConfigFileName = Preferences.TEST_CONFIG_FILE; @Override public void launchApp() { // isTestMode in UI checks for testconfig too so we don't need to specify --test=true here. FXTestUtils.launchApp(TestUI.class, "--testconfig=true"); } @Test public void globalConfigTest() { // Cleaning up with @Before creates race condition. // Neither can we ensure test config file does not exist before starting test, // as the program automatically generates an empty config file if it does not // exist. TextField repoOwnerField = find("#repoOwnerField"); doubleClick(repoOwnerField); doubleClick(repoOwnerField); type("dummy").push(KeyCode.TAB); type("dummy").push(KeyCode.TAB); type("test").push(KeyCode.TAB); type("test"); click("Sign in"); sleep(2000); ComboBox<String> repositorySelector = find("#repositorySelector"); assertEquals(repositorySelector.getValue(), "dummy/dummy"); // Make a new board click("Boards"); click("Save"); // Somehow the text field cannot be populated by typing on the CI, use setText instead. // TODO find out why ((TextField) find("#boardnameinput")).setText("Empty Board"); click("OK"); // Load dummy2/dummy2 too press(KeyCode.CONTROL).press(KeyCode.P).release(KeyCode.P).release(KeyCode.CONTROL); click("#dummy/dummy_col1_filterTextField"); type("repo"); press(KeyCode.SHIFT).press(KeyCode.SEMICOLON).release(KeyCode.SEMICOLON).release(KeyCode.SHIFT); type("dummy2/dummy2"); press(KeyCode.ENTER).release(KeyCode.ENTER); sleep(2000); // Make a new board click("Boards"); click("Save"); ((TextField) find("#boardnameinput")).setText("Dummy Board"); click("OK"); // Then exit program... click("Preferences"); click("Quit"); // ...and check if the test JSON is still there... File testConfig = new File(configFileDirectory, testConfigFileName); if (!(testConfig.exists() && testConfig.isFile())) fail(); // ...then check that the JSON file contents are correct. Preferences testPref = new Preferences(true); // Credentials assertEquals("test", testPref.getLastLoginUsername()); assertEquals("test", testPref.getLastLoginPassword()); // Last open filters List<String> lastOpenFilters = testPref.getLastOpenFilters(); assertEquals(2, lastOpenFilters.size()); assertEquals("", lastOpenFilters.get(0)); assertEquals("repo:dummy2/dummy2", lastOpenFilters.get(1)); // Last viewed repositories List<String> lastViewedRepositories = testPref.getLastViewedRepositories(); assertEquals("dummy/dummy", lastViewedRepositories.get(0)); assertEquals("dummy2/dummy2", lastViewedRepositories.get(1)); // Boards Map<String, List<String>> boards = testPref.getAllBoards(); List<String> emptyBoard = boards.get("Empty Board"); assertEquals(1, emptyBoard.size()); assertEquals("", emptyBoard.get(0)); List<String> dummyBoard = boards.get("Dummy Board"); assertEquals(2, dummyBoard.size()); assertEquals("", dummyBoard.get(0)); assertEquals("repo:dummy2/dummy2", dummyBoard.get(1)); } @After public void teardown() { File testConfig = new File(configFileDirectory, testConfigFileName); if (testConfig.exists() && testConfig.isFile()) testConfig.delete(); } }
package net.sf.flatpack.pzparser; import java.io.StringReader; import java.util.Iterator; import java.util.NoSuchElementException; import junit.framework.TestCase; import net.sf.flatpack.DataError; import net.sf.flatpack.DataSet; import net.sf.flatpack.DefaultParserFactory; import net.sf.flatpack.Parser; import net.sf.flatpack.brparse.BuffReaderParseFactory; import net.sf.flatpack.ordering.OrderBy; import net.sf.flatpack.ordering.OrderColumn; import net.sf.flatpack.util.FPConstants; import net.sf.flatpack.util.FPInvalidUsageException; /** * Test the different options that can be * set on the parser * * @author Paul Zepernick */ public class PZParserOptsTest extends TestCase { public void testHandleShortLines() { DataSet ds; final String cols = "COLUMN1,column2,Column3\r\n value1"; Parser p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setHandlingShortLines(true); // p.setIgnoreParseWarnings(true); ds = p.parse(); assertEquals("Should have a row of data", true, ds.next()); //re-test the buffered reader p = BuffReaderParseFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setHandlingShortLines(true); // p.setIgnoreParseWarnings(true); ds = p.parse(); assertEquals("Should have a row of data", true, ds.next()); } public void testIgnoreExtraColumns() { DataSet ds; final String cols = "COLUMN1,column2,Column3\r\n \"value1\",value2,value3,value4"; Parser p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', '"'); p.setIgnoreExtraColumns(true); // p.setIgnoreParseWarnings(true); ds = p.parse(); assertEquals("Should have a row of data", true, ds.next()); //re-test the buffered reader p = BuffReaderParseFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', '"'); p.setIgnoreExtraColumns(true); // p.setIgnoreParseWarnings(true); ds = p.parse(); assertEquals("Should have a row of data", true, ds.next()); } public void testEmptyToNull() { DataSet ds; final String cols = "COLUMN1,column2,Column3\r\n value1,,value3"; Parser p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setNullEmptyStrings(true); ds = p.parse(); ds.next(); assertEquals("String should be null...", null, ds.getString("column2")); p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setNullEmptyStrings(false); ds = p.parse(); ds.next(); assertEquals("String should be empty...", "", ds.getString("column2")); } public void testIgnoreWarnings() { DataSet ds; final String cols = "COLUMN1,column2,Column3\r\n value1,value2"; Parser p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setHandlingShortLines(true); p.setIgnoreParseWarnings(true); ds = p.parse(); assertEquals("Error collection should be empty...", 0, ds.getErrors().size()); p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setHandlingShortLines(true); p.setIgnoreParseWarnings(false); ds = p.parse(); ds.next(); assertEquals("Error collection should contain warning...", 1, ds.getErrors().size()); } public void testCaseSensitiveMetaData() { DataSet ds; final String cols = "COLUMN1,column2,Column3\r\n value1,value2,value3"; Parser p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); //check that column names are case sensitive p.setColumnNamesCaseSensitive(true); ds = p.parse(); ds.next(); try { ds.getString("COLUMN2"); fail("Column was mapped as 'column2' and lookup was 'COLUMN2'...should fail with case sensitivity turned on"); } catch (final NoSuchElementException e) { //this should happen since we are matching case } //check that column names are NOT case sensitive p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setColumnNamesCaseSensitive(false); ds = p.parse(); ds.next(); try { ds.getString("COLUMN2"); } catch (final NoSuchElementException e) { fail("Column was mapped as 'column2' and lookup was 'COLUMN2'...should NOT fail with case sensitivity turned OFF"); } } public void testEmptyRowCheck() { DataSet ds; final String cols = "column1,column2,column3\r\n,,"; //check to see if the flag empty rows works Parser p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setFlagEmptyRows(true); ds = p.parse(); ds.next(); assertEquals("Row should return empty...", ds.isRowEmpty(), true); //do not set to flag empty rows, but make the check anyhow to make sure we get an exception p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); ds = p.parse(); ds.next(); try { ds.isRowEmpty(); fail("should have got FPInvalidUsageException..."); } catch(FPInvalidUsageException e){} } public void testStoreRawDataToDataError() { DataSet ds; final String cols = "column1,column2,column3\r\nVAL1,VAL2,VAL3,VAL4"; Parser p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setStoreRawDataToDataError(true); ds = p.parse(); Iterator errors = ds.getErrors().iterator(); DataError de = (DataError)errors.next(); assertNotNull("DataError should contain line data...", de.getRawData()); p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setStoreRawDataToDataError(false); ds = p.parse(); errors = ds.getErrors().iterator(); de = (DataError)errors.next(); assertNull("DataError should have <null> line data...", de.getRawData()); } public void testStoreRawData() { DataSet ds; final String cols = "column1,column2,column3\r\nVAL1,VAL2,VAL3"; Parser p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); p.setStoreRawDataToDataSet(true); ds = p.parse(); ds.next(); assertEquals("VAL1,VAL2,VAL3", ds.getRawData()); p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); ds = p.parse(); ds.next(); try { ds.getRawData(); fail("Should have received an FPExcpetion..."); }catch(FPInvalidUsageException e) { } } public void testEmptyLastColumn() { //this was reported as a bug in the forums check to see //if we actually have a problem DataSet ds; String cols = "column1,column2,column3\r\nVAL1,VAL2,"; Parser p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); ds = p.parse(); assertEquals(true, ds.next()); cols = "column1,column2,column3\r\n\"VAL1\",\"VAL2\",\"\""; p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', '"'); ds = p.parse(); assertEquals(true, ds.next()); } public void testSorting() { DataSet ds; String cols = "fname,lname,dob,anumber\r\npaul,zepernick,06/21/1981,2\r\nbenoit,xhenseval,05/01/1970,12"; Parser p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); ds = p.parse(); OrderBy order = new OrderBy(); order.addOrderColumn(new OrderColumn("fname",OrderColumn.ASC)); ds.orderRows(order); ds.next(); assertEquals("benoit", ds.getString("fname")); order = new OrderBy(); order.addOrderColumn(new OrderColumn("lname",OrderColumn.DESC)); ds.orderRows(order); ds.next(); assertEquals("zepernick", ds.getString("lname")); //test date sorting order = new OrderBy(); OrderColumn column = new OrderColumn("dob",OrderColumn.ASC, OrderColumn.COLTYPE_DATE); column.setDateFormatPattern("MM/dd/yyyy"); order.addOrderColumn(column); ds.orderRows(order); ds.next(); assertEquals("xhenseval", ds.getString("lname")); //test numeric sorting order = new OrderBy(); order.addOrderColumn(new OrderColumn("anumber",OrderColumn.DESC, OrderColumn.COLTYPE_NUMERIC)); ds.orderRows(order); ds.next(); assertEquals("xhenseval", ds.getString("lname")); //test bad date format & bad numeric data //06.21.1981 should default to 01/01/1900 since it does not match our date format cols = "fname,lname,dob,anumber\r\npaul,zepernick,06.21.1981,not a number\r\nbenoit,xhenseval,05/01/1970,12"; p = DefaultParserFactory.getInstance().newDelimitedParser(new StringReader(cols), ',', FPConstants.NO_QUALIFIER); ds = p.parse(); order = new OrderBy(); column = new OrderColumn("dob",OrderColumn.ASC, OrderColumn.COLTYPE_DATE); column.setDateFormatPattern("MM/dd/yyyy"); order.addOrderColumn(column); ds.orderRows(order); ds.next(); assertEquals("zepernick", ds.getString("lname")); //not a number should get treated as a 0 order = new OrderBy(); order.addOrderColumn(new OrderColumn("anumber",OrderColumn.ASC, OrderColumn.COLTYPE_NUMERIC)); ds.orderRows(order); ds.next(); assertEquals("zepernick", ds.getString("lname")); } public static void main(final String[] args) { junit.textui.TestRunner.run(PZParserOptsTest.class); } }
package i5.las2peer.p2p; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import i5.las2peer.api.p2p.ServiceNameVersion; import i5.las2peer.api.security.AgentAccessDeniedException; import i5.las2peer.api.security.AgentException; import i5.las2peer.api.security.AgentNotFoundException; import i5.las2peer.communication.Message; import i5.las2peer.communication.PingPongContent; import i5.las2peer.p2p.Node.SendMode; import i5.las2peer.persistency.EncodingFailedException; import i5.las2peer.security.L2pSecurityException; import i5.las2peer.security.Mediator; import i5.las2peer.security.ServiceAgentImpl; import i5.las2peer.security.UserAgentImpl; import i5.las2peer.serialization.SerializationException; import i5.las2peer.testing.MockAgentFactory; import java.io.Serializable; import java.util.Date; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class LocalNodeTest { private UserAgentImpl eve; private UserAgentImpl adam; private UserAgentImpl abel; private static int counter; @Before public void setUp() { try { LocalNode.reset(); eve = MockAgentFactory.getEve(); adam = MockAgentFactory.getAdam(); abel = MockAgentFactory.getAbel(); counter = 0; testVariable = false; } catch (Exception e) { e.printStackTrace(); Assert.fail(e.toString()); } } private static boolean testVariable; @Test public void test() throws EncodingFailedException, L2pSecurityException, SerializationException, InterruptedException, AgentException, AgentAccessDeniedException { System.out.println("start: " + new Date()); LocalNode testee = LocalNode.launchNode(); try { testee.registerReceiver(eve); fail("L2pSecurityException expected"); } catch (L2pSecurityException e) { } eve.unlock("evespass"); adam.unlock("adamspass"); testee.registerReceiver(eve); testee.registerReceiver(adam); assertFalse(eve.isLocked()); assertFalse(adam.isLocked()); System.out.println("check1: " + new Date()); testVariable = false; MessageResultListener listener = new MessageResultListener(10000) { @Override public void notifySuccess() { LocalNodeTest.testVariable = true; } }; PingPongContent c = new PingPongContent(); Message m = new Message(adam, eve, c); testee.sendMessage(m, listener); listener.waitForAllAnswers(); assertFalse(listener.isTimedOut()); assertFalse(listener.hasException()); assertTrue(listener.isSuccess()); assertTrue(listener.isFinished()); Message answer = listener.getResults()[0]; answer.open(adam, testee); assertTrue(c.getTimestamp() < ((PingPongContent) answer.getContent()).getTimestamp()); assertTrue(testVariable); } @Test public void testTwoNodes() throws L2pSecurityException, EncodingFailedException, SerializationException, InterruptedException, AgentException, AgentAccessDeniedException { adam.unlock("adamspass"); eve.unlock("evespass"); // launch to nodes with one agent each LocalNode testee1 = LocalNode.launchAgent(adam); LocalNode.launchAgent(eve); assertTrue(LocalNode.findAllNodesWithAgent(adam.getIdentifier()).length > 0); assertTrue(LocalNode.findAllNodesWithAgent(eve.getIdentifier()).length > 0); MessageResultListener l = new MessageResultListener(10000); Message m = new Message(adam, eve, new PingPongContent()); testee1.sendMessage(m, l); l.waitForAllAnswers(); assertEquals(1, l.getNumberOfExpectedResults()); assertTrue(l.isFinished()); assertTrue(l.isSuccess()); } @Test public void testTimeout() throws EncodingFailedException, L2pSecurityException, SerializationException, InterruptedException, AgentException, AgentAccessDeniedException { adam.unlock("adamspass"); LocalNode testee1 = LocalNode.launchAgent(adam); MessageResultListener l = new MessageResultListener(2000) { @Override public void notifyTimeout() { LocalNodeTest.testVariable = true; } }; Message m = new Message(adam, eve, new PingPongContent(), 1000); LocalNode.setPendingTimeOut(1000); testee1.sendMessage(m, l); Thread.sleep(30000); assertFalse(l.isSuccess()); assertTrue(l.isTimedOut()); assertEquals(0, l.getResults().length); assertTrue(testVariable); } @Test public void testBroadcast() throws EncodingFailedException, L2pSecurityException, SerializationException, InterruptedException, AgentException, AgentAccessDeniedException { adam.unlock("adamspass"); eve.unlock("evespass"); // launch three nodes with one agent each LocalNode testee1 = LocalNode.launchAgent(adam); LocalNode hosting1 = LocalNode.launchAgent(eve); assertEquals(1, LocalNode.findAllNodesWithAgent(eve.getIdentifier()).length); LocalNode hosting2 = LocalNode.launchAgent(eve); assertTrue(hosting1.hasLocalAgent(eve)); assertTrue(hosting2.hasLocalAgent(eve)); assertNotSame(hosting1.getAgent(eve.getIdentifier()), hosting2.getAgent(eve.getIdentifier())); assertEquals(2, LocalNode.findAllNodesWithAgent(eve.getIdentifier()).length); MessageResultListener l = new MessageResultListener(10000) { @Override public void notifySuccess() { synchronized (this) { System.out.println("result retrieved"); LocalNodeTest.counter++; } } }; // l.addRecipient(); assertEquals(1, l.getNumberOfExpectedResults()); Message m = new Message(adam, eve, new PingPongContent()); testee1.sendMessage(m, l, Node.SendMode.BROADCAST); assertEquals(2, l.getNumberOfExpectedResults()); l.waitForAllAnswers(); assertEquals(2, l.getNumberOfResults()); assertEquals(counter, 2); assertTrue(l.isSuccess()); assertTrue(l.isFinished()); } @Test public void testPending() { try { adam.unlock("adamspass"); eve.unlock("evespass"); LocalNode testee = LocalNode.launchAgent(adam); MessageResultListener l = new MessageResultListener(8000) { @Override public void notifySuccess() { LocalNodeTest.testVariable = true; } }; Message m = new Message(adam, eve, new PingPongContent()); testee.sendMessage(m, l); Thread.sleep(5000); assertFalse(testVariable); assertFalse(l.isSuccess()); assertFalse(l.isFinished()); // launch another node hosting eve LocalNode.launchAgent(eve); Thread.sleep(LocalNode.getMaxMessageWait() + 6000); assertTrue(l.isSuccess()); assertTrue(l.isFinished()); assertTrue(testVariable); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.toString()); } } @Test public void testRegisteringAgents() { try { adam.unlock("adamspass"); LocalNode testee = LocalNode.launchAgent(adam); try { testee.storeAgent(abel); fail("L2sSecurityAxception expected"); } catch (L2pSecurityException e) { } /* no problem anymore since its the same agent try { testee.storeAgent(adam); fail("AgentAlreadyRegistered exception expected"); } catch (AgentAlreadyRegisteredException e) { } */ abel.unlock("abelspass"); testee.storeAgent(abel); LocalNode testee2 = LocalNode.launchNode(); UserAgentImpl retrieve = (UserAgentImpl) testee2.getAgent(abel.getIdentifier()); assertTrue(retrieve.isLocked()); try { testee2.storeAgent(retrieve); fail("SecurtityException expected"); } catch (L2pSecurityException e) { } retrieve.unlock("abelspass"); testee2.storeAgent(retrieve); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.toString()); } } @Test public void testRegisteringTopics() { try { // start node adam.unlock("adamspass"); abel.unlock("abelspass"); eve.unlock("evespass"); LocalNode testee = LocalNode.launchNode(); testee.storeAgent(adam); testee.storeAgent(abel); testee.storeAgent(eve); // test registering to topic without being registered to the node try { testee.registerReceiverToTopic(adam, 1); fail("AgentNotRegisteredException expected"); } catch (AgentNotRegisteredException e) { } // test unregsiter without being registered testee.unregisterReceiverFromTopic(adam, 1); // register agents testee.registerReceiver(adam); testee.registerReceiver(abel); testee.registerReceiver(eve); // test register assertFalse(testee.hasTopic(1)); testee.registerReceiverToTopic(adam, 1); assertTrue(testee.hasTopic(1)); // test register to another topic testee.registerReceiverToTopic(adam, 2); testee.registerReceiverToTopic(adam, 3); assertTrue(testee.hasTopic(2)); assertTrue(testee.hasTopic(3)); // test register another agent to same topic testee.registerReceiverToTopic(abel, 1); testee.registerReceiverToTopic(eve, 1); testee.registerReceiverToTopic(eve, 2); // unregister from topic - topic should be removed testee.unregisterReceiverFromTopic(adam, 3); assertFalse(testee.hasTopic(3)); // unregister from topic - should not be removed testee.unregisterReceiverFromTopic(eve, 2); assertTrue(testee.hasTopic(2)); // unregister agent - one topic should be removed testee.unregisterReceiver(adam); assertTrue(testee.hasTopic(1)); assertFalse(testee.hasTopic(2)); // test unregsiter without being registered - again testee.unregisterReceiverFromTopic(adam, 1); assertTrue(testee.hasTopic(1)); // unregister agent - nothing should happen testee.unregisterReceiver(eve); assertTrue(testee.hasTopic(1)); // unregister agent - remove topic testee.unregisterReceiver(abel); assertFalse(testee.hasTopic(1)); // test unregsiter without being registered - again testee.unregisterReceiverFromTopic(adam, 1); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.toString()); } } @Test public void testSendAndReceiveTopics() { try { // start node adam.unlock("adamspass"); abel.unlock("abelspass"); eve.unlock("evespass"); LocalNode node1 = LocalNode.launchNode(); LocalNode node2 = LocalNode.launchNode(); node1.storeAgent(adam); node1.storeAgent(abel); node1.storeAgent(eve); // register receiver to topics Mediator mAdam = node1.createMediatorForAgent(adam); Mediator mAbel = node1.createMediatorForAgent(abel); Mediator mEve = node2.createMediatorForAgent(eve); node1.registerReceiver(mAdam); node1.registerReceiver(mAbel); node2.registerReceiver(mEve); node1.registerReceiverToTopic(mAdam, 1); node1.registerReceiverToTopic(mAbel, 1); node2.registerReceiverToTopic(mEve, 1); // send msg to unknown topic Message noreceiver = new Message(adam, 2, "some content"); MessageResultListener lst1 = new MessageResultListener(1000); node1.sendMessage(noreceiver, lst1, SendMode.BROADCAST); // send message Message sent = new Message(adam, 1, "some content"); MessageResultListener lst = new MessageResultListener(1000); node1.sendMessage(sent, lst, SendMode.BROADCAST); // wait until messages are sent Thread.sleep(4000); // receive Message received1 = mAdam.getNextMessage(); Message received2 = mAbel.getNextMessage(); Message received3 = mEve.getNextMessage(); // messages should be cloned assertTrue(received1 != null && received2 != null && received3 != null); assertTrue(received1 != received2); // check if receiver is set correctly assertTrue(received1.getRecipientId().equalsIgnoreCase(adam.getIdentifier())); assertTrue(received2.getRecipientId().equalsIgnoreCase(abel.getIdentifier())); assertTrue(received3.getRecipientId().equalsIgnoreCase(eve.getIdentifier())); assertTrue(received3.getSenderId().equalsIgnoreCase(adam.getIdentifier())); assertTrue(received3.getTopicId() == 1); // cehck if open assertTrue(received1.isOpen()); assertTrue(received2.isOpen()); assertTrue(received3.isOpen()); // open // received2.open(abel, node1); // assertEquals(received2.getContent(), "some content"); // answer // a mediator always sends an answer... Message msg = new Message(adam, 1, "some content"); Message answer = node1.sendMessageAndWaitForAnswer(msg); assertTrue(answer.getResponseToId() == msg.getId()); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.toString()); } } @Test public void testCollectMessags() { try { // start node adam.unlock("adamspass"); abel.unlock("abelspass"); eve.unlock("evespass"); LocalNode node1 = LocalNode.launchNode(); LocalNode node2 = LocalNode.launchNode(); node1.storeAgent(adam); node1.storeAgent(abel); node1.storeAgent(eve); // register receiver to topics Mediator mAdam = node1.createMediatorForAgent(adam); Mediator mAbel = node1.createMediatorForAgent(abel); Mediator mEve = node2.createMediatorForAgent(eve); node1.registerReceiver(mAdam); node1.registerReceiver(mAbel); node2.registerReceiver(mEve); node1.registerReceiverToTopic(mAdam, 1); node1.registerReceiverToTopic(mAbel, 1); node2.registerReceiverToTopic(mEve, 1); // collect answers Message msg1 = new Message(adam, 1, "collect...", 20000); Message[] answers = node1.sendMessageAndCollectAnswers(msg1, 5); assertTrue(answers.length == 3); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.toString()); } } @Test public void testStartupAgents() { try { LocalNode testee = LocalNode.newNode(); adam.unlock("adamspass"); testee.storeAgent(adam); testee.launch(); try { testee.storeAgent(abel); fail("L2pSecurityException expected"); } catch (L2pSecurityException e) { } abel.unlock("abelspass"); testee.storeAgent(abel); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.toString()); } } @Test public void testSimpleInvocation() { try { String serviceClass = "i5.las2peer.api.TestService"; ServiceAgentImpl testService = ServiceAgentImpl.createServiceAgent( ServiceNameVersion.fromString(serviceClass + "@1.0"), "a passphrase"); testService.unlock("a passphrase"); LocalNode testee = LocalNode.launchNode(); eve.unlock("evespass"); testee.storeAgent(eve); testee.storeAgent(testService); testee.registerReceiver(testService); Serializable result = testee.invokeLocally(eve, testService, "inc", new Serializable[] { new Integer(10) }); assertEquals(12, result); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.toString()); } } @Test public void testUserRegistry() { try { UserAgentImpl a = UserAgentImpl.createUserAgent("a"); UserAgentImpl b = UserAgentImpl.createUserAgent("b"); a.unlock("a"); b.unlock("b"); a.setLoginName("alpha"); b.setLoginName("beta"); LocalNode testee = LocalNode.launchNode(); testee.storeAgent(a); testee.storeAgent(b); assertEquals(a.getIdentifier(), testee.getUserManager().getAgentIdByLogin("alpha")); assertEquals(b.getIdentifier(), testee.getUserManager().getAgentIdByLogin("beta")); try { testee.getUserManager().getAgentIdByLogin("bla"); fail("AgentNotFoundException expected"); } catch (AgentNotFoundException e) { // corrects } } catch (Exception e) { e.printStackTrace(); Assert.fail(e.toString()); } } @Test public void testUserRegDistribution() { try { LocalNode testee1 = LocalNode.launchNode(); for (int i = 0; i < 11; i++) { UserAgentImpl a = UserAgentImpl.createUserAgent("pass" + i); a.unlock("pass" + i); a.setLoginName("login_" + i); testee1.storeAgent(a); } LocalNode testee2 = LocalNode.launchNode(); testee2.getUserManager().getAgentIdByLogin("login_2"); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.toString()); } } }
package innovimax.mixthem; import innovimax.mixthem.MixThem; import innovimax.mixthem.Rule; import innovimax.mixthem.exceptions.MixException; import java.io.*; import java.net.URL; import org.junit.Assert; import org.junit.Test; public class GenericTest { @Test public final void checkRule1() throws MixException, FileNotFoundException, IOException { URL url1 = getClass().getResource("test001_file1.txt"); URL url2 = getClass().getResource("test001_file2.txt"); File file1 = new File(url1.getFile()); File file2 = new File(url2.getFile()); ByteArrayOutputStream baos_rule_1 = new ByteArrayOutputStream(); MixThem mixThem = new MixThem(file1, file2, baos_rule_1); mixThem.process(Rule._1); Assert.assertTrue(checkFileEquals(file1, baos_rule_1.toByteArray())); ByteArrayOutputStream baos_rule_2 = new ByteArrayOutputStream(); mixThem = new MixThem(file1, file2, baos_rule_2); mixThem.process(Rule._2); Assert.assertTrue(checkFileEquals(file2, baos_rule_2.toByteArray())); } @Test public final void checkRuleAdd() throws MixException, FileNotFoundException, IOException { URL url1 = getClass().getResource("test001_file1.txt"); URL url2 = getClass().getResource("test001_file2.txt"); URL url12 = getClass().getResource("test001_output-add.txt"); File file1 = new File(url1.getFile()); File file2 = new File(url2.getFile()); File file12 = new File(url12.getFile()); ByteArrayOutputStream baos_rule_12 = new ByteArrayOutputStream(); MixThem mixThem = new MixThem(file1, file2, baos_rule_12); mixThem.process(Rule._ADD); Assert.assertTrue(checkFileEquals(file12, baos_rule_12.toByteArray())); } @Test public final void checkRuleAltLine() throws MixException, FileNotFoundException, IOException { URL url1 = getClass().getResource("test001_file1.txt"); URL url2 = getClass().getResource("test001_file2.txt"); URL urlComp = getClass().getResource("test001_output-altline.txt"); File file1 = new File(url1.getFile()); File file2 = new File(url2.getFile()); File fileComp = new File(urlComp.getFile()); ByteArrayOutputStream baos_rule = new ByteArrayOutputStream(); MixThem mixThem = new MixThem(file1, file2, baos_rule); mixThem.process(Rule._ALT_LINE); Assert.assertTrue(checkFileEquals(fileComp, baos_rule.toByteArray())); } @Test public final void checkRuleAltChar() throws MixException, FileNotFoundException, IOException { URL url1 = getClass().getResource("test002_file1.txt"); URL url2 = getClass().getResource("test002_file2.txt"); URL urlComp = getClass().getResource("test002_output-altchar.txt"); File file1 = new File(url1.getFile()); File file2 = new File(url2.getFile()); File fileComp = new File(urlComp.getFile()); ByteArrayOutputStream baos_rule = new ByteArrayOutputStream(); MixThem mixThem = new MixThem(file1, file2, baos_rule); mixThem.process(Rule._ALT_CHAR); Assert.assertTrue(checkFileEquals(fileComp, baos_rule.toByteArray())); } @Test public final void dumpRule1() throws MixException, FileNotFoundException, IOException { URL url1 = getClass().getResource("test001_file1.txt"); URL url2 = getClass().getResource("test001_file2.txt"); File file1 = new File(url1.getFile()); File file2 = new File(url2.getFile()); System.out.println("test001/File1:"); MixThem mixThem = new MixThem(file1, file2, System.out); mixThem.process(Rule._1); System.out.println("test001/File2:"); mixThem = new MixThem(file1, file2, System.out); mixThem.process(Rule._2); } @Test public final void dumpRuleAltLine() throws MixException, FileNotFoundException, IOException { URL url1 = getClass().getResource("test001_file1.txt"); URL url2 = getClass().getResource("test001_file2.txt"); URL urlComp = getClass().getResource("test001_output-altline.txt"); File file1 = new File(url1.getFile()); File file2 = new File(url2.getFile()); File fileComp = new File(urlComp.getFile()); System.out.println("test001/Mixed/alt-line:"); MixThem mixThem = new MixThem(file1, file2, System.out); mixThem.process(Rule._ALT_LINE); /* System.out.println("test001/Expected/alt-line:"); String line; BufferedReader br = new BufferedReader(new FileReader(fileComp)); while ((line = br.readLine()) != null) { System.out.println(line); } br.close(); */ } @Test public final void dumpRuleAltChar() throws MixException, FileNotFoundException, IOException { URL url1 = getClass().getResource("test002_file1.txt"); URL url2 = getClass().getResource("test002_file2.txt"); URL urlComp = getClass().getResource("test002_output-altchar.txt"); File file1 = new File(url1.getFile()); File file2 = new File(url2.getFile()); File fileComp = new File(urlComp.getFile()); System.out.println("test002/File1:"); MixThem mixThem = new MixThem(file1, file2, System.out); mixThem.process(Rule._1); System.out.println("test002/File2:"); mixThem = new MixThem(file1, file2, System.out); mixThem.process(Rule._2); System.out.println("test002/Mixed/alt-char:"); mixThem = new MixThem(file1, file2, System.out); mixThem.process(Rule._ALT_CHAR); /* System.out.println("test002/Expected/alt-char:"); String line; BufferedReader br = new BufferedReader(new FileReader(fileComp)); while ((line = br.readLine()) != null) { System.out.println(line); } br.close(); */ } private static boolean checkFileEquals(File fileExpected, byte[] result) throws FileNotFoundException, IOException { FileInputStream fisExpected = new FileInputStream(fileExpected); int c; int offset = 0; while ((c = fisExpected.read()) != -1) { if (offset >= result.length) return false; int d = result[offset++]; if (c != d) return false; } if (offset > result.length) return false; return true; } }
package io.sniffy.util; import org.junit.Test; import static org.junit.Assert.*; public class StringUtilTest { @Test public void testEscapeJsonString() throws Exception { assertEquals("\"\\\\\\\"\\t\\n\\r\\f/_<\\/script>\"", StringUtil.escapeJsonString("\\\"\t\n\r\f/_</script>")); assertEquals("\"\"", StringUtil.escapeJsonString(null)); } }
package org.influxdb; import org.influxdb.InfluxDB.ConsistencyLevel; import org.influxdb.InfluxDBException.DatabaseNotFoundError; import org.influxdb.dto.BatchPoints; import org.influxdb.dto.Point; import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; import static org.mockito.Mockito.*; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; @RunWith(JUnitPlatform.class) public class BatchOptionsTest { private InfluxDB influxDB; @BeforeEach public void setUp() throws InterruptedException, IOException { this.influxDB = TestUtils.connectToInfluxDB(); } /** * Test the implementation of {@link InfluxDB#enableBatch(int, int, TimeUnit, ThreadFactory)}. */ //@Test public void testBatchEnabledWithDefaultSettings() { try { this.influxDB.enableBatch(); } finally { this.influxDB.disableBatch(); } } //@Test public void testParametersSet() { BatchOptions options = BatchOptions.DEFAULTS.actions(3); Assertions.assertEquals(3, options.getActions()); options=options.consistency(InfluxDB.ConsistencyLevel.ANY); Assertions.assertEquals(InfluxDB.ConsistencyLevel.ANY, options.getConsistency()); options=options.flushDuration(1001); Assertions.assertEquals(1001, options.getFlushDuration()); options=options.bufferLimit(7070); Assertions.assertEquals(7070, options.getBufferLimit()); options=options.jitterDuration(104); Assertions.assertEquals(104, options.getJitterDuration()); BiConsumer<Iterable<Point>, Throwable> handler=new BiConsumer<Iterable<Point>, Throwable>() { @Override public void accept(Iterable<Point> points, Throwable throwable) { } }; options=options.exceptionHandler(handler); Assertions.assertEquals(handler, options.getExceptionHandler()); ThreadFactory tf=Executors.defaultThreadFactory(); options=options.threadFactory(tf); Assertions.assertEquals(tf, options.getThreadFactory()); } /** * Test the implementation of {@link BatchOptions#actions(int)} }. */ //@Test public void testActionsSetting() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { BatchOptions options = BatchOptions.DEFAULTS.actions(3); this.influxDB.enableBatch(options); this.influxDB.createDatabase(dbName); this.influxDB.setDatabase(dbName); for (int j = 0; j < 5; j++) { Point point = Point.measurement("cpu") .time(j,TimeUnit.MILLISECONDS) .addField("idle", (double) j) .addField("user", 2.0 * j) .addField("system", 3.0 * j).build(); this.influxDB.write(point); } Thread.sleep(500); QueryResult result=influxDB.query(new Query("select * from cpu", dbName)); Assertions.assertEquals(3, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { this.influxDB.disableBatch(); this.influxDB.deleteDatabase(dbName); } } /** * Test the implementation of {@link BatchOptions#flushDuration(int)} }. * @throws InterruptedException */ @Test public void testFlushDuration() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { BatchOptions options = BatchOptions.DEFAULTS.flushDuration(500); influxDB.createDatabase(dbName); influxDB.setDatabase(dbName); influxDB.enableBatch(options); write20Points(influxDB); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertNull(result.getResults().get(0).getSeries()); Assertions.assertNull(result.getResults().get(0).getError()); Thread.sleep(1000); result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { this.influxDB.disableBatch(); this.influxDB.deleteDatabase(dbName); } } /** * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. * @throws InterruptedException */ //@Test public void testJitterDuration() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { BatchOptions options = BatchOptions.DEFAULTS.flushDuration(1000).jitterDuration(125); influxDB.createDatabase(dbName); influxDB.setDatabase(dbName); influxDB.enableBatch(options); write20Points(influxDB); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertNull(result.getResults().get(0).getSeries()); Assertions.assertNull(result.getResults().get(0).getError()); Thread.sleep(1125); result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { influxDB.disableBatch(); influxDB.deleteDatabase(dbName); } } /** * Test the implementation of {@link BatchOptions#jitterDuration(int)} }. */ //@Test public void testNegativeJitterDuration() { Assertions.assertThrows(IllegalArgumentException.class, () -> { BatchOptions options = BatchOptions.DEFAULTS.jitterDuration(-10); influxDB.enableBatch(options); influxDB.disableBatch(); options = BatchOptions.DEFAULTS.jitterDuration(0); influxDB.enableBatch(); influxDB.disableBatch(); }); } private void doTestBufferLimit(int bufferLimit, int actions) throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(bufferLimit).actions(actions); influxDB.createDatabase(dbName); influxDB.setDatabase(dbName); influxDB.enableBatch(options); write20Points(influxDB); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Thread.sleep(1000); result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { influxDB.disableBatch(); influxDB.deleteDatabase(dbName); } } /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ //@Test public void testBufferLimit1() throws InterruptedException { doTestBufferLimit(3, 4); } /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ //@Test public void testBufferLimit2() throws InterruptedException { doTestBufferLimit(10, 4); } /** * Test the implementation of {@link BatchOptions#bufferLimit(int)} }. */ //@Test public void testNegativeBufferLimit() { Assertions.assertThrows(IllegalArgumentException.class, () -> { BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(-10); influxDB.enableBatch(options); influxDB.disableBatch(); options = BatchOptions.DEFAULTS.bufferLimit(0); influxDB.enableBatch(); influxDB.disableBatch(); }); } /** * Test the implementation of {@link BatchOptions#threadFactory(ThreadFactory)} }. * @throws InterruptedException */ //@Test public void testThreadFactory() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); try { BatchOptions options = BatchOptions.DEFAULTS.threadFactory((r) -> { return new Thread(r); }); influxDB.createDatabase(dbName); influxDB.setDatabase(dbName); influxDB.enableBatch(options); write20Points(influxDB); Thread.sleep(3000); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertEquals(20, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { this.influxDB.disableBatch(); this.influxDB.deleteDatabase(dbName); } } /** * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. * @throws InterruptedException */ //@Test public void testHandlerOnRetryImpossible() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); InfluxDB spy = spy(influxDB); doThrow(DatabaseNotFoundError.class).when(spy).write(any(BatchPoints.class)); try { BiConsumer<Iterable<Point>, Throwable> mockHandler = mock(BiConsumer.class); BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler); spy.createDatabase(dbName); spy.setDatabase(dbName); spy.enableBatch(options); writeSomePoints(spy, 1); Thread.sleep(1000); verify(mockHandler, times(1)).accept(any(), any()); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertNull(result.getResults().get(0).getSeries()); Assertions.assertNull(result.getResults().get(0).getError()); } finally { spy.disableBatch(); spy.deleteDatabase(dbName); } } /** * Test the implementation of {@link BatchOptions#exceptionHandler(BiConsumer)} }. * @throws InterruptedException */ //@Test public void testHandlerOnRetryPossible() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); InfluxDB spy = spy(influxDB); final Map<String, Boolean> map = new HashMap<>(1); map.put("firstCall", true); doAnswer((invocation) -> { if (map.get("firstCall")) { map.put("firstCall", false); throw new InfluxDBException("error"); } else { return invocation.callRealMethod(); } }).when(spy).write(any(BatchPoints.class)); try { BiConsumer<Iterable<Point>, Throwable> mockHandler = mock(BiConsumer.class); BatchOptions options = BatchOptions.DEFAULTS.exceptionHandler(mockHandler); spy.createDatabase(dbName); spy.setDatabase(dbName); spy.enableBatch(options); writeSomePoints(spy, 1); Thread.sleep(5000); verify(mockHandler, never()).accept(any(), any()); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertNotNull(result.getResults().get(0).getSeries()); Assertions.assertEquals(1, result.getResults().get(0).getSeries().get(0).getValues().size()); } finally { spy.disableBatch(); spy.deleteDatabase(dbName); } } /** * Test the implementation of {@link BatchOptions#consistency(InfluxDB.ConsistencyLevel)} }. * @throws InterruptedException */ //@Test public void testConsistency() throws InterruptedException { String dbName = "write_unittest_" + System.currentTimeMillis(); influxDB.createDatabase(dbName); influxDB.setDatabase(dbName); try { int n = 5; for (ConsistencyLevel consistencyLevel : ConsistencyLevel.values()) { BatchOptions options = BatchOptions.DEFAULTS.consistency(consistencyLevel); influxDB.enableBatch(options); writeSomePoints(influxDB, n); Thread.sleep(2000); QueryResult result = influxDB.query(new Query("select * from weather", dbName)); Assertions.assertEquals(n, result.getResults().get(0).getSeries().get(0).getValues().size()); n += 5; this.influxDB.disableBatch(); } } finally { this.influxDB.deleteDatabase(dbName); } } private void writeSomePoints(InfluxDB influxDB, int firstIndex, int lastIndex) { for (int i = firstIndex; i <= lastIndex; i++) { Point point = Point.measurement("weather") .time(i,TimeUnit.HOURS) .addField("temperature", (double) i) .addField("humidity", (double) (i) * 1.1) .addField("uv_index", "moderate").build(); influxDB.write(point); } } private void write20Points(InfluxDB influxDB) { writeSomePoints(influxDB, 0, 19); } private void writeSomePoints(InfluxDB influxDB, int n) { writeSomePoints(influxDB, 0, n - 1); } }
package org.jdesktop.swingx; import java.awt.BorderLayout; import java.awt.Dimension; import java.net.URL; import javax.swing.Icon; import javax.swing.ImageIcon; import javax.swing.JLabel; import javax.swing.JPanel; /** * Test to expose known issues of <code>JXHeader</code>. * <p> * * Ideally, there would be at least one failing test method per open issue in * the issue tracker. Plus additional failing test methods for not fully * specified or not yet decided upon features/behaviour. * <p> * * If an issue is fixed and the corresponding methods are passing, they * should be moved over to the XXTest. * * @author Jeanette Winzenburg */ public class JXHeaderIssues extends InteractiveTestCase { /** * This issue has been fixed, but remains here (otherwise I get a warning * when running this test. Not sure if this JXHeaderIssues should just be * removed, or what). * * Issue #403-swingx: JXHeader doesn't show custom values. * <p> * * Breaking if values are passed in the constructor. */ public void testTitleInContructor() { String title = "customTitle"; JXHeader header = new JXHeader(title, null); // sanity: the property is set assertEquals(title, header.getTitle()); // fishing in the internals ... not really safe, there are 2 labels JLabel label = null; for (int i = 0; i < header.getComponentCount(); i++) { if (header.getComponent(i) instanceof JLabel && !(header.getComponent(i) instanceof JXLabel)) { label = (JLabel) header.getComponent(i); break; } } assertEquals("the label's text must be equal to the headers title", header.getTitle(), label.getText()); } /** * Issue #403-swingx: JXHeader doesn't show custom values.<p> * * All values are passed in the constructor. */ public void interactiveCustomProperties() { URL url = getClass().getResource("resources/images/wellTop.gif"); assertNotNull(url); JPanel p = new JPanel(new BorderLayout()); JXHeader header = new JXHeader("MyTitle", "MyDescription", new ImageIcon(url)); p.add(header); // added just to better visualize bkg gradient in the JXHeader. p.add(new JLabel("Reference component"), BorderLayout.SOUTH); showInFrame(p, "JXHeader with custom properties"); } /** * Issue #469-swingx: JXHeader doesn't wrap words in description.<p> * * All values are passed in the constructor. */ public void interactiveWordWrapping() { URL url = getClass().getResource("resources/images/wellTop.gif"); assertNotNull(url); JPanel p = new JPanel(new BorderLayout()); JXHeader header = new JXHeader("MyTitle", "this is a long test with veeeeeeeeeeeeeery looooooong wooooooooooooords", new ImageIcon(url)); p.add(header); p.setPreferredSize(new Dimension(200,150)); showInFrame(p, "word wrapping JXHeader"); } public static void main(String args[]) { JXHeaderIssues test = new JXHeaderIssues(); try { test.runInteractiveTests(); } catch (Exception e) { System.err.println("exception when executing interactive tests:"); e.printStackTrace(); } } }
package controllers; import java.util.*; import play.*; import play.mvc.*; import play.libs.Json; import com.fasterxml.jackson.databind.node.ObjectNode; import views.html.*; import models.*; import service.YahooFinanceService; /** * Controller for handling buy, sell, and short market orders. */ public class Trader extends Controller { /** The service that provides our stock data. */ private static final YahooFinanceService YAHOO = YahooFinanceService.getInstance(); /** * Method for returning a badRequest for an invalid stock ticker. * @param message is the message to return in the Result * @return a JSON Result */ private static Result invalidRequest( final String message ) { return badRequest( Json.newObject() .put("status", "KO") .put("message", message) ); } /** * Method for purchasing stock in a portfolio. * @param portfolioId is the unique id of the portfolio to purchase stock in * @param ticker is the stock ticker of the stock that is to be purchased * @param qty is the amount of stock to purchase * @return returns a JSON result. */ public static Result buyStock ( final long portfolioId, final String ticker, final long qty ) { Stock stock = YAHOO.getStock( ticker ); if ( stock == null ) { return invalidRequest("Invalid Ticker Symbol"); } Position pos = Position.addOwnPosition(portfolioId, qty, stock); if ( pos == null ) { return invalidRequest("Insufficient Funds"); } ObjectNode result = Json.newObject(); result.put("status", "OK"); result.put("order", pos.getJson()); return ok(result); } /** * Method for selling stock in a portfolio * @param portfolioId is the unique id of the portfolio to purchase stock in * @param ticker is the stock ticker of the stock that is to be purchased * @param qty is the amount of stock to purchase * @return returns a JSON result. */ public static Result sellStock ( final long portfolioId, final String ticker, final long qty ) { Stock stock = YAHOO.getStock( ticker ); if ( stock == null ) { return invalidRequest("Invalid Ticker Symbol"); } List<Position> ownPositions = Position.getAllOwnPositionsForTicker( portfolioId, ticker ); if ( ownPositions == null || ownPositions.isEmpty() ) { return invalidRequest("No positions in " + ticker + " found"); } long ownQty = 0; for ( final Position position : ownPositions ) { ownQty += position.qty; } if ( ownQty < qty ) { return invalidRequest("Insufficient quantity of " + ticker + " owned"); } Position cashPosition = Position.getCashPosition( portfolioId ); long qtyToSell = qty; final double price = stock.getPrice(); for ( Position position : ownPositions ) { if ( position.qty <= qtyToSell ) { cashPosition.price += position.qty * price; qtyToSell -= position.qty; position.qty = 0; position.delete(); } else { cashPosition.price += qtyToSell * price; position.qty -= qtyToSell; position.update(); qtyToSell = 0; } if ( qtyToSell == 0 ) { break; } } cashPosition.update(); ObjectNode result = Json.newObject(); result.put("status", "OK"); result.put("sold", ticker); result.put("qty", qty); result.put("price", price); result.put("total", price * qty); result.put("cashPosition", Position.getCashPosition( portfolioId ).getJson()); return ok(result); } }
package org.flymine.metadata; import java.io.File; import java.util.List; import java.util.ArrayList; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.Collection; import java.util.Properties; import org.flymine.util.PropertiesUtil; import org.flymine.modelproduction.xml.ModelParser; /** * Represents a named business model, makes availble metadata for each class * within model. * * @author Richard Smith */ public class Model { private static Model model; private final String name; private final Map cldMap = new HashMap(); private final Map subclassMap = new HashMap(); private final Map implementorsMap = new HashMap(); /** * Return a Model for properties-specified model name (loading Model if necessary) * @return the relevant metadata * @throws MetaDataException if there is problem parsing the model xml */ public static Model getInstance() throws MetaDataException { if (model != null) { return model; } Properties props = PropertiesUtil.getPropertiesStartingWith("model"); if (props.size() == 0) { throw new MetaDataException("No 'model' properties were found" + " (check properties file)"); } String modelName = props.getProperty("name"); if (modelName == null) { throw new MetaDataException("'model' does not have a name specified" + " (check properties file)"); } String filename = props.getProperty("name") + "_model.xml"; File f = new File(Model.class.getClassLoader().getResource(filename).toString()); try { ModelParser parser = new ModelParser(); parser.parse(f); model = new Model(parser.getModelName(), parser.getClasses()); } catch (Exception e) { throw new MetaDataException("Error parsing metadata: " + e); } return model; } /** * Construct a Model with a name and list of ClassDescriptors. The model will be * set to this in each of the ClassDescriptors. NB This method should only be called * by members of the modelproduction package, eventually it may be replaced with * a static addModel method linked to getInstanceByName (or similar) * @param name name of model * @param clds a List of ClassDescriptors in the model * @throws MetaDataException if inconsistencies found in model */ public Model(String name, List clds) throws MetaDataException { if (name == null || name == "") { throw new IllegalArgumentException("A name must be supplied for the Model"); } this.name = name; // check for valid package name?? Iterator cldIter = clds.iterator(); // 1. Put all ClassDescriptors in model. while (cldIter.hasNext()) { ClassDescriptor cld = (ClassDescriptor) cldIter.next(); cldMap.put(cld.getClassName(), cld); // create maps of ClassDescriptor to empty lists for subclasses and implementors subclassMap.put(cld, new ArrayList()); implementorsMap.put(cld, new ArrayList()); } // 2. Now set model in each ClassDescriptor, this sets up superclass, interface, // etc descriptors. Set ClassDescriptors and reverse refs in ReferenceDescriptors. cldIter = clds.iterator(); while (cldIter.hasNext()) { ClassDescriptor cld = (ClassDescriptor) cldIter.next(); cld.setModel(this); // add this to list of subclasses if a superclass exists ClassDescriptor superCld = cld.getSuperclassDescriptor(); if (superCld != null) { List sub = (List) subclassMap.get(superCld); sub.add(cld); } // add this class to implementors lists for any interfaces List interfaces = cld.getInterfaceDescriptors(); if (interfaces.size() > 0) { Iterator iter = interfaces.iterator(); while (iter.hasNext()) { ClassDescriptor iCld = (ClassDescriptor) iter.next(); List implementors = (List) implementorsMap.get(iCld); implementors.add(cld); } } } // 3. Finally, set completed lists of subclasses and implementors in // each ClassDescriptor. cldIter = clds.iterator(); while (cldIter.hasNext()) { ClassDescriptor cld = (ClassDescriptor) cldIter.next(); List sub = (List) subclassMap.get(cld); if (sub.size() > 0) { cld.setSubclassDescriptors(sub); } if (cld.isInterface()) { List implementors = (List) implementorsMap.get(cld); if (implementors.size() > 0) { cld.setImplementorDescriptors(implementors); } } } } /** * Get a ClassDescriptor by name, null if no ClassDescriptor of given name in Model. * @param name name of ClassDescriptor requested * @return the requested ClassDescriptor */ public ClassDescriptor getClassDescriptorByName(String name) { if (cldMap.containsKey(name)) { return (ClassDescriptor) cldMap.get(name); } else { return null; } } /** * Get all ClassDescriptors in this model. * @return a list of all ClassDescriptors in the model */ public List getClassDescriptors() { return new ArrayList(cldMap.values()); } /** * Return true if named ClassDescriptor is found in the model * @param name named of ClassDescriptor search for * @return true if named descriptor found */ public boolean hasClassDescriptor(String name) { return cldMap.containsKey(name); } /** * Get the name of this model - i.e. package name. * @return name of the model */ public String getName() { return this.name; } /** * Get a Collection of fully qualified class names in this model (i.e. including * package name). * @return Collection of fully qualified class names */ public Collection getClassNames() { return (Collection) cldMap.keySet(); } /** * @see Object#toString */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append("<model name=\"" + name + "\">"); for (Iterator iter = getClassDescriptors().iterator(); iter.hasNext();) { sb.append(iter.next().toString()); } sb.append("</model>"); return sb.toString(); } }
package org.mskcc.portal.servlet; import org.rosuda.REngine.REXP; import org.rosuda.REngine.Rserve.RConnection; import org.owasp.validator.html.PolicyException; import org.apache.log4j.Logger; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.regex.Pattern; import java.util.ArrayList; /** * Generates Plots via RServe. * * @author Anders Jacobsen, Ethan Cerami. */ public class PlotServlet extends HttpServlet { private static Logger logger = Logger.getLogger(PlotServlet.class); public static final String SKIN = "skin"; public static final String SKIN_COL_GROUP = "skin_col_gp"; public static final String LEGEND_POS = "legendPos"; public static final String SKIN_NORMALS = "skin_normals"; public static final int PLOT_WIDTH = 600; public static final int PLOT_HEIGHT = 600; private static final String UNDEFINED = "undefined"; private static final String R_RETURN_MESG = ("An error occurred processing your request.\\n" + "It may be that your gene/case set combination has no data\\n" + "for this data type. If you believe this is an error,\\n" + "please contact us at cbioportal@googlegroups.com."); private static ServletXssUtil servletXssUtil; /** * Initializes the servlet. * * @throws ServletException Serlvet Init Error. */ public void init() throws ServletException { super.init(); try { servletXssUtil = ServletXssUtil.getInstance(); } catch (PolicyException e) { throw new ServletException (e); } } /** * Processes GET Request. * * @param req Http Servlet Request. * @param res http Servlet Response. * @throws ServletException Servlet Error. * @throws IOException IO Error. */ public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { try { // Get All Parameters Safely Pattern p = Pattern.compile(","); // TODO: Later: ACCESS CONTROL: change to cancer study, etc. String cancerTypeId = servletXssUtil.getCleanInput(req, QueryBuilder.CANCER_STUDY_ID); String[] genesList = p.split(servletXssUtil.getCleanInput(req, QueryBuilder.GENE_LIST)); String[] geneticProfilesList = p.split (servletXssUtil.getCleanInput(req, QueryBuilder.GENETIC_PROFILE_IDS)); String skin = servletXssUtil.getCleanInput(req, SKIN); String caseSetId = servletXssUtil.getCleanInput(req, QueryBuilder.CASE_SET_ID); String caseIds = servletXssUtil.getCleanInput(req, QueryBuilder.CASE_IDS); String format = servletXssUtil.getCleanInput(req, QueryBuilder.FORMAT); String skinColGroup = servletXssUtil.getCleanInput(req, SKIN_COL_GROUP); String skinNormals = servletXssUtil.getCleanInput(req, SKIN_NORMALS); String legendPos = servletXssUtil.getCleanInput(req, LEGEND_POS); if (format == null || !format.equals("pdf")) { format = "png"; // default is png } // Split Gene List String genes = ""; for (String s : genesList) { genes += "'" + s + "',"; } genes = genes.substring(0, genes.length() - 1); String geneticProfiles = ""; for (String s : geneticProfilesList) { geneticProfiles += "'" + s + "',"; } geneticProfiles = geneticProfiles.substring(0, geneticProfiles.length() - 1); RConnection c = new RConnection(); if (format.equals("pdf")) { res.setContentType("application/pdf"); } else { res.setContentType("image/png"); } String tmpfile = "tmp" + String.valueOf(System.currentTimeMillis() + "." + format); // Must use Cairo Library, so that we can generate Images without GUI StringBuffer plot = new StringBuffer("library(cgdsr);\n"); plot.append("library(Cairo);\n"); if (format.equals("png")) { plot.append("Cairo(width=" + PLOT_WIDTH + ", height=" + PLOT_HEIGHT + ", file='" + tmpfile + "', type='" + format + "', units=\"px\")\n"); } else { plot.append("pdf(width=6, height=6, file='" + tmpfile + "')\n"); } String currentUrl = req.getRequestURL().toString(); String localHost = "127.0.0.1"; logger.debug("Current URL is: " + currentUrl); // locate host name to replace int startOfHostname = currentUrl.indexOf(" int endOfHostname = currentUrl.indexOf(":", startOfHostname); // port not included in url if (endOfHostname == -1) { endOfHostname = currentUrl.indexOf("/", startOfHostname); // we need to append port number localHost += ":38080"; } String hostname = currentUrl.substring(startOfHostname, endOfHostname); String cgdsUrl = currentUrl.replaceAll("plot.(do|pdf)", ""); cgdsUrl = cgdsUrl.replace(hostname, localHost); logger.debug("Web API URL is: " + cgdsUrl); plot.append ("c = CGDS('" + cgdsUrl + "',TRUE);\n"); // add our own return mesg - must come before call to plot plot.append("setPlotErrorMsg(c, \"" + R_RETURN_MESG + "\");\n"); if (caseSetId != null && !caseSetId.equals("-1")) { plot.append (String.format("plot(c, '%s', c(%s), c(%s), '%s', skin='%s' ", cancerTypeId, genes, geneticProfiles, caseSetId, skin)); } else { ArrayList <String> caseList = new ArrayList<String>(); for (String currentCase : caseIds.split("[\\s,]+")) { currentCase = currentCase.trim(); if (currentCase.length() > 0) { caseList.add(currentCase); } } StringBuffer caseBuffer = new StringBuffer(); for (int i=0; i<caseList.size(); i++) { caseBuffer.append ("\"" + caseList.get(i) + "\""); if (i < caseList.size() -1) { caseBuffer.append (","); } } plot.append (String.format("plot(c, '%s', c(%s), c(%s), cases=c(%s), skin='%s' ", cancerTypeId, genes, geneticProfiles, caseBuffer.toString(), skin)); } if (skinColGroup != null && !skinColGroup.equals(UNDEFINED)) { plot.append (", skin.col.gp=c("); if (skinColGroup.contains(",")) { String colGroups [] = skinColGroup.split(","); for (int i=0; i<colGroups.length; i++) { plot.append ("'" + colGroups[i] +"'"); if (i < colGroups.length -1) { plot.append (","); } } } else { plot.append ("'" + skinColGroup + "'"); } plot.append (")"); } if (skinNormals != null) { plot.append (", skin.normals='" + skinNormals + "'"); } if (legendPos != null) { plot.append (", legend.pos='" + legendPos + "'"); } plot.append (");\n"); plot.append ("dev.off();\n"); logger.debug("Call to R Follows:"); logger.debug(plot.toString()); // open device c.parseAndEval(plot.toString()); // There is no I/O API in REngine because it's actually more efficient to use R for this // we limit the file size to 1MB which should be sufficient and we delete the file as well REXP xp = c.parseAndEval("r=readBin('" + tmpfile + "','raw',1024*1024); unlink('" + tmpfile + "'); r;"); // now this is pretty boring AWT stuff - create an image from the data and display it ... byte[] imageBytes = xp.asBytes(); res.setContentLength(imageBytes.length); res.getOutputStream().write(imageBytes); c.close(); } catch (Exception e) { // In the event of an exception, redirect to the Plot NA Image. logger.error(e); res.sendRedirect("images/plots_na.png"); } } }
package org.tltv.gantt.client; import static org.tltv.gantt.client.SvgUtil.createSVGElementNS; import static org.tltv.gantt.client.SvgUtil.setAttributeNS; import com.google.gwt.dom.client.Element; /** * Background Grid SVG implementation. */ public class BgGridSvgElement extends BgGridCssElement implements BgGridElement { public static final String STYLE_BG_GRID = "bg-grid"; private Element svgElement; private Element content; private Element pattern; private Element path; private double gridBlockWidthPx; private double gridBlockHeightPx; @Override public void init(Element container, Element content) { this.content = content; svgElement = createSVGElementNS("svg"); setAttributeNS(svgElement, "width", "110%"); setAttributeNS(svgElement, "height", "110%"); setAttributeNS(svgElement, "preserveAspectRatio", "none"); Element defs = createSVGElementNS("defs"); svgElement.appendChild(defs); pattern = createSVGElementNS("pattern"); setAttributeNS(pattern, "id", "bggrid-pattern"); setAttributeNS(pattern, "patternUnits", "userSpaceOnUse"); setAttributeNS(pattern, "x", "0"); setAttributeNS(pattern, "y", "0"); defs.appendChild(pattern); Element rect = createSVGElementNS("rect"); setAttributeNS(rect, "width", "100%"); setAttributeNS(rect, "height", "100%"); setAttributeNS(rect, "fill", "#ffffff"); pattern.appendChild(rect); path = createSVGElementNS("path"); setAttributeNS(path, "shape-rendering", "crispEdges"); setAttributeNS(path, "fill-opacity", "0"); setAttributeNS(path, "stroke-width", "1"); setAttributeNS(path, "stroke", "#cccccc"); pattern.appendChild(path); Element rect2 = createSVGElementNS("rect"); setAttributeNS(rect2, "width", "100%"); setAttributeNS(rect2, "height", "100%"); setAttributeNS(rect2, "fill", "url(#bggrid-pattern)"); svgElement.appendChild(rect2); setAttributeNS(svgElement, "class", STYLE_BG_GRID); appendToContainer(svgElement); } private void appendToContainer(Element svgElement) { content.insertFirst(svgElement); } @Override public void hide() { svgElement.removeFromParent(); } @Override public void setBackgroundSize(String gridBlockWidth, double gridBlockWidthPx, int gridBlockHeightPx) { this.gridBlockWidthPx = gridBlockWidthPx; this.gridBlockHeightPx = gridBlockHeightPx; // set background to cover the whole content area. setAttributeNS(svgElement, "width", content.getClientWidth() + gridBlockWidthPx + "px"); setAttributeNS(svgElement, "height", content.getClientHeight() + gridBlockHeightPx + "px"); setAttributeNS(pattern, "width", "" + gridBlockWidthPx); setAttributeNS(pattern, "height", "" + gridBlockHeightPx); double y = gridBlockHeightPx - 1; if (y < 0) { y = 0; } double x = gridBlockWidthPx - 1; if (x < 0) { x = 0; } setAttributeNS(path, "d", "M0," + y + " h" + x + " v0 h0 v-" + gridBlockHeightPx); } @Override public void setBackgroundPosition(String offsetX, String offsetY, double offsetXPx, double offsetYPx) { double offX = 0; if (offsetXPx == 0 || offsetXPx == gridBlockWidthPx) { offX = 0; } else { offX = -(gridBlockWidthPx - offsetXPx - 1); } setAttributeNS(svgElement, "style", "margin-left: " + offX + "px;" + "margin-top: " + -(gridBlockHeightPx - offsetYPx) + "px;"); } @Override public boolean isAttached() { return svgElement.hasParentElement(); } @Override public boolean equals(Element element) { if (element == null) { return false; } return svgElement.equals(element); } @Override public Element getElement() { return svgElement; } }
package org.pentaho.ui.xul.gwt.tags; import com.google.gwt.core.client.GWT; import com.google.gwt.event.dom.client.ChangeEvent; import com.google.gwt.event.dom.client.ChangeHandler; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.ui.FileUpload; import com.google.gwt.user.client.ui.FormHandler; import com.google.gwt.user.client.ui.FormPanel; import com.google.gwt.user.client.ui.FormSubmitCompleteEvent; import com.google.gwt.user.client.ui.FormSubmitEvent; import com.google.gwt.user.client.ui.HTMLPanel; import com.google.gwt.user.client.ui.Hidden; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.VerticalPanel; import com.google.gwt.user.client.ui.Widget; import org.pentaho.gwt.widgets.client.utils.StringUtils; import org.pentaho.mantle.client.csrf.CsrfUtil; import org.pentaho.mantle.client.csrf.JsCsrfToken; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulException; import org.pentaho.ui.xul.components.XulFileUpload; import org.pentaho.ui.xul.dom.Element; import org.pentaho.ui.xul.gwt.AbstractGwtXulContainer; import org.pentaho.ui.xul.gwt.GwtXulHandler; import org.pentaho.ui.xul.gwt.GwtXulParser; import org.pentaho.ui.xul.gwt.tags.util.LabelWidget; import org.pentaho.ui.xul.stereotype.Bindable; import java.util.HashMap; import java.util.Map; public class GwtFileUpload extends AbstractGwtXulContainer implements XulFileUpload { /** * The name of the CSRF token field to use when CSRF protection is disabled. * <p> * An arbitrary name, yet different from the name it can have when CSRF protection enabled. * This avoids not having to dynamically adding and removing the field from the form depending * on whether CSRF protection is enabled or not. * <p> * When CSRF protection is enabled, * the actual name of the field is set before each submit. */ private static final String DISABLED_CSRF_TOKEN_PARAMETER = "csrf_token_disabled"; private String uploadSuccessMethod, uploadFailureMethod; public static final String ERROR = ".ERROR_"; private FormPanel uploadForm = null; private FileUpload upload = null; private VerticalPanel uploadPanel; private VerticalPanel mainPanel; private HTMLPanel hiddenPanel; private GwtLabel uploadTextBox; private String action; private Map<String, String> parameters = null; private static final String ELEMENT_NAME = "pen:fileupload"; /** * The CSRF token field/parameter. * Its name and value are set to the expected values before each submit, * to match the obtained {@link JsCsrfToken}. * <p> * The Tomcat's context must have the `allowCasualMultipartParsing` attribute set * so that the `CsrfGateFilter` is able to transparently read this parameter * in a multi-part encoding form, as is the case of `form`. */ private Hidden csrfTokenParameter; public static void register() { GwtXulParser.registerHandler( ELEMENT_NAME, new GwtXulHandler() { public Element newInstance() { return new GwtFileUpload(); } } ); } public GwtFileUpload() { super( ELEMENT_NAME ); setManagedObject( new VerticalPanel() ); this.parameters = new HashMap<>(); } private String buildActionUrl( String moduleBaseUrl, String anAction ) { String url = moduleBaseUrl; while ( anAction.contains( "../" ) && url.lastIndexOf( "/" ) > -1 ) { url = url.substring( 0, url.lastIndexOf( "/" ) ); anAction = anAction.substring( 3 ); } url += "/" + anAction; return url; } @SuppressWarnings( "deprecation" ) public void init( com.google.gwt.xml.client.Element srcEle, XulDomContainer container ) { mainPanel = new VerticalPanel(); setManagedObject( mainPanel ); super.init( srcEle, container ); if ( !StringUtils.isEmpty( srcEle.getAttribute( "action" ) ) ) { setAction( buildActionUrl( GWT.getModuleBaseURL(), srcEle.getAttribute( "action" ) ) ); } if ( !StringUtils.isEmpty( srcEle.getAttribute( "onuploadsuccess" ) ) ) { setOnUploadSuccess( srcEle.getAttribute( "onuploadsuccess" ) ); } if ( !StringUtils.isEmpty( srcEle.getAttribute( "onuploadfailure" ) ) ) { setOnUploadFailure( srcEle.getAttribute( "onuploadfailure" ) ); } uploadForm = new FormPanel(); uploadForm.setEncoding( FormPanel.ENCODING_MULTIPART ); uploadForm.setMethod( FormPanel.METHOD_POST ); uploadForm.setHeight( getHeight() + "px" ); uploadForm.setWidth( getWidth() + "px" ); // Create a panel to hold all of the form widgets. HorizontalPanel panel = new HorizontalPanel(); uploadForm.setWidget( panel ); uploadForm.setVisible( true ); // Create a FileUpload widget. upload = createFileUpload(); uploadPanel = new VerticalPanel(); // -- upload styling -- // String uploadButtonImage = srcEle.getAttribute( "image" ); String uploadButtonDisabledImage = srcEle.getAttribute( "disabledimage" ); hiddenPanel = new HTMLPanel( "<div id='hidden_div' class='gwt_file_upload_hidden_div'></div>" ); csrfTokenParameter = new Hidden( DISABLED_CSRF_TOKEN_PARAMETER ); uploadTextBox = new GwtLabel(); uploadTextBox.setId( "gwt_FileUpload_uploadTextBox" ); GwtButton uploadButton = new GwtButton(); uploadButton.setId( "gwt_FileUpload_uploadButton" ); uploadButton.setHeight( 22 ); final LabelWidget label = new LabelWidget( "uploadFormElement" ); label.setStyleName( "gwt_file_upload_label" ); // If "image" attribute has been defined in the fileupload control do not display the file textfield AND do not // set the button label. if ( StringUtils.isEmpty( uploadButtonImage ) ) { uploadButton.setLabel( "..." ); final Widget labelWidget = (Widget) uploadTextBox.getManagedObject(); label.add( labelWidget ); uploadTextBox.layout(); labelWidget.setHeight( getHeight() + "px" ); labelWidget.setWidth( ( getWidth() - 55 ) + "px" ); DOM.setStyleAttribute( labelWidget.getElement(), "lineHeight", getHeight() + "px" ); } else { uploadButton.setImage( uploadButtonImage ); uploadButton.setDisabledImage( uploadButtonDisabledImage ); } label.add( (Widget) uploadButton.getManagedObject() ); uploadButton.layout(); hiddenPanel.add( upload, "hidden_div" ); hiddenPanel.add( label, "hidden_div" ); hiddenPanel.add( csrfTokenParameter ); // -- upload styling -- // uploadPanel.add( hiddenPanel ); panel.add( uploadPanel ); mainPanel.add( uploadForm ); if ( getHeight() >= 0 ) { mainPanel.setHeight( getHeight() + "px" ); } if ( getWidth() >= 0 ) { mainPanel.setWidth( getWidth() + "px" ); } uploadForm.addFormHandler( new FormHandler() { public void onSubmit( FormSubmitEvent event ) { if ( upload.getFilename() == null ) { try { GwtFileUpload.this.getXulDomContainer().invoke( getOnUploadFailure(), new Object[] { new Throwable( "No file has been selected. Please select the file to upload" ) } ); } catch ( XulException e ) { // TODO Auto-generated catch block e.printStackTrace(); } } } public void onSubmitComplete( FormSubmitCompleteEvent event ) { String results = event.getResults(); try { if ( results != null && results.indexOf( ERROR ) >= 0 ) { if ( results.indexOf( ERROR ) + ERROR.length() < results.length() ) { String result = results.replaceAll( "\\<.*?>", "" ); GwtFileUpload.this.getXulDomContainer().invoke( getOnUploadFailure(), new Object[] { new Throwable( result ) } ); } } else { if ( results != null ) { String result = results.replaceAll( "\\<.*?>", "" ); GwtFileUpload.this.getXulDomContainer().invoke( getOnUploadSuccess(), new Object[] { result } ); } else { GwtFileUpload.this.getXulDomContainer().invoke( getOnUploadFailure(), new Object[] { new Throwable( "Unable to find upload service or " + "Upload service returned nothing" ) } ); } } } catch ( XulException xule ) { xule.printStackTrace(); } } } ); uploadForm.setWidth( "100%" ); } private FileUpload createFileUpload() { final FileUpload upload = new FileUpload(); upload.setStylePrimaryName( "gwt-StyledFileUpload" ); upload.setName( "uploadFormElement" ); upload.getElement().setId( "uploadFormElement" ); upload.setVisible( true ); upload.setHeight( getHeight() + "px" ); upload.setWidth( getWidth() + "px" ); upload.addChangeHandler( new ChangeHandler() { public void onChange( ChangeEvent event ) { setSelectedFile( upload.getFilename() ); } } ); return upload; } public String getAction() { return action; } private String processParameters() { // TODO the URL being returned should be encoded to UTF-8 String fullUrl = null; try { try { StringBuffer buffer = new StringBuffer(); Object[] keys = this.parameters.keySet().toArray(); buffer.append( this.action ); buffer.append( "?" ); for ( Object theKey : keys ) { buffer.append( theKey ); buffer.append( "=" ); buffer.append( this.parameters.get( theKey ) ); buffer.append( "&" ); } fullUrl = buffer.toString(); fullUrl = fullUrl.substring( 0, fullUrl.lastIndexOf( "&" ) ); } catch ( Exception e ) { GwtFileUpload.this.getXulDomContainer().invoke( getOnUploadFailure(), new Object[] { new Throwable( e ) } ); } } catch ( XulException xule ) { xule.printStackTrace(); } return fullUrl; } /** * Obtains a CSRF token for the form's current URL and * fills it in the form's token parameter hidden field. */ private void setupCsrfToken() { JsCsrfToken token = CsrfUtil.getCsrfTokenSync( uploadForm.getAction() ); if ( token != null ) { csrfTokenParameter.setName( token.getParameter() ); csrfTokenParameter.setValue( token.getToken() ); } else { // Reset the field. csrfTokenParameter.setName( DISABLED_CSRF_TOKEN_PARAMETER ); csrfTokenParameter.setValue( "" ); } } public String getOnUploadFailure() { return uploadFailureMethod; } public String getOnUploadSuccess() { return uploadSuccessMethod; } public void setAction( String action ) { this.action = action; } public void setOnUploadFailure( String method ) { this.uploadFailureMethod = method; } public void setOnUploadSuccess( String method ) { this.uploadSuccessMethod = method; } @Bindable public String getSeletedFile() { return upload.getFilename(); } @Bindable public void setSelectedFile( String name ) { if ( name == null || name.length() <= 0 ) { hiddenPanel.remove( upload ); upload = createFileUpload(); hiddenPanel.add( upload, "hidden_div" ); } uploadTextBox.setValue( name ); firePropertyChange( "selectedFile", null, name ); } public void submit() { uploadForm.setAction( processParameters() ); setupCsrfToken(); uploadForm.submit(); } public void addParameter( String name, String value ) { this.parameters.put( name, value ); } }
// Created : 2006 Jun 14 (Wed) 18:29:38 by Harold Carr. // Last Modified : 2006 Aug 14 (Mon) 22:21:15 by Harold Carr. package com.differentity.client; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.ClickListener; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.TextBox; import com.google.gwt.user.client.ui.Widget; public class QueryPanel { private final HorizontalPanel horizontalPanel; private final TextBox subjectTextBox; private final TextBox verbTextBox; private final TextBox objectTextBox; private final Button subjectResetButton; private final Button verbResetButton; private final Button objectResetButton; QueryPanel() { subjectTextBox = new TextBox(); verbTextBox = new TextBox(); objectTextBox = new TextBox(); subjectResetButton = new Button("*"); verbResetButton = new Button("*"); objectResetButton = new Button("*"); subjectResetButton.addClickListener(new ClickListener() { public void onClick(Widget sender) { subjectTextBox.setText(""); Main.getMainPanel().doQuery(); } }); verbResetButton.addClickListener(new ClickListener() { public void onClick(Widget sender) { verbTextBox.setText(""); Main.getMainPanel().doQuery(); } }); objectResetButton.addClickListener(new ClickListener() { public void onClick(Widget sender) { objectTextBox.setText(""); Main.getMainPanel().doQuery(); } }); horizontalPanel = new HorizontalPanel(); horizontalPanel.add(subjectResetButton); horizontalPanel.add(subjectTextBox); horizontalPanel.add(verbResetButton); horizontalPanel.add(verbTextBox); horizontalPanel.add(objectResetButton); horizontalPanel.add(objectTextBox); } TextBox getSubjectTextBox() { return subjectTextBox; } TextBox getVerbTextBox () { return verbTextBox; } TextBox getObjectTextBox () { return objectTextBox; } HorizontalPanel getHorizontalPanel() { return horizontalPanel; } } // End of file.
package algorithms.convexhullvg; import java.util.Arrays; import grid.GridGraph; import algorithms.PathFindingAlgorithm; import algorithms.datatypes.Memory; import algorithms.rotationalplanesweep.ConvexHullRPSScanner; import algorithms.priorityqueue.ReusableIndirectHeap; public class ConvexHullVGAlgorithm extends PathFindingAlgorithm { private ConvexHullVG convexHullGraph; private ReusableIndirectHeap pq; private int start; private int finish; public ConvexHullVGAlgorithm(GridGraph graph, int sx, int sy, int ex, int ey) { super(graph, graph.sizeX, graph.sizeY, sx, sy, ex, ey); } @Override public void computePath() { // 1. Generate convex hulls convexHullGraph = new ConvexHullVG(graph); if (isRecording()) convexHullGraph.setSnapshotAction(() -> generateConvexHullSnapshot()); convexHullGraph.initialise(sx, sy, ex, ey); int size = convexHullGraph.size(); int memorySize = size+2; start = size; finish = size+1; pq = new ReusableIndirectHeap(size, memorySize); this.initialiseMemory(memorySize, Float.POSITIVE_INFINITY, -1, false); // 2. Search if (graph.lineOfSight(sx, sy, ex, ey)) { // There is a direct path from (sx, sy) to (ex, ey). if (sx != ex || sy != ey) { setParent(finish, start); } return; } // Initialise Start setVisited(start, true); setDistance(start, 0); expand(start, sx ,sy); while (!pq.isEmpty()) { int current = pq.popMinIndex(); setVisited(current, true); if (current == finish) { return; } int currX = convexHullGraph.getX(current); int currY = convexHullGraph.getY(current); expand(current, currX, currY); } } private final void expand(int currIndex, int currX, int currY) { // find neighbours ConvexHullRPSScanner scanner = convexHullGraph.computeAllVisibleSuccessors(currX, currY); if (isRecording()) addSnapshot(scanner.snapshotLines()); int nNeighbours = scanner.nSuccessors; for (int i=0; i<nNeighbours; ++i) { int succX = scanner.successorsX[i]; int succY = scanner.successorsY[i]; if (!graph.lineOfSight(currX, currY, succX, succY)) continue; int succ = convexHullGraph.indexOf(succX, succY); // index of successor float newWeight = distance(currIndex) + graph.distance(currX, currY, succX, succY); if (newWeight < distance(succ)) { setDistance(succ, newWeight); setParent(succ, currIndex); pq.decreaseKey(succ, newWeight + heuristic(succX, succY)); maybeSaveSearchSnapshot(); } } // Check if the goal node is a successor if (graph.lineOfSight(currX, currY, ex, ey)) { float newWeight = distance(currIndex) + graph.distance(currX, currY, ex, ey); if (newWeight < distance(finish)) { setDistance(finish, newWeight); setParent(finish, currIndex); pq.decreaseKey(finish, newWeight + heuristic(ex, ey)); maybeSaveSearchSnapshot(); } } } private final float heuristic(int nx, int ny) { // SLD heuristic (naive) return graph.distance(nx, ny, ex, ey); // Convex hull heuristic } private int pathLength() { int length = 0; int current = finish; while (current != -1) { current = parent(current); length++; } return length; } @Override public int[][] getPath() { int length = pathLength(); int[][] path = new int[length][]; int current = finish; int index = length-1; while (current != -1) { int x, y; if (current == start) { x = sx; y = sy; } else if (current == finish) { x = ex; y = ey; } else { x = convexHullGraph.getX(current); y = convexHullGraph.getY(current); } path[index] = new int[2]; path[index][0] = x; path[index][1] = y; index current = parent(current); } return path; } @Override public float getPathLength() { return 0; } private final int parent(int index) { return Memory.parent(index); } private final void setParent(int index, int value) { Memory.setParent(index, value); } private final float distance(int index) { return Memory.distance(index); } private final void setDistance(int index, float value) { Memory.setDistance(index, value); } private final boolean visited(int index) { return Memory.visited(index); } private final void setVisited(int index, boolean value) { Memory.setVisited(index, value); } private final void generateConvexHullSnapshot() { addSnapshot(convexHullGraph.generateConvexHullSnapshot()); } @Override protected int goalParentIndex() { return finish; } @Override protected Integer[] snapshotEdge(int endIndex) { Integer[] edge = new Integer[4]; int startIndex = parent(endIndex); edge[0] = getX(startIndex); edge[1] = getY(startIndex); edge[2] = getX(endIndex); edge[3] = getY(endIndex); return edge; } @Override protected Integer[] snapshotVertex(int index) { if (selected(index)) { Integer[] edge = new Integer[2]; edge[0] = getX(index); edge[1] = getY(index); return edge; } return null; } private final int getX(int index) { if (index == start) return sx; else if (index == finish) return ex; return convexHullGraph.getX(index); } private final int getY(int index) { if (index == start) return sy; else if (index == finish) return ey; return convexHullGraph.getY(index); } }
package org.hamcrest.core; import static org.hamcrest.core.StringEndsWith.endsWith; import org.hamcrest.AbstractMatcherTest; import org.hamcrest.Matcher; public class StringEndsWithTest extends AbstractMatcherTest { static final String EXCERPT = "EXCERPT"; final Matcher<String> stringEndsWith = endsWith(EXCERPT); @Override protected Matcher<?> createMatcher() { return stringEndsWith; } public void testMatchesSubstringAtEnd() { assertDoesNotMatch(stringEndsWith, EXCERPT + "END"); assertMatches(stringEndsWith, "START" + EXCERPT); assertMatches(stringEndsWith, EXCERPT); assertDoesNotMatch(stringEndsWith, "START" + EXCERPT + "END"); assertMatches(stringEndsWith, EXCERPT + EXCERPT); assertDoesNotMatch(stringEndsWith, "EXCER"); assertMismatchDescription("was \"Something else\"", stringEndsWith, "Something else"); } public void testHasAReadableDescription() { assertDescription("a string ending with \"EXCERPT\"", stringEndsWith); } }
package com.networknt.handler; import io.undertow.server.HttpServerExchange; /** * @author Nicholas Azar */ public class OrchestrationHandler implements LightHttpHandler { @Override public void handleRequest(HttpServerExchange exchange) throws Exception { if (Handler.start(exchange)) { Handler.next(exchange); } else { String methodPath = String.format("%s %s", exchange.getRequestMethod(), exchange.getRequestPath()); setExchangeStatus(exchange, "ERR10048", methodPath); } } }
package io.jxcore.node; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothSocket; import android.content.Context; import android.os.Handler; import android.util.Log; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.thaliproject.p2p.btconnectorlib.BTConnector; import org.thaliproject.p2p.btconnectorlib.BTConnectorSettings; import org.thaliproject.p2p.btconnectorlib.ServiceItem; import java.io.IOException; import java.util.List; import java.util.UUID; import java.util.concurrent.CopyOnWriteArrayList; public class BtConnectorHelper implements BTConnector.Callback, BTConnector.ConnectSelector { private final Context context; private final String serviceTypeIdentifier = "Cordovap2p._tcp"; private final String BtUUID = "fa87c0d0-afac-11de-8a39-0800200c9a66"; private final String Bt_NAME = "Thaili_Bluetooth"; private final CopyOnWriteArrayList<ServiceItem> lastAvailableList = new CopyOnWriteArrayList<ServiceItem>(); private final BTConnectorSettings conSettings; private BTConnector mBTConnector = null; private final CopyOnWriteArrayList<BtToServerSocket> mServerSocketList = new CopyOnWriteArrayList<BtToServerSocket>(); private final CopyOnWriteArrayList<BtToRequestSocket> mRequestSocketList = new CopyOnWriteArrayList<BtToRequestSocket>(); private int mServerPort = 0; // implementation which forwards any uncaught exception from threads to the Jxcore final Thread.UncaughtExceptionHandler mThreadUncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread thread, Throwable ex) { final Throwable tmpException = ex; new Handler(jxcore.activity.getMainLooper()).post(new Runnable() { @Override public void run(){ throw new RuntimeException(tmpException); } }); } }; public BtConnectorHelper() { conSettings = new BTConnectorSettings(); conSettings.SERVICE_TYPE = serviceTypeIdentifier; conSettings.MY_UUID = UUID.fromString(BtUUID); conSettings.MY_NAME = Bt_NAME; this.context = jxcore.activity.getBaseContext(); } public BTConnector.WifiBtStatus Start(String peerName,int port){ this.mServerPort = port; // this.lastAvailableList.clear(); Stop(); BTConnector tmpCon= new BTConnector(context,this,this,conSettings); BTConnector.WifiBtStatus ret = tmpCon.Start(GetBluetoothAddress(),peerName); mBTConnector = tmpCon; if(lastAvailableList.size() > 0) { JSONArray jsonArray = new JSONArray(); for (ServiceItem item : lastAvailableList) { jsonArray.put(getAvailabilityStatus(item, true)); } jxcore.CallJSMethod(JXcoreExtension.EVENTSTRING_PEERAVAILABILITY, jsonArray.toString()); } return ret; } public boolean isRunning(){ return mBTConnector != null; } public void Stop(){ BTConnector tmpCon = mBTConnector; mBTConnector = null; if(tmpCon != null){ tmpCon.Stop(); } //disconnect all incoming connections DisconnectIncomingConnections(); // disconnect outgoing connection DisconnectAll (); } // we only cut off our outgoing connections, incoming ones are cut off from the other end. // if we want to cut off whole communications, we'll do Stop public boolean Disconnect(String peerId){ for (BtToRequestSocket rSocket : mRequestSocketList) { if (rSocket != null) { String currentPeerId = rSocket.GetPeerId(); if (peerId.equalsIgnoreCase(currentPeerId)) { mRequestSocketList.remove(rSocket); Log.i("BtConnectorHelper", "Disconnect outgoing peer: " + currentPeerId); rSocket.Stop(); return true; } } } return false; } private void DisconnectAll(){ for (BtToRequestSocket rSocket : mRequestSocketList) { if (rSocket != null) { mRequestSocketList.remove(rSocket); Log.i("BtConnectorHelper","Disconnect:::Stop : BtToRequestSocket :" + rSocket.getName()); rSocket.Stop(); } } mRequestSocketList.clear(); } //function to disconnect all incoming connections // should only be used internally, i.e. should be private // but for testing time, this is marked as public, so we can simulate 'peer disappearing' // by cutting off the connection from the remote party public boolean DisconnectIncomingConnections() { boolean ret = false; for (BtToServerSocket rSocket : mServerSocketList) { if (rSocket != null) { Log.i("BtConnectorHelper","Disconnect:::Stop : mBtToServerSocket :" + rSocket.getName()); rSocket.Stop(); ret = true; } } mServerSocketList.clear(); return ret; } private String GetBluetoothAddress(){ BluetoothAdapter bluetooth = BluetoothAdapter.getDefaultAdapter(); return bluetooth == null ? "" : bluetooth.getAddress(); } private ConnectStatusCallback mConnectStatusCallback = null; public interface ConnectStatusCallback{ void ConnectionStatusUpdate(String Error, int port); } public void BeginConnectPeer(final String toPeerId, ConnectStatusCallback connectStatusCallback) { if (connectStatusCallback == null) { //nothing we should do, since we can not update progress throw new RuntimeException("BeginConnectPeer callback is NULL !!!!!!"); } //todo what should we have here for the actual value ? if (mRequestSocketList.size() > 100) { connectStatusCallback.ConnectionStatusUpdate("Maximum peer connections reached, please try again after disconnecting a peer. Connected to " + mRequestSocketList.size() + " peers.", -1); return; } ServiceItem selectedDevice = null; for (ServiceItem item : lastAvailableList) { if (item != null && item.peerId.contentEquals(toPeerId)) { selectedDevice = item; break; } } if (selectedDevice == null) { connectStatusCallback.ConnectionStatusUpdate("Device Address for " + toPeerId + " not found from Discovered device list.", -1); return; } BTConnector tmpConn = mBTConnector; if (tmpConn == null) { connectStatusCallback.ConnectionStatusUpdate("Device connectivity not started, please call StartBroadcasting before attempting to connect", -1); return; } switch(tmpConn.TryConnect(selectedDevice)){ case Connecting:{ //all is ok, lets wait callbacks, and for them lets copy the callback here mConnectStatusCallback = connectStatusCallback; } break; case NoSelectedDevice:{ // we do check this already, thus we should not get this ever. connectStatusCallback.ConnectionStatusUpdate("Device Address for " + toPeerId + " not found from Discovered device list.", -1); } break; case AlreadyAttemptingToConnect:{ connectStatusCallback.ConnectionStatusUpdate("There is already one connection attempt progressing.", -1); } break; case BTDeviceFetchFailed:{ connectStatusCallback.ConnectionStatusUpdate("Bluetooth API failed to get Bluetooth device for the address : " + selectedDevice.peerAddress, -1); } break; default: throw new RuntimeException("Invalid value returned for BTConnector.TryConnectReturnValues with TryConnect"); } } //this is always called in context of thread that created instance of the library @Override public void Connected(BluetoothSocket bluetoothSocket, boolean incoming,String peerId,String peerName,String peerAddress) { if (bluetoothSocket == null) { return; } // this is here, so if we have not found the incoming peer via Discovery, we'll get it // added to the discovery list, and we can connect back to it. AddPeerIfNotDiscovered(bluetoothSocket, peerId, peerName, peerAddress); Log.i("BtConnectorHelper","Starting the connected thread incoming : " + incoming + ", " + peerName); if (incoming) { BtToServerSocket tmpBtToServerSocket = null; try { tmpBtToServerSocket = new BtToServerSocket(bluetoothSocket, new BtSocketDisconnectedCallBack(){ //Called when disconnect event happens, so we can stop & clean everything now. @Override public void Disconnected(Thread who, String Error) { Log.i("BtConnectorHelper","BT Disconnected with error : " + Error); for (BtToServerSocket rSocket : mServerSocketList) { if (rSocket != null && (rSocket.getId() == who.getId())) { Log.i("BtConnectorHelper","Disconnect:::Stop : mBtToServerSocket :" + rSocket.GetPeerName()); rSocket.Stop(); mServerSocketList.remove(rSocket); break; } } } }); }catch (IOException e){ Log.i("BtConnectorHelper","Creating BtToServerSocket failed : " + e.toString()); return; } tmpBtToServerSocket.setDefaultUncaughtExceptionHandler(mThreadUncaughtExceptionHandler); mServerSocketList.add(tmpBtToServerSocket); tmpBtToServerSocket.SetIdAddressAndName(peerId, peerName, peerAddress); tmpBtToServerSocket.setPort(this.mServerPort); tmpBtToServerSocket.start(); int port = tmpBtToServerSocket.GetLocalHostPort(); Log.i("BtConnectorHelper","Server socket is using : " + port + ", and is now connected."); return; } //not incoming, thus its outgoing BtToRequestSocket tmpRequestSocket = null; try { tmpRequestSocket = new BtToRequestSocket(bluetoothSocket, new BtSocketDisconnectedCallBack() { //Called when disconnect event happens, so we can stop & clean everything now. @Override public void Disconnected(Thread who, String Error) { for (BtToRequestSocket rSocket : mRequestSocketList) { if (rSocket != null && (rSocket.getId() == who.getId())) { Log.i("BtConnectorHelper", "Disconnect outgoing peer: " + rSocket.GetPeerName()); mRequestSocketList.remove(rSocket); // fire the event in here !!! rSocket.Stop(); JSONObject returnJsonObj = new JSONObject(); try { returnJsonObj.put(JXcoreExtension.EVENTVALUESTRING_PEERID, rSocket.GetPeerId()); } catch (JSONException e) { Log.i("BtConnectorHelper","JSONException : " + e.toString()); } jxcore.CallJSMethod(JXcoreExtension.EVENTSTRING_CONNECTIONERROR, returnJsonObj.toString()); break; } } } }, new BtToRequestSocket.ReadyForIncoming() { // there is a good chance on race condition where the node.js gets to do their client socket // before we got into the accept line executed, thus this callback takes care that we are ready before node.js is @Override public void listeningAndAcceptingNow(int port) { final int portTmp = port; Log.i("BtConnectorHelper","Request socket is using : " + portTmp); new Handler(jxcore.activity.getMainLooper()).postDelayed(new Runnable() { @Override public void run() { ConnectStatusCallback tmpCallBack = mConnectStatusCallback; if (tmpCallBack != null) { Log.i("BtConnectorHelper","Calling ConnectionStatusUpdate with port :" + portTmp); tmpCallBack.ConnectionStatusUpdate(null, portTmp); } } }, 300); } }); }catch (IOException e) { Log.i("BtConnectorHelper","Creating BtToRequestSocket failed : " + e.toString()); ConnectStatusCallback tmpCallBack = mConnectStatusCallback; if (tmpCallBack != null) { tmpCallBack.ConnectionStatusUpdate("Creating BtToRequestSocket failed : " + e.toString(), -1); } return; } mRequestSocketList.add(tmpRequestSocket); tmpRequestSocket.SetIdAddressAndName(peerId, peerName, peerAddress); tmpRequestSocket.setDefaultUncaughtExceptionHandler(mThreadUncaughtExceptionHandler); tmpRequestSocket.SetIdAddressAndName(peerId, peerName, peerAddress); tmpRequestSocket.start(); } // if the peer that just made incoming connection has not been discovered yet, we'll ad it here // thus allowing us to make connection back to it private void AddPeerIfNotDiscovered(BluetoothSocket bluetoothSocket, String peerId,String peerName,String peerAddress) { boolean isDiscovered = false; for (ServiceItem item : lastAvailableList) { if (item != null && item.peerId.contentEquals(peerId)) { isDiscovered = true; break; } } if (!isDiscovered) { String BtAddress = peerAddress; if (bluetoothSocket != null) { if (bluetoothSocket.getRemoteDevice() != null) { BtAddress = bluetoothSocket.getRemoteDevice().getAddress(); } } ServiceItem tmpSrv = new ServiceItem(peerId, peerName, BtAddress, "", "", ""); lastAvailableList.add(tmpSrv); JSONArray jsonArray = new JSONArray(); jsonArray.put(getAvailabilityStatus(tmpSrv, true)); jxcore.CallJSMethod(JXcoreExtension.EVENTSTRING_PEERAVAILABILITY, jsonArray.toString()); } } //this is always called in context of thread that created instance of the library @Override public void ConnectionFailed(String peerId, String peerName, String peerAddress) { ConnectStatusCallback tmpStatBack = mConnectStatusCallback; if(tmpStatBack != null) { tmpStatBack.ConnectionStatusUpdate("Connection to " + peerId + " failed", -1); } } //this is always called in context of thread that created instance of the library @Override public void StateChanged(BTConnector.State state) { // with this version, we don't use this state information for anything switch (state) { case Idle: break; case NotInitialized: break; case WaitingStateChange: break; case FindingPeers: break; case FindingServices: break; case Connecting: break; case Connected: break; default: throw new RuntimeException("Invalid value set for BTConnector.State in StateChanged"); } } // this is called with a full list of peer-services we see, its takes time to get, // since there is time spend between each peer we discover // anyway, this list can be used for determining whether the peer we saw earlier has now disappeared // will be called null or empty list, if no services are found during some time period. //this is always called in context of thread that created instance of the library @Override public ServiceItem CurrentPeersList(final List<ServiceItem> serviceItems) { /* Boolean wasPreviouslyAvailable = false; JSONArray jsonArray = new JSONArray(); if (serviceItems != null) { for (ServiceItem item: serviceItems) { if(item != null) { wasPreviouslyAvailable = false; for (ServiceItem lastItem : lastAvailableList) { if (lastItem != null && item.deviceAddress.equalsIgnoreCase(lastItem.deviceAddress)) { wasPreviouslyAvailable = true; lastAvailableList.remove(lastItem); } } if (!wasPreviouslyAvailable) { jsonArray.put(getAvailabilityStatus(item, true)); } } } } for (ServiceItem lastItem2 : lastAvailableList) { jsonArray.put(getAvailabilityStatus(lastItem2, false)); lastAvailableList.remove(lastItem2); } if (serviceItems != null) { for (ServiceItem item: serviceItems) { if(item != null) { lastAvailableList.add(item); } } } // lets not sent any empty arrays up. if (jsonArray.toString().length() > 5) { jxcore.CallJSMethod(JXcoreExtension.EVENTSTRING_PEERAVAILABILITY, jsonArray.toString()); }*/ return null; } // this is called when we see a peer, so we can inform the app of its availability right when we see it //this is always called in context of thread that created instance of the library @Override public void PeerDiscovered(ServiceItem serviceItem) { boolean wasPrevouslyAvailable = false; for (ServiceItem lastItem : lastAvailableList) { if (lastItem != null && serviceItem.deviceAddress.equalsIgnoreCase(lastItem.deviceAddress)) { wasPrevouslyAvailable = true; } } if (!wasPrevouslyAvailable) { lastAvailableList.add(serviceItem); JSONArray jsonArray = new JSONArray(); jsonArray.put(getAvailabilityStatus(serviceItem, true)); jxcore.CallJSMethod(JXcoreExtension.EVENTSTRING_PEERAVAILABILITY, jsonArray.toString()); } } private JSONObject getAvailabilityStatus(ServiceItem item, boolean available) { JSONObject returnJsonObj = new JSONObject(); try { returnJsonObj.put(JXcoreExtension.EVENTVALUESTRING_PEERID, item.peerId); returnJsonObj.put(JXcoreExtension.EVENTVALUESTRING_PEERNAME, item.peerName); returnJsonObj.put(JXcoreExtension.EVENTVALUESTRING_PEERAVAILABLE, available); } catch (JSONException e) { Log.i("BtConnectorHelper","JSONException : " + e.toString()); } return returnJsonObj; } }
package git4idea.actions; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import git4idea.GitVcs; import git4idea.fetch.GitFetchResult; import git4idea.i18n.GitBundle; import org.jetbrains.annotations.CalledInAwt; import org.jetbrains.annotations.NotNull; import static git4idea.GitUtil.getRepositories; import static git4idea.fetch.GitFetchSupport.fetchSupport; import static git4idea.ui.branch.GitBranchActionsUtilKt.hasRemotes; public class GitFetch extends DumbAwareAction { @Override public void update(@NotNull AnActionEvent e) { super.update(e); Project project = e.getProject(); if (project == null) { e.getPresentation().setEnabledAndVisible(false); } else { e.getPresentation().setEnabled(hasRemotes(project)); } } @Override public void actionPerformed(@NotNull AnActionEvent e) { Project project = e.getRequiredData(CommonDataKeys.PROJECT); GitVcs.runInBackground(new Task.Backgroundable(project, GitBundle.message("fetching"), true) { GitFetchResult result; @Override public void run(@NotNull ProgressIndicator indicator) { result = fetchSupport(project).fetchAllRemotes(getRepositories(project)); } @Override public void onFinished() { if (result != null) { onFetchFinished(result); } } }); } @CalledInAwt protected void onFetchFinished(@NotNull GitFetchResult result) { result.showNotification(); } }
package io.github.ihongs.action.serv; import io.github.ihongs.Cnst; import io.github.ihongs.Core; import io.github.ihongs.HongsExemption; import io.github.ihongs.action.ActionDriver; import io.github.ihongs.action.ActionHelper; import io.github.ihongs.action.ActionRunner; import io.github.ihongs.action.PasserHelper; import io.github.ihongs.action.anno.Action; import io.github.ihongs.action.anno.CustomReplies; import java.io.File; import java.io.IOException; import java.lang.reflect.Method; import java.util.Comparator; import java.util.LinkedHashSet; import java.util.HashSet; import java.util.TreeSet; import java.util.Set; import java.util.regex.Pattern; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * * * <h3>(init-param):</h3> * <pre> * action-path * layout-path * ignore-urls URL, ",", "*" * </pre> * <p> * : * action-path, layout-path filter-mapping url-pattern , * * </p> * * @author Hongs */ public class AutoFilter extends ActionDriver { private String action; private String layout; private PasserHelper ignore = null; private Set<String> layset = null; private Set<String> actset = null; private Set<String> cstset = null; // private Map<String, String> cstmap = null; // inlucde // private Map<String, String> cxtmap = null; // forward private static final Pattern DENY_JSPS = Pattern.compile("(/_|\\.)[^/]*\\.jsp$"); // [_ @Override public void init(FilterConfig cnf) throws ServletException { super.init(cnf); action = cnf.getInitParameter("action-path"); layout = cnf.getInitParameter("layout-path"); if (action == null) { action ="/common/auto"; } if (layout == null) { layout = action; } // URL this.ignore = new PasserHelper( cnf.getInitParameter("ignore-urls"), cnf.getInitParameter("attend-urls") ); } @Override public void destroy() { super.destroy(); actset = null; layset = null; } @Override public void doFilter(Core core, ActionHelper hlpr, FilterChain chain) throws IOException, ServletException { HttpServletResponse rsp = hlpr.getResponse(); HttpServletRequest req = hlpr.getRequest( ); String url = ActionDriver.getRecentPath(req); String ref = ActionDriver.getOriginPath(req); if (ignore != null && ignore.ignore(url)) { chain.doFilter( req , rsp ); return; } if (DENY_JSPS.matcher(ref).find()) { rsp.sendError(HttpServletResponse.SC_NOT_FOUND, "What's your problem?"); return; } if (url.endsWith(Cnst.API_EXT)) { /** * , * , * ; * DENY_JSPS . */ } else if (url.endsWith(Cnst.ACT_EXT)) { String act; String src; // String met; // String uri; int pos; try { pos = url.lastIndexOf('.'); act = url.substring(1,pos); pos = act.lastIndexOf('/'); src = act.substring(0,pos); // met = act.substring(1+pos); } catch (IndexOutOfBoundsException ex) { chain.doFilter ( req, rsp); return; } uri = "/" + src + "/__main__.jsp"; if (new File(Core.BASE_PATH+ uri).exists()) { include ( req, rsp, url, uri); return; } /* uri = "/"+ src +"/#"+ met +".jsp"; if (new File(Core.BASE_PATH+ uri).exists()) { include ( req, rsp, url, uri); return; } uri = "/"+ src +"/$"+ met +".jsp"; if (new File(Core.BASE_PATH+ uri).exists()) { forward ( req, rsp, url, uri); return; } */ if (!ActionRunner.getActions().containsKey(act)) { /* getlays(); for(Map.Entry<String, String> et : cstmap.entrySet()) { met = et.getKey ( ); uri = et.getValue( ); if (act.endsWith(met)) { include(req,rsp, url, layout + uri); return; } } for(Map.Entry<String, String> et : cxtmap.entrySet()) { met = et.getKey ( ); uri = et.getValue( ); if (act.endsWith(met)) { forward(req,rsp, url, layout + uri); return; } } */ for(String axt: getacts()) { if (act.endsWith(axt)) { if (cstset.contains(axt)) { forward(req, rsp, url, action + axt + Cnst.ACT_EXT); } else { include(req, rsp, url, action + axt + Cnst.ACT_EXT); } return; } } } } else { // default.html if ( url.endsWith("/") ) { url = url + "default.html"; } File urf = new File(Core.BASE_PATH+ url); if (!urf.exists( )) { boolean jsp = url.endsWith (".jsp" ); boolean htm = url.endsWith (".htm" ) || url.endsWith (".html"); String uxl = null; if (htm) { int pos = url.lastIndexOf( "." ); uxl = url.substring(0 , pos); } for(String uri: getlays()) { if (url.endsWith(uri)) { forward(req, rsp, url, layout + uri); return; } if (jsp) { continue; } if (htm) { // xxx.htm => xxx.jsp if ((uxl + ".jsp").endsWith(uri)) { forward(req, rsp, url, layout + uri); return; } } else { // xxx.xxx => xxx.xxx.jsp if ((url + ".jsp").endsWith(uri)) { forward(req, rsp, url, layout + uri); return; } } } } } chain.doFilter(req, rsp); } private void include(ServletRequest req, ServletResponse rsp, String url, String uri) throws ServletException, IOException { req.setAttribute(Cnst.ORIGIN_ATTR, Core.ACTION_NAME.get()); req.setAttribute(Cnst.ACTION_ATTR, url.substring(1)); req.getRequestDispatcher( uri ).include( req , rsp ); } private void forward(ServletRequest req, ServletResponse rsp, String url, String uri) throws ServletException, IOException { req.setAttribute(Cnst.ORIGIN_ATTR, Core.ACTION_NAME.get()); req.setAttribute(Cnst.ACTION_ATTR, url.substring(1)); req.getRequestDispatcher( uri ).forward( req , rsp ); } private Set<String> getacts() { if (null != actset) { return actset; } // search class // search search Class cls; try { cls = ActionRunner.getActions() .get(action.substring(1) + "/search") .getMclass( ); } catch ( NullPointerException ex ) { throw new HongsExemption(0x1130, "Auto action '" + action.substring(1) + "/search' is not exists", ex); } cstset = new HashSet(); actset = new TreeSet(new Comparator<String>( ) { @Override public int compare( String o1, String o2 ) { int i, c1 = 0, c2 = 0; i = 0; while ((i = o1.indexOf('/', i)) != -1) { i ++; c1 ++; } i = 0; while ((i = o2.indexOf('/', i)) != -1) { i ++; c2 ++; } i = Integer.compare(c2, c1); return i != 0 ? i : 1; } }); for (Method mtd : cls.getMethods( )) { Action ann = mtd.getAnnotation(Action.class); if (null != ann) { String uri; if (!"".equals(ann.value())) { uri = "/"+ ann.value( ); } else { uri = "/"+ mtd.getName(); } if (mtd.isAnnotationPresent(CustomReplies.class)) { cstset.add(uri); } actset.add(uri); } } return actset; } private Set<String> getlays() { if (null != layset) { return layset; } File dir = new File(Core.BASE_PATH + layout); if (!dir.exists( )) { throw new HongsExemption(0x1131, "Auto layout '" + layout.substring(1) + "' is not exists"); } if (!dir.isDirectory()) { throw new HongsExemption(0x1131, "Auto layout '" + layout.substring(1) + "' is not a directory"); } layset = new LinkedHashSet(); // cstmap = new LinkedHashMap(); // cxtmap = new LinkedHashMap(); getlays(layset, dir, "/"); return layset; } private void getlays(Set layset, File dx, String dn) { File[] fs = dx.listFiles( ); if (null == fs) { return; } Set tmpset; tmpset = new LinkedHashSet(); for ( File fx : fs ) { String fn = fx.getName(); if (fn.startsWith (".") || fn.startsWith ("_")) { continue; } if (fx.isFile ( )) { tmpset.add (dn + fn); } else if (fx.isDirectory( )) { getlays(layset , fx , dn + fn + "/"); } /** * #,$ * # include, $ forward * , */ /* if (fn.startsWith ("#")) { int l = fn.lastIndexOf("."); String ln = fn.substring(1 , l); cstmap.put( dn + ln , dn + fn ); } else if (fn.startsWith ("$")) { int l = fn.lastIndexOf("."); String ln = fn.substring(1 , l); cxtmap.put( dn + ln , dn + fn ); } else { layset.add( dn + fn); } */ } layset.addAll(tmpset); } }
package ui.issuepanel.comments; import java.text.SimpleDateFormat; import java.util.Date; import java.util.regex.Pattern; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.w3c.dom.events.Event; import org.w3c.dom.events.EventListener; import org.w3c.dom.events.EventTarget; import service.ServiceManager; import util.Browse; import model.TurboComment; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.beans.value.WeakChangeListener; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.scene.layout.HBox; import javafx.scene.layout.VBox; import javafx.scene.text.Text; import javafx.scene.web.WebView; import javafx.concurrent.Worker; import javafx.concurrent.Worker.State; public class IssueDetailsCard extends VBox{ protected static int PREF_WIDTH = 300; protected static int ELEMENTS_HORIZONTAL_SPACING = 10; protected static int ELEMENTS_VERTICAL_SPACING = 5; protected static int PADDING = 3; protected static int WEB_TEXT_PADDING = 30; public static final String EVENT_TYPE_CLICK = "click"; protected static final String HTML_CONTENT_WRAPPER = "<html><body>" + "<div id=\"wrapper\">%1s</div>" + "</body></html>"; protected HBox topBar; protected WebView commentsText; protected VBox commentsTextDisplay; protected TurboComment originalComment; protected ChangeListener<String> bodyChangeListener; protected ChangeListener<Document> webViewHeightListener; protected ChangeListener<State> weblinkClickListeners; private boolean heightAdjusted = false; public IssueDetailsCard(){ this.setSpacing(ELEMENTS_VERTICAL_SPACING); this.setPrefWidth(PREF_WIDTH); this.setPadding(new Insets(PADDING)); this.getStyleClass().add("comments-list-cell"); initialiseUIComponents(); } public void setDisplayedItem(TurboComment comment){ this.originalComment = comment; reload(); } protected void reload(){ resetComponents(); loadComponents(); } protected void resetComponents(){ topBar.getChildren().clear(); commentsTextDisplay.getChildren().clear(); this.getChildren().clear(); } protected void loadComponents(){ setupCommentBodyChangeListener(); setupWeblinkClickListeners(); loadCardComponents(); } protected void initialiseUIComponents(){ initialiseTopBar(); initialiseCommentsText(); initialiseCommentsTextDisplay(); } protected void initialiseTopBar(){ topBar = new HBox(); topBar.setPrefWidth(PREF_WIDTH); topBar.setSpacing(ELEMENTS_HORIZONTAL_SPACING); } protected void initialiseCommentsText(){ commentsText = new WebView(); commentsText.setPrefWidth(PREF_WIDTH); setupWebEngineHeightListener(); } protected void setupCommentBodyChangeListener(){ bodyChangeListener = new ChangeListener<String>(){ @Override public void changed(ObservableValue<? extends String> arg0, String original, String change) { setDisplayedCommentText(); heightAdjusted = false; } }; originalComment.getBodyHtmlProperty().addListener(new WeakChangeListener<String>(bodyChangeListener)); } protected void initialiseCommentsTextDisplay(){ commentsTextDisplay = new VBox(); } protected String formatDisplayedDate(Date date){ SimpleDateFormat format = new SimpleDateFormat("d MMM yy, h:mm a"); return format.format(date); } protected HBox createCommentsDetailsDisplay(){ HBox details = new HBox(); Text creator = new Text(originalComment.getCreator().getAlias()); creator.getStyleClass().add("issue-comment-details"); Text creationDate = new Text(formatDisplayedDate(originalComment.getCreatedAt())); creationDate.getStyleClass().add("issue-comment-details"); details.setAlignment(Pos.BOTTOM_LEFT); details.setSpacing(ELEMENTS_HORIZONTAL_SPACING); details.getChildren().addAll(creator, creationDate); return details; } protected void loadTopBar(){ topBar.getChildren().add(createCommentsDetailsDisplay()); } protected void loadCardComponents(){ loadTopBar(); loadCommentsDisplay(); getChildren().addAll(topBar, commentsTextDisplay); } protected void loadCommentsDisplay(){ setDisplayedCommentText(); commentsTextDisplay.getChildren().add(commentsText); } private void setDisplayedCommentText(){ String text = originalComment.getBodyHtml(); String displayedText = String.format(HTML_CONTENT_WRAPPER, stripChangeLogHeader(text)); commentsText.getEngine().loadContent(displayedText); } private void setupWebEngineHeightListener(){ webViewHeightListener = new ChangeListener<Document>() { @Override public void changed(ObservableValue<? extends Document> prop, Document oldDoc, Document newDoc) { adjustWebEngineHeight(); } }; commentsText.getEngine().documentProperty().addListener(new WeakChangeListener<Document>(webViewHeightListener)); } private void setupWeblinkClickListeners(){ // weblinkClickListeners = new ChangeListener<State>() { // @Override // public void changed(ObservableValue ov, State oldState, State newState) { // if (newState == Worker.State.SUCCEEDED) { // System.out.println("listener created"); // EventListener listener = new EventListener() { // @Override // public void handleEvent(Event evt) { // String domEventType = evt.getType(); // System.out.println(evt.getType()); // if (domEventType.equals(EVENT_TYPE_CLICK)) { // String href = ((Element)evt.getTarget()).getAttribute("href"); // System.out.println(href); //// Browse.browse(href); // Document doc = commentsText.getEngine().getDocument(); // NodeList nodeList = doc.getElementsByTagName("a"); // for (int i = 0; i < nodeList.getLength(); i++) { // System.out.println("node"); // ((EventTarget) nodeList.item(i)).addEventListener(EVENT_TYPE_CLICK, listener, false); // commentsText.getEngine().getLoadWorker().stateProperty() // .addListener(weblinkClickListeners); } private void adjustWebEngineHeight(){ if(heightAdjusted == true){ return; } Object res = commentsText.getEngine().executeScript("document.getElementById('wrapper').offsetHeight"); if(res!= null && res instanceof Integer) { Integer height = (Integer)res + WEB_TEXT_PADDING; commentsText.setPrefHeight(height); } heightAdjusted = true; } private String stripChangeLogHeader(String text){ if(text == null || !originalComment.isIssueLog()){ return text; } String regex = Pattern.quote(ServiceManager.CHANGELOG_TAG); return text.replaceFirst(regex, "").trim(); } }
package heufybot.core; import java.util.AbstractMap.SimpleEntry; import heufybot.core.events.LoggingInterface; import heufybot.modules.Module; import heufybot.modules.ModuleInterface; import heufybot.modules.ModuleInterface.ModuleLoaderResponse; import heufybot.utils.FileUtils; public class HeufyBot { public final static String VERSION = "0.4.1"; private Config config; private IRC irc; private ModuleInterface moduleInterface; private LoggingInterface loggingInterface; private static final HeufyBot instance = new HeufyBot(); private HeufyBot() { FileUtils.touchDir("data"); FileUtils.touchDir("modules"); this.config = Config.getInstance(); this.irc = IRC.getInstance(); irc.setConfig(config); } public void start() { moduleInterface = new ModuleInterface(this); loggingInterface = new LoggingInterface(this); irc.getEventListenerManager().addListener(moduleInterface); irc.getEventListenerManager().addListener(loggingInterface); this.loadModules(); if(irc.connect(config.getServer(), config.getPort())) { irc.login(); } } public void stop(String message) { irc.cmdQUIT(message); irc.disconnect(false); //Unload modules this.unloadModules(); Logger.log("*** Stopping..."); } public void restart() { //Disconnect from the server this.stop("Restarting..."); //Reload modules this.loadModules(); //Reload config and reconnect if(config.loadConfigFromFile("settings.yml")) { if(irc.connect(config.getServer(), config.getPort())) { irc.login(); } } } public void loadModules() { Logger.log("*** Loading modules..."); for(String module : config.getModulesToLoad()) { SimpleEntry<ModuleLoaderResponse, String> result = moduleInterface.loadModule(module); switch(result.getKey()) { case Success: Logger.log(" - Module " + result.getValue() + " was loaded"); break; case AlreadyLoaded: Logger.error("Module Loader", "Module " + module + " is already loaded"); break; case DoesNotExist: Logger.error("Module Loader", "Module " + module + " does not exist"); break; default: break; } } } public void unloadModules() { Logger.log("*** Unloading modules..."); Module[] loadedModules = new Module[moduleInterface.getModuleList().size()]; loadedModules = moduleInterface.getModuleList().toArray(loadedModules); for(int i = 0; i < loadedModules.length; i++) { Module module = loadedModules[i]; SimpleEntry<ModuleLoaderResponse, String> result = moduleInterface.unloadModule(module.toString()); switch (result.getKey()) { case Success: Logger.log(" - Module " + result.getValue() + " was unloaded"); break; case DoesNotExist: //If for whatever reason a loaded module doesn't exist Logger.error("Module Loader", "Module " + module + " is already unloaded or does not exist"); break; default: break; } } } public IRC getIRC() { return irc; } public ModuleInterface getModuleInterface() { return moduleInterface; } public static HeufyBot getInstance() { return instance; } public Config getConfig() { return config; } }
package be.ibridge.kettle.job.entry; import java.util.ArrayList; import org.eclipse.swt.widgets.Shell; import org.w3c.dom.Node; import be.ibridge.kettle.core.Result; import be.ibridge.kettle.core.database.DatabaseMeta; import be.ibridge.kettle.core.exception.KettleException; import be.ibridge.kettle.core.exception.KettleXMLException; import be.ibridge.kettle.job.Job; import be.ibridge.kettle.job.JobMeta; import be.ibridge.kettle.job.entry.createfile.JobEntryCreateFile; import be.ibridge.kettle.job.entry.deletefile.JobEntryDeleteFile; import be.ibridge.kettle.job.entry.eval.JobEntryEval; import be.ibridge.kettle.job.entry.filecompare.JobEntryFileCompare; import be.ibridge.kettle.job.entry.fileexists.JobEntryFileExists; import be.ibridge.kettle.job.entry.ftp.JobEntryFTP; import be.ibridge.kettle.job.entry.http.JobEntryHTTP; import be.ibridge.kettle.job.entry.job.JobEntryJob; import be.ibridge.kettle.job.entry.mail.JobEntryMail; import be.ibridge.kettle.job.entry.mysqlbulkload.JobEntryMysqlBulkLoad; import be.ibridge.kettle.job.entry.mysqlbulkfile.JobEntryMysqlBulkFile; import be.ibridge.kettle.job.entry.msgboxinfo.JobEntryMsgBoxInfo; import be.ibridge.kettle.job.entry.delay.JobEntryDelay; import be.ibridge.kettle.job.entry.zipfile.JobEntryZipFile; import be.ibridge.kettle.job.entry.xslt.JobEntryXSLT; import be.ibridge.kettle.job.entry.sftp.JobEntrySFTP; import be.ibridge.kettle.job.entry.sftpput.JobEntrySFTPPUT; import be.ibridge.kettle.job.entry.shell.JobEntryShell; import be.ibridge.kettle.job.entry.special.JobEntrySpecial; import be.ibridge.kettle.job.entry.sql.JobEntrySQL; import be.ibridge.kettle.job.entry.tableexists.JobEntryTableExists; import be.ibridge.kettle.job.entry.trans.JobEntryTrans; import be.ibridge.kettle.job.entry.waitforfile.JobEntryWaitForFile; import be.ibridge.kettle.job.entry.abort.JobEntryAbort; import be.ibridge.kettle.repository.Repository; /** * Interface for the different JobEntry classes. * * @author Matt * @since 18-06-04 * */ public interface JobEntryInterface { public final static int TYPE_JOBENTRY_NONE = 0; public final static int TYPE_JOBENTRY_TRANSFORMATION = 1; public final static int TYPE_JOBENTRY_JOB = 2; public final static int TYPE_JOBENTRY_SHELL = 3; public final static int TYPE_JOBENTRY_MAIL = 4; public final static int TYPE_JOBENTRY_SQL = 5; public final static int TYPE_JOBENTRY_FTP = 6; public final static int TYPE_JOBENTRY_TABLE_EXISTS = 7; public final static int TYPE_JOBENTRY_FILE_EXISTS = 8; public final static int TYPE_JOBENTRY_EVALUATION = 9; public final static int TYPE_JOBENTRY_SPECIAL = 10; public static final int TYPE_JOBENTRY_SFTP = 11; public static final int TYPE_JOBENTRY_HTTP = 12; public static final int TYPE_JOBENTRY_CREATE_FILE = 13; public static final int TYPE_JOBENTRY_DELETE_FILE = 14; public static final int TYPE_JOBENTRY_WAIT_FOR_FILE = 15; public static final int TYPE_JOBENTRY_SFTPPUT = 16; public static final int TYPE_JOBENTRY_FILE_COMPARE = 17; public static final int TYPE_JOBENTRY_MYSQL_BULK_LOAD= 18; public static final int TYPE_JOBENTRY_MSGBOX_INFO= 19; public static final int TYPE_JOBENTRY_DELAY= 20; public static final int TYPE_JOBENTRY_ZIP_FILE= 21; public static final int TYPE_JOBENTRY_XSLT= 22; public static final int TYPE_JOBENTRY_MYSQL_BULK_FILE= 23; public static final int TYPE_JOBENTRY_ABORT= 24; public final static String typeCode[] = { "-", "TRANS", "JOB", "SHELL", "MAIL", "SQL", "FTP", "TABLE_EXISTS", "FILE_EXISTS", "EVAL", "SPECIAL", "SFTP", "HTTP", "CREATE_FILE", "DELETE_FILE", "WAIT_FOR_FILE", "SFTPPUT", "FILE_COMPARE", "MYSQL_BULK_LOAD", "MSGBOX_INFO", "DELAY", "ZIP_FILE", "XSLT", "MYSQL_BULK_FILE", "ABORT", }; public final static String typeDesc[] = { "-", Messages.getString("JobEntry.Trans.TypeDesc"), Messages.getString("JobEntry.Job.TypeDesc"), Messages.getString("JobEntry.Shell.TypeDesc"), Messages.getString("JobEntry.Mail.TypeDesc"), Messages.getString("JobEntry.SQL.TypeDesc"), Messages.getString("JobEntry.FTP.TypeDesc"), Messages.getString("JobEntry.TableExists.TypeDesc"), Messages.getString("JobEntry.FileExists.TypeDesc"), Messages.getString("JobEntry.Evaluation.TypeDesc"), Messages.getString("JobEntry.Special.TypeDesc"), Messages.getString("JobEntry.SFTP.TypeDesc"), Messages.getString("JobEntry.HTTP.TypeDesc"), Messages.getString("JobEntry.CreateFile.TypeDesc"), Messages.getString("JobEntry.DeleteFile.TypeDesc"), Messages.getString("JobEntry.WaitForFile.TypeDesc"), Messages.getString("JobEntry.SFTPPut.TypeDesc"), Messages.getString("JobEntry.FileCompare.TypeDesc"), Messages.getString("JobEntry.MysqlBulkLoad.TypeDesc"), Messages.getString("JobEntry.MsgBoxInfo.TypeDesc"), Messages.getString("JobEntry.Delay.TypeDesc"), Messages.getString("JobEntry.ZipFile.TypeDesc"), Messages.getString("JobEntry.XSLT.TypeDesc"), Messages.getString("JobEntry.MysqlBulkFile.TypeDesc"), Messages.getString("JobEntry.Abort.TypeDesc"), }; public final static String icon_filename[] = { "", "TRN.png", "JOB.png", "SHL.png", "MAIL.png", "SQL.png", "FTP.png", "TEX.png", "FEX.png", "RES.png", "", "SFT.png", "WEB.png", "CFJ.png", "DFJ.png", "WFF.png", "SFP.png", "BFC.png", "MBL.png", "INF.png", "DLT.png", "ZIP.png", "XSLT.png", "MBF.png", "ABR.png", }; public final static String type_tooltip_desc[] = { "", Messages.getString("JobEntry.Trans.Tooltip"), Messages.getString("JobEntry.Job.Tooltip"), Messages.getString("JobEntry.Shell.Tooltip"), Messages.getString("JobEntry.Mail.Tooltip"), Messages.getString("JobEntry.SQL.Tooltip"), Messages.getString("JobEntry.FTP.Tooltip"), Messages.getString("JobEntry.TableExists.Tooltip"), Messages.getString("JobEntry.FileExists.Tooltip"), Messages.getString("JobEntry.Evaluation.Tooltip"), Messages.getString("JobEntry.Special.Tooltip"), Messages.getString("JobEntry.SFTP.Tooltip"), Messages.getString("JobEntry.HTTP.Tooltip"), Messages.getString("JobEntry.CreateFile.Tooltip"), Messages.getString("JobEntry.DeleteFile.Tooltip"), Messages.getString("JobEntry.WaitForFile.Tooltip"), Messages.getString("JobEntry.SFTPPut.Tooltip"), Messages.getString("JobEntry.FileCompare.Tooltip"), Messages.getString("JobEntry.MysqlBulkLoad.Tooltip"), Messages.getString("JobEntry.MsgBoxInfo.Tooltip"), Messages.getString("JobEntry.Delay.Tooltip"), Messages.getString("JobEntry.ZipFile.Tooltip"), Messages.getString("JobEntry.XSLT.Tooltip"), Messages.getString("JobEntry.MysqlBulkFile.Tooltip"), Messages.getString("JobEntry.Abort.Tooltip"), }; public final static Class type_classname[] = { null, JobEntryTrans.class, JobEntryJob.class, JobEntryShell.class, JobEntryMail.class, JobEntrySQL.class, JobEntryFTP.class, JobEntryTableExists.class, JobEntryFileExists.class, JobEntryEval.class, JobEntrySpecial.class, JobEntrySFTP.class, JobEntryHTTP.class, JobEntryCreateFile.class, JobEntryDeleteFile.class, JobEntryWaitForFile.class, JobEntrySFTPPUT.class, JobEntryFileCompare.class, JobEntryMysqlBulkLoad.class, JobEntryMsgBoxInfo.class, JobEntryDelay.class, JobEntryZipFile.class, JobEntryXSLT.class, JobEntryMysqlBulkFile.class, JobEntryAbort.class, }; public Result execute(Result prev_result, int nr, Repository rep, Job parentJob) throws KettleException; public void clear(); public long getID(); public void setID(long id); public String getName(); public void setName(String name); public String getDescription(); public void setDescription(String description); public void setChanged(); public void setChanged(boolean ch); public boolean hasChanged(); public void loadXML(Node entrynode, ArrayList databases, Repository rep) throws KettleXMLException; public String getXML(); public void loadRep(Repository rep, long id_jobentry, ArrayList databases) throws KettleException; public void saveRep(Repository rep, long id_job) throws KettleException; public int getType(); public String getTypeCode(); public String getPluginID(); public boolean isStart(); public boolean isDummy(); public Object clone(); public boolean resetErrorsBeforeExecution(); public boolean evaluates(); public boolean isUnconditional(); public boolean isEvaluation(); public boolean isTransformation(); public boolean isJob(); public boolean isShell(); public boolean isMail(); public boolean isSpecial(); public ArrayList getSQLStatements(Repository repository) throws KettleException; public JobEntryDialogInterface getDialog(Shell shell,JobEntryInterface jei,JobMeta jobMeta,String jobName,Repository rep); public String getFilename(); public String getRealFilename(); /** * This method returns all the database connections that are used by the job entry. * @return an array of database connections meta-data. * Return an empty array if no connections are used. */ public DatabaseMeta[] getUsedDatabaseConnections(); public void setPluginID(String id); }
package beast.evolution.tree.coalescent; import java.util.ArrayList; import java.util.List; import beast.core.CalculationNode; import beast.core.Description; import beast.core.Input; import beast.core.Input.Validate; import beast.evolution.tree.Node; import beast.evolution.tree.Tree; import beast.util.HeapSort; /** * Extracts the intervals from a beast.tree. * * @author Andrew Rambaut * @author Alexei Drummond * @version $Id: TreeIntervals.java,v 1.9 2005/05/24 20:25:56 rambaut Exp $ */ @Description("Extracts the intervals from a tree. Points in the intervals " + "are defined by the heights of nodes in the tree.") public class TreeIntervals extends CalculationNode implements IntervalList { final public Input<Tree> treeInput = new Input<>("tree", "tree for which to calculate the intervals", Validate.REQUIRED); public TreeIntervals() { super(); } public TreeIntervals(Tree tree) { init(tree); } @Override public void initAndValidate() { // this initialises data structures that store/restore might need calculateIntervals(); intervalsKnown = false; } /** * CalculationNode methods * */ @Override protected boolean requiresRecalculation() { // we only get here if the tree is dirty, which is a StateNode // since the StateNode can only become dirty through an operation, // we need to recalculate tree intervals intervalsKnown = false; return true; } @Override protected void restore() { //intervalsKnown = false; double[] tmp = storedIntervals; storedIntervals = intervals; intervals = tmp; int[] tmp2 = storedLineageCounts; storedLineageCounts = lineageCounts; lineageCounts = tmp2; int tmp3 = storedIntervalCount; storedIntervalCount = intervalCount; intervalCount = tmp3; super.restore(); } @Override protected void store() { System.arraycopy(lineageCounts, 0, storedLineageCounts, 0, lineageCounts.length); System.arraycopy(intervals, 0, storedIntervals, 0, intervals.length); storedIntervalCount = intervalCount; super.store(); } /** * Specifies that the intervals are unknown (i.e., the beast.tree has changed). */ public void setIntervalsUnknown() { intervalsKnown = false; } /** * Sets the limit for which adjacent events are merged. * * @param multifurcationLimit A value of 0 means merge addition of leafs (terminal nodes) when possible but * return each coalescense as a separate event. */ public void setMultifurcationLimit(double multifurcationLimit) { // invalidate only if changing anything if (this.multifurcationLimit != multifurcationLimit) { this.multifurcationLimit = multifurcationLimit; intervalsKnown = false; } } @Override public int getSampleCount() { // Assumes a binary tree! return treeInput.get().getInternalNodeCount(); } /** * get number of intervals */ @Override public int getIntervalCount() { if (!intervalsKnown) { calculateIntervals(); } return intervalCount; } /** * Gets an interval. */ @Override public double getInterval(int i) { if (!intervalsKnown) { calculateIntervals(); } if (i < 0 || i >= intervalCount) throw new IllegalArgumentException(); return intervals[i]; } /** * Defensive implementation creates copy * * @return */ public double[] getIntervals(double[] inters) { if (!intervalsKnown) { calculateIntervals(); } if (inters == null) inters = new double[intervals.length]; System.arraycopy(intervals, 0, inters, 0, intervals.length); return inters; } public double[] getCoalescentTimes(double[] coalescentTimes) { if (!intervalsKnown) { calculateIntervals(); } if (coalescentTimes == null) coalescentTimes = new double[getSampleCount()]; double time = 0; int coalescentIndex = 0; for (int i = 0; i < intervals.length; i++) { time += intervals[i]; for (int j = 0; j < getCoalescentEvents(i); j++) { coalescentTimes[coalescentIndex] = time; coalescentIndex += 1; } } return coalescentTimes; } /** * Returns the number of uncoalesced lineages within this interval. * Required for s-coalescents, where new lineages are added as * earlier samples are come across. */ @Override public int getLineageCount(int i) { if (!intervalsKnown) { calculateIntervals(); } if (i >= intervalCount) throw new IllegalArgumentException(); return lineageCounts[i]; } /** * @param interval the index of the interval * @return a list of the nodes representing the lineages in the ith interval. */ // public final List<Node> getLineages(int interval) { // if (lineages[interval] == null) { // List<Node> lines = new ArrayList<>(); // for (int i = 0; i <= interval; i++) { // if (lineagesAdded[i] != null) lines.addAll(lineagesAdded[i]); // if (lineagesRemoved[i] != null) lines.removeAll(lineagesRemoved[i]); // lineages[interval] = Collections.unmodifiableList(lines); // return lineages[interval]; /** * Returns the number of coalescent events in an interval */ @Override public int getCoalescentEvents(int i) { if (!intervalsKnown) { calculateIntervals(); } if (i >= intervalCount) throw new IllegalArgumentException(); if (i < intervalCount - 1) { return lineageCounts[i] - lineageCounts[i + 1]; } else { return lineageCounts[i] - 1; } } /** * Returns the type of interval observed. */ @Override public IntervalType getIntervalType(int i) { if (!intervalsKnown) { calculateIntervals(); } if (i >= intervalCount) throw new IllegalArgumentException(); int numEvents = getCoalescentEvents(i); if (numEvents > 0) return IntervalType.COALESCENT; else if (numEvents < 0) return IntervalType.SAMPLE; else return IntervalType.NOTHING; } // public Node getCoalescentNode(int interval) { // if (getIntervalType(interval) == IntervalType.COALESCENT) { // if (lineagesRemoved[interval] != null) { // if (lineagesRemoved[interval].size() == 1) { // return lineagesRemoved[interval].get(0); /** * get the total height of the genealogy represented by these * intervals. */ @Override public double getTotalDuration() { if (!intervalsKnown) { calculateIntervals(); } double height = 0.0; for (int j = 0; j < intervalCount; j++) { height += intervals[j]; } return height; } /** * Checks whether this set of coalescent intervals is fully resolved * (i.e. whether is has exactly one coalescent event in each * subsequent interval) */ @Override public boolean isBinaryCoalescent() { if (!intervalsKnown) { calculateIntervals(); } for (int i = 0; i < intervalCount; i++) { if (getCoalescentEvents(i) > 0) { if (getCoalescentEvents(i) != 1) return false; } } return true; } /** * Checks whether this set of coalescent intervals coalescent only * (i.e. whether is has exactly one or more coalescent event in each * subsequent interval) */ @Override public boolean isCoalescentOnly() { if (!intervalsKnown) { calculateIntervals(); } for (int i = 0; i < intervalCount; i++) { if (getCoalescentEvents(i) < 1) return false; } return true; } /** * Recalculates all the intervals for the given beast.tree. */ @SuppressWarnings("unchecked") protected void calculateIntervals() { Tree tree = treeInput.get(); final int nodeCount = tree.getNodeCount(); times = new double[nodeCount]; int[] childCounts = new int[nodeCount]; collectTimes(tree, times, childCounts); indices = new int[nodeCount]; HeapSort.sort(times, indices); if (intervals == null || intervals.length != nodeCount) { intervals = new double[nodeCount]; lineageCounts = new int[nodeCount]; lineagesAdded = new List[nodeCount]; lineagesRemoved = new List[nodeCount]; // lineages = new List[nodeCount]; storedIntervals = new double[nodeCount]; storedLineageCounts = new int[nodeCount]; } else { for (List<Node> l : lineagesAdded) { if (l != null) { l.clear(); } } for (List<Node> l : lineagesRemoved) { if (l != null) { l.clear(); } } } // start is the time of the first tip double start = times[indices[0]]; int numLines = 0; int nodeNo = 0; intervalCount = 0; while (nodeNo < nodeCount) { int lineagesRemoved = 0; int lineagesAdded = 0; double finish = times[indices[nodeNo]]; double next; do { final int childIndex = indices[nodeNo]; final int childCount = childCounts[childIndex]; // don't use nodeNo from here on in do loop nodeNo += 1; if (childCount == 0) { addLineage(intervalCount, tree.getNode(childIndex)); lineagesAdded += 1; } else { lineagesRemoved += (childCount - 1); // record removed lineages final Node parent = tree.getNode(childIndex); //assert childCounts[indices[nodeNo]] == beast.tree.getChildCount(parent); //for (int j = 0; j < lineagesRemoved + 1; j++) { for (int j = 0; j < childCount; j++) { Node child = j == 0 ? parent.getLeft() : parent.getRight(); removeLineage(intervalCount, child); } // record added lineages addLineage(intervalCount, parent); // no mix of removed lineages when 0 th if (multifurcationLimit == 0.0) { break; } } if (nodeNo < nodeCount) { next = times[indices[nodeNo]]; } else break; } while (Math.abs(next - finish) <= multifurcationLimit); if (lineagesAdded > 0) { if (intervalCount > 0 || ((finish - start) > multifurcationLimit)) { intervals[intervalCount] = finish - start; lineageCounts[intervalCount] = numLines; intervalCount += 1; } start = finish; } // add sample event numLines += lineagesAdded; if (lineagesRemoved > 0) { intervals[intervalCount] = finish - start; lineageCounts[intervalCount] = numLines; intervalCount += 1; start = finish; } // coalescent event numLines -= lineagesRemoved; } intervalsKnown = true; } /** * Returns the time of the start of an interval * * @param i which interval * @return start time */ public double getIntervalTime(int i) { if (!intervalsKnown) { calculateIntervals(); } return times[indices[i]]; } protected void addLineage(int interval, Node node) { if (lineagesAdded[interval] == null) lineagesAdded[interval] = new ArrayList<>(); lineagesAdded[interval].add(node); } protected void removeLineage(int interval, Node node) { if (lineagesRemoved[interval] == null) lineagesRemoved[interval] = new ArrayList<>(); lineagesRemoved[interval].add(node); } /** * @return the delta parameter of Pybus et al (Node spread statistic) */ public double getDelta() { return IntervalList.Utils.getDelta(this); } /** * extract coalescent times and tip information into array times from beast.tree. * * @param tree the beast.tree * @param times the times of the nodes in the beast.tree * @param childCounts the number of children of each node */ protected static void collectTimes(Tree tree, double[] times, int[] childCounts) { Node[] nodes = tree.getNodesAsArray(); for (int i = 0; i < nodes.length; i++) { Node node = nodes[i]; times[i] = node.getHeight(); childCounts[i] = node.isLeaf() ? 0 : 2; } } /** * The beast.tree. RRB: not a good idea to keep a copy around, since it changes all the time. */ // private Tree tree = null; /** * The widths of the intervals. */ protected double[] intervals; protected double[] storedIntervals; /** interval times **/ double[] times; int[] indices; /** * The number of uncoalesced lineages within a particular interval. */ protected int[] lineageCounts; protected int[] storedLineageCounts; /** * The lineages in each interval (stored by node ref). */ protected List<Node>[] lineagesAdded; protected List<Node>[] lineagesRemoved; // private List<Node>[] lineages; protected int intervalCount = 0; protected int storedIntervalCount = 0; /** * are the intervals known? */ protected boolean intervalsKnown = false; protected double multifurcationLimit = -1.0; }
package org.opencms.ade.publish; import org.opencms.file.CmsResource; import org.opencms.main.CmsLog; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.commons.logging.Log; /** * Helper class for splitting a publish list into publish groups.<p> * * @author Georg Westenberger * * @version $Revision: 1.2 $ * * @since 8.0.0 */ public class CmsPublishGroupHelper { /** An enum representing the age of a publish list resource. */ public enum GroupAge { /** group age constant. */ medium, /** group age constant. */ old, /** group age constant. */ young } private static final Log LOG = CmsLog.getLog(CmsPublishGroupHelper.class); /** The gap between session groups. */ protected static final int GROUP_SESSIONS_GAP = 8 * 60 * 60 * 1000; /** The current locale. */ private Locale m_locale; /** * Creates a new publish group helper for a given locale.<p> * * @param locale the locale to use */ public CmsPublishGroupHelper(Locale locale) { m_locale = locale; } /** * Given a descending list of dates represented as longs, this method computes a map from the dates * to their age in (local) days.<p> * * @param sortedDates a descending list of dates represented as longs * * @return a map from dates to ages (measured in days) */ public Map<Long, Integer> computeDays(List<Long> sortedDates) { if (sortedDates.isEmpty()) { return Collections.<Long, Integer> emptyMap(); } Map<Long, Integer> days = new HashMap<Long, Integer>(); long lastDate = System.currentTimeMillis(); int dayCounter = 0; for (Long dateObj : sortedDates) { long date = dateObj.longValue(); long dayDifference = getDayDifference(lastDate, date); dayCounter += dayDifference; lastDate = date; days.put(dateObj, new Integer(dayCounter)); } return days; } /** * Computes a map from modification date to number of (local) days since the modification date.<p> * * @param resources a list of resources * * @return a map from modification dates to the number of days since the modification date */ public Map<Long, Integer> computeDaysForResources(List<CmsResource> resources) { Map<Long, Integer> result = computeDays(getModificationDates(resources)); if (LOG.isDebugEnabled()) { for (CmsResource res : resources) { LOG.debug("Resource " + res.getRootPath() + " is " + result.get(new Long(res.getDateLastModified())) + " days old."); } } return result; } /** * Gets the difference in days between to dates given as longs.<p> * * The first date must be later than the second date. * * @param first the first date * @param second the second date * * @return the difference between the two dates in days */ public int getDayDifference(long first, long second) { if (first < second) { throw new IllegalArgumentException(); } Calendar firstDay = getStartOfDay(first); Calendar secondDay = getStartOfDay(second); int result = 0; while (firstDay.after(secondDay)) { firstDay.add(Calendar.DAY_OF_MONTH, -1); result += 1; } return result; } /** * Given a list of resources, this method returns a list of their modification dates.<p> * * @param resources a list of resources * * @return the modification dates of the resources, in the same order as the resources */ public List<Long> getModificationDates(List<CmsResource> resources) { List<Long> result = new ArrayList<Long>(); for (CmsResource res : resources) { result.add(new Long(res.getDateLastModified())); } return result; } /** * Returns the localized name for a given publish group based on its age.<p> * * @param resources the resources of the publish group * @param age the age of the publish group * * @return the localized name of the publish group */ public String getPublishGroupName(List<CmsResource> resources, GroupAge age) { long groupDate = resources.get(0).getDateLastModified(); String groupName; switch (age) { case young: groupName = Messages.get().getBundle(m_locale).key( Messages.GUI_GROUPNAME_SESSION_1, new Date(groupDate)); break; case medium: groupName = Messages.get().getBundle(m_locale).key(Messages.GUI_GROUPNAME_DAY_1, new Date(groupDate)); break; case old: default: groupName = Messages.get().getBundle(m_locale).key(Messages.GUI_GROUPNAME_EVERYTHING_ELSE_0); break; } return groupName; } /** * Returns a calendar object representing the start of the day in which a given time lies.<p> * * @param time a long representing a time * * @return a calendar object which represents the day in which the time lies */ public Calendar getStartOfDay(long time) { Calendar cal = Calendar.getInstance(); cal.setTimeInMillis(time); int year = cal.get(Calendar.YEAR); int month = cal.get(Calendar.MONTH); int day = cal.get(Calendar.DAY_OF_MONTH); Calendar result = Calendar.getInstance(); result.set(Calendar.YEAR, year); result.set(Calendar.MONTH, month); result.set(Calendar.DAY_OF_MONTH, day); return result; } /** * Computes publish groups for a list of resources with age "medium".<p> * * @param resources the list of resources * @param days a map from modification dates to the number of days since the modification * * @return a list of publish groups */ public List<List<CmsResource>> partitionMediumResources(List<CmsResource> resources, Map<Long, Integer> days) { if (resources.isEmpty()) { return Collections.<List<CmsResource>> emptyList(); } CmsResource firstRes = resources.get(0); int lastDay = days.get(new Long(firstRes.getDateLastModified())).intValue(); List<List<CmsResource>> result = new ArrayList<List<CmsResource>>(); List<CmsResource> currentGroup = new ArrayList<CmsResource>(); result.add(currentGroup); for (CmsResource res : resources) { LOG.debug("Processing medium-aged resource " + res.getRootPath()); int day = days.get(new Long(res.getDateLastModified())).intValue(); if (day != lastDay) { LOG.debug("=== new group ==="); currentGroup = new ArrayList<CmsResource>(); result.add(currentGroup); } lastDay = day; currentGroup.add(res); } return result; } /** * Partitions a list of resources by their age in (local) days since the last modification.<p> * * @param resources the list of resources to partition * @param days the map from modification dates to the number of (local) days since the modification * * @return a map from age enum values to the list of resources which fall into the corresponding age group */ public Map<GroupAge, List<CmsResource>> partitionPublishResourcesByAge( List<CmsResource> resources, Map<Long, Integer> days) { List<CmsResource> youngRes = new ArrayList<CmsResource>(); List<CmsResource> mediumRes = new ArrayList<CmsResource>(); List<CmsResource> oldRes = new ArrayList<CmsResource>(); for (CmsResource res : resources) { int day = days.get(new Long(res.getDateLastModified())).intValue(); List<CmsResource> listToAddTo = null; if (day < 7) { listToAddTo = youngRes; LOG.debug("Classifying publish resource " + res.getRootPath() + " as young"); } else if (day < 28) { listToAddTo = mediumRes; LOG.debug("Classifying publish resource " + res.getRootPath() + " as medium-aged"); } else { listToAddTo = oldRes; LOG.debug("Classifying publish resource " + res.getRootPath() + " as old"); } listToAddTo.add(res); } Map<GroupAge, List<CmsResource>> result = new HashMap<GroupAge, List<CmsResource>>(); result.put(GroupAge.young, youngRes); result.put(GroupAge.medium, mediumRes); result.put(GroupAge.old, oldRes); return result; } /** * Partitions the list of young resources into publish groups.<p> * * @param resources the list of resources to partition * * @return a partition of the resources into publish groups */ public List<List<CmsResource>> partitionYoungResources(List<CmsResource> resources) { if (resources.isEmpty()) { return Collections.<List<CmsResource>> emptyList(); } List<List<CmsResource>> result = new ArrayList<List<CmsResource>>(); List<CmsResource> currentGroup = new ArrayList<CmsResource>(); result.add(currentGroup); long lastDate = resources.get(0).getDateLastModified(); for (CmsResource res : resources) { LOG.debug("Processing young resource " + res.getRootPath()); long resDate = res.getDateLastModified(); if (lastDate - resDate > GROUP_SESSIONS_GAP) { LOG.debug("=== new group ==="); currentGroup = new ArrayList<CmsResource>(); result.add(currentGroup); } currentGroup.add(res); } return result; } }
package ch.idsia.benchmark.mario.engine.sprites; import ch.idsia.benchmark.mario.engine.Art; import ch.idsia.benchmark.mario.engine.GlobalOptions; import ch.idsia.benchmark.mario.engine.LevelScene; import ch.idsia.benchmark.mario.engine.level.Level; public final class Mario extends Sprite { private final int FractionalPowerUpTime = 0; public static final String[] MODES = new String[]{"small", "Large", "FIRE"}; // fire = (mode == MODE.MODE_FIRE); public static final int KEY_LEFT = 0; public static final int KEY_RIGHT = 1; public static final int KEY_DOWN = 2; public static final int KEY_JUMP = 3; public static final int KEY_SPEED = 4; public static final int KEY_UP = 5; public static final int KEY_PAUSE = 6; public static final int KEY_DUMP_CURRENT_WORLD = 7; public static final int KEY_LIFE_UP = 8; public static final int KEY_WIN = 9; public static final int STATUS_RUNNING = 2; public static final int STATUS_WIN = 1; public static final int STATUS_DEAD = 0; public static boolean large = false; public static boolean fire = false; public static int coins = 0; public static int hiddenBlocksFound = 0; public static int collisionsWithCreatures = 0; public static int mushroomsDevoured; public static int flowersDevoured; private static boolean isMarioInvulnerable; private int status = STATUS_RUNNING; // for racoon when carrying the shell private int prevWPic; private int prevxPicO; private int prevyPicO; private int prevHPic; private boolean isRacoon; public static void resetStatic(int marioMode) { large = marioMode > 0; fire = marioMode == 2; coins = 0; hiddenBlocksFound = 0; mushroomsDevoured = 0; flowersDevoured = 0; } public int getMode() { return ((large) ? 1 : 0) + ((fire) ? 1 : 0); } // private static float GROUND_INERTIA = 0.89f; // private static float AIR_INERTIA = 0.89f; public boolean[] keys; public boolean[] cheatKeys; private float runTime; boolean wasOnGround = false; boolean onGround = false; private boolean mayJump = false; private boolean ducking = false; private boolean sliding = false; private int jumpTime = 0; private float xJumpSpeed; private float yJumpSpeed; private boolean canShoot = false; int width = 4; int height = 24; public LevelScene world; public int facing; private int powerUpTime = 0; // exclude pause for rendering changes public int xDeathPos, yDeathPos; public int deathTime = 0; public int winTime = 0; private int invulnerableTime = 0; public Sprite carried = null; // private static Mario instance; public Mario(LevelScene world) { kind = KIND_MARIO; // Mario.instance = this; // TODO: refactor: rename to levelScene this.world = world; x = 32; y = 0; facing = 1; setMode(Mario.large, Mario.fire); } private boolean lastLarge; private boolean lastFire; private boolean newLarge; private boolean newFire; private void blink(boolean on) { Mario.large = on ? newLarge : lastLarge; Mario.fire = on ? newFire : lastFire; // System.out.println("on = " + on); if (large) { sheet = Art.mario; if (fire) sheet = Art.fireMario; xPicO = 16; yPicO = 31; wPic = hPic = 32; } else { sheet = Art.smallMario; xPicO = 8; yPicO = 15; wPic = hPic = 16; } savePrevState(); calcPic(); } void setMode(boolean large, boolean fire) { // System.out.println("large = " + large); if (fire) large = true; if (!large) fire = false; lastLarge = Mario.large; lastFire = Mario.fire; Mario.large = large; Mario.fire = fire; newLarge = Mario.large; newFire = Mario.fire; blink(true); } public void setRacoon(boolean isRacoon) { // if (true) // return; this.isRacoon = isRacoon; // this.setMode(isRacoon, false); // System.out.println("isRacoon = " + isRacoon); // System.out.println("Art.racoonmario.length = " + Art.racoonmario.length); // System.out.println("Art.racoonmario[0].length = " + Art.racoonmario[0].length); if (isRacoon) { savePrevState(); xPicO = 16; yPicO = 31; wPic = hPic = 32; this.sheet = Art.racoonmario; } else { this.sheet = prevSheet; this.xPicO = this.prevxPicO; this.yPicO = this.prevyPicO; wPic = prevWPic; hPic = prevHPic; // blink(false); } } private void savePrevState() { this.prevSheet = this.sheet; prevWPic = wPic; prevHPic = hPic; this.prevxPicO = xPicO; this.prevyPicO = yPicO; } public void move() { if (GlobalOptions.isFly) { if (keys[KEY_DOWN]) ya = 15; else ya = 0; if (keys[KEY_UP]) ya = -10; else if(!keys[KEY_DOWN]) ya = 0; if (keys[KEY_RIGHT]) xa = 15; else xa = 0; if (keys[KEY_LEFT]) xa = -10; else if (!keys[KEY_RIGHT]) xa = 0; } ++world.level.marioTrace[this.mapX][this.mapY]; if (winTime > 0) { winTime++; xa = 0; ya = 0; return; } if (deathTime > 0) { deathTime++; if (deathTime < 11) { xa = 0; ya = 0; } else if (deathTime == 11) { ya = -15; } else { ya += 2; } x += xa; y += ya; return; } if (powerUpTime != 0) { if (powerUpTime > 0) { powerUpTime blink(((powerUpTime / 3) & 1) == 0); } else { powerUpTime++; blink(((-powerUpTime / 3) & 1) == 0); } if (powerUpTime == 0) world.paused = false; calcPic(); return; } if (invulnerableTime > 0) invulnerableTime visible = ((invulnerableTime / 2) & 1) == 0; wasOnGround = onGround; float sideWaysSpeed = keys[KEY_SPEED] ? 1.2f : 0.6f; // float sideWaysSpeed = onGround ? 2.5f : 1.2f; if (onGround) { ducking = keys[KEY_DOWN] && large; } if (xa > 2) { facing = 1; } if (xa < -2) { facing = -1; } if (keys[KEY_JUMP] || (jumpTime < 0 && !onGround && !sliding)) { if (jumpTime < 0) { xa = xJumpSpeed; ya = -jumpTime * yJumpSpeed; jumpTime++; } else if (onGround && mayJump) { xJumpSpeed = 0; yJumpSpeed = -1.9f; jumpTime = 7; ya = jumpTime * yJumpSpeed; onGround = false; sliding = false; } else if (sliding && mayJump) { xJumpSpeed = -facing * 6.0f; yJumpSpeed = -2.0f; jumpTime = -6; xa = xJumpSpeed; ya = -jumpTime * yJumpSpeed; onGround = false; sliding = false; facing = -facing; } else if (jumpTime > 0) { xa += xJumpSpeed; ya = jumpTime * yJumpSpeed; jumpTime } } else { jumpTime = 0; } if (keys[KEY_LEFT] && !ducking) { if (facing == 1) sliding = false; xa -= sideWaysSpeed; if (jumpTime >= 0) facing = -1; } if (keys[KEY_RIGHT] && !ducking) { if (facing == -1) sliding = false; xa += sideWaysSpeed; if (jumpTime >= 0) facing = 1; } if ((!keys[KEY_LEFT] && !keys[KEY_RIGHT]) || ducking || ya < 0 || onGround) { sliding = false; } if (keys[KEY_SPEED] && canShoot && Mario.fire && world.fireballsOnScreen < 2) { world.addSprite(new Fireball(world, x + facing * 6, y - 20, facing)); } // Cheats: if (GlobalOptions.isPowerRestoration && keys[KEY_SPEED] && (!Mario.large || !Mario.fire)) setMode(true, true); // if (cheatKeys[KEY_LIFE_UP]) // this.lives++; // TODO: remove this and clean up "easter eggs", redundant due to "SPACE" for the whole world and "-le 0" to disable creatures world.paused = GlobalOptions.isPauseWorld; if (cheatKeys[KEY_WIN]) win(); // if (keys[KEY_DUMP_CURRENT_WORLD]) // try { // System.out.println("DUMP:"); //// world.getTextObservationAroundMario(System.out); // //world.level.save(System.out); // System.out.println("DUMPED:"); // } catch (IOException e) { // e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. canShoot = !keys[KEY_SPEED]; mayJump = (onGround || sliding) && !keys[KEY_JUMP]; xFlipPic = facing == -1; runTime += (Math.abs(xa)) + 5; if (Math.abs(xa) < 0.5f) { runTime = 0; xa = 0; } calcPic(); if (sliding) { for (int i = 0; i < 1; i++) { world.addSprite(new Sparkle((int) (x + Math.random() * 4 - 2) + facing * 8, (int) (y + Math.random() * 4) - 24, (float) (Math.random() * 2 - 1), (float) Math.random() * 1, 0, 1, 5)); } ya *= 0.5f; } onGround = false; move(xa, 0); move(0, ya); if (y > world.level.height * 16 + 16) die("Reason: Gap"); if (x < 0) { x = 0; xa = 0; } if (x > world.level.xExit * 16 - 8 && x < world.level.xExit * 16 + 2 * 16 && y < world.level.yExit * 16) { x = world.level.xExit * 16; win(); } if (x > world.level.length * 16) { x = world.level.length * 16; xa = 0; } // TODO: move to variable (gravity?) ya *= 0.85f; if (onGround) { xa *= GROUND_INERTIA; } else { xa *= AIR_INERTIA; } if (!onGround) { ya += 3; } if (carried != null) { carried.x = x + facing * 8; //TODO : move to cellSize_2 = cellSize/2; carried.y = y - 2; if (!keys[KEY_SPEED]) { carried.release(this); carried = null; setRacoon(false); // System.out.println("carried = " + carried); } // System.out.println("sideWaysSpeed = " + sideWaysSpeed); } } private void calcPic() { int runFrame; if (large || isRacoon) { runFrame = ((int) (runTime / 20)) % 4; if (runFrame == 3) runFrame = 1; if (carried == null && Math.abs(xa) > 10) runFrame += 3; if (carried != null) runFrame += 10; if (!onGround) { if (carried != null) runFrame = 12; else if (Math.abs(xa) > 10) runFrame = 7; else runFrame = 6; } } else { runFrame = ((int) (runTime / 20)) % 2; if (carried == null && Math.abs(xa) > 10) runFrame += 2; if (carried != null) runFrame += 8; if (!onGround) { if (carried != null) runFrame = 9; else if (Math.abs(xa) > 10) runFrame = 5; else runFrame = 4; } } if (onGround && ((facing == -1 && xa > 0) || (facing == 1 && xa < 0))) { if (xa > 1 || xa < -1) runFrame = large ? 9 : 7; if (xa > 3 || xa < -3) { for (int i = 0; i < 3; i++) { world.addSprite(new Sparkle((int) (x + Math.random() * 8 - 4), (int) (y + Math.random() * 4), (float) (Math.random() * 2 - 1), (float) Math.random() * -1, 0, 1, 5)); } } } if (large) { if (ducking) runFrame = 14; height = ducking ? 12 : 24; } else { height = 12; } xPic = runFrame; } private boolean move(float xa, float ya) { while (xa > 8) { if (!move(8, 0)) return false; xa -= 8; } while (xa < -8) { if (!move(-8, 0)) return false; xa += 8; } while (ya > 8) { if (!move(0, 8)) return false; ya -= 8; } while (ya < -8) { if (!move(0, -8)) return false; ya += 8; } boolean collide = false; if (ya > 0) { if (isBlocking(x + xa - width, y + ya, xa, 0)) collide = true; else if (isBlocking(x + xa + width, y + ya, xa, 0)) collide = true; else if (isBlocking(x + xa - width, y + ya + 1, xa, ya)) collide = true; else if (isBlocking(x + xa + width, y + ya + 1, xa, ya)) collide = true; } if (ya < 0) { if (isBlocking(x + xa, y + ya - height, xa, ya)) collide = true; else if (collide || isBlocking(x + xa - width, y + ya - height, xa, ya)) collide = true; else if (collide || isBlocking(x + xa + width, y + ya - height, xa, ya)) collide = true; } if (xa > 0) { sliding = true; if (isBlocking(x + xa + width, y + ya - height, xa, ya)) collide = true; else sliding = false; if (isBlocking(x + xa + width, y + ya - height / 2, xa, ya)) collide = true; else sliding = false; if (isBlocking(x + xa + width, y + ya, xa, ya)) collide = true; else sliding = false; } if (xa < 0) { sliding = true; if (isBlocking(x + xa - width, y + ya - height, xa, ya)) collide = true; else sliding = false; if (isBlocking(x + xa - width, y + ya - height / 2, xa, ya)) collide = true; else sliding = false; if (isBlocking(x + xa - width, y + ya, xa, ya)) collide = true; else sliding = false; } if (collide) { if (xa < 0) { x = (int) ((x - width) / 16) * 16 + width; this.xa = 0; } if (xa > 0) { x = (int) ((x + width) / 16 + 1) * 16 - width - 1; this.xa = 0; } if (ya < 0) { y = (int) ((y - height) / 16) * 16 + height; jumpTime = 0; this.ya = 0; } if (ya > 0) { y = (int) ((y - 1) / 16 + 1) * 16 - 1; onGround = true; } return false; } else { x += xa; y += ya; return true; } } private boolean isBlocking(float _x, float _y, float xa, float ya) { int x = (int) (_x / 16); int y = (int) (_y / 16); if (x == (int) (this.x / 16) && y == (int) (this.y / 16)) return false; boolean blocking = world.level.isBlocking(x, y, xa, ya); byte block = world.level.getBlock(x, y); if (((Level.TILE_BEHAVIORS[block & 0xff]) & Level.BIT_PICKUPABLE) > 0) { Mario.gainCoin(); world.level.setBlock(x, y, (byte) 0); for (int xx = 0; xx < 2; xx++) for (int yy = 0; yy < 2; yy++) world.addSprite(new Sparkle(x * 16 + xx * 8 + (int) (Math.random() * 8), y * 16 + yy * 8 + (int) (Math.random() * 8), 0, 0, 0, 2, 5)); } if (blocking && ya < 0) { world.bump(x, y, large); } return blocking; } public void stomp(Enemy enemy) { if (deathTime > 0 || world.paused) return; float targetY = enemy.y - enemy.height / 2; move(0, targetY - y); xJumpSpeed = 0; yJumpSpeed = -1.9f; jumpTime = 8; ya = jumpTime * yJumpSpeed; onGround = false; sliding = false; invulnerableTime = 1; } public void stomp(Shell shell) { if (deathTime > 0 || world.paused) return; if (keys[KEY_SPEED] && shell.facing == 0) { carried = shell; shell.carried = true; setRacoon(true); } else { float targetY = shell.y - shell.height / 2; move(0, targetY - y); xJumpSpeed = 0; yJumpSpeed = -1.9f; jumpTime = 8; ya = jumpTime * yJumpSpeed; onGround = false; sliding = false; invulnerableTime = 1; } } public void getHurt(final int spriteKind) { if (deathTime > 0 || world.paused || isMarioInvulnerable) return; if (invulnerableTime > 0) return; ++collisionsWithCreatures; if (large) { world.paused = true; powerUpTime = -3 * FractionalPowerUpTime; if (fire) { world.mario.setMode(true, false); } else { world.mario.setMode(false, false); } invulnerableTime = 32; } else { die("Collision with a creature " + spriteKind); // TODO: substitue by named creature kind } } public void win() { xDeathPos = (int) x; yDeathPos = (int) y; world.paused = true; winTime = 1; status = Mario.STATUS_WIN; } public void die(String reasonOfDeath) { xDeathPos = (int) x; yDeathPos = (int) y; world.paused = true; deathTime = 25; status = Mario.STATUS_DEAD; // TODO: [M] refactor reasons of death to enum {COLLISION, GAP, TIMEOUT} world.addMemoMessage("Reason of death: " + reasonOfDeath); } public void getFlower() { if (deathTime > 0 || world.paused) return; if (!fire) { world.paused = true; powerUpTime = 3 * FractionalPowerUpTime; world.mario.setMode(true, true); } else { Mario.gainCoin(); } ++flowersDevoured; } public void getMushroom() { if (deathTime > 0 || world.paused) return; if (!large) { world.paused = true; powerUpTime = 3 * FractionalPowerUpTime; world.mario.setMode(true, false); } else { Mario.gainCoin(); } ++mushroomsDevoured; } public void kick(Shell shell) { // if (deathTime > 0 || world.paused) return; if (keys[KEY_SPEED]) { carried = shell; shell.carried = true; setRacoon(true); System.out.println("shell = " + shell); } else { invulnerableTime = 1; } } public void stomp(BulletBill bill) { if (deathTime > 0 || world.paused) return; float targetY = bill.y - bill.height / 2; move(0, targetY - y); xJumpSpeed = 0; yJumpSpeed = -1.9f; jumpTime = 8; ya = jumpTime * yJumpSpeed; onGround = false; sliding = false; invulnerableTime = 1; } public static void gainCoin() { coins++; // if (coins % 100 == 0) // get1Up(); } public static void gainHiddenBlock() { ++hiddenBlocksFound; } public int getStatus() { return status; } public boolean isOnGround() { return onGround; } public boolean mayJump() { return mayJump; } public boolean isCanShoot() { return canShoot; } public static void setMarioInvulnerable(boolean marioInvulnerable) { isMarioInvulnerable = marioInvulnerable; } } // public byte getKeyMask() // int mask = 0; // for (int i = 0; i < 7; i++) // if (keys[i]) mask |= (1 << i); // return (byte) mask; // public void setKeys(byte mask) // for (int i = 0; i < 7; i++) // keys[i] = (mask & (1 << i)) > 0; // public static void get1Up() // lives++;
import java.time.format.*; import java.time.*; import java.io.*; import java.nio.file.*; import java.net.*; import java.util.*; public class DumpOpenFiles { public static void main(String[] args) throws Exception { String url = "http://localhost:" + parsePort() + "/dump"; System.out.println("Sending request to " + url); try (BufferedReader response = new BufferedReader(new InputStreamReader(new URL(url).openStream()))) { String line = null; while ((line = response.readLine()) != null) { System.out.println(line); } } } private static String parsePort() throws Exception { if (new File("port.txt").isFile()) { return Files.readAllLines(new File("port.txt").toPath()).get(0).trim(); } else { System.err.println("Port not found, skip"); System.exit(0); return ""; } } }
package client.command.commands.gm3; import client.command.Command; import client.MapleClient; import client.MapleCharacter; import net.server.Server; import server.events.gm.MapleEvent; import tools.MaplePacketCreator; public class StartEventCommand extends Command { { setDescription(""); } @Override public void execute(MapleClient c, String[] params) { MapleCharacter player = c.getPlayer(); int players = 50; if (params.length > 1) players = Integer.parseInt(params[0]); c.getChannelServer().setEvent(new MapleEvent(player.getMapId(), players)); Server.getInstance().broadcastMessage(c.getWorld(), MaplePacketCreator.earnTitleMessage( "[Event] An event has started on " + player.getMap().getMapName() + " and will allow " + players + " players to join. Type @joinevent to participate.")); Server.getInstance().broadcastMessage(c.getWorld(), MaplePacketCreator.serverNotice(6, "[Event] An event has started on " + player.getMap().getMapName() + " and will allow " + players + " players to join. Type @joinevent to participate.")); } }