answer
stringlengths
17
10.2M
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:20-03-29"); this.setApiVersion("15.19.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platforms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:22-03-20"); this.setApiVersion("18.1.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
package org.elasticsearch.xpack.sql.execution.search.extractor; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef.Property; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.TimeZone; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase<CompositeKeyExtractor> { public static CompositeKeyExtractor randomCompositeKeyExtractor() { return new CompositeKeyExtractor(randomAlphaOfLength(16), randomFrom(asList(Property.values())), randomSafeTimeZone()); } @Override protected CompositeKeyExtractor createTestInstance() { return randomCompositeKeyExtractor(); } @Override protected Reader<CompositeKeyExtractor> instanceReader() { return CompositeKeyExtractor::new; } @Override protected CompositeKeyExtractor mutateInstance(CompositeKeyExtractor instance) throws IOException { return new CompositeKeyExtractor(instance.key() + "mutated", instance.property(), instance.timeZone()); } public void testExtractBucketCount() { Bucket bucket = new TestBucket(emptyMap(), randomLong(), new Aggregations(emptyList())); CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.COUNT, randomTimeZone()); assertEquals(bucket.getDocCount(), extractor.extract(bucket)); } public void testExtractKey() { CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, null); Object value = new Object(); Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), new Aggregations(emptyList())); assertEquals(value, extractor.extract(bucket)); } public void testExtractDate() { CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomSafeTimeZone()); long millis = System.currentTimeMillis(); Bucket bucket = new TestBucket(singletonMap(extractor.key(), millis), randomLong(), new Aggregations(emptyList())); assertEquals(new DateTime(millis, DateTimeZone.forTimeZone(extractor.timeZone())), extractor.extract(bucket)); } public void testExtractIncorrectDateKey() { CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomTimeZone()); Object value = new Object(); Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), new Aggregations(emptyList())); SqlIllegalArgumentException exception = expectThrows(SqlIllegalArgumentException.class, () -> extractor.extract(bucket)); assertEquals("Invalid date key returned: " + value, exception.getMessage()); } * We need to exclude SystemV/* time zones because they cannot be converted * back to DateTimeZone which we currently still need to do internally, * e.g. in bwc serialization and in the extract() method */ private static TimeZone randomSafeTimeZone() { return randomValueOtherThanMany(tz -> tz.getID().startsWith("SystemV"), () -> randomTimeZone()); } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:20-06-06"); this.setApiVersion("16.3.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
package edu.umd.cs.findbugs.classfile.impl; import java.io.DataInputStream; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.jar.Attributes; import java.util.jar.Manifest; import edu.umd.cs.findbugs.SystemProperties; import edu.umd.cs.findbugs.classfile.CheckedAnalysisException; import edu.umd.cs.findbugs.classfile.ClassDescriptor; import edu.umd.cs.findbugs.classfile.IClassFactory; import edu.umd.cs.findbugs.classfile.IClassPath; import edu.umd.cs.findbugs.classfile.IClassPathBuilder; import edu.umd.cs.findbugs.classfile.IClassPathBuilderProgress; import edu.umd.cs.findbugs.classfile.ICodeBase; import edu.umd.cs.findbugs.classfile.ICodeBaseEntry; import edu.umd.cs.findbugs.classfile.ICodeBaseIterator; import edu.umd.cs.findbugs.classfile.ICodeBaseLocator; import edu.umd.cs.findbugs.classfile.IErrorLogger; import edu.umd.cs.findbugs.classfile.IScannableCodeBase; import edu.umd.cs.findbugs.classfile.InvalidClassFileFormatException; import edu.umd.cs.findbugs.classfile.ResourceNotFoundException; import edu.umd.cs.findbugs.classfile.analysis.ClassInfo; import edu.umd.cs.findbugs.classfile.analysis.ClassNameAndSuperclassInfo; import edu.umd.cs.findbugs.classfile.engine.ClassParser; import edu.umd.cs.findbugs.classfile.engine.ClassParserInterface; import edu.umd.cs.findbugs.io.IO; import edu.umd.cs.findbugs.util.Archive; /** * Implementation of IClassPathBuilder. * * @author David Hovemeyer */ public class ClassPathBuilder implements IClassPathBuilder { private static final boolean VERBOSE = SystemProperties.getBoolean("findbugs2.builder.verbose"); private static final boolean DEBUG = VERBOSE || SystemProperties.getBoolean("findbugs2.builder.debug"); private static final boolean NO_PARSE_CLASS_NAMES = SystemProperties.getBoolean("findbugs2.builder.noparseclassnames"); /** * Worklist item. * Represents one codebase to be processed during the * classpath construction algorithm. */ static class WorkListItem { private ICodeBaseLocator codeBaseLocator; private boolean isAppCodeBase; private int howDiscovered; @Override public String toString() { return "WorkListItem(" + codeBaseLocator +", " + isAppCodeBase + ", " + howDiscovered +")"; } public WorkListItem(ICodeBaseLocator codeBaseLocator, boolean isApplication, int howDiscovered) { this.codeBaseLocator = codeBaseLocator; this.isAppCodeBase = isApplication; this.howDiscovered = howDiscovered; } public ICodeBaseLocator getCodeBaseLocator() { return codeBaseLocator; } public boolean isAppCodeBase() { return isAppCodeBase; } /** * @return Returns the howDiscovered. */ public int getHowDiscovered() { return howDiscovered; } } /** * A codebase discovered during classpath building. */ static class DiscoveredCodeBase { ICodeBase codeBase; LinkedList<ICodeBaseEntry> resourceList; public DiscoveredCodeBase(ICodeBase codeBase) { this.codeBase= codeBase; this.resourceList = new LinkedList<ICodeBaseEntry>(); } public ICodeBase getCodeBase() { return codeBase; } public LinkedList<ICodeBaseEntry> getResourceList() { return resourceList; } public void addCodeBaseEntry(ICodeBaseEntry entry) { resourceList.add(entry); } public ICodeBaseIterator iterator() throws InterruptedException { if (codeBase instanceof IScannableCodeBase) { return ((IScannableCodeBase) codeBase).iterator(); } else { return new ICodeBaseIterator() { public boolean hasNext() throws InterruptedException { return false; } public ICodeBaseEntry next() throws InterruptedException { throw new UnsupportedOperationException(); } }; } } } // Fields private IClassFactory classFactory; private IErrorLogger errorLogger; private LinkedList<WorkListItem> projectWorkList; private LinkedList<DiscoveredCodeBase> discoveredCodeBaseList; private Map<String, DiscoveredCodeBase> discoveredCodeBaseMap; private LinkedList<ClassDescriptor> appClassList; private boolean scanNestedArchives; /** * Constructor. * * @param classFactory the class factory * @param errorLogger the error logger */ ClassPathBuilder(IClassFactory classFactory, IErrorLogger errorLogger) { this.classFactory = classFactory; this.errorLogger = errorLogger; this.projectWorkList = new LinkedList<WorkListItem>(); this.discoveredCodeBaseList = new LinkedList<DiscoveredCodeBase>(); this.discoveredCodeBaseMap = new HashMap<String, DiscoveredCodeBase>(); this.appClassList = new LinkedList<ClassDescriptor>(); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.classfile.IClassPathBuilder#addCodeBase(edu.umd.cs.findbugs.classfile.ICodeBaseLocator, boolean) */ public void addCodeBase(ICodeBaseLocator locator, boolean isApplication) { addToWorkList(projectWorkList, new WorkListItem(locator, isApplication, ICodeBase.SPECIFIED)); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.classfile.IClassPathBuilder#scanNestedArchives(boolean) */ public void scanNestedArchives(boolean scanNestedArchives) { this.scanNestedArchives = scanNestedArchives; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.classfile.IClassPathBuilder#build(edu.umd.cs.findbugs.classfile.IClassPath, edu.umd.cs.findbugs.classfile.IClassPathBuilderProgress) */ public void build(IClassPath classPath, IClassPathBuilderProgress progress) throws CheckedAnalysisException, IOException, InterruptedException { // Discover all directly and indirectly referenced codebases processWorkList(classPath, projectWorkList, progress); // If not already located, try to locate any additional codebases // containing classes required for analysis. locateCodebasesRequiredForAnalysis(classPath, progress); // Add all discovered codebases to the classpath for (DiscoveredCodeBase discoveredCodeBase : discoveredCodeBaseList) { classPath.addCodeBase(discoveredCodeBase.getCodeBase()); } Set<ClassDescriptor> appClassSet = new HashSet<ClassDescriptor>(); // Build collection of all application classes. // Also, add resource name -> codebase entry mappings for application classes. for (DiscoveredCodeBase discoveredCodeBase : discoveredCodeBaseList) { if (!discoveredCodeBase.getCodeBase().isApplicationCodeBase()) { continue; } codeBaseEntryLoop: for (ICodeBaseIterator i = discoveredCodeBase.iterator(); i.hasNext(); ) { ICodeBaseEntry entry = i.next(); if (!ClassDescriptor.isClassResource(entry.getResourceName())) { continue; } ClassDescriptor classDescriptor = entry.getClassDescriptor(); if (classDescriptor == null) throw new IllegalStateException(); if (appClassSet.contains(classDescriptor)) { // An earlier entry takes precedence over this class continue codeBaseEntryLoop; } appClassSet.add(classDescriptor); appClassList.add(classDescriptor); classPath.mapResourceNameToCodeBaseEntry(entry.getResourceName(), entry); } } if (DEBUG) { System.out.println("Classpath:"); dumpCodeBaseList(classPath.appCodeBaseIterator(), "Application codebases"); dumpCodeBaseList(classPath.auxCodeBaseIterator(), "Auxiliary codebases"); } } /** * Make an effort to find the codebases containing any files required for analysis. */ private void locateCodebasesRequiredForAnalysis(IClassPath classPath, IClassPathBuilderProgress progress) throws InterruptedException, IOException, ResourceNotFoundException { boolean foundJavaLangObject = false; boolean foundFindBugsAnnotations = false; boolean foundJSR305Annotations = false; for (DiscoveredCodeBase discoveredCodeBase : discoveredCodeBaseList) { if (!foundJavaLangObject) { foundJavaLangObject = probeCodeBaseForResource(discoveredCodeBase, "java/lang/Object.class"); } if (!foundFindBugsAnnotations) { foundFindBugsAnnotations = probeCodeBaseForResource(discoveredCodeBase, "edu/umd/cs/findbugs/annotations/Nonnull.class"); } if (!foundJSR305Annotations) { foundJSR305Annotations = probeCodeBaseForResource(discoveredCodeBase, "javax/annotation/meta/TypeQualifier.class"); } } if (!foundJavaLangObject) { processWorkList(classPath, buildSystemCodebaseList(), progress); } if (!foundFindBugsAnnotations) { processWorkList(classPath, buildFindBugsAnnotationCodebaseList(), progress); } if (!foundJSR305Annotations) { processWorkList(classPath, buildJSR305AnnotationsCodebaseList(), progress); } } /** * Probe a codebase to see if a given source exists in that code base. * * @param resourceName name of a resource * @return true if the resource exists in the codebase, false if not */ private boolean probeCodeBaseForResource(DiscoveredCodeBase discoveredCodeBase, String resourceName) { try { ICodeBaseEntry entry = discoveredCodeBase.getCodeBase().lookupResource(resourceName); return true; } catch (ResourceNotFoundException e) { return false; } } private void dumpCodeBaseList(Iterator<? extends ICodeBase> i, String desc) throws InterruptedException { System.out.println(" " + desc + ":"); while (i.hasNext()) { ICodeBase codeBase = i.next(); System.out.println(" " + codeBase.getCodeBaseLocator().toString()); if (codeBase.containsSourceFiles()) { System.out.println(" * contains source files"); } } } private LinkedList<WorkListItem> buildSystemCodebaseList() { // This method is based on the // org.apache.bcel.util.ClassPath.getClassPath() // method. LinkedList<WorkListItem> workList = new LinkedList<WorkListItem>(); String bootClassPath = SystemProperties.getProperty("sun.boot.class.path"); // Seed worklist with system codebases. // addWorkListItemsForClasspath(workList, SystemProperties.getProperty("java.class.path")); addWorkListItemsForClasspath(workList, bootClassPath); String extPath = SystemProperties.getProperty("java.ext.dirs"); if (extPath != null) { StringTokenizer st = new StringTokenizer(extPath, File.pathSeparator); while (st.hasMoreTokens()) { String extDir = st.nextToken(); addWorkListItemsForExtDir(workList, extDir); } } return workList; } /** * Create a worklist that will add the FindBugs lib/annotations.jar to the classpath. */ private LinkedList<WorkListItem> buildFindBugsAnnotationCodebaseList() { return createFindBugsLibWorkList("annotations.jar"); } /** * Create a worklist that will add the FindBugs lib/jsr305.jar to the classpath. */ private LinkedList<WorkListItem> buildJSR305AnnotationsCodebaseList() { return createFindBugsLibWorkList("jsr305.jar"); } private LinkedList<WorkListItem> createFindBugsLibWorkList(String jarFileName) { LinkedList<WorkListItem> workList = new LinkedList<WorkListItem>(); String findbugsHome = SystemProperties.getProperty("findbugs.home"); if (findbugsHome != null) { ICodeBaseLocator codeBaseLocator = classFactory.createFilesystemCodeBaseLocator( findbugsHome + File.separator + "lib" + File.separator + jarFileName); workList.add(new WorkListItem(codeBaseLocator, false, ICodeBase.IN_SYSTEM_CLASSPATH)); } return workList; } /** * Add worklist items from given system classpath. * * @param workList the worklist * @param path a system classpath */ private void addWorkListItemsForClasspath(LinkedList<WorkListItem> workList, String path) { if (path == null) { return; } StringTokenizer st = new StringTokenizer(path, File.pathSeparator); while (st.hasMoreTokens()) { String entry = st.nextToken(); if (DEBUG) { System.out.println("System classpath entry: " + entry); } addToWorkList(workList, new WorkListItem( classFactory.createFilesystemCodeBaseLocator(entry), false, ICodeBase.IN_SYSTEM_CLASSPATH)); } } /** * Add worklist items from given extensions directory. * * @param workList the worklist * @param extDir an extensions directory */ private void addWorkListItemsForExtDir(LinkedList<WorkListItem> workList, String extDir) { File dir = new File(extDir); File[] fileList = dir.listFiles(new FileFilter() { /* (non-Javadoc) * @see java.io.FileFilter#accept(java.io.File) */ public boolean accept(File pathname) { String path = pathname.getParent(); return Archive.isArchiveFileName(path); } }); if (fileList == null) { return; } for (File archive : fileList) { addToWorkList(workList, new WorkListItem( classFactory.createFilesystemCodeBaseLocator(archive.getPath()), false, ICodeBase.IN_SYSTEM_CLASSPATH)); } } /** * Process classpath worklist items. * We will attempt to find all nested archives and * Class-Path entries specified in Jar manifests. This should give us * as good an idea as possible of all of the classes available (and * which are part of the application). * * @param workList the worklist to process * @param progress IClassPathBuilderProgress callback * @throws InterruptedException * @throws IOException * @throws ResourceNotFoundException */ private void processWorkList( IClassPath classPath, LinkedList<WorkListItem> workList, IClassPathBuilderProgress progress) throws InterruptedException, IOException, ResourceNotFoundException { // Build the classpath, scanning codebases for nested archives // and referenced codebases. while (!workList.isEmpty()) { WorkListItem item = workList.removeFirst(); if (DEBUG) { System.out.println("Working: " + item.getCodeBaseLocator()); } DiscoveredCodeBase discoveredCodeBase; // See if we have encountered this codebase before discoveredCodeBase = discoveredCodeBaseMap.get(item.getCodeBaseLocator().toString()); if (discoveredCodeBase != null) { // If the codebase is not an app codebase and // the worklist item says that it is an app codebase, // change it. Otherwise, we have nothing to do. if (!discoveredCodeBase.getCodeBase().isApplicationCodeBase() && item.isAppCodeBase()) { discoveredCodeBase.getCodeBase().setApplicationCodeBase(true); } continue; } // If we are working on an application codebase, // then failing to open/scan it is a fatal error. // We issue warnings about problems with aux codebases, // but continue anyway. try { // Open the codebase and add it to the classpath discoveredCodeBase = new DiscoveredCodeBase(item.getCodeBaseLocator().openCodeBase()); discoveredCodeBase.getCodeBase().setApplicationCodeBase(item.isAppCodeBase()); discoveredCodeBase.getCodeBase().setHowDiscovered(item.getHowDiscovered()); // Note that this codebase has been visited discoveredCodeBaseMap.put(item.getCodeBaseLocator().toString(), discoveredCodeBase); discoveredCodeBaseList.addLast(discoveredCodeBase); // If it is a scannable codebase, check it for nested archives. // In addition, if it is an application codebase then // make a list of application classes. if (discoveredCodeBase.getCodeBase() instanceof IScannableCodeBase && discoveredCodeBase.codeBase.isApplicationCodeBase()) { scanCodebase(classPath, workList, discoveredCodeBase); } // Check for a Jar manifest for additional aux classpath entries. scanJarManifestForClassPathEntries(workList, discoveredCodeBase.getCodeBase()); } catch (IOException e) { if (item.isAppCodeBase()) { throw e; } else if (item.getHowDiscovered() == ICodeBase.SPECIFIED) { errorLogger.logError("Cannot open codebase " + item.getCodeBaseLocator(), e); } } catch (ResourceNotFoundException e) { if (item.isAppCodeBase()) { throw e; } else if (item.getHowDiscovered() == ICodeBase.SPECIFIED) { errorLogger.logError("Cannot open codebase " + item.getCodeBaseLocator(), e); } } if (item.getHowDiscovered() == ICodeBase.SPECIFIED) { progress.finishArchive(); } } } /** * Scan given codebase in order to * <ul> * <li> check the codebase for nested archives * (adding any found to the worklist) * <li> build a list of class resources found in the codebase * </ul> * * @param workList the worklist * @param discoveredCodeBase the codebase to scan * @throws InterruptedException */ private void scanCodebase(IClassPath classPath, LinkedList<WorkListItem> workList, DiscoveredCodeBase discoveredCodeBase) throws InterruptedException { if (DEBUG) { System.out.println("Scanning " + discoveredCodeBase.getCodeBase().getCodeBaseLocator()); } IScannableCodeBase codeBase = (IScannableCodeBase) discoveredCodeBase.getCodeBase(); ICodeBaseIterator i = codeBase.iterator(); while (i.hasNext()) { ICodeBaseEntry entry = i.next(); if (VERBOSE) { System.out.println("Entry: " + entry.getResourceName()); } if (!NO_PARSE_CLASS_NAMES && codeBase.isApplicationCodeBase() && ClassDescriptor.isClassResource(entry.getResourceName())) { parseClassName(entry); } // Note the resource exists in this codebase discoveredCodeBase.addCodeBaseEntry(entry); // If resource is a nested archive, add it to the worklist if (scanNestedArchives && codeBase.isApplicationCodeBase() && Archive.isArchiveFileName(entry.getResourceName())) { if (VERBOSE) { System.out.println("Entry is an archive!"); } ICodeBaseLocator nestedArchiveLocator = classFactory.createNestedArchiveCodeBaseLocator(codeBase, entry.getResourceName()); addToWorkList( workList, new WorkListItem(nestedArchiveLocator, codeBase.isApplicationCodeBase(), ICodeBase.NESTED)); } } } /** * Attempt to parse data of given resource in order * to divine the real name of the class contained in the * resource. * * @param entry the resource */ private void parseClassName(ICodeBaseEntry entry) { DataInputStream in = null; try { in = new DataInputStream(entry.openResource()); ClassParserInterface parser = new ClassParser(in, null, entry); ClassNameAndSuperclassInfo.Builder builder = new ClassNameAndSuperclassInfo.Builder(); parser.parse(builder); entry.overrideResourceName(builder.build().getClassDescriptor().toResourceName()); } catch (IOException e) { errorLogger.logError("Invalid class resource " + entry.getResourceName() + " in " + entry, e); } catch (InvalidClassFileFormatException e) { errorLogger.logError("Invalid class resource " + entry.getResourceName() + " in " + entry, e); } finally { IO.close(in); } } /** * Check a codebase for a Jar manifest to examine for Class-Path entries. * * @param workList the worklist * @param codeBase the codebase for examine for a Jar manifest * @throws IOException */ private void scanJarManifestForClassPathEntries(LinkedList<WorkListItem> workList, ICodeBase codeBase) throws IOException { try { // See if this codebase has a jar manifest ICodeBaseEntry manifestEntry = codeBase.lookupResource("META-INF/MANIFEST.MF"); // Try to read the manifest InputStream in = null; try { in = manifestEntry.openResource(); Manifest manifest = new Manifest(in); Attributes mainAttrs = manifest.getMainAttributes(); String classPath = mainAttrs.getValue("Class-Path"); if (classPath != null) { String[] pathList = classPath.split("\\s+"); for (String path : pathList) { // Create a codebase locator for the classpath entry // relative to the codebase in which we discovered the Jar // manifest ICodeBaseLocator relativeCodeBaseLocator = codeBase.getCodeBaseLocator().createRelativeCodeBaseLocator(path); // Codebases found in Class-Path entries are always // added to the aux classpath, not the application. addToWorkList(workList, new WorkListItem(relativeCodeBaseLocator, false, ICodeBase.IN_JAR_MANIFEST)); } } } finally { if (in != null) { IO.close(in); } } } catch (ResourceNotFoundException e) { // Do nothing - no Jar manifest found } } /** * Add a worklist item to the worklist. * This method maintains the invariant that all of the worklist * items representing application codebases appear <em>before</em> * all of the worklist items representing auxiliary codebases. * * @param projectWorkList the worklist * @param itemToAdd the worklist item to add */ private void addToWorkList(LinkedList<WorkListItem> workList, WorkListItem itemToAdd) { if (DEBUG) { new RuntimeException("Adding work list item " + itemToAdd).printStackTrace(System.out); } if (!itemToAdd.isAppCodeBase()) { // Auxiliary codebases are always added at the end workList.addLast(itemToAdd); return; } // Adding an application codebase: position a ListIterator // just before first auxiliary codebase (or at the end of the list // if there are no auxiliary codebases) ListIterator<WorkListItem> i = workList.listIterator(); while (i.hasNext()) { WorkListItem listItem = i.next(); if (!listItem.isAppCodeBase()) { i.previous(); break; } } // Add the codebase to the worklist i.add(itemToAdd); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.classfile.IClassPathBuilder#getAppClassList() */ public List<ClassDescriptor> getAppClassList() { return appClassList; } }
package org.xwiki.test.ui.po; import java.util.List; import org.openqa.selenium.By; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import static org.openqa.selenium.support.ui.ExpectedConditions.elementToBeClickable; /** * Represents the actions possible on the Attachment Pane at the bottom of a page. * * @version $Id$ * @since 3.2M3 */ public class AttachmentsPane extends BaseElement { @FindBy(id = "Attachmentspane") private WebElement pane; @FindBy(xpath = "//input[@value='Add another file']") private WebElement addAnotherFile; private ConfirmationModal confirmDelete; public boolean isOpened() { return getDriver().findElementWithoutWaiting(By.id("attachmentscontent")).isDisplayed(); } /** * Fills the URL with the specified file path. * * @param filePath the path to the file to upload in URL form (the file *must* exist in the target directory). */ public void setFileToUpload(final String filePath) { final List<WebElement> inputs = this.pane.findElements(By.className("uploadFileInput")); inputs.get(inputs.size() - 1).sendKeys(filePath); } public void waitForUploadToFinish(String fileName) { waitForNotificationSuccessMessage("Attachment uploaded: " + fileName); } public void clickHideProgress() { this.pane.findElement(By.xpath("//a[text()='Hide upload status']")).click(); } /** * Adds another input field for attaching a file. */ public void addAnotherFile() { this.addAnotherFile.click(); } public void clickAttachFiles() { this.pane .findElement(By.xpath( "//form[@id='AddAttachment']//input[@class='button' and @type='submit' and " + "@value='Attach']")) .click(); } /** * Return the {@code a} tag of an attachment link according to its name. * * @param attachmentName the name of the attachment (for instance {@code "my_doc.txt"}) * @return the {@link WebElement} of the {@code a} tag with the requested name * @since 3.2M3 */ public WebElement getAttachmentLink(String attachmentName) { By attachementLinkSelector = By.xpath( String.format("//div[@id='_attachments']//a[@title = 'Download this attachment' and contains(@href, '%s')]", attachmentName)); // Make sure that the element is visible and can be clicked before returning it to prevent interacting too // early with the attachment link. getDriver().waitUntilCondition(elementToBeClickable(attachementLinkSelector)); return getDriver().findElementWithoutWaiting(attachementLinkSelector); } /** * Deletes the corresponding file name. * * @param attachmentName the name of the attachment to be deleted */ public void deleteAttachmentByFileByName(String attachmentName) { // We initialize before so we can remove the animation before the modal is shown this.confirmDelete = new ConfirmationModal(By.id("deleteAttachment")); getDriver().findElement(By.xpath("//div[@id='attachmentscontent']//a[text()='" + attachmentName + "']/../../div[contains(@class, 'xwikibuttonlinks')]/a[contains(@class,'deletelink')]")).click(); this.confirmDelete.clickOk(); getDriver().waitUntilElementDisappears( By.xpath("//div[@id='attachmentscontent']//a[text()='" + attachmentName + "']")); getDriver().waitUntilElementIsVisible(By.xpath("//div[@id='Attachmentspane']")); } /** * Deletes the first attachment. */ public void deleteFirstAttachment() { String tmp = getDriver() /** * @return the number of attachments in this document. */ public int getNumberOfAttachments() { By countLocator = By.cssSelector("#Attachmentstab .itemCount"); return Integer.parseInt(getDriver().findElement(countLocator).getText().replaceAll("[()]", "")); } /** * Deletes ALL the attached files */ public void deleteAllAttachments() { while (this.getNumberOfAttachments() > 0) { this.deleteFirstAttachment(); } } public String getUploaderOfAttachment(String attachmentName) { return getDriver().findElement(By.xpath("//div[@id='attachmentscontent']//a[text()='" + attachmentName + "']/../../div[@class='meta']/span[@class='publisher']/span[@class='wikilink']")).getText(); } public String getLatestVersionOfAttachment(String attachmentName) { return getDriver() .findElement(By.xpath( "//div[@id='attachmentscontent']//a[text()= '" + attachmentName + "']/../../span[@class='version']/a")) .getText(); } public String getSizeOfAttachment(String attachmentName) { return getDriver().findElement(By.xpath("//div[@id='attachmentscontent']//a[text()='" + attachmentName + "']/../../div[@class='meta']/span[@class='size']")).getText().replaceAll("[()]", ""); } public String getDateOfLastUpload(String attachmentName) { return getDriver().findElement(By.xpath("//div[@id='attachmentscontent']//a[text()='" + attachmentName + "']/../../div[@class='meta']/span[@class='date']")).getText().replaceFirst("on", ""); } public boolean attachmentExistsByFileName(String attachmentName) { try { getDriver() .findElement(By.xpath("//a[@title = 'Download this attachment' and . = '" + attachmentName + "']")); } catch (NoSuchElementException e) { return false; } return true; } }
package com.machine.learning; import com.github.rschmitt.dynamicobject.DynamicObject; import com.machine.learning.classifier.Classifier; import com.machine.learning.classifier.ClassifierDefault; import com.machine.learning.experimenter.MadScientist; import com.machine.learning.model.DataModel; import joptsimple.OptionParser; import joptsimple.OptionSet; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; public class Main { public static void main(String ... args) { System.out.println("Beginning testing..."); OptionSet optionSet = getOptions(args); List<DataModel> dataModelList = new ArrayList<>(); if (((List) optionSet.valueOf("files")).size() == 0) { dataModelList = Arrays.asList( DynamicObject.newInstance(DataModel.class).fromFile("breast-cancer-wisconsin.data.txt"), DynamicObject.newInstance(DataModel.class).fromFile("glass.data.txt"), DynamicObject.newInstance(DataModel.class).fromFile("house-votes-84.data.txt"), DynamicObject.newInstance(DataModel.class).fromFile("iris.data.txt"), DynamicObject.newInstance(DataModel.class).fromFile("soybean-small.data.txt") ); } else { List<String> files = (List<String>) optionSet.valueOf("files"); for (String fileName: files) { dataModelList.add(DynamicObject.newInstance(DataModel.class).fromFile(fileName+".data.txt")); } } List<Classifier> classifiers = new ArrayList<>(); Map<String, Classifier> classifierRegistry = new HashMap<>(); classifierRegistry.put("default", new ClassifierDefault()); if (((List) optionSet.valueOf("classifiers")).size() == 0) { Classifier classifier = new ClassifierDefault(); classifiers.add(classifier); } else { List<String> classifierList = (List<String>) optionSet.valueOf("classifiers"); for (String classifierId: classifierList) { Classifier classifier = classifierRegistry.get(classifierId); if (classifier != null) { classifiers.add(classifier); } else { System.err.println("Classifier "+ classifierId + "isn't registered."); } } } MadScientist madScientist = new MadScientist(dataModelList, classifiers); PrintWriter pw = null; try { String outfile = optionSet.valueOf("outdir").toString()+ System.nanoTime(); pw = new PrintWriter(outfile); } catch (FileNotFoundException ex) { ex.printStackTrace(); } finally { if (pw != null) { pw.close(); } } System.out.println("All tests finished"); } private static OptionSet getOptions(String... args) { OptionParser parser = new OptionParser(); parser.accepts("files").withRequiredArg().ofType(List.class).defaultsTo(Collections.emptyList()); parser.accepts("classifiers").withRequiredArg().ofType(List.class).defaultsTo(Collections.emptyList()); parser.accepts("outdir").withRequiredArg().ofType(String.class).defaultsTo("results"); OptionSet options = parser.parse(args); return options; } }
package nl.mpi.kinnate.svg; import nl.mpi.kinnate.kindata.EntityData; import java.util.ArrayList; import nl.mpi.kinnate.kindata.DataTypes; import nl.mpi.kinnate.kindata.EntityRelation; import nl.mpi.kinnate.uniqueidentifiers.UniqueIdentifier; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.Text; import org.w3c.dom.svg.SVGDocument; public class RelationSvg { private void addUseNode(SVGDocument doc, String svgNameSpace, Element targetGroup, String targetDefId) { String useNodeId = targetDefId + "use"; Node useNodeOld = doc.getElementById(useNodeId); if (useNodeOld != null) { useNodeOld.getParentNode().removeChild(useNodeOld); } Element useNode = doc.createElementNS(svgNameSpace, "use"); useNode.setAttributeNS("http://www.w3.org/1999/xlink", "xlink:href", "#" + targetDefId); // the xlink: of "xlink:href" is required for some svg viewers to render correctly // useNode.setAttribute("href", "#" + lineIdString); useNode.setAttribute("id", useNodeId); targetGroup.appendChild(useNode); } private void updateLabelNode(SVGDocument doc, String svgNameSpace, String lineIdString, String targetRelationId) { // remove and readd the text on path label so that it updates with the new path String labelNodeId = targetRelationId + "label"; Node useNodeOld = doc.getElementById(labelNodeId); if (useNodeOld != null) { Node textParentNode = useNodeOld.getParentNode(); String labelText = useNodeOld.getTextContent(); useNodeOld.getParentNode().removeChild(useNodeOld); Element textPath = doc.createElementNS(svgNameSpace, "textPath"); textPath.setAttributeNS("http://www.w3.org/1999/xlink", "xlink:href", "#" + lineIdString); // the xlink: of "xlink:href" is required for some svg viewers to render correctly textPath.setAttribute("startOffset", "50%"); textPath.setAttribute("id", labelNodeId); Text textNode = doc.createTextNode(labelText); textPath.appendChild(textNode); textParentNode.appendChild(textPath); } } private void setPolylinePointsAttribute(Element targetNode, String svgNameSpace, DataTypes.RelationType relationType, float vSpacing, float egoX, float egoY, float alterX, float alterY) { float midY = (egoY + alterY) / 2; switch (relationType) { case affiliation: break; case ancestor: midY = alterY + vSpacing / 2; break; case descendant: midY = egoY + vSpacing / 2; break; case none: break; case sibling: // if (commonParentMaxY != null) { // midY = commonParentMaxY + vSpacing / 2; // } else { midY = (egoY < alterY) ? egoY - vSpacing / 2 : alterY - vSpacing / 2; break; case union: midY = (egoY > alterY) ? egoY + vSpacing / 2 : alterY + vSpacing / 2; break; } // if (alterY == egoY) { // // make sure that union lines go below the entities and sibling lines go above // if (relationType == DataTypes.RelationType.sibling) { // midY = alterY - vSpacing / 2; // } else if (relationType == DataTypes.RelationType.union) { // midY = alterY + vSpacing / 2; targetNode.setAttribute("points", egoX + "," + egoY + " " + egoX + "," + midY + " " + alterX + "," + midY + " " + alterX + "," + alterY); } private void setPathPointsAttribute(Element targetNode, String svgNameSpace, DataTypes.RelationType relationType, DataTypes.RelationLineType relationLineType, float hSpacing, float vSpacing, float egoX, float egoY, float alterX, float alterY) { float fromBezX; float fromBezY; float toBezX; float toBezY; if ((egoX > alterX && egoY < alterY) || (egoX > alterX && egoY > alterY)) { // prevent the label on the line from rendering upside down float tempX = alterX; float tempY = alterY; alterX = egoX; alterY = egoY; egoX = tempX; egoY = tempY; } if (relationLineType == DataTypes.RelationLineType.verticalCurve) { fromBezX = egoX; fromBezY = alterY; toBezX = alterX; toBezY = egoY; // todo: update the bezier positions similar to in the follwing else statement if (1 / (egoY - alterY) < vSpacing) { fromBezX = egoX; fromBezY = alterY - vSpacing / 2; toBezX = alterX; toBezY = egoY - vSpacing / 2; } } else { fromBezX = alterX; fromBezY = egoY; toBezX = egoX; toBezY = alterY; // todo: if the nodes are almost in align then this test fails and it should insted check for proximity not equality // System.out.println(1 / (egoX - alterX)); // if (1 / (egoX - alterX) < vSpacing) { if (egoX > alterX) { if (egoX - alterX < hSpacing / 4) { fromBezX = egoX - hSpacing / 4; toBezX = alterX - hSpacing / 4; } else { fromBezX = (egoX - alterX) / 2 + alterX; toBezX = (egoX - alterX) / 2 + alterX; } } else { if (alterX - egoX < hSpacing / 4) { fromBezX = egoX + hSpacing / 4; toBezX = alterX + hSpacing / 4; } else { fromBezX = (alterX - egoX) / 2 + egoX; toBezX = (alterX - egoX) / 2 + egoX; } } } targetNode.setAttribute("d", "M " + egoX + "," + egoY + " C " + fromBezX + "," + fromBezY + " " + toBezX + "," + toBezY + " " + alterX + "," + alterY); } private boolean hasCommonParent(EntityData currentNode, EntityRelation graphLinkNode) { if (graphLinkNode.relationType == DataTypes.RelationType.sibling) { for (EntityRelation altersRelation : graphLinkNode.getAlterNode().getDistinctRelateNodes()) { if (altersRelation.relationType == DataTypes.RelationType.ancestor) { for (EntityRelation egosRelation : currentNode.getDistinctRelateNodes()) { if (egosRelation.relationType == DataTypes.RelationType.ancestor) { if (altersRelation.alterUniqueIdentifier.equals(egosRelation.alterUniqueIdentifier)) { return true; } } } } } } return false; } // private Float getCommonParentMaxY(EntitySvg entitySvg, EntityData currentNode, EntityRelation graphLinkNode) { // if (graphLinkNode.relationType == DataTypes.RelationType.sibling) { // Float maxY = null; // ArrayList<Float> commonParentY = new ArrayList<Float>(); // for (EntityRelation altersRelation : graphLinkNode.getAlterNode().getDistinctRelateNodes()) { // if (altersRelation.relationType == DataTypes.RelationType.ancestor) { // for (EntityRelation egosRelation : currentNode.getDistinctRelateNodes()) { // if (egosRelation.relationType == DataTypes.RelationType.ancestor) { // if (altersRelation.alterUniqueIdentifier.equals(egosRelation.alterUniqueIdentifier)) { // float parentY = entitySvg.getEntityLocation(egosRelation.alterUniqueIdentifier)[1]; // maxY = parentY > maxY ? parentY : maxY; // return maxY; // } else { // return null; protected void insertRelation(GraphPanel graphPanel, String svgNameSpace, Element relationGroupNode, EntityData currentNode, EntityRelation graphLinkNode, int hSpacing, int vSpacing) { if (graphLinkNode.relationLineType == DataTypes.RelationLineType.sanguineLine) { if (hasCommonParent(currentNode, graphLinkNode)) { return; // do not draw lines for siblings if the common parent is visible because the ancestor lines will take the place of the sibling lines } } int relationLineIndex = relationGroupNode.getChildNodes().getLength(); Element groupNode = graphPanel.doc.createElementNS(svgNameSpace, "g"); groupNode.setAttribute("id", "relation" + relationLineIndex); Element defsNode = graphPanel.doc.createElementNS(svgNameSpace, "defs"); String lineIdString = "relation" + relationLineIndex + "Line"; new DataStoreSvg().storeRelationParameters(graphPanel.doc, groupNode, graphLinkNode.relationType, graphLinkNode.relationLineType, currentNode.getUniqueIdentifier(), graphLinkNode.getAlterNode().getUniqueIdentifier()); // set the line end points // int[] egoSymbolPoint = graphPanel.dataStoreSvg.graphData.getEntityLocation(currentNode.getUniqueIdentifier()); // int[] alterSymbolPoint = graphPanel.dataStoreSvg.graphData.getEntityLocation(graphLinkNode.getAlterNode().getUniqueIdentifier()); float[] egoSymbolPoint = graphPanel.entitySvg.getEntityLocation(currentNode.getUniqueIdentifier()); float[] alterSymbolPoint = graphPanel.entitySvg.getEntityLocation(graphLinkNode.getAlterNode().getUniqueIdentifier()); // float fromX = (currentNode.getxPos()); // * hSpacing + hSpacing // float fromY = (currentNode.getyPos()); // * vSpacing + vSpacing // float toX = (graphLinkNode.getAlterNode().getxPos()); // * hSpacing + hSpacing // float toY = (graphLinkNode.getAlterNode().getyPos()); // * vSpacing + vSpacing float fromX = (egoSymbolPoint[0]); // * hSpacing + hSpacing float fromY = (egoSymbolPoint[1]); // * vSpacing + vSpacing float toX = (alterSymbolPoint[0]); // * hSpacing + hSpacing float toY = (alterSymbolPoint[1]); // * vSpacing + vSpacing switch (graphLinkNode.relationLineType) { case kinTermLine: // this case uses the following case case verticalCurve: // todo: groupNode.setAttribute("id", ); // System.out.println("link: " + graphLinkNode.linkedNode.xPos + ":" + graphLinkNode.linkedNode.yPos); //// <line id="_15" transform="translate(146.0,112.0)" x1="0" y1="0" x2="100" y2="100" ="black" stroke-width="1"/> // Element linkLine = doc.createElementNS(svgNS, "line"); // linkLine.setAttribute("x1", Integer.toString(currentNode.xPos * hSpacing + hSpacing)); // linkLine.setAttribute("y1", Integer.toString(currentNode.yPos * vSpacing + vSpacing)); // linkLine.setAttribute("x2", Integer.toString(graphLinkNode.linkedNode.xPos * hSpacing + hSpacing)); // linkLine.setAttribute("y2", Integer.toString(graphLinkNode.linkedNode.yPos * vSpacing + vSpacing)); // linkLine.setAttribute("stroke", "black"); // linkLine.setAttribute("stroke-width", "1"); // // Attach the rectangle to the root 'svg' element. // svgRoot.appendChild(linkLine); //System.out.println("link: " + graphLinkNode.getAlterNode().xPos + ":" + graphLinkNode.getAlterNode().yPos); // <line id="_15" transform="translate(146.0,112.0)" x1="0" y1="0" x2="100" y2="100" ="black" stroke-width="1"/> Element linkLine = graphPanel.doc.createElementNS(svgNameSpace, "path"); setPathPointsAttribute(linkLine, svgNameSpace, graphLinkNode.relationType, graphLinkNode.relationLineType, hSpacing, vSpacing, fromX, fromY, toX, toY); // linkLine.setAttribute("x1", ); // linkLine.setAttribute("y1", ); // linkLine.setAttribute("x2", ); linkLine.setAttribute("fill", "none"); if (graphLinkNode.lineColour != null) { linkLine.setAttribute("stroke", graphLinkNode.lineColour); } else { linkLine.setAttribute("stroke", "blue"); } linkLine.setAttribute("stroke-width", Integer.toString(EntitySvg.strokeWidth)); linkLine.setAttribute("id", lineIdString); defsNode.appendChild(linkLine); break; case sanguineLine: // Element squareLinkLine = doc.createElement("line"); // squareLinkLine.setAttribute("x1", Integer.toString(currentNode.xPos * hSpacing + hSpacing)); // squareLinkLine.setAttribute("y1", Integer.toString(currentNode.yPos * vSpacing + vSpacing)); // squareLinkLine.setAttribute("x2", Integer.toString(graphLinkNode.linkedNode.xPos * hSpacing + hSpacing)); // squareLinkLine.setAttribute("y2", Integer.toString(graphLinkNode.linkedNode.yPos * vSpacing + vSpacing)); // squareLinkLine.setAttribute("stroke", "grey"); // squareLinkLine.setAttribute("stroke-width", Integer.toString(strokeWidth)); Element squareLinkLine = graphPanel.doc.createElementNS(svgNameSpace, "polyline"); setPolylinePointsAttribute(squareLinkLine, svgNameSpace, graphLinkNode.relationType, vSpacing, fromX, fromY, toX, toY); squareLinkLine.setAttribute("fill", "none"); squareLinkLine.setAttribute("stroke", "grey"); squareLinkLine.setAttribute("stroke-width", Integer.toString(EntitySvg.strokeWidth)); squareLinkLine.setAttribute("id", lineIdString); defsNode.appendChild(squareLinkLine); break; } groupNode.appendChild(defsNode); // insert the node that uses the above definition addUseNode(graphPanel.doc, svgNameSpace, groupNode, lineIdString); // add the relation label if (graphLinkNode.labelString != null) { Element labelText = graphPanel.doc.createElementNS(svgNameSpace, "text"); labelText.setAttribute("text-anchor", "middle"); // labelText.setAttribute("x", Integer.toString(labelX)); // labelText.setAttribute("y", Integer.toString(labelY)); if (graphLinkNode.lineColour != null) { labelText.setAttribute("fill", graphLinkNode.lineColour); } else { labelText.setAttribute("fill", "blue"); } labelText.setAttribute("stroke-width", "0"); labelText.setAttribute("font-size", "14"); // labelText.setAttribute("transform", "rotate(45)"); Element textPath = graphPanel.doc.createElementNS(svgNameSpace, "textPath"); textPath.setAttributeNS("http://www.w3.org/1999/xlink", "xlink:href", "#" + lineIdString); // the xlink: of "xlink:href" is required for some svg viewers to render correctly textPath.setAttribute("startOffset", "50%"); textPath.setAttribute("id", "relation" + relationLineIndex + "label"); Text textNode = graphPanel.doc.createTextNode(graphLinkNode.labelString); textPath.appendChild(textNode); labelText.appendChild(textPath); groupNode.appendChild(labelText); } relationGroupNode.appendChild(groupNode); } public void updateRelationLines(GraphPanel graphPanel, ArrayList<UniqueIdentifier> draggedNodeIds, String svgNameSpace, int hSpacing, int vSpacing) { // todo: if an entity is above its ancestor then this must be corrected, if the ancestor data is stored in the relationLine attributes then this would be a good place to correct this Element relationGroup = graphPanel.doc.getElementById("RelationGroup"); for (Node currentChild = relationGroup.getFirstChild(); currentChild != null; currentChild = currentChild.getNextSibling()) { if ("g".equals(currentChild.getLocalName())) { Node idAttrubite = currentChild.getAttributes().getNamedItem("id"); //System.out.println("idAttrubite: " + idAttrubite.getNodeValue()); DataStoreSvg.GraphRelationData graphRelationData = new DataStoreSvg().getEntitiesForRelations(currentChild); if (graphRelationData != null) { if (draggedNodeIds.contains(graphRelationData.egoNodeId) || draggedNodeIds.contains(graphRelationData.alterNodeId)) { // todo: update the relation lines //System.out.println("needs update on: " + idAttrubite.getNodeValue()); String lineElementId = idAttrubite.getNodeValue() + "Line"; Element relationLineElement = graphPanel.doc.getElementById(lineElementId); //System.out.println("type: " + relationLineElement.getLocalName()); float[] egoSymbolPoint = graphPanel.entitySvg.getEntityLocation(graphRelationData.egoNodeId); float[] alterSymbolPoint = graphPanel.entitySvg.getEntityLocation(graphRelationData.alterNodeId); // int[] egoSymbolPoint = graphPanel.dataStoreSvg.graphData.getEntityLocation(graphRelationData.egoNodeId); // int[] alterSymbolPoint = graphPanel.dataStoreSvg.graphData.getEntityLocation(graphRelationData.alterNodeId); float egoX = egoSymbolPoint[0]; float egoY = egoSymbolPoint[1]; float alterX = alterSymbolPoint[0]; float alterY = alterSymbolPoint[1]; // SVGRect egoSymbolRect = new EntitySvg().getEntityLocation(doc, graphRelationData.egoNodeId); // SVGRect alterSymbolRect = new EntitySvg().getEntityLocation(doc, graphRelationData.alterNodeId); // float egoX = egoSymbolRect.getX() + egoSymbolRect.getWidth() / 2; // float egoY = egoSymbolRect.getY() + egoSymbolRect.getHeight() / 2; // float alterX = alterSymbolRect.getX() + alterSymbolRect.getWidth() / 2; // float alterY = alterSymbolRect.getY() + alterSymbolRect.getHeight() / 2; if ("polyline".equals(relationLineElement.getLocalName())) { setPolylinePointsAttribute(relationLineElement, svgNameSpace, graphRelationData.relationType, vSpacing, egoX, egoY, alterX, alterY); } if ("path".equals(relationLineElement.getLocalName())) { setPathPointsAttribute(relationLineElement, svgNameSpace, graphRelationData.relationType, graphRelationData.relationLineType, hSpacing, vSpacing, egoX, egoY, alterX, alterY); } addUseNode(graphPanel.doc, svgNameSpace, (Element) currentChild, lineElementId); updateLabelNode(graphPanel.doc, svgNameSpace, lineElementId, idAttrubite.getNodeValue()); } } } } } // new RelationSvg().addTestNode(doc, (Element) relationLineElement.getParentNode().getParentNode(), svgNameSpace); // public void addTestNode(SVGDocument doc, Element addTarget, String svgNameSpace) { // Element squareNode = doc.createElementNS(svgNameSpace, "rect"); // squareNode.setAttribute("x", "100"); // squareNode.setAttribute("y", "100"); // squareNode.setAttribute("width", "20"); // squareNode.setAttribute("height", "20"); // squareNode.setAttribute("fill", "green"); // squareNode.setAttribute("stroke", "black"); // squareNode.setAttribute("stroke-width", "2"); // addTarget.appendChild(squareNode); }
package org.opendaylight.yangtools.yang.parser.rfc7950.stmt.uses; import com.google.common.base.Verify; import com.google.common.collect.ImmutableSet; import java.util.ArrayList; import java.util.Collection; import java.util.Optional; import java.util.Set; import javax.annotation.Nonnull; import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.common.QNameModule; import org.opendaylight.yangtools.yang.common.YangVersion; import org.opendaylight.yangtools.yang.model.api.YangStmtMapping; import org.opendaylight.yangtools.yang.model.api.meta.EffectiveStatement; import org.opendaylight.yangtools.yang.model.api.meta.StatementDefinition; import org.opendaylight.yangtools.yang.model.api.stmt.AugmentStatement; import org.opendaylight.yangtools.yang.model.api.stmt.DescriptionStatement; import org.opendaylight.yangtools.yang.model.api.stmt.IfFeatureStatement; import org.opendaylight.yangtools.yang.model.api.stmt.ReferenceStatement; import org.opendaylight.yangtools.yang.model.api.stmt.RefineStatement; import org.opendaylight.yangtools.yang.model.api.stmt.SchemaNodeIdentifier; import org.opendaylight.yangtools.yang.model.api.stmt.StatusStatement; import org.opendaylight.yangtools.yang.model.api.stmt.UsesStatement; import org.opendaylight.yangtools.yang.model.api.stmt.WhenStatement; import org.opendaylight.yangtools.yang.parser.rfc7950.namespace.SchemaNodeIdentifierBuildNamespace; import org.opendaylight.yangtools.yang.parser.rfc7950.reactor.YangValidationBundles; import org.opendaylight.yangtools.yang.parser.spi.meta.AbstractDeclaredStatement; import org.opendaylight.yangtools.yang.parser.spi.meta.CopyType; import org.opendaylight.yangtools.yang.parser.spi.meta.InferenceException; import org.opendaylight.yangtools.yang.parser.spi.meta.StmtContext; import org.opendaylight.yangtools.yang.parser.spi.meta.StmtContext.Mutable; import org.opendaylight.yangtools.yang.parser.spi.meta.StmtContextUtils; import org.opendaylight.yangtools.yang.parser.spi.source.ModuleCtxToModuleQName; import org.opendaylight.yangtools.yang.parser.spi.source.SourceException; import org.opendaylight.yangtools.yang.parser.spi.validation.ValidationBundlesNamespace; import org.opendaylight.yangtools.yang.parser.spi.validation.ValidationBundlesNamespace.ValidationBundleType; import org.opendaylight.yangtools.yang.parser.stmt.reactor.StatementContextBase; import org.slf4j.Logger; import org.slf4j.LoggerFactory; final class UsesStatementImpl extends AbstractDeclaredStatement<QName> implements UsesStatement { private static final Logger LOG = LoggerFactory.getLogger(UsesStatementImpl.class); UsesStatementImpl(final StmtContext<QName, UsesStatement, ?> context) { super(context); } @Nonnull @Override public QName getName() { return argument(); } @Override public WhenStatement getWhenStatement() { return firstDeclared(WhenStatement.class); } @Nonnull @Override public Collection<? extends IfFeatureStatement> getIfFeatures() { return allDeclared(IfFeatureStatement.class); } @Override public StatusStatement getStatus() { return firstDeclared(StatusStatement.class); } @Override public DescriptionStatement getDescription() { return firstDeclared(DescriptionStatement.class); } @Override public ReferenceStatement getReference() { return firstDeclared(ReferenceStatement.class); } @Nonnull @Override public Collection<? extends AugmentStatement> getAugments() { return allDeclared(AugmentStatement.class); } @Nonnull @Override public Collection<? extends RefineStatement> getRefines() { return allDeclared(RefineStatement.class); } /** * Copy statements from a grouping to a target node. * * @param sourceGrpStmtCtx * source grouping statement context * @param targetCtx * target context * @param usesNode * uses node * @throws SourceException * instance of SourceException */ static void copyFromSourceToTarget(final Mutable<?, ?, ?> sourceGrpStmtCtx, final StatementContextBase<?, ?, ?> targetCtx, final Mutable<QName, UsesStatement, EffectiveStatement<QName, UsesStatement>> usesNode) { final Collection<? extends Mutable<?, ?, ?>> declared = sourceGrpStmtCtx.mutableDeclaredSubstatements(); final Collection<? extends Mutable<?, ?, ?>> effective = sourceGrpStmtCtx.mutableEffectiveSubstatements(); final Collection<Mutable<?, ?, ?>> buffer = new ArrayList<>(declared.size() + effective.size()); final QNameModule newQNameModule = getNewQNameModule(targetCtx, sourceGrpStmtCtx); for (final Mutable<?, ?, ?> original : declared) { if (original.isSupportedByFeatures()) { copyStatement(original, targetCtx, newQNameModule, buffer); } } for (final Mutable<?, ?, ?> original : effective) { copyStatement(original, targetCtx, newQNameModule, buffer); } targetCtx.addEffectiveSubstatements(buffer); usesNode.addAsEffectOfStatement(buffer); } private static void copyStatement(final Mutable<?, ?, ?> original, final StatementContextBase<?, ?, ?> targetCtx, final QNameModule targetModule, final Collection<Mutable<?, ?, ?>> buffer) { if (needToCopyByUses(original)) { final Mutable<?, ?, ?> copy = targetCtx.childCopyOf(original, CopyType.ADDED_BY_USES, targetModule); buffer.add(copy); } else if (isReusedByUsesOnTop(original)) { buffer.add(original); } } private static final Set<YangStmtMapping> TOP_REUSED_DEF_SET = ImmutableSet.of( YangStmtMapping.TYPE, YangStmtMapping.TYPEDEF); private static boolean isReusedByUsesOnTop(final StmtContext<?, ?, ?> stmtContext) { return TOP_REUSED_DEF_SET.contains(stmtContext.getPublicDefinition()); } private static final Set<YangStmtMapping> NOCOPY_FROM_GROUPING_SET = ImmutableSet.of( YangStmtMapping.DESCRIPTION, YangStmtMapping.REFERENCE, YangStmtMapping.STATUS); private static final Set<YangStmtMapping> REUSED_DEF_SET = ImmutableSet.of( YangStmtMapping.TYPE, YangStmtMapping.TYPEDEF, YangStmtMapping.USES); public static boolean needToCopyByUses(final StmtContext<?, ?, ?> stmtContext) { final StatementDefinition def = stmtContext.getPublicDefinition(); if (REUSED_DEF_SET.contains(def)) { LOG.trace("Will reuse {} statement {}", def, stmtContext); return false; } if (NOCOPY_FROM_GROUPING_SET.contains(def)) { return !YangStmtMapping.GROUPING.equals(stmtContext.getParentContext().getPublicDefinition()); } LOG.trace("Will copy {} statement {}", def, stmtContext); return true; } public static void resolveUsesNode( final Mutable<QName, UsesStatement, EffectiveStatement<QName, UsesStatement>> usesNode, final StmtContext<?, ?, ?> targetNodeStmtCtx) { for (final Mutable<?, ?, ?> subStmtCtx : usesNode.mutableDeclaredSubstatements()) { if (StmtContextUtils.producesDeclared(subStmtCtx, RefineStatement.class) && areFeaturesSupported(subStmtCtx)) { performRefine(subStmtCtx, targetNodeStmtCtx); } } } private static boolean areFeaturesSupported(final StmtContext<?, ?, ?> subStmtCtx) { /* * In case of Yang 1.1, checks whether features are supported. */ return !YangVersion.VERSION_1_1.equals(subStmtCtx.getRootVersion()) || subStmtCtx.isSupportedByFeatures(); } private static void performRefine(final Mutable<?, ?, ?> subStmtCtx, final StmtContext<?, ?, ?> usesParentCtx) { final Object refineArgument = subStmtCtx.getStatementArgument(); InferenceException.throwIf(!(refineArgument instanceof SchemaNodeIdentifier), subStmtCtx.getStatementSourceReference(), "Invalid refine argument %s. It must be instance of SchemaNodeIdentifier.", refineArgument); final Optional<StmtContext<?, ?, ?>> optRefineTargetCtx = SchemaNodeIdentifierBuildNamespace.findNode( usesParentCtx, (SchemaNodeIdentifier) refineArgument); InferenceException.throwIf(!optRefineTargetCtx.isPresent(), subStmtCtx.getStatementSourceReference(), "Refine target node %s not found.", refineArgument); final StmtContext<?, ?, ?> refineTargetNodeCtx = optRefineTargetCtx.get(); if (StmtContextUtils.isUnknownStatement(refineTargetNodeCtx)) { LOG.trace("Refine node '{}' in uses '{}' has target node unknown statement '{}'. " + "Refine has been skipped. At line: {}", subStmtCtx.getStatementArgument(), subStmtCtx.getParentContext().getStatementArgument(), refineTargetNodeCtx.getStatementArgument(), subStmtCtx.getStatementSourceReference()); subStmtCtx.addAsEffectOfStatement(refineTargetNodeCtx); return; } Verify.verify(refineTargetNodeCtx instanceof StatementContextBase); addOrReplaceNodes(subStmtCtx, (StatementContextBase<?, ?, ?>) refineTargetNodeCtx); subStmtCtx.addAsEffectOfStatement(refineTargetNodeCtx); } private static void addOrReplaceNodes(final Mutable<?, ?, ?> subStmtCtx, final StatementContextBase<?, ?, ?> refineTargetNodeCtx) { for (final Mutable<?, ?, ?> refineSubstatementCtx : subStmtCtx.mutableDeclaredSubstatements()) { if (isSupportedRefineSubstatement(refineSubstatementCtx)) { addOrReplaceNode(refineSubstatementCtx, refineTargetNodeCtx); } } } private static void addOrReplaceNode(final Mutable<?, ?, ?> refineSubstatementCtx, final StatementContextBase<?, ?, ?> refineTargetNodeCtx) { final StatementDefinition refineSubstatementDef = refineSubstatementCtx.getPublicDefinition(); SourceException.throwIf(!isSupportedRefineTarget(refineSubstatementCtx, refineTargetNodeCtx), refineSubstatementCtx.getStatementSourceReference(), "Error in module '%s' in the refine of uses '%s': can not perform refine of '%s' for the target '%s'.", refineSubstatementCtx.getRoot().getStatementArgument(), refineSubstatementCtx.getParentContext() .getStatementArgument(), refineSubstatementCtx.getPublicDefinition(), refineTargetNodeCtx .getPublicDefinition()); if (isAllowedToAddByRefine(refineSubstatementDef)) { refineTargetNodeCtx.addEffectiveSubstatement(refineSubstatementCtx); } else { refineTargetNodeCtx.removeStatementFromEffectiveSubstatements(refineSubstatementDef); refineTargetNodeCtx.addEffectiveSubstatement(refineSubstatementCtx); } } private static final Set<YangStmtMapping> ALLOWED_TO_ADD_BY_REFINE_DEF_SET = ImmutableSet.of(YangStmtMapping.MUST); private static boolean isAllowedToAddByRefine(final StatementDefinition publicDefinition) { return ALLOWED_TO_ADD_BY_REFINE_DEF_SET.contains(publicDefinition); } private static boolean isSupportedRefineSubstatement(final StmtContext<?, ?, ?> refineSubstatementCtx) { final Collection<?> supportedRefineSubstatements = refineSubstatementCtx.getFromNamespace( ValidationBundlesNamespace.class, ValidationBundleType.SUPPORTED_REFINE_SUBSTATEMENTS); return supportedRefineSubstatements == null || supportedRefineSubstatements.isEmpty() || supportedRefineSubstatements.contains(refineSubstatementCtx.getPublicDefinition()) || StmtContextUtils.isUnknownStatement(refineSubstatementCtx); } private static boolean isSupportedRefineTarget(final StmtContext<?, ?, ?> refineSubstatementCtx, final StmtContext<?, ?, ?> refineTargetNodeCtx) { final Collection<?> supportedRefineTargets = YangValidationBundles.SUPPORTED_REFINE_TARGETS .get(refineSubstatementCtx.getPublicDefinition()); return supportedRefineTargets == null || supportedRefineTargets.isEmpty() || supportedRefineTargets.contains(refineTargetNodeCtx.getPublicDefinition()); } private static QNameModule getNewQNameModule(final StmtContext<?, ?, ?> targetCtx, final StmtContext<?, ?, ?> stmtContext) { if (targetCtx.getParentContext() == null) { return targetCtx.getFromNamespace(ModuleCtxToModuleQName.class, targetCtx); } if (targetCtx.getPublicDefinition() == YangStmtMapping.AUGMENT) { return StmtContextUtils.getRootModuleQName(targetCtx); } final Object targetStmtArgument = targetCtx.getStatementArgument(); final Object sourceStmtArgument = stmtContext.getStatementArgument(); if (targetStmtArgument instanceof QName && sourceStmtArgument instanceof QName) { return ((QName) targetStmtArgument).getModule(); } return null; } }
package com.sixtyfour.parser; import com.sixtyfour.elements.Constant; import com.sixtyfour.elements.Type; import com.sixtyfour.system.Machine; /** * A Term is a (part of a) calculation or a logical operation. Terms have two * children (left and right) and an operator that connects the two. */ public class Term implements Atom { /** The left child */ private Atom left; /** The right child */ private Atom right; /** The operator that connects the two children */ private Operator operator; /** The expression that created this Term */ private String expression; /** The key for term replacement while parsing */ private String key; /** The type that the Term returns */ private Type type; /** * Instantiates a new term based on the given expression. * * @param expression * the expression */ public Term(String expression) { this.setExpression(expression); } /** * Instantiates a new term with an Atom as left child and a 0 as the right child. * The operator is a NOP. * * @param left * the left child */ public Term(Atom left) { this.left = left; this.operator = Operator.NOP; this.right = new Constant<Integer>(0); } /** * Checks if the Term is complete. A complete term has two children and an * operator. * * @return true, if it's complete */ public boolean isComplete() { return left != null && right != null && operator != null; } /** * Checks if the term is empty. * * @return true, if it's empty */ public boolean isEmpty() { return left == null || (left.isTerm() && ((Term) left).isEmpty()); } /** * Returns the left child. * * @return the left child */ public Atom getLeft() { return left; } /** * Sets the left child. * * @param left * the new left child */ public void setLeft(Atom left) { this.left = left; } /** * Returns the right child. * * @return the right child */ public Atom getRight() { return right; } /** * Sets the right child. * * @param right * the new right child */ public void setRight(Atom right) { this.right = right; } /** * Returns the operator. * * @return the operator */ public Operator getOperator() { return operator; } /** * Sets the operator. * * @param operator * the new operator */ public void setOperator(Operator operator) { this.operator = operator; } /** * Returns the expression. * * @return the expression */ public String getExpression() { return expression; } /** * Sets the expression. * * @param expression * the new expression */ public void setExpression(String expression) { this.expression = expression; } /** * Returns the key. The key is used for internal replacement of parts of a * term (as text) by placeholders. * * @return the key */ public String getKey() { return key; } /** * Sets the key. The key is used for internal replacement of parts of a term * (as text) by placeholders. * * @param key * the new key */ public void setKey(String key) { this.key = key; } /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { return "([" + key + "]\\l:" + left + this.operator + "\\r:" + right + ")"; } /* * (non-Javadoc) * * @see sixtyfour.parser.Atom#getType() */ @Override public Type getType() { if (type != null) { return type; } Type t1 = left.getType(); Type t2 = t1; if (operator != Operator.NOP) { t2 = right.getType(); } if (operator.isDelimiter()) { return t1; } if (t1.equals(Type.INTEGER) && t2.equals(Type.REAL)) { return t2; } if (t1.equals(Type.REAL) && t2.equals(Type.INTEGER)) { return t1; } if (!t1.equals(t2)) { throw new RuntimeException("Type mismatch error: " + this.toString() + " | " + left + " | " + right + " | " + t1 + "/" + t2 + "/" + operator.getType()); } type = t1; return t1; } /* * (non-Javadoc) * * @see sixtyfour.parser.Atom#eval(sixtyfour.system.Machine) */ @Override public Object eval(Machine machine) { try { machine.setCurrentOperator(operator); if (operator.isNop()) { if (left == null) { throw new RuntimeException("Syntax error!"); } return left.eval(machine); } Type type = getType(); if (type == Type.STRING) { if (operator.isPlus()) { return left.eval(machine).toString() + right.eval(machine).toString(); } } else { Number n1 = (Number) left.eval(machine); Number n2 = (Number) right.eval(machine); float v1 = 0; if (operator.isPlus()) { v1 = n1.floatValue() + n2.floatValue(); } else if (operator.isMinus()) { v1 = n1.floatValue() - n2.floatValue(); } else if (operator.isPower()) { v1 = (float) Math.pow(n1.doubleValue(), n2.doubleValue()); } else if (operator.isMultiplication()) { v1 = n1.floatValue() * n2.floatValue(); } else if (operator.isDivision()) { if (n2.floatValue() == 0) { throw new RuntimeException("Division by zero error: " + n1 + "/" + n2); } v1 = n1.floatValue() / n2.floatValue(); } else if (operator.isOr()) { v1 = n1.intValue() | n2.intValue(); } else if (operator.isAnd()) { v1 = n1.intValue() & n2.intValue(); } else if (operator.isNot()) { v1 = ~n2.intValue(); } return v1; } throw new RuntimeException("Unable to evaluate term: " + this.toString()); } finally { machine.setCurrentOperator(null); } } /* * (non-Javadoc) * * @see sixtyfour.parser.Atom#isTerm() */ @Override public boolean isTerm() { return true; } }
package com.stripe.model; import com.stripe.exception.APIConnectionException; import com.stripe.exception.APIException; import com.stripe.exception.AuthenticationException; import com.stripe.exception.CardException; import com.stripe.exception.InvalidRequestException; import com.stripe.net.APIResource; import com.stripe.net.RequestOptions; import java.util.List; import java.util.Map; public class Transfer extends APIResource implements MetadataStore<Transfer> { String id; String status; Long date; Boolean livemode; Summary summary; String description; String statementDescriptor; @Deprecated String statementDescription; Integer amount; String currency; List<String> otherTransfers; @Deprecated String recipient; String destination; String destinationPayment; BankAccount account; String balanceTransaction; Map<String, String> metadata; String failureCode; String failureMessage; TransferReversalCollection reversals; public String getId() { return id; } public void setId(String id) { this.id = id; } public Boolean getLivemode() { return livemode; } public void setLivemode(Boolean livemode) { this.livemode = livemode; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public Long getDate() { return date; } public void setDate(Long date) { this.date = date; } public Summary getSummary() { return summary; } public void setSummary(Summary summary) { this.summary = summary; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getStatementDescriptor() { return statementDescriptor; } public void setStatementDescriptor(String statementDescriptor) { this.statementDescriptor = statementDescriptor; } @Deprecated public String getStatementDescription() { return statementDescription; } @Deprecated public void setStatementDescription(String statementDescription) { this.statementDescription = statementDescription; } public Integer getAmount() { return amount; } public void setAmount(Integer amount) { this.amount = amount; } public String getCurrency() { return currency; } @Deprecated public String getRecipient() { return recipient; } @Deprecated public void setRecipient(String recipient) { this.recipient = recipient; } public String getDestination() { return destination; } public void setDestination(String destination) { this.destination = destination; } public String getDestinationPayment() { return destinationPayment; } public void setDestinationPayment(String destinationPayment) { this.destinationPayment = destinationPayment; } public BankAccount getAccount() { return account; } public void setAccount(BankAccount account) { this.account = account; } public void setCurrency(String currency) { this.currency = currency; } public List<String> getOtherTransfers() { return otherTransfers; } public void setOtherTransfers(List<String> otherTransfers) { this.otherTransfers = otherTransfers; } public String getBalanceTransaction() { return balanceTransaction; } public void setBalanceTransaction(String balanceTransaction) { this.balanceTransaction = balanceTransaction; } public String getFailureCode() { return failureCode; } public void setFailureCode(String failureCode) { this.failureCode = failureCode; } public String getFailureMessage() { return failureMessage; } public void setFailureMessage(String failureMessage) { this.failureMessage = failureMessage; } public Map<String, String> getMetadata() { return metadata; } public void setMetadata(Map<String, String> metadata) { this.metadata = metadata; } public static Transfer create(Map<String, Object> params) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return create(params, (RequestOptions) null); } public static Transfer retrieve(String id) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return retrieve(id, (RequestOptions) null); } /** * @deprecated * Use Transfer.getReversals().create() instead of Transfer.cancel(). */ @Deprecated public Transfer cancel() throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return cancel((RequestOptions) null); } public Transfer update(Map<String, Object> params) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return update(params, (RequestOptions) null); } public static TransferCollection all(Map<String, Object> params) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return all(params, (RequestOptions) null); } public TransferTransactionCollection transactions(Map<String, Object> params) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return transactions(params, (RequestOptions) null); } @Deprecated public static Transfer create(Map<String, Object> params, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return create(params, RequestOptions.builder().setApiKey(apiKey).build()); } public static Transfer create(Map<String, Object> params, RequestOptions options) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return request(RequestMethod.POST, classURL(Transfer.class), params, Transfer.class, options); } @Deprecated public Transfer update(Map<String, Object> params, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return update(params, RequestOptions.builder().setApiKey(apiKey).build()); } public Transfer update(Map<String, Object> params, RequestOptions options) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return request(RequestMethod.POST, instanceURL(Transfer.class, this.id), params, Transfer.class, options); } @Deprecated public Transfer cancel(String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return cancel(RequestOptions.builder().setApiKey(apiKey).build()); } public Transfer cancel(RequestOptions options) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return request(RequestMethod.POST, instanceURL(Transfer.class, this.id) + "/cancel", null, Transfer.class, options); } @Deprecated public static Transfer retrieve(String id, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return retrieve(id, RequestOptions.builder().setApiKey(apiKey).build()); } public static Transfer retrieve(String id, RequestOptions options) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return request(RequestMethod.GET, instanceURL(Transfer.class, id), null, Transfer.class, options); } @Deprecated public static TransferCollection all(Map<String, Object> params, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return all(params, RequestOptions.builder().setApiKey(apiKey).build()); } public static TransferCollection all(Map<String, Object> params, RequestOptions options) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return request(RequestMethod.GET, classURL(Transfer.class), params, TransferCollection.class, options); } @Deprecated public TransferTransactionCollection transactions( Map<String, Object> params, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return transactions(params, RequestOptions.builder().setApiKey(apiKey).build()); } public TransferTransactionCollection transactions( Map<String, Object> params, RequestOptions options) throws AuthenticationException, InvalidRequestException, APIConnectionException, CardException, APIException { return request(RequestMethod.GET, String.format("%s/transactions", instanceURL(Transfer.class, this.getId())), params, TransferTransactionCollection.class, options); } public TransferReversalCollection getReversals() { if (reversals.getURL() == null) { reversals.setURL(String.format("/v1/transfers/%s/reversals", getId())); } return reversals; } }
package org.geomajas.example.gwt.client.samples.mapwidget; import org.geomajas.example.gwt.client.samples.base.SamplePanel; import org.geomajas.example.gwt.client.samples.base.SamplePanelFactory; import org.geomajas.example.gwt.client.samples.i18n.I18nProvider; import org.geomajas.gwt.client.controller.PanController; import org.geomajas.gwt.client.gfx.style.ShapeStyle; import org.geomajas.gwt.client.widget.MapWidget; import org.geomajas.gwt.client.widget.OverviewMap; import com.smartgwt.client.types.Alignment; import com.smartgwt.client.types.VerticalAlignment; import com.smartgwt.client.widgets.Canvas; import com.smartgwt.client.widgets.IButton; import com.smartgwt.client.widgets.events.ClickEvent; import com.smartgwt.client.widgets.events.ClickHandler; import com.smartgwt.client.widgets.layout.HLayout; import com.smartgwt.client.widgets.layout.VLayout; /** * <p> * Sample that shows the relation between an overview map and a normal map. * </p> * * @author Pieter De Graef */ public class OverviewMapSample extends SamplePanel { public static final String TITLE = "OverviewMap"; public static final SamplePanelFactory FACTORY = new SamplePanelFactory() { public SamplePanel createPanel() { return new OverviewMapSample(); } }; public Canvas getViewPanel() { VLayout layout = new VLayout(); layout.setWidth100(); layout.setHeight100(); layout.setMembersMargin(10); // Create a layout for the main map: HLayout mapLayout = new HLayout(); mapLayout.setShowEdges(true); // Map with ID osmNavigationToolbarMap is defined in the XML configuration. final MapWidget map = new MapWidget("osmMap", "gwt-samples"); map.setController(new PanController(map)); mapLayout.addMember(map); // Create a layout for the overview map: HLayout bottomLayout = new HLayout(); bottomLayout.setHeight(300); bottomLayout.setMembersMargin(10); bottomLayout.setAlign(Alignment.CENTER); VLayout overviewMapLayout = new VLayout(); overviewMapLayout.setShowEdges(true); overviewMapLayout.setWidth(300); // Create an overview map: final OverviewMap overviewMap = new OverviewMap("overviewOsmMap", "gwt-samples", map, true, true); overviewMapLayout.addMember(overviewMap); // Create a layout for a few buttons: HLayout buttonLayout = new HLayout(); buttonLayout.setHeight(20); buttonLayout.setAlign(VerticalAlignment.BOTTOM); buttonLayout.setMembersMargin(10); // Creating 3 buttons: // Button1: Toggle the rectangle style: IButton button1 = new IButton(I18nProvider.getSampleMessages().overviewMapToggleRectStyle()); button1.addClickHandler(new ClickHandler() { private ShapeStyle nextStyle = new ShapeStyle("#000000", 0.6f, "#000000", 1, 1); public void onClick(ClickEvent event) { ShapeStyle temp = nextStyle; nextStyle = overviewMap.getRectangleStyle(); overviewMap.setRectangleStyle(temp); } }); button1.setWidth100(); buttonLayout.addMember(button1); // Button2: Toggle the maximum extent style: IButton button2 = new IButton(I18nProvider.getSampleMessages().overviewMapToggleExtentStyle()); button2.addClickHandler(new ClickHandler() { private ShapeStyle nextStyle = new ShapeStyle("#FF0000", 0.6f, "#FF0000", 1, 3); public void onClick(ClickEvent event) { ShapeStyle temp = nextStyle; nextStyle = overviewMap.getTargetMaxExtentRectangleStyle(); overviewMap.setTargetMaxExtentRectangleStyle(temp); } }); button2.setWidth100(); buttonLayout.addMember(button2); // Button3: Toggle drawing the maximum extent: IButton button3 = new IButton(I18nProvider.getSampleMessages().overviewMapToggleExtent()); button3.addClickHandler(new ClickHandler() { public void onClick(ClickEvent event) { overviewMap.setDrawTargetMaxExtent(!overviewMap.isDrawTargetMaxExtent()); } }); button3.setWidth100(); buttonLayout.addMember(button3); // Place the layouts together: bottomLayout.addMember(overviewMapLayout); //bottomLayout.addMember(buttonLayout); layout.addMember(mapLayout); layout.addMember(bottomLayout); layout.addMember(buttonLayout); return layout; } public String getDescription() { return I18nProvider.getSampleMessages().overviewMapDescription(); } public String getSourceFileName() { return "classpath:org/geomajas/example/gwt/client/samples/mapwidget/OverviewMapSample.txt"; } public String[] getConfigurationFiles() { return new String[] { "WEB-INF/layerOsm.xml", "WEB-INF/mapOverviewOsm.xml", "WEB-INF/mapOsm.xml" }; } public String ensureUserLoggedIn() { return "luc"; } }
package org.csstudio.config.savevalue.ui; import org.csstudio.platform.data.IDoubleValue; import org.csstudio.platform.data.ILongValue; import org.csstudio.platform.data.INumericMetaData; import org.csstudio.platform.data.IStringValue; import org.csstudio.platform.data.IValue; import org.csstudio.platform.model.IProcessVariableWithSamples; import org.eclipse.core.commands.AbstractHandler; import org.eclipse.core.commands.ExecutionEvent; import org.eclipse.core.commands.ExecutionException; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.ui.handlers.HandlerUtil; /** * Handler for the save value command. * * @author Joerg Rathlev */ public class SaveValueCommandHandler extends AbstractHandler { /** * The id of the PV parameter. */ private static final String PV_PARAM_ID = "org.csstudio.platform.ui.commands.saveValue.pv"; //$NON-NLS-1$ /** * The id of the value paramter. */ private static final String VALUE_PARAM_ID = "org.csstudio.platform.ui.commands.saveValue.value"; //$NON-NLS-1$ /** * {@inheritDoc} */ public final Object execute(final ExecutionEvent event) throws ExecutionException { String pv; String value; // The command can either be called with parameters or on a selection. // If the command is called with parameters, the parameters are used, // otherwise, the pv and value are determined based on the selection on // which the command was executed. pv = event.getParameter(PV_PARAM_ID); if (pv != null) { // called with paramters value = event.getParameter(VALUE_PARAM_ID); if (value == null) { throw new ExecutionException("Invalid parameterization: no value parameter provided"); //$NON-NLS-1$ } } else { // called on a selection ISelection selection = HandlerUtil.getActiveMenuSelectionChecked(event); IProcessVariableWithSamples pvWithSamples = getSelectedProcessVariable(selection); pv = pvWithSamples.getName(); try { IValue iv = pvWithSamples.getSample(0); value = valueToString(iv); } catch (IllegalStateException e) { // This happens if the _pv is actually a TextInputEditPart with // the value type set to "double", but the text input cannot be // parsed as a double value. MessageDialog.openError(null, Messages.SaveValueDialog_DIALOG_TITLE, Messages.SaveValueDialog_ERRMSG_TEXT_IS_NOT_A_DOUBLE); return null; } } pv = withoutPrefix(pv); saveValue(pv, value); return null; } /** * Returns the PV name without a control system prefix. * * @param pv * the PV name. * @return the PV name without prefix. */ private String withoutPrefix(String pv) { int prefixPosition = pv.indexOf("://"); //$NON-NLS-1$ if (prefixPosition != -1) { pv = pv.substring(prefixPosition + 3); } return pv; } /** * Performs the save value operation. * * @param pv * the process variable. * @param value * the value. */ private void saveValue(String pv, String value) { SaveValueDialog dialog = new SaveValueDialog(null, pv, value); dialog.open(); } /** * Converts the given value into a string representation suitable for * writing into a CA file. * * @param value * the value. * @return the string representation of the value. */ private String valueToString(final IValue value) { // TODO: replace with ValueUtil#getString ? if (value instanceof IStringValue) { return ((IStringValue) value).getValue(); } else if (value instanceof IDoubleValue) { IDoubleValue idv = (IDoubleValue) value; double dv = idv.getValue(); int precision = ((INumericMetaData) idv.getMetaData()).getPrecision(); return SaveValueClient.formatForCaFile(dv, precision); } else if (value instanceof ILongValue) { ILongValue lv = (ILongValue) value; return Long.toString(lv.getValue()); } else { return value.format(); } } /** * Returns the selected <code>IProcessVariableWithSamples</code>. * * @param selection * the selection. * @return the selected <code>IProcessVariableWithSamples</code>, or * <code>null</code> if the selected object does not implement * <code>IProcessVariableWithSamples</code> or if the selection is * empty or not a structured selection. */ private IProcessVariableWithSamples getSelectedProcessVariable(final ISelection selection) { if (!(selection instanceof IStructuredSelection)) { return null; } IStructuredSelection structuredSelection = (IStructuredSelection) selection; Object selectedObject = structuredSelection.getFirstElement(); if (selectedObject instanceof IProcessVariableWithSamples) { return (IProcessVariableWithSamples) selectedObject; } else if (selectedObject instanceof IAdaptable) { return (IProcessVariableWithSamples) ((IAdaptable) selectedObject) .getAdapter(IProcessVariableWithSamples.class); } else { return null; } } }
package org.csstudio.swt.xygraph.dataprovider; import java.util.Calendar; import java.util.Iterator; import org.csstudio.swt.xygraph.linearscale.Range; import org.eclipse.swt.widgets.Display; /** * Provides data to a trace. * @author Xihui Chen * */ public class CircularBufferDataProvider extends AbstractDataProvider{ public enum UpdateMode{ X_OR_Y("X or Y"), X_AND_Y("X AND Y"), X("X"), Y("Y"), TRIGGER("Trigger"); private UpdateMode(String description) { this.description = description; } private String description; @Override public String toString() { return description; } public static String[] stringValues(){ String[] sv = new String[values().length]; int i=0; for(UpdateMode p : values()) sv[i++] = p.toString(); return sv; } } public enum PlotMode{ LAST_N("Plot last n pts."), N_STOP("Plot n pts & stop."); private PlotMode(String description) { this.description = description; } private String description; @Override public String toString() { return description; } public static String[] stringValues(){ String[] sv = new String[values().length]; int i=0; for(PlotMode p : values()) sv[i++] = p.toString(); return sv; } } private CircularBuffer<ISample> traceData; private double currentXData; private double currentYData; private long currentYDataTimestamp; private boolean currentXDataChanged = false; private boolean currentYDataChanged = false; private boolean currentYDataTimestampChanged = false; private double[] currentXDataArray = new double[]{}; private double[] currentYDataArray = new double[]{}; private boolean currentXDataArrayChanged = false; private boolean currentYDataArrayChanged = false; private boolean xAxisDateEnabled = false; private int updateDelay = 0; private boolean duringDelay = false; private boolean concatenate_data = true; /** * this indicates if the max and min of the data need to be recalculated. */ private boolean dataRangedirty = false; private UpdateMode updateMode = UpdateMode.X_AND_Y; private PlotMode plotMode = PlotMode.LAST_N; private Runnable fireUpdate; public CircularBufferDataProvider(boolean chronological) { super(chronological); traceData = new CircularBuffer<ISample>(100); fireUpdate = new Runnable(){ public void run() { for(IDataProviderListener listener : listeners){ listener.dataChanged(CircularBufferDataProvider.this); } duringDelay = false; } }; } /** * @param currentXData the currentXData to set */ public synchronized void setCurrentXData(double newValue) { this.currentXData = newValue; currentXDataChanged = true; tryToAddDataPoint(); } /**Set current YData. * @param currentYData the currentYData to set */ public synchronized void setCurrentYData(double newValue) { this.currentYData = newValue; currentYDataChanged = true; if(!xAxisDateEnabled|| (xAxisDateEnabled && currentYDataTimestampChanged)) tryToAddDataPoint(); } public synchronized void addSample(ISample sample){ if(traceData.size() == traceData.getBufferSize() && plotMode == PlotMode.N_STOP) return; traceData.add(sample); fireDataChange(); } /**Set the time stamp of currrent YData * @param timestamp timestamp of Y data in milliseconds. */ public synchronized void setCurrentYDataTimestamp(long timestamp){ if(!xAxisDateEnabled){ clearTrace(); xAxisDateEnabled = true; } this.currentYDataTimestamp = timestamp; currentYDataTimestampChanged = true; if(currentYDataChanged) tryToAddDataPoint(); } /**Set current YData and its timestamp when the new value generated. * @param currentYData the currentYData to set * @param timestamp timestamp of Y data in milliseconds. */ public synchronized void setCurrentYData(double newValue, long timestamp) { xAxisDateEnabled = true; this.currentYData = newValue; currentYDataChanged = true; this.currentYDataTimestamp = timestamp; currentYDataTimestampChanged = true; tryToAddDataPoint(); } /** * Try to add a new data point to trace data. * Whether it will be added or not is up to the update mode. */ private void tryToAddDataPoint(){ if(traceData.size() == traceData.getBufferSize() && plotMode == PlotMode.N_STOP) return; switch (updateMode) { case X_OR_Y: if((chronological && currentYDataChanged) || (!chronological && (currentXDataChanged || currentYDataChanged))) addDataPoint(); break; case X_AND_Y: if((chronological && currentYDataChanged) || (!chronological && (currentXDataChanged && currentYDataChanged))) addDataPoint(); break; case X: if((chronological && currentYDataChanged) || (!chronological && currentXDataChanged)) addDataPoint(); break; case Y: if(currentYDataChanged) addDataPoint(); break; case TRIGGER: default: break; } } /** * add a new data point to trace data. */ private void addDataPoint() { double newXValue; if(!concatenate_data) traceData.clear(); if(chronological){ if(xAxisDateEnabled){ if(updateMode != UpdateMode.TRIGGER) newXValue = currentYDataTimestamp; else newXValue = Calendar.getInstance().getTimeInMillis(); } else{ if(traceData.size() == 0) newXValue = 0; else newXValue = traceData.getTail().getXValue() +1; } }else{ newXValue = currentXData; } traceData.add(new Sample(newXValue, currentYData)); currentXDataChanged = false; currentYDataChanged = false; currentYDataTimestampChanged = false; fireDataChange(); } /** * @param currentXData the currentXData to set */ public synchronized void setCurrentXDataArray(double[] newValue) { this.currentXDataArray = newValue; currentXDataArrayChanged = true; tryToAddDataArray(); } /** * @param currentXData the currentXData to set */ public synchronized void setCurrentYDataArray(double[] newValue) { this.currentYDataArray = newValue; currentYDataArrayChanged = true; tryToAddDataArray(); } /** * Try to add a new data array to trace data. * Whether it will be added or not is up to the update mode. */ private void tryToAddDataArray(){ if(traceData.size() == traceData.getBufferSize() && plotMode == PlotMode.N_STOP) return; switch (updateMode) { case X_OR_Y: if((chronological && currentYDataArrayChanged) || (!chronological && (currentXDataArrayChanged || currentYDataArrayChanged))) addDataArray(); break; case X_AND_Y: if((chronological && currentYDataArrayChanged) || (!chronological && (currentXDataArrayChanged && currentYDataArrayChanged))) addDataArray(); break; case X: if((chronological && currentYDataArrayChanged) || (!chronological && currentXDataArrayChanged)) addDataArray(); break; case Y: if(currentYDataArrayChanged) addDataArray(); break; case TRIGGER: default: break; } } /** * add a new data point to trace data. */ private void addDataArray() { if(!concatenate_data) traceData.clear(); if(chronological){ double[] newXValueArray; newXValueArray = new double[currentYDataArray.length]; if(traceData.size() == 0) for(int i=0; i<currentYDataArray.length; i++){ newXValueArray[i] = i; } else for(int i=1; i<currentYDataArray.length+1; i++){ newXValueArray[i-1] = traceData.getTail().getXValue() + i; } for(int i=0; i<Math.min(traceData.getBufferSize(), Math.min(newXValueArray.length, currentYDataArray.length)); i++){ traceData.add(new Sample(newXValueArray[i], currentYDataArray[i])); } }else{ //newXValueArray = currentXDataArray; // if the data array size is longer than buffer size, //just ignore the tail data. for(int i=0; i<Math.min(traceData.getBufferSize(), Math.min(currentXDataArray.length, currentYDataArray.length)); i++){ traceData.add(new Sample(currentXDataArray[i], currentYDataArray[i])); } } currentXDataChanged = false; currentYDataChanged = false; currentYDataTimestampChanged = false; fireDataChange(); } public synchronized void clearTrace(){ traceData.clear(); fireDataChange(); } public Iterator<ISample> iterator() { return traceData.iterator(); } /** * @param bufferSize the bufferSize to set */ public synchronized void setBufferSize(int bufferSize) { traceData.setBufferSize(bufferSize, false); } /** * @param updateMode the updateMode to set */ public void setUpdateMode(UpdateMode updateMode) { this.updateMode = updateMode; } /** * @return the update mode. */ public UpdateMode getUpdateMode() { return updateMode; } /**In TRIGGER update mode, the trace data could be updated by this method * @param triggerValue the triggerValue to set */ public void triggerUpdate() { //do not update if no new data was added, otherwise, it will add (0,0) which is not a real sample. if(traceData.size() == 0 && !(currentYDataChanged || currentYDataArrayChanged)) return; if(currentYDataArray.length > 0) addDataArray(); else addDataPoint(); } @Override protected void innerUpdate() { dataRangedirty = true; } @Override protected void updateDataRange(){ if(!dataRangedirty) return; dataRangedirty = false; if(getSize() > 0){ double xMin; double xMax; xMin = traceData.getHead().getXValue(); xMax = xMin; double yMin; double yMax; yMin = traceData.getHead().getYValue(); yMax = yMin; for(ISample dp : traceData){ if(xMin > dp.getXValue()-dp.getXMinusError()) xMin = dp.getXValue()-dp.getXMinusError(); if(xMax < dp.getXValue()+dp.getXPlusError()) xMax = dp.getXValue()+ dp.getXPlusError(); if(yMin > dp.getYValue() - dp.getYMinusError()) yMin = dp.getYValue() - dp.getYMinusError(); if(yMax < dp.getYValue() + dp.getYPlusError()) yMax = dp.getYValue() + dp.getYPlusError(); } xDataMinMax = new Range(xMin, xMax); yDataMinMax = new Range(yMin, yMax); }else { xDataMinMax = null; yDataMinMax = null; } } /** * @param plotMode the plotMode to set */ public void setPlotMode(PlotMode plotMode) { this.plotMode = plotMode; } @Override public ISample getSample(int index) { return traceData.getElement(index); } @Override public int getSize() { return traceData.size(); } /**If xAxisDateEnable is true, you will need to use * {@link #setCurrentYData(double, long)} or {@link #setCurrentYDataTimestamp(long)} to set the * time stamp of ydata. This flag will be automatically enabled when * either of these two methods were called. * The default value is false. * @param xAxisDateEnabled the xAxisDateEnabled to set */ public void setXAxisDateEnabled(boolean xAxisDateEnabled) { this.xAxisDateEnabled = xAxisDateEnabled; } /** * @param updateDelay Delay in milliseconds between plot updates. This may help to reduce CPU * usage. The default value is 0ms. */ public synchronized void setUpdateDelay(int updateDelay) { this.updateDelay = updateDelay; } @Override protected synchronized void fireDataChange() { if(updateDelay >0){ innerUpdate(); if(!duringDelay){ Display.getCurrent().timerExec(updateDelay, fireUpdate); duringDelay = true; } }else super.fireDataChange(); } public void setConcatenate_data(boolean concatenate_data) { this.concatenate_data = concatenate_data; } public boolean isConcatenate_data() { return concatenate_data; } }
package exnihilocreatio.util; import exnihilocreatio.blocks.BlockInfestingLeaves; import exnihilocreatio.texturing.Color; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.texture.TextureAtlasSprite; import net.minecraft.entity.MoverType; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.init.Blocks; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.NonNullList; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.MathHelper; import net.minecraft.util.math.Vec3d; import net.minecraft.world.World; import net.minecraftforge.fluids.Fluid; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.fluids.FluidUtil; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import net.minecraftforge.oredict.OreDictionary; import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Arrays; import java.util.function.BiPredicate; import static java.lang.Math.round; public class Util { public static final Color whiteColor = new Color(1f, 1f, 1f, 1f); public static final Color blackColor = new Color(0f, 0f, 0f, 1f); public static final Color greenColor = new Color(0f, 1f, 0f, 1f); public static void dropItemInWorld(TileEntity source, EntityPlayer player, ItemStack stack, double speedFactor) { if (stack == null || stack.isEmpty()) return; int hitOrientation = player == null ? 0 : MathHelper.floor(player.rotationYaw * 4.0F / 360.0F + 0.5D) & 3; double stackCoordX = 0.0D, stackCoordY = 0.0D, stackCoordZ = 0.0D; switch (hitOrientation) { case 0: stackCoordX = source.getPos().getX() + 0.5D; stackCoordY = source.getPos().getY() + 0.5D + 1; stackCoordZ = source.getPos().getZ() - 0.25D; break; case 1: stackCoordX = source.getPos().getX() + 1.25D; stackCoordY = source.getPos().getY() + 0.5D + 1; stackCoordZ = source.getPos().getZ() + 0.5D; break; case 2: stackCoordX = source.getPos().getX() + 0.5D; stackCoordY = source.getPos().getY() + 0.5D + 1; stackCoordZ = source.getPos().getZ() + 1.25D; break; case 3: stackCoordX = source.getPos().getX() - 0.25D; stackCoordY = source.getPos().getY() + 0.5D + 1; stackCoordZ = source.getPos().getZ() + 0.5D; break; default: break; } EntityItem droppedEntity = new EntityItem(source.getWorld(), stackCoordX, stackCoordY, stackCoordZ, stack); if (player != null) { Vec3d motion = new Vec3d(player.posX - stackCoordX, player.posY - stackCoordY, player.posZ - stackCoordZ); motion.normalize(); droppedEntity.motionX = motion.x; droppedEntity.motionY = motion.y; droppedEntity.motionZ = motion.z; double offset = 0.25D; droppedEntity.move(MoverType.SELF, motion.x * offset, motion.y * offset, motion.z * offset); } droppedEntity.motionX *= speedFactor; droppedEntity.motionY *= speedFactor; droppedEntity.motionZ *= speedFactor; source.getWorld().spawnEntity(droppedEntity); } @SideOnly(Side.CLIENT) public static TextureAtlasSprite getTextureFromBlockState(@Nonnull IBlockState state) { return Minecraft.getMinecraft().getBlockRendererDispatcher().getBlockModelShapes().getTexture(state); } @SideOnly(Side.CLIENT) public static TextureAtlasSprite getTextureFromFluidStack(FluidStack stack) { if (stack != null && stack.getFluid() != null) return getTextureFromFluid(stack.getFluid()); return Minecraft.getMinecraft().getTextureMapBlocks().getMissingSprite(); } @SideOnly(Side.CLIENT) public static TextureAtlasSprite getTextureFromFluid(@Nonnull Fluid fluid) { TextureAtlasSprite tex = null; // Try still if (fluid.getStill() != null) tex = getTexture(fluid.getStill()); if(tex != null) return tex; // Try flowing if (fluid.getFlowing() != null) tex = getTexture(fluid.getFlowing()); if(tex != null) return tex; // Try grabbing the block if (fluid.getBlock() != null) tex = getTextureFromBlockState(fluid.getBlock().getDefaultState()); if(tex != null) return tex; // Give up return Minecraft.getMinecraft().getTextureMapBlocks().getMissingSprite(); } @SideOnly(Side.CLIENT) public static TextureAtlasSprite getTexture(ResourceLocation location) { return Minecraft.getMinecraft().getTextureMapBlocks().getTextureExtry(location.toString()); } public static boolean isSurroundingBlocksAtLeastOneOf(BlockInfo[] blocks, BlockPos pos, World world, int radius) { ArrayList<BlockInfo> blockList = new ArrayList<>(Arrays.asList(blocks)); for (int xShift = -1 * radius; xShift <= radius; xShift++) { for (int zShift = -1 * radius; zShift <= radius; zShift++) { BlockPos checkPos = pos.add(xShift, 0, zShift); BlockInfo checkBlock = new BlockInfo(world.getBlockState(checkPos)); if (blockList.contains(checkBlock)) return true; } } return false; } public static int getNumSurroundingBlocksAtLeastOneOf(BlockInfo[] blocks, BlockPos pos, World world) { int ret = 0; ArrayList<BlockInfo> blockList = new ArrayList<>(Arrays.asList(blocks)); for (int xShift = -2; xShift <= 2; xShift++) { for (int zShift = -2; zShift <= 2; zShift++) { BlockPos checkPos = pos.add(xShift, 0, zShift); BlockInfo checkBlock = new BlockInfo(world.getBlockState(checkPos)); if (blockList.contains(checkBlock)) ret++; } } return ret; } public static int getLightValue(FluidStack fluid) { if (fluid != null && fluid.getFluid() != null) { return fluid.getFluid().getLuminosity(fluid); } else { return 0; } } public static float weightedAverage(float a, float b, float percent) { return a * percent + b * (1 - percent); } public static ItemStack getBucketStack(Fluid fluid) { return FluidUtil.getFilledBucket(new FluidStack(fluid, 1000)); } public static boolean compareItemStack(ItemStack stack1, ItemStack stack2) { if (stack1.getMetadata() == OreDictionary.WILDCARD_VALUE || stack2.getMetadata() == OreDictionary.WILDCARD_VALUE) { return stack1.getItem() == stack2.getItem(); } else return stack1.getItem() == stack2.getItem() && stack1.getMetadata() == stack2.getMetadata(); } public static int interpolate(int low, int high, float amount) { if (amount > 1.0f) return high; if (amount < 0.0f) return low; return low + round((high - low) * amount); } public static NonNullList<BlockPos> getNearbyLeaves(World world, BlockPos pos) { NonNullList<BlockPos> blockPos = NonNullList.create(); for (BlockPos checkPos : BlockPos.getAllInBox(new BlockPos(pos.getX() - 1, pos.getY() - 1, pos.getZ() - 1), new BlockPos(pos.getX() + 1, pos.getY() + 1, pos.getZ() + 1))) { IBlockState newState = world.getBlockState(checkPos); if (newState.getBlock() != Blocks.AIR && !(newState.getBlock() instanceof BlockInfestingLeaves)) { if (Util.isLeaves(newState)) blockPos.add(checkPos); } } //if (!blockStates.isEmpty()) LogUtil.info("Obtained getNearbyLeaves"); return blockPos; } public static boolean isLeaves(IBlockState state) { ItemStack itemStack = new ItemStack(state.getBlock()); return OreDictionary.getOres("treeLeaves").stream().anyMatch(stack1 -> Util.compareItemStack(stack1, itemStack)); } public static <T, U>boolean arrayEqualsPredicate(T[] a, U[] a2, BiPredicate<T, U> predicate) { if (a==a2) return true; if (a==null || a2==null) return false; int length = a.length; if (a2.length != length) return false; for (int i=0; i<length; i++) { T o1 = a[i]; U o2 = a2[i]; if (!(o1==null ? o2==null : predicate.test(o1, o2))) return false; } return true; } /** * A slow simulation of incrementing a counter until toMatch is reached. Used to avoid floating point errors. * * @param toMatch total value to reach * @param stepSize increment size * @return Number of times stepSize needs to be added to reach toMatch */ public static int stepsRequiredToMatch(float toMatch, float stepSize) { int n = 0; float accumulated = 0.0f; while(accumulated < toMatch) { accumulated += stepSize; n++; } return n; } }
package org.ovirt.engine.core.dal.dbbroker.auditloghandling; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.junit.Test; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.businessentities.StorageDomain; import org.ovirt.engine.core.common.businessentities.StorageDomainStatus; import org.ovirt.engine.core.common.businessentities.StoragePool; import org.ovirt.engine.core.common.businessentities.VDS; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.VM; import org.ovirt.engine.core.common.businessentities.VmTemplate; import org.ovirt.engine.core.common.businessentities.aaa.DbUser; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.dao.StorageDomainDao; import org.ovirt.engine.core.dao.StoragePoolDao; import org.ovirt.engine.core.dao.VdsDao; import org.ovirt.engine.core.dao.VdsGroupDao; import org.ovirt.engine.core.dao.VmDao; import org.ovirt.engine.core.dao.VmTemplateDao; import org.ovirt.engine.core.dao.network.VmNetworkInterfaceDao; public class AuditLogableBaseTest { protected static final Guid GUID = new Guid("11111111-1111-1111-1111-111111111111"); protected static final Guid GUID2 = new Guid("11111111-1111-1111-1111-111111111112"); protected static final Guid GUID3 = new Guid("11111111-1111-1111-1111-111111111113"); protected static final String NAME = "testName"; protected static final String DOMAIN = "testDomain"; @Test public void nGuidCtor() { final AuditLogableBase b = new AuditLogableBase(GUID); final Guid v = b.getVdsId(); assertEquals(GUID, v); } @Test public void nGuidCtorNull() { final Guid n = null; final AuditLogableBase b = new AuditLogableBase(n); final Guid g = b.getVdsId(); assertEquals(Guid.Empty, g); } @Test public void nGuidGuidCtor() { final AuditLogableBase b = new AuditLogableBase(GUID, GUID2); final Guid g = b.getVdsId(); assertEquals(GUID, g); final Guid gu = b.getVmId(); assertEquals(GUID2, gu); } @Test public void nGuidGuidCtorNullNGuid() { final AuditLogableBase b = new AuditLogableBase(null, GUID2); final Guid g = b.getVdsId(); assertEquals(Guid.Empty, g); final Guid gu = b.getVmId(); assertEquals(GUID2, gu); } @Test public void nGuidGuidCtorNullGuid() { final AuditLogableBase b = new AuditLogableBase(GUID, null); final Guid g = b.getVdsId(); assertEquals(GUID, g); final Guid gu = b.getVmId(); assertEquals(Guid.Empty, gu); } @Test public void nGuidGuidCtorNull() { final AuditLogableBase b = new AuditLogableBase(null, null); final Guid g = b.getVdsId(); assertEquals(Guid.Empty, g); final Guid gu = b.getVmId(); assertEquals(Guid.Empty, gu); } @Test public void getUserIdDefault() { final AuditLogableBase b = new AuditLogableBase(); final Guid g = b.getUserId(); assertEquals(Guid.Empty, g); } @Test public void getUserIdIdSet() { final AuditLogableBase b = new AuditLogableBase(); b.setUserId(GUID); final Guid g = b.getUserId(); assertEquals(GUID, g); } @Test public void getUserIdVdcUserDefault() { final AuditLogableBase b = new AuditLogableBase(); final DbUser u = new DbUser(); b.setCurrentUser(u); final Guid g = b.getUserId(); assertEquals(null, g); } @Test public void getUserIdVdcUserId() { final AuditLogableBase b = new AuditLogableBase(); final DbUser u = new DbUser(); u.setId(GUID); b.setCurrentUser(u); final Guid g = b.getUserId(); assertEquals(GUID, g); } @Test public void getUserNameDefault() { final AuditLogableBase b = new AuditLogableBase(); final String n = b.getUserName(); assertNull(n); } @Test public void getUserNameNull() { final AuditLogableBase b = new AuditLogableBase(); b.setUserName(null); final String n = b.getUserName(); assertNull(n); } @Test public void getUserName() { final AuditLogableBase b = new AuditLogableBase(); b.setUserName(NAME); final String n = b.getUserName(); assertEquals(NAME, n); } @Test public void getUserNameFromUser() { final AuditLogableBase b = new AuditLogableBase(); final DbUser u = new DbUser(); u.setLoginName(NAME); u.setDomain(DOMAIN); b.setCurrentUser(u); final String un = b.getUserName(); assertEquals(String.format("%s@%s", NAME, DOMAIN), un); } @Test public void currentUserDefault() { final AuditLogableBase b = new AuditLogableBase(); final DbUser u = b.getCurrentUser(); assertNull(u); } @Test public void currentUserNull() { final AuditLogableBase b = new AuditLogableBase(); final DbUser u = null; b.setCurrentUser(u); final DbUser cu = b.getCurrentUser(); assertEquals(u, cu); } @Test public void currentUser() { final AuditLogableBase b = new AuditLogableBase(); final DbUser u = new DbUser(); b.setCurrentUser(u); final DbUser cu = b.getCurrentUser(); assertEquals(u, cu); } @Test public void vmTemplateIdDefault() { final AuditLogableBase b = new AuditLogableBase(); final Guid g = b.getVmTemplateId(); assertEquals(Guid.Empty, g); } @Test public void vmTemplateId() { final AuditLogableBase b = new AuditLogableBase(); b.setVmTemplateId(GUID); final Guid g = b.getVmTemplateId(); assertEquals(GUID, g); } @Test public void vmTemplateIdRefDefault() { final AuditLogableBase b = new AuditLogableBase(); final Guid g = b.getVmTemplateIdRef(); assertNull(g); } @Test public void vmTemplateIdRef() { final AuditLogableBase b = new AuditLogableBase(); b.setVmTemplateId(GUID); final Guid g = b.getVmTemplateIdRef(); assertEquals(GUID, g); } @Test public void vmTemplateIdRefWithVm() { final AuditLogableBase b = new TestAuditLogableBase(); final VM v = new VM(); b.setVm(v); final Guid g = b.getVmTemplateIdRef(); assertEquals(GUID, g); } @Test public void vmTemplateNameDefault() { final AuditLogableBase b = new AuditLogableBase(); final String n = b.getVmTemplateName(); assertNull(n); } @Test public void vmTemplateName() { final AuditLogableBase b = new AuditLogableBase(); b.setVmTemplateName(NAME); final String nm = b.getVmTemplateName(); assertEquals(NAME, nm); } @Test public void vmTemplateNameWithVm() { final AuditLogableBase b = new TestAuditLogableBase(); final VM v = new VM(); b.setVm(v); final String n = b.getVmTemplateName(); assertEquals(NAME, n); } @Test public void vmIdDefault() { final AuditLogableBase b = new AuditLogableBase(); final Guid i = b.getVmId(); assertEquals(Guid.Empty, i); } @Test public void vmIdNull() { final AuditLogableBase b = new AuditLogableBase(); b.setVmId(null); final Guid i = b.getVmId(); assertEquals(Guid.Empty, i); } @Test public void vmId() { final AuditLogableBase b = new AuditLogableBase(); b.setVmId(GUID); final Guid i = b.getVmId(); assertEquals(GUID, i); } @Test public void snapshotNameDefault() { final AuditLogableBase b = new AuditLogableBase(); final String s = b.getSnapshotName(); assertNull(s); } @Test public void snapshotNameNull() { final AuditLogableBase b = new AuditLogableBase(); b.setSnapshotName(null); final String s = b.getSnapshotName(); assertNull(s); } @Test public void snapshotNameEmpty() { final AuditLogableBase b = new AuditLogableBase(); final String e = ""; b.setSnapshotName(e); final String s = b.getSnapshotName(); assertEquals(e, s); } @Test public void snapshotName() { final AuditLogableBase b = new AuditLogableBase(); b.setSnapshotName(NAME); final String s = b.getSnapshotName(); assertEquals(NAME, s); } @Test public void vmIdRefDefault() { final AuditLogableBase b = new AuditLogableBase(); final Guid g = b.getVmIdRef(); assertEquals(Guid.Empty, g); } @Test public void vmIdRefNullVmId() { final AuditLogableBase b = new AuditLogableBase(); b.setVmId(null); final Guid g = b.getVmIdRef(); assertNull(g); } @Test public void vmIdRefNullVm() { final AuditLogableBase b = new AuditLogableBase(); b.setVmId(null); final VM v = new VM(); v.setId(GUID); b.setVm(v); final Guid g = b.getVmIdRef(); assertEquals(GUID, g); } @Test public void vmNameDefault() { final AuditLogableBase b = new AuditLogableBase(); final String n = b.getVmName(); assertNull(n); } @Test public void vmNameNull() { final AuditLogableBase b = new AuditLogableBase(); b.setVmName(null); final String n = b.getVmName(); assertNull(n); } @Test public void vmNameNullVm() { final AuditLogableBase b = new AuditLogableBase(); b.setVmName(null); final VM v = new VM(); v.setName(NAME); b.setVm(v); final String n = b.getVmName(); assertEquals(NAME, n); } @Test public void vmName() { final AuditLogableBase b = new AuditLogableBase(); b.setVmName(NAME); final String n = b.getVmName(); assertEquals(NAME, n); } @Test public void vdsIdRefDefault() { final AuditLogableBase b = new AuditLogableBase(); final Guid g = b.getVdsIdRef(); assertNull(g); } @Test public void vdsIdRefNull() { final AuditLogableBase b = new AuditLogableBase(); b.setVdsIdRef(null); final Guid g = b.getVdsIdRef(); assertNull(g); } @Test public void vdsIdRef() { final AuditLogableBase b = new AuditLogableBase(); b.setVdsIdRef(GUID); final Guid g = b.getVdsIdRef(); assertEquals(GUID, g); } @Test public void vdsIdRefVds() { final AuditLogableBase b = new AuditLogableBase(); b.setVdsIdRef(null); final VDS v = new VDS(); v.setId(GUID); b.setVds(v); final Guid g = b.getVdsIdRef(); assertEquals(GUID, g); } @Test public void vdsIdDefault() { final AuditLogableBase b = new AuditLogableBase(); final Guid g = b.getVdsId(); assertEquals(Guid.Empty, g); } @Test public void vdsIdNull() { final AuditLogableBase b = new AuditLogableBase(); b.setVdsId(null); final Guid g = b.getVdsId(); assertEquals(Guid.Empty, g); } @Test public void vdsId() { final AuditLogableBase b = new AuditLogableBase(); b.setVdsId(GUID); final Guid g = b.getVdsId(); assertEquals(GUID, g); } @Test public void vdsNameDefault() { final AuditLogableBase b = new AuditLogableBase(); final String s = b.getVdsName(); assertNull(s); } @Test public void vdsNameNull() { final AuditLogableBase b = new AuditLogableBase(); b.setVdsName(null); final String s = b.getVdsName(); assertNull(s); } @Test public void vdsName() { final AuditLogableBase b = new AuditLogableBase(); b.setVdsName(NAME); final String s = b.getVdsName(); assertEquals(NAME, s); } @Test public void vdsNameVds() { final AuditLogableBase b = new AuditLogableBase(); b.setVdsName(null); final VDS v = new VDS(); v.setVdsName(NAME); b.setVds(v); final String s = b.getVdsName(); assertEquals(NAME, s); } @Test public void storageDomainDefault() { final AuditLogableBase b = new AuditLogableBase(); final StorageDomain s = b.getStorageDomain(); assertNull(s); } @Test public void storageDomainNull() { final AuditLogableBase b = new AuditLogableBase(); b.setStorageDomain(null); final StorageDomain s = b.getStorageDomain(); assertNull(s); } @Test public void storageDomain() { final AuditLogableBase b = new AuditLogableBase(); final StorageDomain s = new StorageDomain(); b.setStorageDomain(s); final StorageDomain st = b.getStorageDomain(); assertEquals(s, st); } @Test public void storageDomainWithId() { final TestAuditLogableBase b = new TestAuditLogableBase(); b.setStorageDomainId(GUID); b.setStoragePoolId(GUID); final StorageDomain s = b.getStorageDomain(); assertEquals(b.STORAGE_DOMAIN, s); } @Test public void storageDomainWithIdNullPool() { final TestAuditLogableBase b = new TestAuditLogableBase(); b.setStorageDomainId(GUID); b.setStoragePoolId(GUID2); final StorageDomain s = b.getStorageDomain(); assertNull(s); } @Test public void storageDomainWithNullId() { final TestAuditLogableBase b = new TestAuditLogableBase(); b.setStorageDomainId(GUID2); final StorageDomain s = b.getStorageDomain(); assertEquals(b.STORAGE_DOMAIN, s); } @Test public void storageDomainIdDefault() { final AuditLogableBase b = new AuditLogableBase(); final Guid g = b.getStorageDomainId(); assertNull(g); } @Test public void storageDomainIdNull() { final AuditLogableBase b = new AuditLogableBase(); b.setStorageDomainId(null); final Guid g = b.getStorageDomainId(); assertNull(g); } @Test public void storageDomainId() { final AuditLogableBase b = new AuditLogableBase(); b.setStorageDomainId(GUID); final Guid g = b.getStorageDomainId(); assertEquals(GUID, g); } @Test public void storageDomainIdWithStorageDomain() { final AuditLogableBase b = new AuditLogableBase(); final StorageDomain s = new StorageDomain(); s.setId(GUID); b.setStorageDomain(s); final Guid g = b.getStorageDomainId(); assertEquals(GUID, g); } @Test public void storageDomainNameDefault() { final AuditLogableBase b = new AuditLogableBase(); final String s = b.getStorageDomainName(); assertEquals("", s); } @Test public void storageDomainName() { final AuditLogableBase b = new AuditLogableBase(); final StorageDomain s = new StorageDomain(); s.setStorageName(NAME); b.setStorageDomain(s); final String n = b.getStorageDomainName(); assertEquals(NAME, n); } @Test public void storagePoolDefault() { final AuditLogableBase b = new AuditLogableBase(); final StoragePool p = b.getStoragePool(); assertNull(p); } @Test public void storagePoolWithId() { final AuditLogableBase b = new TestAuditLogableBase(); b.setStoragePoolId(GUID); final StoragePool p = b.getStoragePool(); assertNotNull(p); } @Test public void storagePool() { final AuditLogableBase b = new AuditLogableBase(); final StoragePool p = new StoragePool(); b.setStoragePool(p); final StoragePool sp = b.getStoragePool(); assertEquals(p, sp); } @Test public void storagePoolIdDefault() { final AuditLogableBase b = new AuditLogableBase(); final Guid n = b.getStoragePoolId(); assertNull(n); } @Test public void storagePoolIdNull() { final AuditLogableBase b = new AuditLogableBase(); b.setStoragePoolId(null); final Guid n = b.getStoragePoolId(); assertNull(n); } @Test public void storagePoolId() { final AuditLogableBase b = new AuditLogableBase(); b.setStoragePoolId(GUID); final Guid n = b.getStoragePoolId(); assertEquals(GUID, n); } @Test public void storagePoolIdWithStoragePool() { final AuditLogableBase b = new AuditLogableBase(); b.setStoragePoolId(null); final StoragePool p = new StoragePool(); p.setId(GUID); b.setStoragePool(p); final Guid n = b.getStoragePoolId(); assertEquals(GUID, n); } @Test public void storagePoolIdWithStorageDomain() { final AuditLogableBase b = new AuditLogableBase(); b.setStoragePoolId(null); b.setStoragePool(null); final StorageDomain s = new StorageDomain(); s.setStoragePoolId(GUID); b.setStorageDomain(s); final Guid n = b.getStoragePoolId(); assertEquals(GUID, n); } @Test public void storagePoolNameDefault() { final AuditLogableBase b = new AuditLogableBase(); final String s = b.getStoragePoolName(); assertEquals("", s); } @Test public void storagePoolName() { final AuditLogableBase b = new AuditLogableBase(); final StoragePool p = new StoragePool(); p.setName(NAME); b.setStoragePool(p); final String s = b.getStoragePoolName(); assertEquals(NAME, s); } @Test public void auditLogTypeValue() { final AuditLogableBase b = new AuditLogableBase(); final AuditLogType t = b.getAuditLogTypeValue(); assertEquals(AuditLogType.UNASSIGNED, t); } @Test public void getVdsDefault() { final AuditLogableBase b = new AuditLogableBase(); final VDS v = b.getVds(); assertNull(v); } @Test public void getVdsNullAll() { final AuditLogableBase b = new AuditLogableBase(); final VDS vds = null; final VM vm = null; final Guid vdsId = null; b.setVds(vds); b.setVdsId(vdsId); b.setVm(vm); final VDS v = b.getVds(); assertNull(v); } @Test public void getVdsNullVdsId() { final AuditLogableBase b = new TestAuditLogableBase(); final VDS vds = null; final VM vm = new VM(); vm.setRunOnVds(GUID3); final Guid vdsId = null; b.setVds(vds); b.setVdsId(vdsId); b.setVm(vm); final VDS v = b.getVds(); assertNull(v); } @Test public void getVdsNullRun() { final AuditLogableBase b = new AuditLogableBase(); final VDS vds = null; final VM vm = new VM(); vm.setRunOnVds(null); final Guid vdsId = null; b.setVds(vds); b.setVdsId(vdsId); b.setVm(vm); final VDS v = b.getVds(); assertNull(v); } @Test public void getVdsWithVds() { final AuditLogableBase b = new AuditLogableBase(); final VDS vds = new VDS(); final VM vm = null; final Guid vdsId = null; b.setVds(vds); b.setVdsId(vdsId); b.setVm(vm); final VDS v = b.getVds(); assertEquals(vds, v); } @Test public void getVdsWithVdsId() { final AuditLogableBase b = new TestAuditLogableBase(); final VM vm = new VM(); vm.setRunOnVds(GUID2); final Guid vdsId = GUID; b.setVdsId(vdsId); b.setVm(vm); final VDS v = b.getVds(); assertEquals(GUID, v.getId()); } @Test public void getVdsWithVm() { final AuditLogableBase b = new TestAuditLogableBase(); final VDS vds = null; final VM vm = new VM(); vm.setRunOnVds(GUID2); final Guid vdsId = null; b.setVds(vds); b.setVdsId(vdsId); b.setVm(vm); final VDS v = b.getVds(); assertEquals(GUID2, v.getId()); } @Test public void getVdsSwallowsException() { final AuditLogableBase b = new TestAuditLogableBase(); final VDS vds = null; final VM vm = new VM(); vm.setRunOnVds(GUID2); final Guid vdsId = GUID3; b.setVds(vds); b.setVdsId(vdsId); b.setVm(vm); final VDS v = b.getVds(); assertNull(v); } @Test public void getVmDefault() { final AuditLogableBase b = new AuditLogableBase(); final VM v = b.getVm(); assertNull(v); } @Test public void getVm() { final AuditLogableBase b = new AuditLogableBase(); final VM v = new VM(); b.setVm(v); final VM vm = b.getVm(); assertEquals(v, vm); } @Test public void getVmNullId() { final AuditLogableBase b = new AuditLogableBase(); final VM v = null; b.setVm(v); b.setVmId(null); final VM vm = b.getVm(); assertNull(vm); } @Test public void getVmEmptyId() { final AuditLogableBase b = new AuditLogableBase(); final VM v = null; b.setVm(v); b.setVmId(Guid.Empty); final VM vm = b.getVm(); assertNull(vm); } @Test public void getVmFromId() { final AuditLogableBase b = new TestAuditLogableBase(); final VM v = null; b.setVm(v); b.setVmId(GUID); final VM vm = b.getVm(); assertNotNull(vm); } @Test public void getVmSwallowsExceptions() { final AuditLogableBase b = new TestAuditLogableBase(); final VM v = null; b.setVm(v); b.setVmId(GUID3); final VM vm = b.getVm(); assertNull(vm); } @Test public void getVmTemplateDefault() { final AuditLogableBase b = new AuditLogableBase(); final VmTemplate t = b.getVmTemplate(); assertNull(t); } @Test public void getVmTemplateNull() { final AuditLogableBase b = new AuditLogableBase(); b.setVmTemplate(null); final VmTemplate t = b.getVmTemplate(); assertNull(t); } @Test public void getVmTemplateWithId() { final AuditLogableBase b = new TestAuditLogableBase(); b.setVmTemplate(null); b.setVmTemplateId(GUID); final VmTemplate t = b.getVmTemplate(); assertNotNull(t); } @Test public void getVmTemplateWithVm() { final AuditLogableBase b = new TestAuditLogableBase(); b.setVmTemplate(null); b.setVmTemplateId(null); final VM vm = new VM(); vm.setVmtGuid(GUID); b.setVm(vm); final VmTemplate t = b.getVmTemplate(); assertNotNull(t); } @Test public void getVdsGroupIdDefault() { final AuditLogableBase b = new AuditLogableBase(); final Guid g = b.getVdsGroupId(); assertEquals(Guid.Empty, g); } @Test public void getVdsGroupId() { final AuditLogableBase b = new AuditLogableBase(); b.setVdsGroupId(GUID); final Guid g = b.getVdsGroupId(); assertEquals(GUID, g); } @Test public void getVdsGroupIdVdsGroup() { final AuditLogableBase b = new AuditLogableBase(); final VDSGroup gr = new VDSGroup(); gr.setId(GUID); b.setVdsGroup(gr); final Guid g = b.getVdsGroupId(); assertEquals(GUID, g); } @Test public void getVdsGroupDefault() { final AuditLogableBase b = new AuditLogableBase(); final VDSGroup g = b.getVdsGroup(); assertNull(g); } @Test public void getVdsGroupNotNull() { final AuditLogableBase b = new AuditLogableBase(); final VDSGroup g = new VDSGroup(); b.setVdsGroup(g); final VDSGroup gr = b.getVdsGroup(); assertEquals(g, gr); } @Test public void getVdsGroupWithId() { final AuditLogableBase b = new TestAuditLogableBase(); b.setVdsGroupId(GUID); final VDSGroup g = b.getVdsGroup(); assertEquals(GUID, g.getId()); } @Test public void getVdsGroupWithVds() { final AuditLogableBase b = new TestAuditLogableBase(); final VDS v = new VDS(); v.setVdsGroupId(GUID); b.setVds(v); final VDSGroup g = b.getVdsGroup(); assertEquals(GUID, g.getId()); } @Test public void getVdsGroupWithVm() { final AuditLogableBase b = new TestAuditLogableBase(); final VM v = new VM(); v.setVdsGroupId(GUID); b.setVm(v); final VDSGroup g = b.getVdsGroup(); assertEquals(GUID, g.getId()); } @Test public void getVdsGroupNameDefault() { final AuditLogableBase b = new TestAuditLogableBase(); final String n = b.getVdsGroupName(); assertEquals("", n); } @Test public void getVdsGroupNameNullVds() { final AuditLogableBase b = new TestAuditLogableBase(); final VDSGroup g = null; b.setVdsGroup(g); final String n = b.getVdsGroupName(); assertEquals("", n); } @Test public void getVdsGroupName() { final AuditLogableBase b = new TestAuditLogableBase(); final VDSGroup g = new VDSGroup(); g.setName(NAME); b.setVdsGroup(g); final String n = b.getVdsGroupName(); assertEquals(NAME, n); } @Test(expected = NullPointerException.class) public void addCustomValueDoesNotHandleNullKeys() { final AuditLogableBase b = new TestAuditLogableBase(); final String key = null; final String value = NAME; b.addCustomValue(key, value); final String v = b.getCustomValue(key); assertEquals(value, v); } @Test public void addCustomValueWillNotReturnANull() { final AuditLogableBase b = new TestAuditLogableBase(); final String key = NAME; final String value = null; b.addCustomValue(key, value); final String v = b.getCustomValue(key); assertEquals("", v); } @Test public void customValue() { final AuditLogableBase b = new TestAuditLogableBase(); final String key = "foo"; final String value = NAME; b.addCustomValue(key, value); final String v = b.getCustomValue(key); assertEquals(value, v); } @Test public void getCustomValuesLeaksInternalStructure() { final AuditLogableBase b = new TestAuditLogableBase(); final String key = "foo"; final String value = NAME; b.addCustomValue(key, value); final String v = b.getCustomValue(key); assertEquals(value, v); final Map<String, String> m = b.getCustomValues(); m.clear(); final String s = b.getCustomValue(key); assertEquals("", s); } @Test public void appendCustomValue() { final AuditLogableBase b = new TestAuditLogableBase(); final String key = "foo"; final String value = NAME; final String sep = "_"; b.appendCustomValue(key, value, sep); final String s = b.getCustomValue(key); assertEquals(value, s); } @Test public void appendCustomValueAppend() { final AuditLogableBase b = new TestAuditLogableBase(); final String key = "foo"; final String value = NAME; final String newVal = "bar"; final String sep = "_"; b.addCustomValue(key, value); b.appendCustomValue(key, newVal, sep); final String s = b.getCustomValue(key); assertEquals(value + sep + newVal, s); } @Test(expected = NullPointerException.class) public void appendCustomValueDoesntHandleNullKeys() { final AuditLogableBase b = new TestAuditLogableBase(); final String key = null; final String value = NAME; final String sep = "_"; b.appendCustomValue(key, value, sep); final String s = b.getCustomValue(key); assertEquals(value, s); } @Test public void appendCustomValueAppendsWithNull() { final AuditLogableBase b = new TestAuditLogableBase(); final String key = "foo"; final String value = null; final String newVal = "bar"; final String sep = "_"; b.addCustomValue(key, value); b.appendCustomValue(key, newVal, sep); final String s = b.getCustomValue(key); assertEquals(value + sep + newVal, s); } @Test public void appendCustomValueUsesNullSeparator() { final AuditLogableBase b = new TestAuditLogableBase(); final String key = "foo"; final String value = NAME; final String newVal = "bar"; final String sep = null; b.addCustomValue(key, value); b.appendCustomValue(key, newVal, sep); final String s = b.getCustomValue(key); assertEquals(value + sep + newVal, s); } @Test public void getCustomValueFromEmptyMap() { final AuditLogableBase b = new TestAuditLogableBase(); final String s = b.getCustomValue(NAME); assertEquals("", s); } @Test public void key() { final AuditLogableBase b = new TestAuditLogableBase(); final String s = b.getKey(); assertEquals(AuditLogType.UNASSIGNED.toString(), s); } protected static class TestAuditLogableBase extends AuditLogableBase { public final StorageDomain STORAGE_DOMAIN = new StorageDomain(); @Override public VmTemplateDao getVmTemplateDao() { final VmTemplateDao vt = mock(VmTemplateDao.class); final VmTemplate t = new VmTemplate(); t.setId(GUID); t.setName(NAME); when(vt.get(Guid.Empty)).thenReturn(t); when(vt.get(GUID)).thenReturn(new VmTemplate()); return vt; } @Override public VmDao getVmDao() { final VmDao v = mock(VmDao.class); when(v.get(GUID)).thenReturn(new VM()); when(v.get(GUID3)).thenThrow(new RuntimeException()); return v; } @Override public StorageDomainDao getStorageDomainDao() { final StorageDomainDao d = mock(StorageDomainDao.class); when(d.getForStoragePool(GUID, GUID)).thenReturn(STORAGE_DOMAIN); when(d.getAllForStorageDomain(GUID2)).thenReturn(getStorageDomainList()); return d; } @Override public StoragePoolDao getStoragePoolDao() { final StoragePoolDao s = mock(StoragePoolDao.class); final StoragePool p = new StoragePool(); p.setId(GUID); when(s.get(GUID)).thenReturn(p); when(s.get(GUID2)).thenReturn(null); return s; } @Override public VdsDao getVdsDao() { final VdsDao v = mock(VdsDao.class); final VDS vds1 = new VDS(); vds1.setId(GUID); final VDS vds2 = new VDS(); vds2.setId(GUID2); when(v.get(GUID)).thenReturn(vds1); when(v.get(GUID2)).thenReturn(vds2); when(v.get(GUID3)).thenThrow(new RuntimeException()); return v; } @Override public VdsGroupDao getVdsGroupDao() { final VdsGroupDao v = mock(VdsGroupDao.class); final VDSGroup g = new VDSGroup(); g.setVdsGroupId(GUID); when(v.get(GUID)).thenReturn(g); return v; } @Override public VmNetworkInterfaceDao getVmNetworkInterfaceDao() { return mock(VmNetworkInterfaceDao.class); } private List<StorageDomain> getStorageDomainList() { final List<StorageDomain> l = new ArrayList<>(); final StorageDomain s = new StorageDomain(); s.setStatus(StorageDomainStatus.Inactive); l.add(s); final StorageDomain s2 = new StorageDomain(); s2.setStatus(null); l.add(s2); STORAGE_DOMAIN.setStatus(StorageDomainStatus.Active); l.add(STORAGE_DOMAIN); return l; } } }
package com.philliphsu.bottomsheetpickers.view.numberpad; import com.philliphsu.bottomsheetpickers.view.LocaleModel; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import static com.philliphsu.bottomsheetpickers.view.numberpad.ButtonTextModel.text; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; public class NumberPadTimePickerPresenterTest { static final int MODE_12HR = 0; static final int MODE_24HR = 1; private final INumberPadTimePicker.View[] mViews = new INumberPadTimePicker.View[2]; private final INumberPadTimePicker.Presenter[] mPresenters = new INumberPadTimePicker.Presenter[2]; private final ButtonTextModel[] mButtonTextModels = new ButtonTextModel[2]; @Mock private LocaleModel mLocaleModel; @Before public final void setup() { // Inject mocks annotated with the @Mock annotation. MockitoAnnotations.initMocks(this); mViews[MODE_12HR] = mock(getViewClass()); mPresenters[MODE_12HR] = createPresenter(mViews[MODE_12HR], mLocaleModel, false); mButtonTextModels[MODE_12HR] = new ButtonTextModel(mLocaleModel, false); mViews[MODE_24HR] = mock(getViewClass()); mPresenters[MODE_24HR] = createPresenter(mViews[MODE_24HR], mLocaleModel, true); mButtonTextModels[MODE_24HR] = new ButtonTextModel(mLocaleModel, true); } @Test public void verifyViewEnabledStatesForEmptyState() { mPresenters[MODE_12HR].onCreate(NumberPadTimePickerState.EMPTY); verify(mViews[MODE_12HR]).setNumberKeysEnabled(1, 10); verify(mViews[MODE_12HR]).setBackspaceEnabled(false); // Assuming no initial text for the time display, there is no need to have to call this. verify(mViews[MODE_12HR], never()).updateTimeDisplay(null /* value doesn't matter */); verify(mViews[MODE_12HR]).updateAmPmDisplay(null); verify(mViews[MODE_12HR]).setAmPmDisplayVisible(true); verify(mViews[MODE_12HR]).setAmPmDisplayIndex(mLocaleModel.isAmPmWrittenBeforeTime() ? 0 : 1); verify(mViews[MODE_12HR]).setLeftAltKeyText(altText(0, MODE_12HR)); verify(mViews[MODE_12HR]).setRightAltKeyText(altText(1, MODE_12HR)); verify(mViews[MODE_12HR]).setLeftAltKeyEnabled(false); verify(mViews[MODE_12HR]).setRightAltKeyEnabled(false); verify(mViews[MODE_12HR]).setHeaderDisplayFocused(true); mPresenters[MODE_24HR].onCreate(NumberPadTimePickerState.EMPTY); verify(mViews[MODE_24HR]).setNumberKeysEnabled(0, 10); verify(mViews[MODE_24HR]).setBackspaceEnabled(false); verify(mViews[MODE_24HR], never()).updateTimeDisplay(null /* value doesn't matter */); verify(mViews[MODE_24HR], never()).updateAmPmDisplay(null /* value doesn't matter */); verify(mViews[MODE_24HR]).setAmPmDisplayVisible(false); verify(mViews[MODE_24HR], never()).setAmPmDisplayIndex(0 /* value doesn't matter */); verify(mViews[MODE_24HR]).setLeftAltKeyText(altText(0, MODE_24HR)); verify(mViews[MODE_24HR]).setRightAltKeyText(altText(1, MODE_24HR)); verify(mViews[MODE_24HR]).setLeftAltKeyEnabled(false); verify(mViews[MODE_24HR]).setRightAltKeyEnabled(false); verify(mViews[MODE_24HR]).setHeaderDisplayFocused(true); } @Test public void clickOnNumberKey_UpdatesTimeDisplay() { // Number texts are the same for both 12-hour and 24-hour modes. mPresenters[MODE_12HR].onNumberKeyClick(text(1)); verify(mViews[MODE_12HR]).updateTimeDisplay(text(1)); mPresenters[MODE_24HR].onNumberKeyClick(text(1)); verify(mViews[MODE_24HR]).updateTimeDisplay(text(1)); } INumberPadTimePicker.View getView(int mode) { return mViews[mode]; } INumberPadTimePicker.Presenter getPresenter(int mode) { return mPresenters[mode]; } Class<? extends INumberPadTimePicker.View> getViewClass() { return INumberPadTimePicker.View.class; } INumberPadTimePicker.Presenter createPresenter(INumberPadTimePicker.View view, LocaleModel localeModel, boolean is24HourMode) { return new NumberPadTimePickerPresenter(view, localeModel, is24HourMode); } private String altText(int leftOrRight, int mode) { return mButtonTextModels[mode].altText(leftOrRight); } }
package org.jnosql.artemis.graph; import org.hamcrest.Matchers; import org.jnosql.artemis.EntityNotFoundException; import org.jnosql.artemis.graph.cdi.CDIJUnitRunner; import org.jnosql.artemis.graph.model.Book; import org.jnosql.artemis.graph.model.Person; import org.jnosql.diana.api.Value; import org.junit.Test; import org.junit.runner.RunWith; import javax.inject.Inject; import java.util.Optional; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; @RunWith(CDIJUnitRunner.class) public class EdgeEntityTest { @Inject private GraphTemplate graphTemplate; @Test(expected = NullPointerException.class) public void shouldReturnErrorWhenInboudIsNull() { Person person = Person.builder().withName("Poliana").withAge().build(); Book book = null; graphTemplate.edge(person, "reads", book); } @Test(expected = NullPointerException.class) public void shouldReturnErrorWhenOutboudIsNull() { Person person = Person.builder().withName("Poliana").withAge().build(); Book book = Book.builder().withAge(2007).withName("The Shack").build(); graphTemplate.edge(person, "reads", book); } @Test(expected = NullPointerException.class) public void shouldReturnErrorWhenLabelIsNull() { Person person = Person.builder().withName("Poliana").withAge().build(); Book book = Book.builder().withAge(2007).withName("The Shack").build(); graphTemplate.edge(person, null, book); } @Test(expected = NullPointerException.class) public void shouldReturnNullWhenInboundIdIsNull() { Person person = Person.builder().withName("Poliana").withAge().build(); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); graphTemplate.edge(person, "reads", book); } @Test(expected = NullPointerException.class) public void shouldReturnNullWhenOutboundIdIsNull() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = Book.builder().withAge(2007).withName("The Shack").build(); graphTemplate.edge(person, "reads", book); } @Test(expected = EntityNotFoundException.class) public void shouldReturnEntityNotFoundWhenOutBoundDidNotFound() { Person person = Person.builder().withId(-10L).withName("Poliana").withAge().build(); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); graphTemplate.edge(person, "reads", book); } @Test(expected = EntityNotFoundException.class) public void shouldReturnEntityNotFoundWhenInBoundDidNotFound() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = Book.builder().withId(10L).withAge(2007).withName("The Shack").build(); graphTemplate.edge(person, "reads", book); } @Test public void shouldCreateAnEdge() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); assertEquals("reads", edge.getLabel()); assertEquals(person, edge.getOutbound()); assertEquals(book, edge.getInbound()); assertTrue(edge.isEmpty()); assertNotNull(edge.getId()); } @Test public void shouldUseAnEdge() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); EdgeEntity<Person, Book> sameEdge = graphTemplate.edge(person, "reads", book); assertEquals(edge.getId(), sameEdge.getId()); assertEquals(edge, sameEdge); } @Test public void shouldUseAnEdge2() { Person poliana = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Person nilzete = graphTemplate.insert(Person.builder().withName("Nilzete").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(poliana, "reads", book); EdgeEntity<Person, Book> edge1 = graphTemplate.edge(nilzete, "reads", book); EdgeEntity<Person, Book> sameEdge = graphTemplate.edge(poliana, "reads", book); EdgeEntity<Person, Book> sameEdge1 = graphTemplate.edge(nilzete, "reads", book); assertEquals(edge.getId(), sameEdge.getId()); assertEquals(edge, sameEdge); assertEquals(edge1.getId(), sameEdge1.getId()); assertEquals(edge1, sameEdge1); } @Test(expected = NullPointerException.class) public void shouldReturnErrorWhenAddKeyIsNull() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); edge.add(null, "Brazil"); } @Test(expected = NullPointerException.class) public void shouldReturnErrorWhenAddValueIsNull() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); edge.add("where", null); } @Test public void shouldAddProperty() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); edge.add("where", "Brazil"); assertFalse(edge.isEmpty()); assertEquals(1, edge.size()); assertThat(edge.getProperties(), Matchers.contains(ArtemisProperty.of("where", "Brazil"))); } @Test(expected = NullPointerException.class) public void shouldReturnErrorWhenRemoveNullKeyProperty() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); edge.add("where", "Brazil"); assertFalse(edge.isEmpty()); edge.remove(null); } @Test public void shouldRemoveProperty() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); edge.add("where", "Brazil"); assertEquals(1, edge.size()); assertFalse(edge.isEmpty()); edge.remove("where"); assertTrue(edge.isEmpty()); assertEquals(0, edge.size()); } @Test public void shouldFindProperty() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); edge.add("where", "Brazil"); Optional<Value> where = edge.get("where"); assertTrue(where.isPresent()); assertEquals("Brazil", where.get().get()); } @Test public void shouldDeleteAnEdge() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); edge.delete(); EdgeEntity<Person, Book> newEdge = graphTemplate.edge(person, "reads", book); assertNotEquals(edge.getId(), newEdge.getId()); graphTemplate.deleteEdge(newEdge.getId().get()); } @Test(expected = NullPointerException.class) public void shouldReturnErrorWhenDeleteAnEdgeWithNull() { graphTemplate.delete(null); } @Test public void shouldDeleteAnEdge2() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); graphTemplate.deleteEdge(edge.getId().get()); EdgeEntity<Person, Book> newEdge = graphTemplate.edge(person, "reads", book); assertNotEquals(edge.getId(), newEdge.getId()); } @Test(expected = NullPointerException.class) public void shouldReturnErrorWhenFindEdgeWithNull() { graphTemplate.edge(null); } @Test public void shouldFindAnEdge() { Person person = graphTemplate.insert(Person.builder().withName("Poliana").withAge().build()); Book book = graphTemplate.insert(Book.builder().withAge(2007).withName("The Shack").build()); EdgeEntity<Person, Book> edge = graphTemplate.edge(person, "reads", book); Optional<EdgeEntity<Person, Book>> newEdge = graphTemplate.edge(edge.getId().get()); assertTrue(newEdge.isPresent()); assertEquals(edge.getId(), newEdge.get().getId()); graphTemplate.deleteEdge(edge.getId().get()); } @Test public void shouldNotFindAnEdge() { Optional<EdgeEntity<Person, Book>> edgeEntity = graphTemplate.edge(12L); assertFalse(edgeEntity.isPresent()); } }
package infovis.overview; import infovis.data.BusData; import infovis.data.BusEdge; import infovis.data.BusStation; import infovis.data.BusStationManager; import infovis.data.BusTime; import infovis.gui.Canvas; import infovis.gui.PainterAdapter; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Graphics2D; import java.io.IOException; import javax.swing.JFrame; import javax.swing.WindowConstants; /** * Visualization of the schematic overview of the Konstanz bus network. * * @author Marc Spicker */ public class Overview extends PainterAdapter { /** * Data Manager. */ private BusStationManager mgr; /** * Radius of a bus station on the map. */ private static final int STATION_RADIUS = 6; /** * Test. * * @param args Unused. */ public static void main(final String[] args) { final JFrame frame = new JFrame("Bus test"); final Canvas c = new Canvas(new Overview(), 800, 600); frame.add(c); frame.pack(); c.reset(); frame.setLocationRelativeTo(null); frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); frame.setVisible(true); } /** * Constructor. */ public Overview() { try { mgr = BusData.load("src/main/resources/"); mgr.setMaxTimeHours(3); } catch(final IOException e) { e.printStackTrace(); return; } } @Override public void draw(final Graphics2D gfx) { gfx.setColor(Color.ORANGE); gfx.setStroke(new BasicStroke(2.0f)); for(final BusStation station : mgr.getStations()) { if(station.getAbstractX() == Double.MIN_VALUE) { continue; } final Iterable<BusEdge> edges = station.getEdges(new BusTime(0, 0)); // Draw Bus edges for(final BusEdge edge : edges) { gfx.setColor(edge.getLine().getColor()); final BusStation from = edge.getFrom(); final BusStation to = edge.getTo(); if(from.getAbstractX() == Double.MIN_VALUE || to.getAbstractX() == Double.MIN_VALUE) { continue; } gfx.drawLine((int) from.getAbstractX(), (int) from.getAbstractY(), (int) to.getAbstractX(), (int) to.getAbstractY()); } } // Draw Bus stations gfx.setStroke(new BasicStroke()); for(final BusStation station : mgr.getStations()) { if(station.getAbstractX() == Double.MIN_VALUE) { continue; } final int x = (int) station.getAbstractX(); final int y = (int) station.getAbstractY(); gfx.setColor(Color.ORANGE); gfx.fillOval(x - STATION_RADIUS, y - STATION_RADIUS, STATION_RADIUS * 2, STATION_RADIUS * 2); gfx.setColor(Color.BLACK); gfx.drawOval(x - STATION_RADIUS, y - STATION_RADIUS, STATION_RADIUS * 2, STATION_RADIUS * 2); gfx.drawString(station.getName(), x + 2 * STATION_RADIUS, y + STATION_RADIUS); } } }
package io.cattle.platform.iaas.api.auth.integration.ldap.OpenLDAP; import io.cattle.platform.iaas.api.auth.SecurityConstants; import io.cattle.platform.iaas.api.auth.dao.AuthDao; import io.cattle.platform.iaas.api.auth.identity.Token; import io.cattle.platform.iaas.api.auth.integration.interfaces.TokenCreator; import io.cattle.platform.iaas.api.auth.integration.ldap.ServiceContextCreationException; import io.cattle.platform.iaas.api.auth.integration.ldap.ServiceContextRetrievalException; import io.cattle.platform.iaas.api.auth.projects.ProjectResourceManager; import io.cattle.platform.object.ObjectManager; import io.cattle.platform.token.TokenService; import io.cattle.platform.util.type.CollectionUtils; import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException; import io.github.ibuildthecloud.gdapi.request.ApiRequest; import io.github.ibuildthecloud.gdapi.util.ResponseCodes; import java.util.Map; import javax.inject.Inject; import org.apache.commons.lang3.ObjectUtils; public class OpenLDAPTokenCreator extends OpenLDAPConfigurable implements TokenCreator { @Inject OpenLDAPIdentityProvider openLDAPIdentityProvider; @Inject AuthDao authDao; @Inject TokenService tokenService; @Inject ProjectResourceManager projectResourceManager; @Inject ObjectManager objectManager; @Inject OpenLDAPUtils OpenLDAPUtils; private Token getLdapToken(String username, String password) { if (!isConfigured()) { throw new ClientVisibleException(ResponseCodes.INTERNAL_SERVER_ERROR, OpenLDAPConstants.CONFIG, "Ldap Not Configured.", null); } try{ return OpenLDAPUtils.createToken(openLDAPIdentityProvider.getIdentities(username, password), null); } catch (ServiceContextCreationException | ServiceContextRetrievalException e){ throw new ClientVisibleException(ResponseCodes.INTERNAL_SERVER_ERROR, "LdapDown", "Could not talk to ldap", null); } } @Override public Token getToken(ApiRequest request) { Map<String, Object> requestBody = CollectionUtils.toMap(request.getRequestObject()); if (!isConfigured()) { throw new ClientVisibleException(ResponseCodes.SERVICE_UNAVAILABLE, "OpenLDAPConfig", "OpenLDAPConfig is not Configured.", null); } String code = ObjectUtils.toString(requestBody.get(SecurityConstants.CODE)); String[] split = code.split(":", 2); if (split.length != 2) { throw new ClientVisibleException(ResponseCodes.FORBIDDEN); } return getLdapToken(split[0], split[1]); } @Override public void reset() { openLDAPIdentityProvider.reset(); } @Override public String getName() { return OpenLDAPConstants.TOKEN_CREATOR; } }
package innovimax.mixthem; import java.io.IOException; import java.io.File; import java.io.FileInputStream; import java.io.OutputStream; public class MixThem { private final static int BYTE_BUFFER_SIZE = 1024; public static void main(String[] args) { run(args); } private static void run(String[] args) { try { if (checkArguments(args)) { String rule, file1, file2; if (args.length >= 3) { rule = args[0]; file1 = args[1]; file2 = args[2]; } else { rule = Constants.RULE_1; file1 = args[0]; file2 = args[1]; } processFiles(rule, new File(file1), new File(file2), System.out); } else { printUsage(); } } catch (MixException e) { System.err.println("Files mixing has been aborted due to following reason:"); System.err.println(e.getMessage()); } catch (Exception e) { System.err.println("An unexpected error occurs."); e.printStackTrace(); } } public static void processFiles(String rule, File file1, File file2, OutputStream out) throws MixException { try { switch (rule) { case Constants.RULE_1: copyChar(file1, out); break; case Constants.RULE_2: copyChar(file2, out); break; case Constants.RULE_ALT_LINE: case Constants.RULE_ALT_BYTE: case Constants.RULE_RANDOM_ALT_LINE: case Constants.RULE_JOIN: default: System.out.println("This rule has not been implemented yet."); break; } } catch (IOException e) { throw new MixException("Unexpected file error", e); } catch (MixException e) { throw e; } } // this one copies the files as beeing char private static void copyChar(File file, OutputStream out) throws MixException, IOException { FileInputStream in = new FileInputStream(file); byte[] buffer = new byte[BYTE_BUFFER_SIZE]; int c; while ((c = in.read(buffer)) != -1) { out.write(buffer, 0, c); } in.close(); // out.close(); } public static boolean checkArguments(String[] args) { String rule = null; String file1 = null; String file2 = null; if (args.length >= 3) { rule = args[0]; file1 = args[1]; file2 = args[2]; } else { if (args.length > 0) { file1 = args[0]; } if (args.length > 1) { file2 = args[1]; } } boolean ruleOk = true; if (rule != null) { if (!rule.equals(Constants.RULE_1) && !rule.equals(Constants.RULE_2) && !rule.equals(Constants.RULE_ALT_LINE) && !rule.equals(Constants.RULE_ALT_BYTE) && !rule.equals(Constants.RULE_RANDOM_ALT_LINE) && !rule.equals(Constants.RULE_JOIN)) { System.out.println("rule argument is incorrect."); ruleOk = false; } } if (ruleOk) { if (file1 == null) { System.out.println("file1 argument missing."); } else if (file2 == null) { System.out.println("file2 argument missing."); } else { File file = new File(file1); if (file.exists()) { if (file.canRead()) { file = new File(file2); if (file.exists()) { if (file.canRead()) { return true; } else { System.out.println("file2 cannot be read."); } } else { System.out.println("file2 not found."); } } else { System.out.println("file1 cannot be read."); } } else { System.out.println("file1 not found."); } } } return false; } private static void printUsage() { System.out.println(" "); System.out.println("Usage:"); System.out.println(" "); System.out.println(" mix-them file1 file2"); System.out.println(" (will generate any file based on file1 and file2)"); System.out.println(" "); System.out.println(" mix-them -[rule] file1 file2"); System.out.println(" (will generate a file based on the rule)"); System.out.println(" "); System.out.println(" Here are the list of rules"); System.out.println(" - " + Constants.RULE_1 + ": will output file1"); System.out.println(" - " + Constants.RULE_2 + ": will output file2"); System.out.println(" - " + Constants.RULE_ALT_LINE + ": will output one line of each starting with first line of file1"); System.out.println(" - " + Constants.RULE_ALT_BYTE + ": will output one byte of each starting with first byte of file1"); System.out.println(" - " + Constants.RULE_RANDOM_ALT_LINE + " [seed]: will output one line of each code randomly based on a seed for reproducability"); Sysyem.out.println(" - " + Constants.RUE_JOIN + " will output merging of lines that have common occurrence"); System.out.println(" "); } }
package it.neef.tu.ba.wectask; import org.apache.flink.api.common.functions.FilterFunction; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.io.TextOutputFormat; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.util.Collector; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; /** * Count user edits from wikipedia metadata dumps. */ public class Job { //Filter for specific namespace. private static int NS_FILTER = 0; public static void main(String[] args) throws Exception { if(args.length != 2) { System.err.println("USAGE: Job <wikipediadump.xml> <output>"); return; } // set up the execution environment final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); //Parse XML Dump from wikipedia. //We're only interested in <page>...<revision>...<contributor></contributor></revision></page> XMLContentHandler xmlCH = null; try { XMLReader xmlReader = XMLReaderFactory.createXMLReader(); FileReader reader = new FileReader(args[0]); InputSource inputSource = new InputSource(reader); xmlCH = new XMLContentHandler(); xmlReader.setContentHandler(xmlCH); xmlReader.parse(inputSource); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (SAXException e) { e.printStackTrace(); } if(xmlCH==null) { System.err.println("Error parsing XML!"); return ; } //Filter for specific namespace! DataSet<Page> pageSet = env.fromCollection(xmlCH.getAllPages()).filter(new FilterFunction<Page>() { @Override public boolean filter(Page page) throws Exception { return page.getNs() == NS_FILTER; } }); //Put all <Username,Edits> (Edits = 1 per revision) into a dataset. DataSet<Tuple2<String, Integer>> userEditSet = pageSet.flatMap(new FlatMapFunction<Page, Tuple2<String, Integer>>() { @Override public void flatMap(Page page, Collector<Tuple2<String, Integer>> collector) throws Exception { for(Revision r : page.getRevisions()) { collector.collect(new Tuple2<String, Integer>(r.getUsername(), 1)); } } }); //Group by Username and sum by Edits userEditSet .groupBy(0) .sum(1) //.print(); .writeAsFormattedText(args[1], new TextOutputFormat.TextFormatter<Tuple2<String, Integer>>() { @Override public String format(Tuple2<String, Integer> stringIntegerTuple2) { return stringIntegerTuple2.f0 + "," + String.valueOf(stringIntegerTuple2.f1); } }); env.execute("Wikipedia user edit count"); } }
package kalang.script; import java.io.File; import java.io.IOException; import java.util.Random; import kalang.lang.Script; import org.apache.commons.io.FileUtils; /** * * @author Kason Yang */ public abstract class ShellScript extends Script { public final static String DEFAULT_ENCODING = "utf8"; protected String readText(File file, String encoding) throws IOException { return FileUtils.readFileToString(file, encoding); } protected String readText(File file) throws IOException { return readText(file, DEFAULT_ENCODING); } protected void writeText(File file, String data, String encoding) throws IOException { FileUtils.writeStringToFile(file, data, encoding); } protected void writeText(File file, String data) throws IOException { writeText(file, data, DEFAULT_ENCODING); } protected int randomInt(int min, int max) { //TODO here never return max Random random = new Random(); return random.nextInt(max - min) + min; } protected int randomInt() { return new Random().nextInt(); } protected void deleteDirectory(File file) throws IOException { FileUtils.deleteDirectory(file); } protected int exec(String command, String... arguments) throws IOException { String[] list = new String[arguments.length + 1]; list[0] = command; if (arguments.length > 0) { System.arraycopy(arguments, 0, list, 1, arguments.length); } ProcessBuilder pb = new ProcessBuilder(list); pb.inheritIO(); Process p = pb.start(); try { p.waitFor(); } catch (InterruptedException ex) { } return p.exitValue(); } protected int exec(String command) throws IOException { return exec(command, new String[0]); } }
package net.fornwall.jelf; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.MappedByteBuffer; public final class ElfFile { /** Relocatable file type. A possible value of {@link #file_type}. */ public static final int FT_REL = 1; /** Executable file type. A possible value of {@link #file_type}. */ public static final int FT_EXEC = 2; /** Shared object file type. A possible value of {@link #file_type}. */ public static final int FT_DYN = 3; /** Core file file type. A possible value of {@link #file_type}. */ public static final int FT_CORE = 4; /** 32-bit objects. */ public static final byte CLASS_32 = 1; /** 64-bit objects. */ public static final byte CLASS_64 = 2; /** LSB data encoding. */ public static final byte DATA_LSB = 1; /** MSB data encoding. */ public static final byte DATA_MSB = 2; /** No architecture type. */ public static final int ARCH_NONE = 0; /** AT&amp;T architecture type. */ public static final int ARCH_ATT = 1; /** SPARC architecture type. */ public static final int ARCH_SPARC = 2; /** Intel 386 architecture type. */ public static final int ARCH_i386 = 3; /** Motorola 68000 architecture type. */ public static final int ARCH_68k = 4; /** Motorola 88000 architecture type. */ public static final int ARCH_88k = 5; /** Intel 860 architecture type. */ public static final int ARCH_i860 = 7; /** MIPS architecture type. */ public static final int ARCH_MIPS = 8; public static final int ARCH_ARM = 0x28; public static final int ARCH_X86_64 = 0x3E; public static final int ARCH_AARCH64 = 0xB7; /** Byte identifying the size of objects, either {@link #CLASS_32} or {link {@value #CLASS_64} . */ public final byte objectSize; /** * Returns a byte identifying the data encoding of the processor specific data. This byte will be either * DATA_INVALID, DATA_LSB or DATA_MSB. */ public final byte encoding; public final byte elfVersion; public final byte abi; public final byte abiVersion; /** Identifies the object file type. One of the FT_* constants in the class. */ public final short file_type; // Elf32_Half /** The required architecture. One of the ARCH_* constants in the class. */ public final short arch; // Elf32_Half /** Version */ public final int version; // Elf32_Word /** * Virtual address to which the system first transfers control. If there is no entry point for the file the value is * 0. */ public final long entry_point; // Elf32_Addr /** Program header table offset in bytes. If there is no program header table the value is 0. */ public final long ph_offset; // Elf32_Off /** Section header table offset in bytes. If there is no section header table the value is 0. */ public final long sh_offset; // Elf32_Off /** Processor specific flags. */ public final int flags; // Elf32_Word /** ELF header size in bytes. */ public final short eh_size; // Elf32_Half /** e_phentsize. Size of one entry in the file's program header table in bytes. All entries are the same size. */ public final short ph_entry_size; // Elf32_Half /** e_phnum. Number of {@link ElfSegment} entries in the program header table, 0 if no entries. */ public final short num_ph; // Elf32_Half /** Section header entry size in bytes. */ public final short sh_entry_size; // Elf32_Half /** Number of entries in the section header table, 0 if no entries. */ public final short num_sh; // Elf32_Half /** * Elf{32,64}_Ehdr#e_shstrndx. Index into the section header table associated with the section name string table. * SH_UNDEF if there is no section name string table. */ private short sh_string_ndx; // Elf32_Half /** MemoizedObject array of section headers associated with this ELF file. */ private MemoizedObject<ElfSection>[] sectionHeaders; /** MemoizedObject array of program headers associated with this ELF file. */ private MemoizedObject<ElfSegment>[] programHeaders; /** Used to cache symbol table lookup. */ private ElfSection symbolTableSection; /** Used to cache dynamic symbol table lookup. */ private ElfSection dynamicSymbolTableSection; private ElfSection dynamicLinkSection; /** * Returns the section header at the specified index. The section header at index 0 is defined as being a undefined * section. */ public ElfSection getSection(int index) throws ElfException, IOException { return sectionHeaders[index].getValue(); } /** Returns the section header string table associated with this ELF file. */ public ElfStringTable getSectionNameStringTable() throws ElfException, IOException { return getSection(sh_string_ndx).getStringTable(); } /** Returns the string table associated with this ELF file. */ public ElfStringTable getStringTable() throws ElfException, IOException { return findStringTableWithName(ElfSection.STRING_TABLE_NAME); } /** * Returns the dynamic symbol table associated with this ELF file, or null if one does not exist. */ public ElfStringTable getDynamicStringTable() throws ElfException, IOException { return findStringTableWithName(ElfSection.DYNAMIC_STRING_TABLE_NAME); } private ElfStringTable findStringTableWithName(String tableName) throws ElfException, IOException { // Loop through the section header and look for a section // header with the name "tableName". We can ignore entry 0 // since it is defined as being undefined. for (int i = 1; i < num_sh; i++) { ElfSection sh = getSection(i); if (tableName.equals(sh.getName())) return sh.getStringTable(); } return null; } /** The {@link ElfSection#SHT_SYMTAB} section (of which there may be only one), if any. */ public ElfSection getSymbolTableSection() throws ElfException, IOException { return (symbolTableSection != null) ? symbolTableSection : (symbolTableSection = getSymbolTableSection(ElfSection.SHT_SYMTAB)); } /** The {@link ElfSection#SHT_DYNSYM} section (of which there may be only one), if any. */ public ElfSection getDynamicSymbolTableSection() throws ElfException, IOException { return (dynamicSymbolTableSection != null) ? dynamicSymbolTableSection : (dynamicSymbolTableSection = getSymbolTableSection(ElfSection.SHT_DYNSYM)); } /** The {@link ElfSection#SHT_DYNAMIC} section (of which there may be only one). Named ".dynamic". */ public ElfSection getDynamicLinkSection() throws IOException { return (dynamicLinkSection != null) ? dynamicLinkSection : (dynamicLinkSection = getSymbolTableSection(ElfSection.SHT_DYNAMIC)); } private ElfSection getSymbolTableSection(int type) throws ElfException, IOException { for (int i = 1; i < num_sh; i++) { ElfSection sh = getSection(i); if (sh.type == type) return sh; } return null; } /** Returns the elf symbol with the specified name or null if one is not found. */ public ElfSymbol getELFSymbol(String symbolName) throws ElfException, IOException { if (symbolName == null) return null; // Check dynamic symbol table for symbol name. ElfSection sh = getDynamicSymbolTableSection(); if (sh != null) { int numSymbols = sh.getNumberOfSymbols(); for (int i = 0; i < Math.ceil(numSymbols / 2); i++) { ElfSymbol symbol = sh.getELFSymbol(i); if (symbolName.equals(symbol.getName())) { return symbol; } else if (symbolName.equals((symbol = sh.getELFSymbol(numSymbols - 1 - i)).getName())) { return symbol; } } } // Check symbol table for symbol name. sh = getSymbolTableSection(); if (sh != null) { int numSymbols = sh.getNumberOfSymbols(); for (int i = 0; i < Math.ceil(numSymbols / 2); i++) { ElfSymbol symbol = sh.getELFSymbol(i); if (symbolName.equals(symbol.getName())) { return symbol; } else if (symbolName.equals((symbol = sh.getELFSymbol(numSymbols - 1 - i)).getName())) { return symbol; } } } return null; } /** * Returns the elf symbol with the specified address or null if one is not found. 'address' is relative to base of * shared object for .so's. */ public ElfSymbol getELFSymbol(long address) throws ElfException, IOException { // Check dynamic symbol table for address. ElfSymbol symbol = null; long value = 0L; ElfSection sh = getDynamicSymbolTableSection(); if (sh != null) { int numSymbols = sh.getNumberOfSymbols(); for (int i = 0; i < numSymbols; i++) { symbol = sh.getELFSymbol(i); value = symbol.value; if (address >= value && address < value + symbol.size) return symbol; } } // Check symbol table for symbol name. sh = getSymbolTableSection(); if (sh != null) { int numSymbols = sh.getNumberOfSymbols(); for (int i = 0; i < numSymbols; i++) { symbol = sh.getELFSymbol(i); value = symbol.value; if (address >= value && address < value + symbol.size) return symbol; } } return null; } public ElfSegment getProgramHeader(int index) throws IOException { return programHeaders[index].getValue(); } public static ElfFile fromStream(InputStream in) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); int totalRead = 0; byte[] buffer = new byte[8096]; boolean firstRead = true; while (true) { int readNow = in.read(buffer, totalRead, buffer.length - totalRead); if (readNow == -1) { return fromBytes(baos.toByteArray()); } else { if (firstRead) { // Abort early. if (readNow < 4) { throw new ElfException("Bad first read"); } else { if (!(0x7f == buffer[0] && 'E' == buffer[1] && 'L' == buffer[2] && 'F' == buffer[3])) throw new ElfException("Bad magic number for file"); } firstRead = false; } baos.write(buffer, 0, readNow); } } } public static ElfFile fromFile(File file) throws ElfException, IOException { byte[] buffer = new byte[(int) file.length()]; try (FileInputStream in = new FileInputStream(file)) { int totalRead = 0; while (totalRead < buffer.length) { int readNow = in.read(buffer, totalRead, buffer.length - totalRead); if (readNow == -1) { throw new ElfException("Premature end of file"); } else { totalRead += readNow; } } } return new ElfFile(new ByteArrayInputStream(buffer)); } public static ElfFile fromBytes(byte[] buffer) throws ElfException, IOException { return new ElfFile(new ByteArrayInputStream(buffer)); } public ElfFile(MappedByteBuffer buffer, long startPosition) throws ElfException, IOException { final ElfParser parser = new ElfParser(this, buffer, startPosition); //Parsing is a shitty thing to do in constructors. byte[] ident = new byte[16]; int bytesRead = parser.read(ident); if (bytesRead != ident.length) throw new ElfException("Error reading elf header (read " + bytesRead + "bytes - expected to read " + ident.length + "bytes)"); if (!(0x7f == ident[0] && 'E' == ident[1] && 'L' == ident[2] && 'F' == ident[3])) throw new ElfException("Bad magic number for file"); objectSize = ident[4]; if (!(objectSize == CLASS_32 || objectSize == CLASS_64)) throw new ElfException("Invalid object size class: " + objectSize); encoding = ident[5]; if (!(encoding == DATA_LSB || encoding == DATA_MSB)) throw new ElfException("Invalid encoding: " + encoding); elfVersion = ident[6]; if (elfVersion != 1) throw new ElfException("Invalid elf version: " + elfVersion); abi = ident[7]; // EI_OSABI, target operating system ABI abiVersion = ident[8]; // EI_ABIVERSION, ABI version. Linux kernel (after at least 2.6) has no definition of it. // ident[9-15] // EI_PAD, currently unused. file_type = parser.readShort(); arch = parser.readShort(); version = parser.readInt(); entry_point = parser.readIntOrLong(); ph_offset = parser.readIntOrLong(); sh_offset = parser.readIntOrLong(); flags = parser.readInt(); eh_size = parser.readShort(); ph_entry_size = parser.readShort(); num_ph = parser.readShort(); sh_entry_size = parser.readShort(); num_sh = parser.readShort(); if (num_sh == 0) { throw new ElfException("e_shnum is SHN_UNDEF(0), which is not supported yet" + " (the actual number of section header table entries is contained in the sh_size field of the section header at index 0)"); } sh_string_ndx = parser.readShort(); if (sh_string_ndx == /* SHN_XINDEX= */0xffff) { throw new ElfException("e_shstrndx is SHN_XINDEX(0xffff), which is not supported yet" + " (the actual index of the section name string table section is contained in the sh_link field of the section header at index 0)"); } sectionHeaders = MemoizedObject.uncheckedArray(num_sh); for (int i = 0; i < num_sh; i++) { final long sectionHeaderOffset = sh_offset + (i * sh_entry_size); sectionHeaders[i] = new MemoizedObject<ElfSection>() { @Override public ElfSection computeValue() throws ElfException, IOException { return new ElfSection(parser, sectionHeaderOffset); } }; } programHeaders = MemoizedObject.uncheckedArray(num_ph); for (int i = 0; i < num_ph; i++) { final long programHeaderOffset = ph_offset + (i * ph_entry_size); programHeaders[i] = new MemoizedObject<ElfSegment>() { @Override public ElfSegment computeValue() throws IOException { return new ElfSegment(parser, programHeaderOffset); } }; } } public ElfFile(ByteArrayInputStream baos) throws ElfException, IOException { final ElfParser parser = new ElfParser(this, baos); byte[] ident = new byte[16]; int bytesRead = parser.read(ident); if (bytesRead != ident.length) throw new ElfException("Error reading elf header (read " + bytesRead + "bytes - expected to read " + ident.length + "bytes)"); if (!(0x7f == ident[0] && 'E' == ident[1] && 'L' == ident[2] && 'F' == ident[3])) throw new ElfException("Bad magic number for file"); objectSize = ident[4]; if (!(objectSize == CLASS_32 || objectSize == CLASS_64)) throw new ElfException("Invalid object size class: " + objectSize); encoding = ident[5]; if (!(encoding == DATA_LSB || encoding == DATA_MSB)) throw new ElfException("Invalid encoding: " + encoding); elfVersion = ident[6]; if (elfVersion != 1) throw new ElfException("Invalid elf version: " + elfVersion); abi = ident[7]; // EI_OSABI, target operating system ABI abiVersion = ident[8]; // EI_ABIVERSION, ABI version. Linux kernel (after at least 2.6) has no definition of it. // ident[9-15] // EI_PAD, currently unused. file_type = parser.readShort(); arch = parser.readShort(); version = parser.readInt(); entry_point = parser.readIntOrLong(); ph_offset = parser.readIntOrLong(); sh_offset = parser.readIntOrLong(); flags = parser.readInt(); eh_size = parser.readShort(); ph_entry_size = parser.readShort(); num_ph = parser.readShort(); sh_entry_size = parser.readShort(); num_sh = parser.readShort(); if (num_sh == 0) { throw new ElfException("e_shnum is SHN_UNDEF(0), which is not supported yet" + " (the actual number of section header table entries is contained in the sh_size field of the section header at index 0)"); } sh_string_ndx = parser.readShort(); if (sh_string_ndx == /* SHN_XINDEX= */0xffff) { throw new ElfException("e_shstrndx is SHN_XINDEX(0xffff), which is not supported yet" + " (the actual index of the section name string table section is contained in the sh_link field of the section header at index 0)"); } sectionHeaders = MemoizedObject.uncheckedArray(num_sh); for (int i = 0; i < num_sh; i++) { final long sectionHeaderOffset = sh_offset + (i * sh_entry_size); sectionHeaders[i] = new MemoizedObject<ElfSection>() { @Override public ElfSection computeValue() throws ElfException, IOException { return new ElfSection(parser, sectionHeaderOffset); } }; } programHeaders = MemoizedObject.uncheckedArray(num_ph); for (int i = 0; i < num_ph; i++) { final long programHeaderOffset = ph_offset + (i * ph_entry_size); programHeaders[i] = new MemoizedObject<ElfSegment>() { @Override public ElfSegment computeValue() throws IOException { return new ElfSegment(parser, programHeaderOffset); } }; } } /** The interpreter specified by the {@link ElfSegment#PT_INTERP} program header, if any. */ public String getInterpreter() throws IOException { for (MemoizedObject<ElfSegment> programHeader : programHeaders) { ElfSegment ph = programHeader.getValue(); if (ph.type == ElfSegment.PT_INTERP) return ph.getIntepreter(); } return null; } }
/* * @(#) OrderedSet.java */ package net.pwall.util; import java.util.AbstractSet; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Set; /** * An implementation of the {@link Set} interface using an ordered list and binary search. * * @author Peter Wall * @param <E> the element type */ public class OrderedSet<E> extends AbstractSet<E> { private List<E> list; private Comparator<E> comparator; /** * Construct an {@code OrderedSet}. * * @param comparator the comparator that determines the order of the set */ public OrderedSet(Comparator<E> comparator) { list = new ArrayList<>(); this.comparator = comparator; } /** * Construct an {@code OrderedSet}, providing an initial collection. * * @param comparator the comparator that determines the order of the set * @param c the initial contents */ public OrderedSet(Comparator<E> comparator, Collection<? extends E> c) { this(comparator); addAll(c); } /** * Test whether the set contains the specified object. * * @param o the object * @return {@code true} if the object is contained in the set * @throws NullPointerException if the object is {@code null} * @throws ClassCastException if the object is not of the class of the set * @see Collection#contains(Object) */ @Override public boolean contains(Object o) { @SuppressWarnings("unchecked") E c = (E)Objects.requireNonNull(o); int lo = 0; int hi = list.size(); while (lo < hi) { int mid = (lo + hi) >> 1; int comp = comparator.compare(list.get(mid), c); if (comp == 0) return true; if (comp < 0) lo = mid + 1; else hi = mid; } return false; } /** * Add an element to the set. * * @param e the new element * @return {@code true} if the set changed as a result of the operation * @throws NullPointerException if the object is {@code null} * @see Collection#add(Object) */ @Override public boolean add(E e) { Objects.requireNonNull(e); int lo = 0; int hi = list.size(); while (lo < hi) { int mid = (lo + hi) >> 1; int comp = comparator.compare(list.get(mid), e); if (comp == 0) return false; if (comp < 0) lo = mid + 1; else hi = mid; } list.add(lo, e); return true; } /** * Remove the specified object from the set. * * @param o the object to be removed * @throws NullPointerException if the object is {@code null} * @throws ClassCastException if the object is not of the class of the set * @see Collection#remove(Object) */ @Override public boolean remove(Object o) { @SuppressWarnings("unchecked") E c = (E)Objects.requireNonNull(o); int lo = 0; int hi = list.size(); while (lo < hi) { int mid = (lo + hi) >> 1; int comp = comparator.compare(list.get(mid), c); if (comp == 0) { list.remove(mid); return true; } if (comp < 0) lo = mid + 1; else hi = mid; } return false; } /** * {@inheritDoc} * @see Collection#clear() */ @Override public void clear() { list.clear(); } /** * Returns an {@link Iterator} over the elements contained in this set. The elements are * returned in the order determined by the {@link Comparator} associated with the set. * * @return the {@link Iterator} * @see Collection#iterator() */ @Override public Iterator<E> iterator() { return new Iter(); } /** * {@inheritDoc} * @see Collection#size() */ @Override public int size() { return list.size(); } /** * Create a new {@code OrderedSet} of {@link Comparable} objects. * * @param <C> the class of the elements * @return the new set */ public static <C extends Comparable<C>> OrderedSet<C> create() { return new OrderedSet<>(new Comparator<C>() { @Override public int compare(C o1, C o2) { return o1.compareTo(o2); } }); } public class Iter implements Iterator<E> { private int index = 0; private boolean removeAllowed = false; @Override public boolean hasNext() { return index < list.size(); } @Override public E next() { if (!hasNext()) throw new NoSuchElementException(); removeAllowed = true; return list.get(index++); } @Override public void remove() { if (!removeAllowed) throw new IllegalStateException(); removeAllowed = false; list.remove(--index); } } }
package morfologik.stemming; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.*; /** * Description of attributes, their types and default values. * * @see Dictionary */ public final class DictionaryMetadata { /** * Attribute name for {@link #separator}. */ public final static String ATTR_NAME_SEPARATOR = "fsa.dict.separator"; /** * Attribute name for {@link #encoding}. */ public final static String ATTR_NAME_ENCODING = "fsa.dict.encoding"; /** * Attribute name for {@link #usesPrefixes}. */ public final static String ATTR_NAME_USES_PREFIXES = "fsa.dict.uses-prefixes"; /** * Attribute name for {@link #usesInfixes}. */ public final static String ATTR_NAME_USES_INFIXES = "fsa.dict.uses-infixes"; /** * A separator character between fields (stem, lemma, form). The character * must be within byte range (FSA uses bytes internally). */ public final byte separator; /** * Encoding used for converting bytes to characters and vice versa. */ public final String encoding; /** * True if the dictionary was compiled with prefix compression. */ public final boolean usesPrefixes; /** * True if the dictionary was compiled with infix compression. */ public final boolean usesInfixes; /** * Other meta data not included above. */ public final Map<String, String> metadata; /** * Creates an immutable instance of {@link DictionaryMetadata}. */ public DictionaryMetadata(char separator, String encoding, boolean usesPrefixes, boolean usesInfixes, Map<String, String> metadata) { this.encoding = encoding; this.usesPrefixes = usesPrefixes; this.usesInfixes = usesInfixes; try { final byte [] separatorBytes = new String(new char [] {separator}).getBytes(encoding); if (separatorBytes.length != 1) { throw new RuntimeException("Separator character '" + separator + "' must be a single byte after transformation with encoding: " + encoding); } this.separator = separatorBytes[0]; } catch (UnsupportedEncodingException e) { throw new RuntimeException("Encoding not supported on this VM: " + encoding); } this.metadata = Collections.unmodifiableMap(new HashMap<String, String>(metadata)); } /** * Converts attributes in a {@link Map} to an instance of {@link Dictionary}, * validating attribute values. */ static DictionaryMetadata fromMap(Properties properties) throws IOException { final String separator = properties.getProperty(ATTR_NAME_SEPARATOR); if (separator == null || separator.length() != 1) { throw new IOException("Attribute " + ATTR_NAME_SEPARATOR + " must be " + "a single character."); } final String encoding = properties.getProperty(ATTR_NAME_ENCODING); if (encoding == null || encoding.length() == 0) { throw new IOException("Attribute " + ATTR_NAME_ENCODING + " must be " + "present and non-empty."); } final boolean usesPrefixes = Boolean.valueOf( properties.getProperty(ATTR_NAME_USES_PREFIXES, "false")).booleanValue(); final boolean usesInfixes = Boolean.valueOf( properties.getProperty(ATTR_NAME_USES_INFIXES, "false")).booleanValue(); final HashMap<String, String> metadata = new HashMap<String, String>(); for (Map.Entry<Object, Object> e : properties.entrySet()) { metadata.put(e.getKey().toString(), e.getValue().toString()); } return new DictionaryMetadata( separator.charAt(0), encoding, usesPrefixes, usesInfixes, metadata); } }
package com.hazelcast.map.impl.mapstore; import com.hazelcast.config.Config; import com.hazelcast.config.MapConfig; import com.hazelcast.config.MapIndexConfig; import com.hazelcast.config.MapStoreConfig; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.IMap; import com.hazelcast.core.MapLoader; import com.hazelcast.core.MapStore; import com.hazelcast.core.MapStoreAdapter; import com.hazelcast.core.MapStoreFactory; import com.hazelcast.instance.Node; import com.hazelcast.map.impl.mapstore.writebehind.TestMapUsingMapStoreBuilder; import com.hazelcast.nio.Address; import com.hazelcast.partition.InternalPartition; import com.hazelcast.partition.InternalPartitionService; import com.hazelcast.query.SqlPredicate; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastSerialClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.io.Serializable; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static com.hazelcast.test.TimeConstants.MINUTE; import static java.lang.String.format; import static java.util.Collections.singleton; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; @RunWith(HazelcastSerialClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class MapLoaderTest extends HazelcastTestSupport { @Test public void testSenderAndBackupTerminates_AfterInitialLoad() throws InterruptedException { String name = randomString(); Config config = new Config(); MapConfig mapConfig = config.getMapConfig(name); MapStoreConfig mapStoreConfig = new MapStoreConfig(); mapStoreConfig.setEnabled(true); mapStoreConfig.setImplementation(new DummyMapLoader()); mapStoreConfig.setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER); mapConfig.setMapStoreConfig(mapStoreConfig); TestHazelcastInstanceFactory instanceFactory = createHazelcastInstanceFactory(5); HazelcastInstance[] instances = instanceFactory.newInstances(config); IMap<Object, Object> map = instances[0].getMap(name); map.clear(); HazelcastInstance[] ownerAndReplicas = findOwnerAndReplicas(instances, name); ownerAndReplicas[0].getLifecycleService().terminate(); ownerAndReplicas[1].getLifecycleService().terminate(); map = ownerAndReplicas[3].getMap(name); map.loadAll(false); assertEquals(DummyMapLoader.SIZE, map.size()); } private HazelcastInstance[] findOwnerAndReplicas(HazelcastInstance[] instances, String name) { Node node = getNode(instances[0]); InternalPartitionService partitionService = node.getPartitionService(); int partitionId = partitionService.getPartitionId(name); InternalPartition partition = partitionService.getPartition(partitionId); HazelcastInstance[] ownerAndReplicas = new HazelcastInstance[instances.length]; for (int i = 0; i < instances.length; i++) { ownerAndReplicas[i] = getInstanceForAddress(instances, partition.getReplicaAddress(i)); } return ownerAndReplicas; } private HazelcastInstance getInstanceForAddress(HazelcastInstance[] instances, Address address) { for (HazelcastInstance instance : instances) { Address instanceAddress = instance.getCluster().getLocalMember().getAddress(); if (address.equals(instanceAddress)) { return instance; } } throw new IllegalArgumentException(); } @Test public void test1770() throws InterruptedException { Config config = getConfig(); config.getManagementCenterConfig().setEnabled(true); config.getManagementCenterConfig().setUrl("http://127.0.0.1:8090/mancenter"); MapConfig mapConfig = config.getMapConfig("foo"); final AtomicBoolean loadAllCalled = new AtomicBoolean(); MapLoader mapLoader = new MapLoader() { @Override public Object load(Object key) { return null; } @Override public Map loadAll(Collection keys) { loadAllCalled.set(true); return new HashMap(); } @Override public Set loadAllKeys() { return new HashSet(Arrays.asList(1)); } }; MapStoreConfig mapStoreConfig = new MapStoreConfig(); mapStoreConfig.setEnabled(true); mapStoreConfig.setImplementation(mapLoader); mapConfig.setMapStoreConfig(mapStoreConfig); HazelcastInstance hz = createHazelcastInstance(config); Map map = hz.getMap(mapConfig.getName()); assertTrueAllTheTime(new AssertTask() { @Override public void run() { assertFalse("LoadAll should not have been called", loadAllCalled.get()); } }, 10); } @Ignore @Test public void testMapLoaderLoadUpdatingIndex() throws Exception { final int nodeCount = 3; String mapName = randomString(); SampleIndexableObjectMapLoader loader = new SampleIndexableObjectMapLoader(); Config config = createMapConfig(mapName, loader); NodeBuilder nodeBuilder = new NodeBuilder(nodeCount, config).build(); HazelcastInstance node = nodeBuilder.getRandomNode(); IMap<Integer, SampleIndexableObject> map = node.getMap(mapName); for (int i = 0; i < 10; i++) { map.put(i, new SampleIndexableObject("My-" + i, i)); } final SqlPredicate predicate = new SqlPredicate("name='My-5'"); assertPredicateResultCorrect(map, predicate); map.destroy(); loader.preloadValues = true; node = nodeBuilder.getRandomNode(); map = node.getMap(mapName); assertLoadAllKeysCount(loader, 1); assertPredicateResultCorrect(map, predicate); } @Test public void testGetAll_putsLoadedItemsToIMap() throws Exception { Integer[] requestedKeys = {1, 2, 3}; AtomicInteger loadedKeysCounter = new AtomicInteger(0); MapStore mapStore = createMapLoader(loadedKeysCounter); IMap map = TestMapUsingMapStoreBuilder.create() .withMapStore(mapStore) .withNodeCount(1) .withNodeFactory(createHazelcastInstanceFactory(1)) .withPartitionCount(1) .build(); Set<Integer> keySet = new HashSet<Integer>(Arrays.asList(requestedKeys)); map.getAll(keySet); map.getAll(keySet); map.getAll(keySet); assertEquals(requestedKeys.length, loadedKeysCounter.get()); } @Test(timeout = MINUTE) public void testMapCanBeLoaded_whenLoadAllKeysThrowsExceptionFirstTime() throws InterruptedException { Config config = getConfig(); MapLoader failingMapLoader = new FailingMapLoader(); MapStoreConfig mapStoreConfig = new MapStoreConfig().setImplementation(failingMapLoader); MapConfig mapConfig = config.getMapConfig(getClass().getName()).setMapStoreConfig(mapStoreConfig); HazelcastInstance[] hz = createHazelcastInstanceFactory(2).newInstances(config, 2); IMap map = hz[0].getMap(mapConfig.getName()); Throwable exception = null; try { map.get(generateKeyNotOwnedBy(hz[0])); } catch (Throwable e) { exception = e; } assertNotNull("Exception wasn't propagated", exception); map.loadAll(true); assertEquals(1, map.size()); } private MapStore createMapLoader(final AtomicInteger loadAllCounter) { return new MapStoreAdapter<Integer, Integer>() { @Override public Map<Integer, Integer> loadAll(Collection<Integer> keys) { loadAllCounter.addAndGet(keys.size()); Map<Integer, Integer> map = new HashMap<Integer, Integer>(); for (Integer key : keys) { map.put(key, key); } return map; } @Override public Integer load(Integer key) { loadAllCounter.incrementAndGet(); return super.load(key); } }; } private Config createMapConfig(String mapName, SampleIndexableObjectMapLoader loader) { Config config = getConfig(); MapConfig mapConfig = config.getMapConfig(mapName); List<MapIndexConfig> indexConfigs = mapConfig.getMapIndexConfigs(); indexConfigs.add(new MapIndexConfig("name", true)); MapStoreConfig storeConfig = new MapStoreConfig(); storeConfig.setFactoryImplementation(loader); storeConfig.setEnabled(true); mapConfig.setMapStoreConfig(storeConfig); return config; } private void assertLoadAllKeysCount(final SampleIndexableObjectMapLoader loader, final int instanceCount) { assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertEquals("call-count of loadAllKeys method is problematic", instanceCount, loader.loadAllKeysCallCount.get()); } }); } private void assertPredicateResultCorrect(final IMap<Integer, SampleIndexableObject> map, final SqlPredicate predicate) { assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { final int mapSize = map.size(); final String message = format("Map size is %d", mapSize); Set<Map.Entry<Integer, SampleIndexableObject>> result = map.entrySet(predicate); assertEquals(message, 1, result.size()); assertEquals(message, 5, (int) result.iterator().next().getValue().value); } }); } private static class DummyMapLoader implements MapLoader<Integer, Integer> { static final int SIZE = 1000; final Map<Integer, Integer> map = new ConcurrentHashMap<Integer, Integer>(SIZE); public DummyMapLoader() { for (int i = 0; i < SIZE; i++) { map.put(i, i); } } @Override public Integer load(Integer key) { return map.get(key); } @Override public Map<Integer, Integer> loadAll(Collection<Integer> keys) { HashMap<Integer, Integer> hashMap = new HashMap<Integer, Integer>(); for (Integer key : keys) { hashMap.put(key, map.get(key)); } return hashMap; } @Override public Iterable<Integer> loadAllKeys() { return map.keySet(); } } public static class SampleIndexableObjectMapLoader implements MapLoader<Integer, SampleIndexableObject>, MapStoreFactory<Integer, SampleIndexableObject> { volatile boolean preloadValues = false; private SampleIndexableObject[] values = new SampleIndexableObject[10]; private Set<Integer> keys = new HashSet<Integer>(); private AtomicInteger loadAllKeysCallCount = new AtomicInteger(0); public SampleIndexableObjectMapLoader() { for (int i = 0; i < 10; i++) { keys.add(i); values[i] = new SampleIndexableObject("My-" + i, i); } } @Override public SampleIndexableObject load(Integer key) { if (!preloadValues) return null; return values[key]; } @Override public Map<Integer, SampleIndexableObject> loadAll(Collection<Integer> keys) { if (!preloadValues) return Collections.emptyMap(); Map<Integer, SampleIndexableObject> data = new HashMap<Integer, SampleIndexableObject>(); for (Integer key : keys) { data.put(key, values[key]); } return data; } @Override public Set<Integer> loadAllKeys() { if (!preloadValues) return Collections.emptySet(); loadAllKeysCallCount.incrementAndGet(); return Collections.unmodifiableSet(keys); } @Override public MapLoader<Integer, SampleIndexableObject> newMapStore(String mapName, Properties properties) { return this; } } public static class SampleIndexableObject implements Serializable { String name; Integer value; public SampleIndexableObject() { } public SampleIndexableObject(String name, Integer value) { this.name = name; this.value = value; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Integer getValue() { return value; } public void setValue(Integer value) { this.value = value; } } static class FailingMapLoader extends MapStoreAdapter { AtomicBoolean first = new AtomicBoolean(true); @Override public Set loadAllKeys() { if (first.compareAndSet(true, false)) { throw new IllegalStateException("Intentional exception"); } return singleton("key"); } @Override public Map loadAll(Collection keys) { return Collections.singletonMap("key", "value"); } } private class NodeBuilder { private final int nodeCount; private final Config config; private final Random random = new Random(); private final TestHazelcastInstanceFactory factory; private HazelcastInstance[] nodes; public NodeBuilder(int nodeCount, Config config) { this.nodeCount = nodeCount; this.config = config; this.factory = createHazelcastInstanceFactory(nodeCount); } public NodeBuilder build() { nodes = factory.newInstances(config); return this; } public HazelcastInstance getRandomNode() { final int nodeIndex = random.nextInt(nodeCount); return nodes[nodeIndex]; } } ; }
package net.querz.nbt.mca; import net.querz.nbt.CompoundTag; import net.querz.nbt.ListTag; import net.querz.nbt.Tag; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.FileInputStream; import java.io.IOException; import java.io.RandomAccessFile; import java.util.HashMap; import java.util.Map; import java.util.zip.DeflaterOutputStream; import java.util.zip.GZIPInputStream; import java.util.zip.InflaterInputStream; /** * A complete representation of an .mca file, capable of reading and saving. * */ public class MCAFile { public static final int DEFAULT_DATA_VERSION = 1628; private int[] offsets; private byte[] sectors; private int[] lengths; private int[] timestamps; private CompoundTag[] data; public MCAFile() {} public void deserialize(RandomAccessFile raf) throws IOException { offsets = new int[1024]; sectors = new byte[1024]; lengths = new int[1024]; timestamps = new int[1024]; data = new CompoundTag[1024]; for (int i = 0; i < offsets.length; i++) { raf.seek(i * 4); int offset = raf.read() << 16; offset |= (raf.read() & 0xFF) << 8; offset |= raf.read() & 0xFF; offsets[i] = offset; if ((sectors[i] = raf.readByte()) == 0) { continue; } raf.seek(offset * 4096); lengths[i] = raf.readInt(); DataInputStream dis; byte ct; switch (ct = raf.readByte()) { case 0: continue; //compression type 0 means no data case 1: dis = new DataInputStream(new BufferedInputStream(new GZIPInputStream(new FileInputStream(raf.getFD())))); break; case 2: dis = new DataInputStream(new BufferedInputStream(new InflaterInputStream(new FileInputStream(raf.getFD())))); break; default: throw new IOException("invalid compression type " + ct); } Tag tag = Tag.deserialize(dis, 0); if (tag instanceof CompoundTag) { data[i] = (CompoundTag) tag; } else { throw new IOException("invalid data tag at offset " + offset + ": " + (tag == null ? "null" : tag.getClass().getName())); } } raf.seek(4096); for (int i = 0; i < timestamps.length; i++) { timestamps[i] = raf.readInt(); } } public int serialize(RandomAccessFile raf) throws IOException { return serialize(raf, false); } //returns the number of chunks written to the file. public int serialize(RandomAccessFile raf, boolean changeLastUpdate) throws IOException { int globalOffset = 2; int lastWritten = 0; int timestamp = (int) (System.currentTimeMillis() / 1000L); int chunksWritten = 0; for (int i = 0; i < 1024; i++) { if (data[i] == null) { continue; } raf.seek(globalOffset * 4096); //write chunk data ByteArrayOutputStream baos = new ByteArrayOutputStream(4096); try (DataOutputStream nbtOut = new DataOutputStream(new BufferedOutputStream(new DeflaterOutputStream(baos)))) { data[i].serialize(nbtOut, 0); } byte[] rawData = baos.toByteArray(); raf.writeInt(rawData.length); raf.writeByte(2); raf.write(rawData); lastWritten = rawData.length + 5; chunksWritten++; int sectors = (lastWritten >> 12) + 1; raf.seek(i * 4); raf.writeByte(globalOffset >>> 16); raf.writeByte(globalOffset >> 8 & 0xFF); raf.writeByte(globalOffset & 0xFF); raf.writeByte(sectors); //write timestamp to tmp file raf.seek(i * 4 + 4096); raf.writeInt(changeLastUpdate ? timestamp : timestamps[i]); globalOffset += sectors; } //padding if (lastWritten % 4096 != 0) { raf.seek(globalOffset * 4096 - 1); raf.write(0); } return chunksWritten; } public int getOffset(int index) { checkIndex(index); if (offsets == null) { return 0; } return offsets[index]; } public int getOffset(int chunkX, int chunkZ) { return getOffset(getChunkIndex(chunkX, chunkZ)); } public byte getSizeInSectors(int index) { checkIndex(index); if (sectors == null) { return 0; } return sectors[index]; } public byte getSizeInSectors(int chunkX, int chunkZ) { return getSizeInSectors(getChunkIndex(chunkX, chunkZ)); } public int getLastUpdate(int index) { checkIndex(index); if (timestamps == null) { return 0; } return timestamps[index]; } public int getLastUpdate(int chunkX, int chunkZ) { return getLastUpdate(getChunkIndex(chunkX, chunkZ)); } public int getRawDataLength(int index) { checkIndex(index); if (lengths == null) { return 0; } return lengths[index]; } public int getRawDataLength(int chunkX, int chunkZ) { return getRawDataLength(getChunkIndex(chunkX, chunkZ)); } public void setLastUpdate(int index, int lastUpdate) { checkIndex(index); if (timestamps == null) { timestamps = new int[1024]; } timestamps[index] = lastUpdate; } public void setLastUpdate(int chunkX, int chunkZ, int lastUpdate) { setLastUpdate(getChunkIndex(chunkX, chunkZ), lastUpdate); } public CompoundTag getChunkData(int index) { checkIndex(index); if (data == null) { return null; } return data[index]; } public CompoundTag getChunkData(int chunkX, int chunkZ) { return getChunkData(getChunkIndex(chunkX, chunkZ)); } /** * Returns the biome id at a specific location. * @param blockX The x-coordinate of the block. * @param blockZ The z-coordinate of the block. * @return The biome id or -1 if there is no chunk or no biome data. * */ public int getBiomeAt(int blockX, int blockZ) { CompoundTag chunkData = getChunkData(MCAUtil.blockToChunk(blockX), MCAUtil.blockToChunk(blockZ)); if (chunkData == null) { return -1; } int[] biomes = chunkData.getCompoundTag("Level").getIntArray("Biomes"); if (biomes.length == 0) { return -1; } return biomes[getSectionIndex(blockX, 0, blockZ)]; } /** * Searches for redundant blocks in the palette in the section of the provided block coordinates, * removes them and updates the palette indices in the BlockStates accordingly. * Changes nothing if there is no chunk or no section at the coordinates. * @param blockX The x-coordinate of the block. * @param blockY The y-coordinate of the block. * @param blockZ The z-coordinate of the block. * */ public void cleanupPaletteAndBlockStates(int blockX, int blockY, int blockZ) { CompoundTag chunkData = getChunkData(MCAUtil.blockToChunk(blockX), MCAUtil.blockToChunk(blockZ)); if (chunkData == null) { return; } int blockSection = MCAUtil.blockToChunk(blockY); for (CompoundTag section : chunkData.getCompoundTag("Level").getListTag("Sections").asCompoundTagList()) { if (section.getByte("Y") == blockSection) { long[] blockStates = section.getLongArray("BlockStates"); ListTag<CompoundTag> palette = section.getListTag("Palette").asCompoundTagList(); Map<Integer, Integer> oldToNewMapping = cleanupPalette(blockStates, palette); blockStates = adjustBlockStateBits(blockStates, palette, oldToNewMapping); section.putLongArray("BlockStates", blockStates); } } } /** * Sets block data at specific block coordinates. If there is no chunk data or no section data * at the provided location, it will create default data ({@link MCAFile#createDefaultChunk(int, int)}, * {@link MCAFile#createDefaultSection(int)}). * If the size of the palette reaches a number that is a power of 2, it will automatically increase * the size of the BlockStates. This cleanup procedure ONLY occurs in this case, EXCEPT if {@code cleanup} * is set to {@code true}. The reason for this is a rather high performance impact of the cleanup process. * This may lead to unused block states in the palette, but never to an unnecessarily large number of bits * used per block state in the BlockStates array. * A manual cleanup can be performed using {@link MCAFile#cleanupPaletteAndBlockStates(int, int, int)}. * @param blockX The x-coordinate of the block. * @param blockY The y-coordinate of the block. * @param blockZ The z-coordinate of the block. * @param data The block data. * @param cleanup If the cleanup procedure should be forced. * */ public void setBlockDataAt(int blockX, int blockY, int blockZ, CompoundTag data, boolean cleanup) { CompoundTag chunkData = getChunkData(MCAUtil.blockToChunk(blockX), MCAUtil.blockToChunk(blockZ)); if (chunkData == null) { chunkData = createDefaultChunk(MCAUtil.blockToChunk(blockX), MCAUtil.blockToChunk(blockZ)); } int blockSection = MCAUtil.blockToChunk(blockY); for (CompoundTag section : chunkData.getCompoundTag("Level").getListTag("Sections").asCompoundTagList()) { if (section.getByte("Y") == blockSection) { long[] blockStates = section.getLongArray("BlockStates"); ListTag<CompoundTag> palette = section.getListTag("Palette").asCompoundTagList(); int paletteIndex; if ((paletteIndex = palette.indexOf(data)) != -1) { //data already exists in palette, so there's nothing to do setPaletteIndex(getSectionIndex(blockX, blockY, blockZ), paletteIndex, blockStates); } else { palette.add(data); paletteIndex = palette.size() - 1; long[] newBlockStates = blockStates; //power of 2 --> bits must increase if ((palette.size() & (palette.size() - 1)) == 0) { newBlockStates = adjustBlockStateBits(blockStates, palette, null); setPaletteIndex(getSectionIndex(blockX, blockY, blockZ), paletteIndex, newBlockStates); } else { //bits did not increase, change the index setPaletteIndex(getSectionIndex(blockX, blockY, blockZ), paletteIndex, newBlockStates); } if (cleanup || blockStates.length != newBlockStates.length) { Map<Integer, Integer> oldToNewMapping = cleanupPalette(newBlockStates, palette); newBlockStates = adjustBlockStateBits(newBlockStates, palette, oldToNewMapping); } section.putLongArray("BlockStates", newBlockStates); } return; } } //create new section CompoundTag section = createDefaultSection(MCAUtil.blockToChunk(blockY)); long[] blockStates = section.getLongArray("BlockStates"); ListTag<CompoundTag> palette = section.getListTag("Palette").asCompoundTagList(); if (palette.indexOf(data) == 0) { return; } else { palette.add(data); setPaletteIndex(getSectionIndex(blockX, blockY, blockZ), 1, blockStates); } chunkData.getCompoundTag("Level").getListTag("Sections").asCompoundTagList().add(section); } /** * Sets the index of the block data in the BlockStates. Does not adjust the size of the BlockStates array. * @param index The index of the block in this section, ranging from 0-4095. * @param state The block state to be set (index of block data in the palette). * @param blockStates The BlockStates that store the palette indices. * */ void setPaletteIndex(int index, int state, long[] blockStates) { int bits = blockStates.length / 64; double blockStatesIndex = index / (4096D / blockStates.length); int longIndex = (int) blockStatesIndex; int startBit = (int) ((blockStatesIndex - Math.floor(longIndex)) * 64D); if (startBit + bits > 64) { blockStates[longIndex] = updateBits(blockStates[longIndex], state, startBit, 64); blockStates[longIndex + 1] = updateBits(blockStates[longIndex + 1], state, startBit - 64, startBit + bits - 64); } else { blockStates[longIndex] = updateBits(blockStates[longIndex], state, startBit, startBit + bits); } } long[] adjustBlockStateBits(long[] blockStates, ListTag<CompoundTag> palette, Map<Integer, Integer> oldToNewMapping) { //increases or decreases the amount of bits used per BlockState //based on the size of the palette. oldToNewMapping can be used to update indices //if the palette had been cleaned up before using MCAFile#cleanupPalette(). int newBits = 32 - Integer.numberOfLeadingZeros(palette.size()); newBits = newBits < 4 ? 4 : newBits; long[] newBlockStates = newBits == blockStates.length / 64 ? blockStates : new long[newBits * 64]; if (oldToNewMapping != null) { for (int i = 0; i < 4096; i++) { setPaletteIndex(i, oldToNewMapping.get(getPaletteIndex(i, blockStates)), newBlockStates); } } else { for (int i = 0; i < 4096; i++) { setPaletteIndex(i, getPaletteIndex(i, blockStates), newBlockStates); } } return newBlockStates; } Map<Integer, Integer> cleanupPalette(long[] blockStates, ListTag<CompoundTag> palette) { //create index - palette mapping Map<Integer, Integer> allIndices = new HashMap<>(); for (int i = 0; i < 4096; i++) { int paletteIndex = getPaletteIndex(i, blockStates); allIndices.put(paletteIndex, paletteIndex); } //delete unused blocks from palette int index = 1; for (int i = 1; i < palette.size(); i++) { if (!allIndices.containsKey(index)) { palette.remove(i); i } else { allIndices.put(index, i); } index++; } return allIndices; } /** * Returns the block data at the provided block coordinates. * @param blockX The x-coordinate of the block. * @param blockY The y-coordinate of the block. * @param blockZ The z-coordinate of the block. * @return The block data at the specific block coordinates from the palette in this section. * Returns {@code null} if there is no chunk data or no section. * */ public CompoundTag getBlockDataAt(int blockX, int blockY, int blockZ) { //get chunk in this region CompoundTag chunkData = getChunkData(MCAUtil.blockToChunk(blockX), MCAUtil.blockToChunk(blockZ)); if (chunkData == null) { return null; } //get section int blockSection = MCAUtil.blockToChunk(blockY); for (CompoundTag section : chunkData.getCompoundTag("Level").getListTag("Sections").asCompoundTagList()) { if (section.getByte("Y") == blockSection) { //get index of long in block index array long[] blockStates = section.getLongArray("BlockStates"); ListTag<CompoundTag> palette = section.getListTag("Palette").asCompoundTagList(); //convert block coordinates into section coordinates int index = getSectionIndex(blockX, blockY, blockZ); int paletteIndex = getPaletteIndex(index, blockStates); return palette.get(paletteIndex); } } return null; } /** * Returns the index of the block data in the palette. * @param index The index of the block in this section, ranging from 0-4095. * @param blockStates The BlockStates that store the palette indices. * @return The index of the block data in the palette. * */ int getPaletteIndex(int index, long[] blockStates) { int bits = blockStates.length >> 6; double blockStatesIndex = index / (4096D / blockStates.length); int longIndex = (int) blockStatesIndex; int startBit = (int) ((blockStatesIndex - Math.floor(blockStatesIndex)) * 64D); if (startBit + bits > 64) { long prev = bitRange(blockStates[longIndex], startBit, 64); long next = bitRange(blockStates[longIndex + 1], 0, startBit + bits - 64); return (int) ((next << 64 - startBit) + prev); } else { return (int) bitRange(blockStates[longIndex], startBit, startBit + bits - 1); } } public void setChunkData(int index, CompoundTag data) { checkIndex(index); if (this.data == null) { this.data = new CompoundTag[1024]; } this.data[index] = data; } public void setChunkData(int chunkX, int chunkZ, CompoundTag data) { setChunkData(getChunkIndex(chunkX, chunkZ), data); } public static int getChunkIndex(int chunkX, int chunkZ) { return (chunkX & 31) + (chunkZ & 31) * 32; } private int checkIndex(int index) { if (index < 0 || index > 1023) { throw new IndexOutOfBoundsException(); } return index; } int getSectionIndex(int blockX, int blockY, int blockZ) { return (blockY & 15) * 256 + (blockZ & 15) * 16 + (blockX & 15); } long updateBits(long n, long m, int i, int j) { //replace i to j in n with j - i bits of m long mShifted = i > 0 ? (m & ((1L << j - i) - 1)) << i : (m & ((1L << j - i) - 1)) >>> -i; return ((n & ((j > 63 ? 0 : (~0L << j)) | (i < 0 ? 0 : ((1L << i) - 1L)))) | mShifted); } long bitRange(long value, int from, int to) { int waste = 64 - to; return (value << waste) >>> (waste + from); } private CompoundTag createDefaultChunk(int xPos, int zPos) { CompoundTag chunk = new CompoundTag(); chunk.putInt("DataVersion", DEFAULT_DATA_VERSION); CompoundTag level = new CompoundTag(); level.putInt("xPos", xPos); level.putInt("zPos", zPos); level.put("Entities", new ListTag()); level.put("Sections", new ListTag()); level.putString("Status", "base"); chunk.put("Level", level); return chunk; } private CompoundTag createDefaultSection(int y) { CompoundTag section = new CompoundTag(); section.putByte("Y", (byte) y); ListTag<CompoundTag> palette = new ListTag<>(); CompoundTag air = new CompoundTag(); air.putString("Name", "minecraft:air"); palette.add(air); section.put("Palette", palette); section.putLongArray("BlockStates", new long[256]); return section; } }
package navalwar.server.network; import java.io.BufferedReader; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.net.Socket; import java.net.SocketException; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import navalwar.server.gameengine.IGameEngineModule; import navalwar.server.gameengine.UnitObject; import navalwar.server.gameengine.exceptions.InvalidUnitNameException; import navalwar.server.gameengine.exceptions.PlaceNotFreeToPlaceUnitException; import navalwar.server.gameengine.exceptions.UnitCoordinatesOutsideMatrixException; import navalwar.server.gameengine.exceptions.WarAlreadyFinishedException; import navalwar.server.gameengine.exceptions.WarAlreadyStartedException; import navalwar.server.gameengine.exceptions.WarDoesNotExistException; import navalwar.server.gameengine.info.IWarInfo; import navalwar.server.gameengine.info.WarInfo; public class NetworkRequest implements Runnable,IServerNetworkModule { IGameEngineModule game; Socket s; DataOutputStream outToClient; BufferedReader br; public NetworkRequest (Socket s,IGameEngineModule game) throws IOException{ this.s = s; this.game = game; outToClient = new DataOutputStream(this.s.getOutputStream()); br = new BufferedReader(new InputStreamReader(this.s.getInputStream())); } @Override public void run(){ try { System.out.println("Connection established with host : " + s.getInetAddress() + "port: "+ s.getPort()); listener(); }catch(Exception e){ if(e instanceof SocketException ){ System.out.println("Connection loss"); } else{ e.printStackTrace(); } } System.out.println("Cerrando thread"); } private void listener() throws IOException, WarDoesNotExistException, WarAlreadyFinishedException, WarAlreadyStartedException, InvalidUnitNameException, PlaceNotFreeToPlaceUnitException, UnitCoordinatesOutsideMatrixException{ boolean alive = true; while(alive){ String inputLine; inputLine = br.readLine(); StringTokenizer tokenizer = new StringTokenizer(inputLine); String request= tokenizer.nextToken(); System.out.println("LLamado por: " + s.getInetAddress()); System.out.println("Mensaje : "+ inputLine); switch(request){ case "ListMsg" : handleWarListMsg(); break; case "CreateWarMsg": creatingWar(); break; case "StartMsg": System.out.println("game.startWar(warID)"); break; case "MovementMsg": System.out.println("game.handleShot()"); break; case "SurrenderMsg": System.out.println("game.QuitArmy(armyID,warID)"); break; case "ExitMsg": System.out.println("game.ExitArmy(armyID,warID)"); break; case "JOIN": handleJoin(); break; case "DISCONNECT": br.close(); outToClient.close(); s.close(); alive = false; break; default : break; } } } private void creatingWar() throws IOException{ String warNameMsg,warDescMsg,warName,warDesc; warNameMsg = br.readLine(); warDescMsg = br.readLine(); StringTokenizer nameTokenizer = new StringTokenizer(warNameMsg); StringTokenizer descTokenizer = new StringTokenizer(warDescMsg); nameTokenizer.nextToken(":"); warName=""; while(nameTokenizer.hasMoreTokens()) warName += nameTokenizer.nextToken(); descTokenizer.nextToken(":"); warDesc=""; while(descTokenizer.hasMoreTokens()) warDesc += descTokenizer.nextToken(); System.out.println("warName = "+ warName); System.out.println("warDescription = "+ warDesc); int warID = game.createWar(warName, warDesc); String WarIDMsg = "WarIDMsg"+'\n'; outToClient.writeBytes(WarIDMsg); String response = ""+warID+'\n'; outToClient.writeBytes(response); } private void handleWarListMsg() throws IOException{ List<Integer> warIDs = game.getWarsList(); String header ="GamesMsg"+'\n'; outToClient.writeBytes(header); String numberOfWars = ""+ warIDs.size() + '\n'; outToClient.writeBytes(numberOfWars); System.out.println("mando header"); for(Integer i : warIDs){ IWarInfo info = game.getWarInfo(i); String warID="warID:"+info.getWarID()+'\n'; String warName="warName:"+info.getName()+'\n'; outToClient.writeBytes(warID); outToClient.writeBytes(warName); } System.out.println("termino de mandar lista"); } private void handleJoin() throws IOException, WarDoesNotExistException, WarAlreadyFinishedException, WarAlreadyStartedException, InvalidUnitNameException, PlaceNotFreeToPlaceUnitException, UnitCoordinatesOutsideMatrixException{ String WarIDMsg,ArmyNameMsg,SizeMsg; List<UnitObject> units = new ArrayList<UnitObject>(); WarIDMsg = br.readLine(); ArmyNameMsg = br.readLine(); SizeMsg = br.readLine(); StringTokenizer warIDTokenizer = new StringTokenizer(WarIDMsg); StringTokenizer armyNameTokenizer = new StringTokenizer(ArmyNameMsg); StringTokenizer SizeTokenizer = new StringTokenizer(SizeMsg); warIDTokenizer.nextToken(":"); int warID = Integer.parseInt(warIDTokenizer.nextToken()); armyNameTokenizer.nextToken(":"); String armyName = armyNameTokenizer.nextToken(); SizeTokenizer.nextToken(":"); System.out.println("WarID: "+warID); System.out.println("War: "+ game.getWarInfo(warID).getName()); System.out.println("ArmyName: "+ armyName); System.out.println("Units:"); int size = Integer.parseInt(SizeTokenizer.nextToken()); for(int i=0 ; i<size ;i++){ String unitName,UnitMsg,XMsg,YMsg; int x,y; UnitMsg = br.readLine(); XMsg = br.readLine(); YMsg = br.readLine(); StringTokenizer unitTokenizer = new StringTokenizer(UnitMsg); StringTokenizer XTokenizer = new StringTokenizer(XMsg); StringTokenizer YTokenizer = new StringTokenizer(YMsg); unitTokenizer.nextToken(":"); unitName = unitTokenizer.nextToken(); XTokenizer.nextToken(":"); x = Integer.parseInt(unitTokenizer.nextToken()); YTokenizer.nextToken(":"); y = Integer.parseInt(YTokenizer.nextToken()); units.add(new UnitObject(unitName, x, y)); System.out.println("Unit: "+unitName); System.out.println("X: " + x); System.out.println("Y: " + y); } int armyID = game.regArmy(warID, armyName, units); outToClient.writeBytes("ArmyIDMsg"+'\n'); outToClient.writeBytes("armyID:"+armyID+'\n'); } @Override public void bindGameEngineModule(IGameEngineModule game) { // TODO Auto-generated method stub } @Override public int startWar(int warID) { // TODO Auto-generated method stub return 0; } @Override public int turnArmy(int warID, int armyID) { // TODO Auto-generated method stub return 0; } @Override public int turnArmyTimeout(int warID, int armyID) { // TODO Auto-generated method stub return 0; } @Override public int armyKicked(int warID, int armyID) { // TODO Auto-generated method stub return 0; } @Override public int endWar(int warID, int winnerArmyID) { // TODO Auto-generated method stub return 0; } }
package nl.b3p.batchloader; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLEncoder; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * * @author Meine Toonen <meinetoonen@b3partners.nl> */ public class Loader { private final int POS_NAME = 5; private final int POS_PROTOCOL = 0; private final int POS_URL = 7; private final int POS_TILINGPROTOCOL = 10; private URL url; private List<Tuple> urls = new ArrayList<>(); private static final Log log = LogFactory.getLog(Loader.class); public Loader(URL url){ this.url = url; } public void parse(String filename){ BufferedReader br = null; log.info("Parsing file "+ filename); try { br = new BufferedReader(new FileReader(filename)); String line = null; while( (line = br.readLine()) != null){ log.info("Processing line: " + line); Tuple t = parseLine(line); if(t != null){ urls.add(t); } } } catch (FileNotFoundException ex) { log.error("File with service url not found: ",ex); } catch (IOException ex) { log.error("Error reading file: ",ex); }finally{ if(br != null){ try { br.close(); } catch (IOException ex) { log.error("Error closing reader:",ex); } } } log.info("Finished parsing"); } private Tuple parseLine(String line){ Tuple t = null; String protocol = getValue(POS_PROTOCOL, line); String name = getValue(POS_NAME,line); String url = getValue(POS_URL,line); String tilingProtocol = getValue(POS_TILINGPROTOCOL,line); log.info("Naam: " + name); log.info("URL: " + url); try { URL u= new URL(url); t = new Tuple(); t.url = u; t.naam = name; t.protocol = protocol; t.tilingProtocol = tilingProtocol; } catch (MalformedURLException ex) { log.error("Cannot create url from :" + line + ". Was trying to parse a URL from line " + url); } return t; } public String getValue(int pos, String line){ line = line.substring(line.indexOf("VALUES (") +8); String rest = line; String delimiter = ","; String found = null; for (int i = 0; i <= pos; i++) { int index = rest.indexOf(delimiter); found = rest.substring(0, index); rest = rest.substring(index+2); } found = found.replaceAll("'", ""); return found; } public void start(){ int category = 2; String request = url.toString() + "action/geoservice?add=true&category="+category; for (Tuple t : urls) { try { String r2 = request; String urlEncoded = URLEncoder.encode(t.url.toString(), "UTF-8"); r2 += "&url=" + urlEncoded; r2 += "&protocol="+ t.protocol; log.debug("Request url: " + r2); } catch (UnsupportedEncodingException ex) { log.error("Cannot encode uri",ex); } } } public static void main (String[] args) throws MalformedURLException{ if(args.length != 2){ log.error("Incorrect number of arguments given: " + args.length); throw new IllegalArgumentException("Should supply 2 arguments: filename and url"); } String filename = args[0]; String url = args[1]; Loader l = new Loader(new URL(url)); l.parse(filename); l.start(); } class Tuple{ private URL url; private String protocol; private String naam; private String tilingProtocol; } }
package dimensions.solar.material; public enum Material{ //Blocks AIR("Air", "air", true, false), STONE("Stone", "stone", true, true), GRASS("Grass Block", "grass", true, true), DIRT("Dirt", "dirt", true, true), COBBLESTONE("Cobblestone", "cobblestone", true, true), WOOD_PLANKS("Wood Planks", "planks", true, true), SAPLING("Sapling", "sapling", true, false), BEDROCK("Bedrock", "bedrock", true, true), WATER("Flowing Water", "flowing_water", true, false), STATIONARY_WATER("Stationary Water", "water", true, false), LAVA("Flowing Lava", "flowing_lava", true, false), STATIONARY_LAVA("Stationary Lava", "lava", true, false), SAND("Sand", "sand", true, true), GRAVEL("Gravel", "gravel", true, true), GOLD_ORE("Gold Ore", "gold_ore", true, true), IRON_ORE("Iron Ore", "iron_ore", true, true), COAL_ORE("Coal Ore", "coal_ore", true, true), WOOD("Wood", "log", true, true), LEAVES("Leaves", "leaves", true, true), SPONGE("Sponge", "sponge", true, true), GLASS("Glass", "glass", true, true), LAPIS_ORE("Lapis Ore", "lapis_ore", true, true), LAPIS_BLOCK("Lapis Block", "lapis_block", true, true), DISPENSER("Dispenser", "dispenser", true, true), SANDSTONE("Sandstone", "sandstone", true, true), NOTEBLOCK("Noteblock", "noteblock", true, true), BED("Bed", "bed", true, true), POWERED_RAILS("Powered Rails", "golden_rails", true, false), DETECTOR_RAILS("Detector Rails", "detector_rails", true, false), STICKY_PISTON("Sticky Piston", "sticky_piston", true, false), WEB("Cobweb", "web", true, false), TALLGRASS("Tallgrass", "tallgrass", true, false), DEADBUSH("Dead Bush", "deadbush", true, false), PISTON("Piston", "piston", true, true), PISTON_HEAD("Piston Head", "piston_head", true, false), WOOL("Wool", "wool", true, true), PISTON_EXTENSION("Piston Extension", "piston_extension", true, false), DANDELIOIN("Dandelion", "yellow_flower", true, false), POPPY("Poppy", "red_flower', true false), BROWN_MUSHROOM("Brown Mushroom", "brown_mushroom", true, false), RED_MUSHROOM("Red Mushroom", "red_mushroom", true, false), GOLD_BLOCK("Gold Block", "gold_block", true, true), IRON_BLOCK("Iron Block", "iron_block", true, true), DOUBLE_STONE_SLAB("Double Stone Slab", "double_stone_slab", true, true), STONE_SLAB("Stone Slab", "stone_slab", true, true), BRICKS("Bricks", "brick_block", true, true), TNT("TNT", "tnt", true, true), BOOKSHELF("Bookshelf", "bookshelf", true, true), MOSSY_COBBLESTONE("Mossy Cobblestone", "mossy_cobblestone", true, true), OBSIDIAN("Obsidian", "obsidian", true, true), TORCH("Torch", "torch", true, false), FIRE("Fire", "fire", true, false), MOB_SPAWNER("Mod Spawner", "mob_spawner", true, true), OAK_STAIRS("Oak Stairs", "oak_stairs", true, true), CHEST("Chest", "chest", true, true), RESTONE_WIRE("Redstone Wire", "redstone_wire", true, false), DIAMOND_ORE("Diamond Ore", "diamond_ore", true, true), DIAMOND_BLOCK("Diamond Block", "diamond_block", true, true), CRAFTING_TABLE("Crafting Table", "crafting_table", true, true), WHEAT("Wheat", "wheat", true, false), FARMLAND("Farmland", "farmland", true, true), FURNACE("Furnace", "furnace", true, true), FURNACE_BURNING("Burning Furnace", "lit_furnace", true, true), SIGN_STANDING("Standing Sign", "standing_sign", true, false), OAK_DOOR("Wooden Door", "wooden_door", true, true), LADDER("Ladder", "ladder", true, false), ; private String name; private boolean isBlock, isSolid; private String minecraft_id; Material(String name, String minecraft_id, boolean isBlock, boolean isSolid){ this.name = name; this.minecraft_id = minecraft_id; this.isBlock = isBlock; this.isSolid = isSolid; } public String toString(){ return name; } public String getId(){ return "minecraft:" + minecraft_id; } public boolean isBlock(){ return isBlock; } public boolean isSolid(){ return isSolid; } }
package org.apache.helix.task; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.apache.helix.HelixDefinedState; import org.apache.helix.HelixManager; import org.apache.helix.common.caches.TaskDataCache; import org.apache.helix.controller.rebalancer.util.RebalanceScheduler; import org.apache.helix.controller.stages.ClusterDataCache; import org.apache.helix.controller.stages.CurrentStateOutput; import org.apache.helix.model.InstanceConfig; import org.apache.helix.model.Message; import org.apache.helix.model.Partition; import org.apache.helix.model.ResourceAssignment; import org.apache.helix.monitoring.mbeans.ClusterStatusMonitor; import org.apache.helix.task.assigner.AssignableInstance; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class AbstractTaskDispatcher { private static final Logger LOG = LoggerFactory.getLogger(AbstractTaskDispatcher.class); // For connection management protected HelixManager _manager; protected static RebalanceScheduler _rebalanceScheduler = new RebalanceScheduler(); protected ClusterStatusMonitor _clusterStatusMonitor; public void init(HelixManager manager) { _manager = manager; } // Job Update related methods public void updatePreviousAssignedTasksStatus( Map<String, SortedSet<Integer>> prevInstanceToTaskAssignments, Set<String> excludedInstances, String jobResource, CurrentStateOutput currStateOutput, JobContext jobCtx, JobConfig jobCfg, ResourceAssignment prevTaskToInstanceStateAssignment, TaskState jobState, Set<Integer> assignedPartitions, Set<Integer> partitionsToDropFromIs, Map<Integer, PartitionAssignment> paMap, TargetState jobTgtState, Set<Integer> skippedPartitions, ClusterDataCache cache) { // Get AssignableInstanceMap for releasing resources for tasks in terminal states Map<String, AssignableInstance> assignableInstanceMap = cache.getAssignableInstanceManager().getAssignableInstanceMap(); // Iterate through all instances for (String instance : prevInstanceToTaskAssignments.keySet()) { if (excludedInstances.contains(instance)) { continue; } Set<Integer> pSet = prevInstanceToTaskAssignments.get(instance); // Used to keep track of partitions that are in one of the final states: COMPLETED, TIMED_OUT, // TASK_ERROR, ERROR. Set<Integer> donePartitions = new TreeSet<>(); for (int pId : pSet) { final String pName = pName(jobResource, pId); TaskPartitionState currState = updateJobContextAndGetTaskCurrentState(currStateOutput, jobResource, pId, pName, instance, jobCtx); // This avoids a race condition in the case that although currentState is in the following // error condition, the pending message (INIT->RUNNNING) might still be present. // This is undesirable because this prevents JobContext from getting the proper update of // fields including task state and task's NUM_ATTEMPTS if (currState == TaskPartitionState.ERROR || currState == TaskPartitionState.TASK_ERROR || currState == TaskPartitionState.TIMED_OUT || currState == TaskPartitionState.TASK_ABORTED) { markPartitionError(jobCtx, pId, currState, true); } // Check for pending state transitions on this (partition, instance). If there is a pending // state transition, we prioritize this pending state transition and set the assignment from // this pending state transition, essentially "waiting" until this pending message clears Message pendingMessage = currStateOutput.getPendingMessage(jobResource, new Partition(pName), instance); if (pendingMessage != null && !pendingMessage.getToState().equals(currState.name())) { // If there is a pending message whose destination state is different from the current // state, just make the same assignment as the pending message. This is essentially // "waiting" until this state transition is complete processTaskWithPendingMessage(prevTaskToInstanceStateAssignment, pId, pName, instance, pendingMessage, jobState, currState, paMap, assignedPartitions); continue; } // Get AssignableInstance for this instance and TaskConfig for releasing resources String quotaType = jobCfg.getJobType(); AssignableInstance assignableInstance = assignableInstanceMap.get(instance); String taskId; if (TaskUtil.isGenericTaskJob(jobCfg)) { taskId = jobCtx.getTaskIdForPartition(pId); } else { taskId = pName; } TaskConfig taskConfig = jobCfg.getTaskConfig(taskId); // Process any requested state transitions. If there is a requested state transition, just // "wait" until this state transition is complete String requestedStateStr = currStateOutput.getRequestedState(jobResource, new Partition(pName), instance); if (requestedStateStr != null && !requestedStateStr.isEmpty()) { TaskPartitionState requestedState = TaskPartitionState.valueOf(requestedStateStr); if (requestedState.equals(currState)) { LOG.warn(String.format( "Requested state %s is the same as the current state for instance %s.", requestedState, instance)); } // For STOPPED tasks, if the targetState is STOP, we should not honor requestedState // transition and make it a NOP if (currState == TaskPartitionState.STOPPED && jobTgtState == TargetState.STOP) { // This task is STOPPED and not going to be re-run, so release this task assignableInstance.release(taskConfig, quotaType); continue; } paMap.put(pId, new PartitionAssignment(instance, requestedState.name())); assignedPartitions.add(pId); if (LOG.isDebugEnabled()) { LOG.debug( String.format("Instance %s requested a state transition to %s for partition %s.", instance, requestedState, pName)); } continue; } switch (currState) { case RUNNING: { TaskPartitionState nextState = TaskPartitionState.RUNNING; if (jobState == TaskState.TIMING_OUT) { nextState = TaskPartitionState.TASK_ABORTED; } else if (jobTgtState == TargetState.STOP) { nextState = TaskPartitionState.STOPPED; } else if (jobState == TaskState.ABORTED || jobState == TaskState.FAILED || jobState == TaskState.FAILING || jobState == TaskState.TIMED_OUT) { // Drop tasks if parent job is not in progress paMap.put(pId, new PartitionAssignment(instance, TaskPartitionState.DROPPED.name())); break; } paMap.put(pId, new PartitionAssignment(instance, nextState.name())); assignedPartitions.add(pId); if (LOG.isDebugEnabled()) { LOG.debug(String.format("Setting task partition %s state to %s on instance %s.", pName, nextState, instance)); } } break; case STOPPED: { // TODO: This case statement might be unreachable code - Hunter // This code may need to be removed because once a task is STOPPED and its workflow's // targetState is STOP, we do not assign that stopped task. Not assigning means it will // not be included in previousAssignment map in the next rebalance. If it is not in // prevInstanceToTaskAssignments, it will never hit this part of the code // When the parent workflow is to be resumed (target state is START), then it will just be // assigned as if it were being assigned for the first time TaskPartitionState nextState; if (jobTgtState.equals(TargetState.START)) { nextState = TaskPartitionState.RUNNING; } else { nextState = TaskPartitionState.STOPPED; // This task is STOPPED and not going to be re-run, so release this task assignableInstance.release(taskConfig, quotaType); } paMap.put(pId, new JobRebalancer.PartitionAssignment(instance, nextState.name())); assignedPartitions.add(pId); if (LOG.isDebugEnabled()) { LOG.debug(String.format("Setting task partition %s state to %s on instance %s.", pName, nextState, instance)); } } break; case COMPLETED: { // The task has completed on this partition. Mark as such in the context object. donePartitions.add(pId); if (LOG.isDebugEnabled()) { LOG.debug(String.format( "Task partition %s has completed with state %s. Marking as such in rebalancer context.", pName, currState)); } partitionsToDropFromIs.add(pId); markPartitionCompleted(jobCtx, pId); // This task is COMPLETED, so release this task assignableInstance.release(taskConfig, quotaType); } break; case TIMED_OUT: case TASK_ERROR: case TASK_ABORTED: case ERROR: { donePartitions.add(pId); // The task may be rescheduled on a different instance. if (LOG.isDebugEnabled()) { LOG.debug(String.format( "Task partition %s has error state %s with msg %s. Marking as such in rebalancer context.", pName, currState, jobCtx.getPartitionInfo(pId))); } // The error policy is to fail the task as soon a single partition fails for a specified // maximum number of attempts or task is in ABORTED state. // But notice that if job is TIMED_OUT, aborted task won't be treated as fail and won't // cause job fail. // After all tasks are aborted, they will be dropped, because of job timeout. if (jobState != TaskState.TIMED_OUT && jobState != TaskState.TIMING_OUT) { if (jobCtx.getPartitionNumAttempts(pId) >= jobCfg.getMaxAttemptsPerTask() || currState.equals(TaskPartitionState.TASK_ABORTED) || currState.equals(TaskPartitionState.ERROR)) { skippedPartitions.add(pId); partitionsToDropFromIs.add(pId); if (LOG.isDebugEnabled()) { LOG.debug("skippedPartitions:" + skippedPartitions); } } else { // Mark the task to be started at some later time (if enabled) markPartitionDelayed(jobCfg, jobCtx, pId); } } // Release this task assignableInstance.release(taskConfig, quotaType); } break; case INIT: { // INIT is a temporary state for tasks // Two possible scenarios for INIT: // 1. Task is getting scheduled for the first time. In this case, Task's state will go // from null->INIT->RUNNING, and this INIT state will be transient and very short-lived // 2. Task is getting scheduled for the first time, but in this case, job is timed out or // timing out. In this case, it will be sent back to INIT state to be removed. Here we // ensure that this task then goes from INIT to DROPPED so that it will be released from // AssignableInstance to prevent resource leak if (jobState == TaskState.TIMED_OUT || jobState == TaskState.TIMING_OUT || jobTgtState == TargetState.DELETE) { // Job is timed out or timing out or targetState is to be deleted, so its tasks will be // sent back to INIT // In this case, tasks' IdealState will be removed, and they will be sent to DROPPED partitionsToDropFromIs.add(pId); // Also release resources for these tasks assignableInstance.release(taskConfig, quotaType); } else if (jobState == TaskState.IN_PROGRESS && (jobTgtState != TargetState.STOP && jobTgtState != TargetState.DELETE)) { // Job is in progress, implying that tasks are being re-tried, so set it to RUNNING paMap.put(pId, new JobRebalancer.PartitionAssignment(instance, TaskPartitionState.RUNNING.name())); assignedPartitions.add(pId); } } case DROPPED: { // currState in [INIT, DROPPED]. Do nothing, the partition is eligible to be reassigned. donePartitions.add(pId); if (LOG.isDebugEnabled()) { LOG.debug(String.format( "Task partition %s has state %s. It will be dropped from the current ideal state.", pName, currState)); } // If it's DROPPED, release this task. If INIT, do not release if (currState == TaskPartitionState.DROPPED) { assignableInstance.release(taskConfig, quotaType); } } break; default: throw new AssertionError("Unknown enum symbol: " + currState); } } // Remove the set of task partitions that are completed or in one of the error states. pSet.removeAll(donePartitions); } } /** * Computes the partition name given the resource name and partition id. */ protected String pName(String resource, int pId) { return String.format("%s_%s", resource, pId); } /** * An (instance, state) pair. */ protected static class PartitionAssignment { public final String _instance; public final String _state; PartitionAssignment(String instance, String state) { _instance = instance; _state = state; } } private TaskPartitionState updateJobContextAndGetTaskCurrentState( CurrentStateOutput currentStateOutput, String jobResource, Integer pId, String pName, String instance, JobContext jobCtx) { String currentStateString = currentStateOutput.getCurrentState(jobResource, new Partition(pName), instance); if (currentStateString == null) { // Task state is either DROPPED or INIT TaskPartitionState stateFromContext = jobCtx.getPartitionState(pId); return stateFromContext == null ? TaskPartitionState.INIT : stateFromContext; } TaskPartitionState currentState = TaskPartitionState.valueOf(currentStateString); jobCtx.setPartitionState(pId, currentState); String taskMsg = currentStateOutput.getInfo(jobResource, new Partition(pName), instance); if (taskMsg != null) { jobCtx.setPartitionInfo(pId, taskMsg); } return currentState; } /** * Create an assignment based on an already-existing pending message. This effectively lets the * Controller to "wait" until the pending state transition has been processed. * @param prevAssignment * @param pId * @param pName * @param instance * @param pendingMessage * @param jobState * @param currState * @param paMap * @param assignedPartitions */ private void processTaskWithPendingMessage(ResourceAssignment prevAssignment, Integer pId, String pName, String instance, Message pendingMessage, TaskState jobState, TaskPartitionState currState, Map<Integer, PartitionAssignment> paMap, Set<Integer> assignedPartitions) { // stateMap is a mapping of Instance -> TaskPartitionState (String) Map<String, String> stateMap = prevAssignment.getReplicaMap(new Partition(pName)); if (stateMap != null) { String prevState = stateMap.get(instance); if (!pendingMessage.getToState().equals(prevState)) { LOG.warn(String.format( "Task pending to-state is %s while previous assigned state is %s. This should not" + "happen.", pendingMessage.getToState(), prevState)); } if (jobState == TaskState.TIMING_OUT && currState == TaskPartitionState.INIT && prevState.equals(TaskPartitionState.RUNNING.name())) { // While job is timing out, if the task is pending on INIT->RUNNING, set it back to INIT, // so that Helix will cancel the transition. paMap.put(pId, new PartitionAssignment(instance, TaskPartitionState.INIT.name())); assignedPartitions.add(pId); if (LOG.isDebugEnabled()) { LOG.debug(String.format( "Task partition %s has a pending state transition on instance %s INIT->RUNNING. " + "Setting it back to INIT so that Helix can cancel the transition(if enabled).", pName, instance, prevState)); } } else { // Otherwise, Just copy forward // the state assignment from the previous ideal state. paMap.put(pId, new PartitionAssignment(instance, prevState)); assignedPartitions.add(pId); if (LOG.isDebugEnabled()) { LOG.debug(String.format( "Task partition %s has a pending state transition on instance %s. Using the previous ideal state which was %s.", pName, instance, prevState)); } } } } protected static void markPartitionCompleted(JobContext ctx, int pId) { ctx.setPartitionState(pId, TaskPartitionState.COMPLETED); ctx.setPartitionFinishTime(pId, System.currentTimeMillis()); ctx.incrementNumAttempts(pId); } protected static void markPartitionError(JobContext ctx, int pId, TaskPartitionState state, boolean incrementAttempts) { ctx.setPartitionState(pId, state); ctx.setPartitionFinishTime(pId, System.currentTimeMillis()); if (incrementAttempts) { ctx.incrementNumAttempts(pId); } } protected static void markAllPartitionsError(JobContext ctx, TaskPartitionState state, boolean incrementAttempts) { for (int pId : ctx.getPartitionSet()) { markPartitionError(ctx, pId, state, incrementAttempts); } } protected static void markPartitionDelayed(JobConfig cfg, JobContext ctx, int p) { long delayInterval = cfg.getTaskRetryDelay(); if (delayInterval <= 0) { return; } long nextStartTime = ctx.getPartitionFinishTime(p) + delayInterval; ctx.setNextRetryTime(p, nextStartTime); } protected void handleJobTimeout(JobContext jobCtx, WorkflowContext workflowCtx, String jobResource, JobConfig jobCfg) { jobCtx.setFinishTime(System.currentTimeMillis()); workflowCtx.setJobState(jobResource, TaskState.TIMED_OUT); // Mark all INIT task to TASK_ABORTED for (int pId : jobCtx.getPartitionSet()) { if (jobCtx.getPartitionState(pId) == TaskPartitionState.INIT) { jobCtx.setPartitionState(pId, TaskPartitionState.TASK_ABORTED); } } _clusterStatusMonitor.updateJobCounters(jobCfg, TaskState.TIMED_OUT); _rebalanceScheduler.removeScheduledRebalance(jobResource); TaskUtil.cleanupJobIdealStateExtView(_manager.getHelixDataAccessor(), jobResource); } protected void failJob(String jobName, WorkflowContext workflowContext, JobContext jobContext, WorkflowConfig workflowConfig, Map<String, JobConfig> jobConfigMap, ClusterDataCache clusterDataCache) { markJobFailed(jobName, jobContext, workflowConfig, workflowContext, jobConfigMap, clusterDataCache.getTaskDataCache()); // Mark all INIT task to TASK_ABORTED for (int pId : jobContext.getPartitionSet()) { if (jobContext.getPartitionState(pId) == TaskPartitionState.INIT) { jobContext.setPartitionState(pId, TaskPartitionState.TASK_ABORTED); } } _clusterStatusMonitor.updateJobCounters(jobConfigMap.get(jobName), TaskState.FAILED); _rebalanceScheduler.removeScheduledRebalance(jobName); TaskUtil.cleanupJobIdealStateExtView(_manager.getHelixDataAccessor(), jobName); } // Compute real assignment from theoretical calculation with applied throttling // This is the actual assigning part protected void handleAdditionalTaskAssignment( Map<String, SortedSet<Integer>> prevInstanceToTaskAssignments, Set<String> excludedInstances, String jobResource, CurrentStateOutput currStateOutput, JobContext jobCtx, JobConfig jobCfg, WorkflowConfig workflowConfig, WorkflowContext workflowCtx, ClusterDataCache cache, ResourceAssignment prevTaskToInstanceStateAssignment, Set<Integer> assignedPartitions, Map<Integer, PartitionAssignment> paMap, Set<Integer> skippedPartitions, TaskAssignmentCalculator taskAssignmentCal, Set<Integer> allPartitions, long currentTime, Collection<String> liveInstances) { // See if there was LiveInstance change and cache LiveInstances from this iteration of pipeline boolean existsLiveInstanceOrCurrentStateChange = cache.getExistsLiveInstanceOrCurrentStateChange(); // The excludeSet contains the set of task partitions that must be excluded from consideration // when making any new assignments. // This includes all completed, failed, delayed, and already assigned partitions. Set<Integer> excludeSet = Sets.newTreeSet(assignedPartitions); addCompletedTasks(excludeSet, jobCtx, allPartitions); addGiveupPartitions(excludeSet, jobCtx, allPartitions, jobCfg); excludeSet.addAll(skippedPartitions); Set<Integer> partitionsWithDelay = TaskUtil.getNonReadyPartitions(jobCtx, currentTime); excludeSet.addAll(partitionsWithDelay); // The following is filtering of tasks before passing them to the assigner // Only feed in tasks that need to be assigned (null and STOPPED) Set<Integer> filteredTaskPartitionNumbers = filterTasks(allPartitions, jobCtx, liveInstances); // Remove all excludeSet tasks to be safer because some STOPPED tasks have been already // re-started (excludeSet includes already-assigned partitions). Also tasks with their retry // limit exceed (addGiveupPartitions) will be removed as well filteredTaskPartitionNumbers.removeAll(excludeSet); Set<Integer> partitionsToRetryOnLiveInstanceChangeForTargetedJob = new HashSet<>(); // If the job is a targeted job, in case of live instance change, we need to assign // non-terminal tasks so that they could be re-scheduled if (!TaskUtil.isGenericTaskJob(jobCfg) && existsLiveInstanceOrCurrentStateChange) { // This job is a targeted job, so FixedAssignmentCalculator will be used // There has been a live instance change. Must re-add incomplete task partitions to be // re-assigned and re-scheduled for (int partitionNum : allPartitions) { TaskPartitionState taskPartitionState = jobCtx.getPartitionState(partitionNum); if (isTaskNotInTerminalState(taskPartitionState) && !partitionsWithDelay.contains(partitionNum)) { // Some targeted tasks may have timed-out due to Participants (instances) not being // live, so we give tasks like these another try // If some of these tasks are already scheduled and running, they will be dropped as // well // Also, do not include partitions with delay that are not ready to be assigned and // scheduled partitionsToRetryOnLiveInstanceChangeForTargetedJob.add(partitionNum); } } } filteredTaskPartitionNumbers.addAll(partitionsToRetryOnLiveInstanceChangeForTargetedJob); // The actual assignment is computed here // Get instance->[partition, ...] mappings for the target resource. Map<String, SortedSet<Integer>> tgtPartitionAssignments = taskAssignmentCal.getTaskAssignment( currStateOutput, prevTaskToInstanceStateAssignment, liveInstances, jobCfg, jobCtx, workflowConfig, workflowCtx, filteredTaskPartitionNumbers, cache.getIdealStates()); if (!TaskUtil.isGenericTaskJob(jobCfg) && jobCfg.isRebalanceRunningTask()) { // TODO: Revisit the logic for isRebalanceRunningTask() and valid use cases for it // TODO: isRebalanceRunningTask() was originally put in place to allow users to move // ("rebalance") long-running tasks, but there hasn't been a clear use case for this // Previously, there was a bug in the condition above (it was || where it should have been &&) dropRebalancedRunningTasks(tgtPartitionAssignments, prevInstanceToTaskAssignments, paMap, jobCtx); } // If this is a targeted job and if there was a live instance change if (!TaskUtil.isGenericTaskJob(jobCfg) && existsLiveInstanceOrCurrentStateChange) { // Drop current jobs only if they are assigned to a different instance, regardless of // the jobCfg.isRebalanceRunningTask() setting dropRebalancedRunningTasks(tgtPartitionAssignments, prevInstanceToTaskAssignments, paMap, jobCtx); } // Go through ALL instances and assign/throttle tasks accordingly for (Map.Entry<String, SortedSet<Integer>> entry : prevInstanceToTaskAssignments.entrySet()) { String instance = entry.getKey(); if (!tgtPartitionAssignments.containsKey(instance)) { // There is no assignment made for this instance, so it is safe to skip continue; } if (excludedInstances.contains(instance)) { // There is a task assignment made for this instance, but for some reason, we cannot // assign to this instance. So we must skip the actual scheduling, but we must also // release the prematurely assigned tasks from AssignableInstance if (!cache.getAssignableInstanceManager().getAssignableInstanceMap() .containsKey(instance)) { continue; // This should not happen; skip! } AssignableInstance assignableInstance = cache.getAssignableInstanceManager().getAssignableInstanceMap().get(instance); String quotaType = jobCfg.getJobType(); for (int partitionNum : tgtPartitionAssignments.get(instance)) { // Get the TaskConfig for this partitionNumber String taskId = getTaskId(jobCfg, jobCtx, partitionNum); TaskConfig taskConfig = jobCfg.getTaskConfig(taskId); assignableInstance.release(taskConfig, quotaType); } continue; } // 1. throttled by job configuration // Contains the set of task partitions currently assigned to the instance. Set<Integer> pSet = entry.getValue(); int jobCfgLimitation = jobCfg.getNumConcurrentTasksPerInstance() - pSet.size(); // 2. throttled by participant capacity int participantCapacity = cache.getInstanceConfigMap().get(instance).getMaxConcurrentTask(); if (participantCapacity == InstanceConfig.MAX_CONCURRENT_TASK_NOT_SET) { participantCapacity = cache.getClusterConfig().getMaxConcurrentTaskPerInstance(); } int participantLimitation = participantCapacity - cache.getParticipantActiveTaskCount(instance); // New tasks to be assigned int numToAssign = Math.min(jobCfgLimitation, participantLimitation); if (LOG.isDebugEnabled()) { LOG.debug(String.format( "Throttle tasks to be assigned to instance %s using limitation: Job Concurrent Task(%d), " + "Participant Max Task(%d). Remaining capacity %d.", instance, jobCfgLimitation, participantCapacity, numToAssign)); } Set<Integer> throttledSet = new HashSet<>(); if (numToAssign > 0) { List<Integer> nextPartitions = getNextPartitions(tgtPartitionAssignments.get(instance), excludeSet, throttledSet, numToAssign); for (Integer pId : nextPartitions) { String pName = pName(jobResource, pId); paMap.put(pId, new PartitionAssignment(instance, TaskPartitionState.RUNNING.name())); excludeSet.add(pId); jobCtx.setAssignedParticipant(pId, instance); jobCtx.setPartitionState(pId, TaskPartitionState.INIT); jobCtx.setPartitionStartTime(pId, System.currentTimeMillis()); if (LOG.isDebugEnabled()) { LOG.debug(String.format("Setting task partition %s state to %s on instance %s.", pName, TaskPartitionState.RUNNING, instance)); } } cache.setParticipantActiveTaskCount(instance, cache.getParticipantActiveTaskCount(instance) + nextPartitions.size()); } else { // No assignment was actually scheduled, so this assignment needs to be released // Put all assignments in throttledSet. Be sure to subtract excludeSet because excludeSet is // already applied at filteringPartitions (excludeSet may contain partitions that are // currently running) Set<Integer> throttledSetWithExcludeSet = new HashSet<>(tgtPartitionAssignments.get(instance)); throttledSetWithExcludeSet.removeAll(excludeSet); // Remove excludeSet throttledSet.addAll(throttledSetWithExcludeSet); } if (!throttledSet.isEmpty()) { // Release the tasks in throttledSet because they weren't actually assigned if (!cache.getAssignableInstanceManager().getAssignableInstanceMap() .containsKey(instance)) { continue; } AssignableInstance assignableInstance = cache.getAssignableInstanceManager().getAssignableInstanceMap().get(instance); String quotaType = jobCfg.getJobType(); for (int partitionNum : throttledSet) { // Get the TaskConfig for this partitionNumber String taskId = getTaskId(jobCfg, jobCtx, partitionNum); TaskConfig taskConfig = jobCfg.getTaskConfig(taskId); assignableInstance.release(taskConfig, quotaType); } LOG.debug( throttledSet.size() + "tasks are ready but throttled when assigned to participant."); } } } protected void scheduleForNextTask(String job, JobContext jobCtx, long now) { // Figure out the earliest schedulable time in the future of a non-complete job boolean shouldSchedule = false; long earliestTime = Long.MAX_VALUE; for (int p : jobCtx.getPartitionSet()) { long retryTime = jobCtx.getNextRetryTime(p); TaskPartitionState state = jobCtx.getPartitionState(p); state = (state != null) ? state : TaskPartitionState.INIT; Set<TaskPartitionState> errorStates = Sets.newHashSet(TaskPartitionState.ERROR, TaskPartitionState.TASK_ERROR, TaskPartitionState.TIMED_OUT); if (errorStates.contains(state) && retryTime > now && retryTime < earliestTime) { earliestTime = retryTime; shouldSchedule = true; } } // If any was found, then schedule it if (shouldSchedule) { long scheduledTime = _rebalanceScheduler.getRebalanceTime(job); if (scheduledTime == -1 || earliestTime < scheduledTime) { _rebalanceScheduler.scheduleRebalance(_manager, job, earliestTime); } } } // add all partitions that have been tried maxNumberAttempts protected static void addGiveupPartitions(Set<Integer> set, JobContext ctx, Iterable<Integer> pIds, JobConfig cfg) { for (Integer pId : pIds) { if (isTaskGivenup(ctx, cfg, pId)) { set.add(pId); } } } private static List<Integer> getNextPartitions(SortedSet<Integer> candidatePartitions, Set<Integer> excluded, Set<Integer> throttled, int n) { List<Integer> result = new ArrayList<Integer>(); for (Integer pId : candidatePartitions) { if (!excluded.contains(pId)) { if (result.size() < n) { result.add(pId); } else { throttled.add(pId); } } } return result; } private static void addCompletedTasks(Set<Integer> set, JobContext ctx, Iterable<Integer> pIds) { for (Integer pId : pIds) { TaskPartitionState state = ctx.getPartitionState(pId); if (state == TaskPartitionState.COMPLETED) { set.add(pId); } } } /** * Returns a filtered Iterable of tasks. To filter tasks in this context means to only allow tasks * whose contexts are either null or in STOPPED, TIMED_OUT, TASK_ERROR, or DROPPED state because * only the * tasks whose contexts are in these states are eligible to be assigned or re-tried. * Also, for those tasks in non-terminal states whose previously assigned instances are no longer * LiveInstances are re-added so that they could be re-assigned. * @param allPartitions * @param jobContext * @return a filter Iterable of task partition numbers */ private Set<Integer> filterTasks(Iterable<Integer> allPartitions, JobContext jobContext, Collection<String> liveInstances) { Set<Integer> filteredTasks = new HashSet<>(); for (int partitionNumber : allPartitions) { TaskPartitionState state = jobContext.getPartitionState(partitionNumber); // Allow tasks eligible for scheduling if (state == null || state == TaskPartitionState.STOPPED || state == TaskPartitionState.TIMED_OUT || state == TaskPartitionState.TASK_ERROR || state == TaskPartitionState.DROPPED) { filteredTasks.add(partitionNumber); } // Allow tasks whose assigned instances are no longer live for rescheduling if (isTaskNotInTerminalState(state)) { String assignedParticipant = jobContext.getAssignedParticipant(partitionNumber); if (assignedParticipant != null && !liveInstances.contains(assignedParticipant)) { // The assigned instance is no longer live, so mark it as DROPPED in the context jobContext.setPartitionState(partitionNumber, TaskPartitionState.DROPPED); filteredTasks.add(partitionNumber); } } } return filteredTasks; } /** * Returns whether if the task is not in a terminal state and could be re-scheduled. * @param state * @return */ private boolean isTaskNotInTerminalState(TaskPartitionState state) { return state != TaskPartitionState.COMPLETED && state != TaskPartitionState.TASK_ABORTED && state != TaskPartitionState.DROPPED && state != TaskPartitionState.ERROR; } protected static boolean isTaskGivenup(JobContext ctx, JobConfig cfg, int pId) { TaskPartitionState state = ctx.getPartitionState(pId); if (state == TaskPartitionState.TASK_ABORTED || state == TaskPartitionState.ERROR) { return true; } if (state == TaskPartitionState.TIMED_OUT || state == TaskPartitionState.TASK_ERROR) { return ctx.getPartitionNumAttempts(pId) >= cfg.getMaxAttemptsPerTask(); } return false; } /** * If assignment is different from previous assignment, drop the old running task if it's no * longer assigned to the same instance, but not removing it from excludeSet because the same task * should not be assigned to the new instance right away. * Also only drop if the old and the new assignments both have the partition (task) and they * differ (because that means the task has been assigned to a different instance). */ private void dropRebalancedRunningTasks(Map<String, SortedSet<Integer>> newAssignment, Map<String, SortedSet<Integer>> oldAssignment, Map<Integer, PartitionAssignment> paMap, JobContext jobContext) { for (String instance : oldAssignment.keySet()) { for (int pId : oldAssignment.get(instance)) { if (jobContext.getPartitionState(pId) == TaskPartitionState.RUNNING) { // Check if the new assignment has this task on a different instance boolean existsInNewAssignment = false; for (Map.Entry<String, SortedSet<Integer>> entry : newAssignment.entrySet()) { if (!entry.getKey().equals(instance) && entry.getValue().contains(pId)) { // Found the partition number; new assignment has been made existsInNewAssignment = true; LOG.info( "Currently running task partition number: {} is being dropped from instance: {} and will be newly assigned to instance: {}. This is due to a LiveInstance/CurrentState change, and because this is a targeted task.", pId, instance, entry.getKey()); break; } } if (existsInNewAssignment) { // We need to drop this task in the old assignment paMap.put(pId, new PartitionAssignment(instance, TaskPartitionState.DROPPED.name())); jobContext.setPartitionState(pId, TaskPartitionState.DROPPED); // Now it will be dropped and be rescheduled } } } } } protected void markJobComplete(String jobName, JobContext jobContext, WorkflowConfig workflowConfig, WorkflowContext workflowContext, Map<String, JobConfig> jobConfigMap, ClusterDataCache clusterDataCache) { long currentTime = System.currentTimeMillis(); workflowContext.setJobState(jobName, TaskState.COMPLETED); jobContext.setFinishTime(currentTime); if (isWorkflowFinished(workflowContext, workflowConfig, jobConfigMap, clusterDataCache.getTaskDataCache())) { workflowContext.setFinishTime(currentTime); updateWorkflowMonitor(workflowContext, workflowConfig); } scheduleJobCleanUp(jobConfigMap.get(jobName), workflowConfig, currentTime); } protected void markJobFailed(String jobName, JobContext jobContext, WorkflowConfig workflowConfig, WorkflowContext workflowContext, Map<String, JobConfig> jobConfigMap, TaskDataCache clusterDataCache) { long currentTime = System.currentTimeMillis(); workflowContext.setJobState(jobName, TaskState.FAILED); if (jobContext != null) { jobContext.setFinishTime(currentTime); } if (isWorkflowFinished(workflowContext, workflowConfig, jobConfigMap, clusterDataCache)) { workflowContext.setFinishTime(currentTime); updateWorkflowMonitor(workflowContext, workflowConfig); } scheduleJobCleanUp(jobConfigMap.get(jobName), workflowConfig, currentTime); } protected void scheduleJobCleanUp(JobConfig jobConfig, WorkflowConfig workflowConfig, long currentTime) { long currentScheduledTime = _rebalanceScheduler.getRebalanceTime(workflowConfig.getWorkflowId()) == -1 ? Long.MAX_VALUE : _rebalanceScheduler.getRebalanceTime(workflowConfig.getWorkflowId()); if (currentTime + jobConfig.getExpiry() < currentScheduledTime) { _rebalanceScheduler.scheduleRebalance(_manager, workflowConfig.getWorkflowId(), currentTime + jobConfig.getExpiry()); } } // Workflow related methods /** * Checks if the workflow has finished (either completed or failed). * Set the state in workflow context properly. * @param ctx Workflow context containing job states * @param cfg Workflow config containing set of jobs * @return returns true if the workflow * 1. completed (all tasks are {@link TaskState#COMPLETED}) * 2. failed (any task is {@link TaskState#FAILED} * 3. workflow is {@link TaskState#TIMED_OUT} * returns false otherwise. */ protected boolean isWorkflowFinished(WorkflowContext ctx, WorkflowConfig cfg, Map<String, JobConfig> jobConfigMap, TaskDataCache clusterDataCache) { boolean incomplete = false; TaskState workflowState = ctx.getWorkflowState(); if (TaskState.TIMED_OUT.equals(workflowState)) { // We don't update job state here as JobRebalancer will do it return true; } // Check if failed job count is beyond threshold and if so, fail the workflow // and abort in-progress jobs int failedJobs = 0; for (String job : cfg.getJobDag().getAllNodes()) { TaskState jobState = ctx.getJobState(job); if (jobState == TaskState.FAILED || jobState == TaskState.TIMED_OUT) { failedJobs++; if (!cfg.isJobQueue() && failedJobs > cfg.getFailureThreshold()) { ctx.setWorkflowState(TaskState.FAILED); LOG.info("Workflow {} reached the failure threshold, so setting its state to FAILED.", cfg.getWorkflowId()); for (String jobToFail : cfg.getJobDag().getAllNodes()) { if (ctx.getJobState(jobToFail) == TaskState.IN_PROGRESS) { ctx.setJobState(jobToFail, TaskState.ABORTED); // Skip aborted jobs latency since they are not accurate latency for job running time if (_clusterStatusMonitor != null) { _clusterStatusMonitor.updateJobCounters(jobConfigMap.get(jobToFail), TaskState.ABORTED); } // Since the job is aborted, release resources occupied by it // Otherwise, we run the risk of resource leak if (clusterDataCache != null) { Iterable<AssignableInstance> assignableInstances = clusterDataCache .getAssignableInstanceManager().getAssignableInstanceMap().values(); JobConfig jobConfig = jobConfigMap.get(jobToFail); String quotaType = jobConfig.getJobType(); Map<String, TaskConfig> taskConfigMap = jobConfig.getTaskConfigMap(); // Iterate over all tasks and release them for (Map.Entry<String, TaskConfig> taskEntry : taskConfigMap.entrySet()) { TaskConfig taskConfig = taskEntry.getValue(); for (AssignableInstance assignableInstance : assignableInstances) { assignableInstance.release(taskConfig, quotaType); } } } } } return true; } } if (jobState != TaskState.COMPLETED && jobState != TaskState.FAILED && jobState != TaskState.TIMED_OUT) { incomplete = true; } } if (!incomplete && cfg.isTerminable()) { ctx.setWorkflowState(TaskState.COMPLETED); return true; } return false; } protected void updateWorkflowMonitor(WorkflowContext context, WorkflowConfig config) { if (_clusterStatusMonitor != null) { _clusterStatusMonitor.updateWorkflowCounters(config, context.getWorkflowState(), context.getFinishTime() - context.getStartTime()); } } // Common methods protected Set<String> getExcludedInstances(String currentJobName, WorkflowConfig workflowCfg, WorkflowContext workflowContext, ClusterDataCache cache) { Set<String> ret = new HashSet<>(); if (!workflowCfg.isAllowOverlapJobAssignment()) { // exclude all instances that has been assigned other jobs' tasks for (String jobName : workflowCfg.getJobDag().getAllNodes()) { if (jobName.equals(currentJobName)) { continue; } JobContext jobContext = cache.getJobContext(jobName); if (jobContext == null) { continue; } // Also skip if the job is not currently running // For example, if the job here is in a terminal state (such as ABORTED), then its tasks are // practically not running, so we do not need to exclude instances who have tasks from dead // jobs TaskState jobState = workflowContext.getJobState(jobName); if (jobState != TaskState.IN_PROGRESS) { continue; } for (int pId : jobContext.getPartitionSet()) { TaskPartitionState partitionState = jobContext.getPartitionState(pId); if (partitionState == TaskPartitionState.INIT || partitionState == TaskPartitionState.RUNNING) { ret.add(jobContext.getAssignedParticipant(pId)); } } } } return ret; } /** * Schedule the rebalancer timer for task framework elements * @param resourceId The resource id * @param startTime The resource start time * @param timeoutPeriod The resource timeout period. Will be -1 if it is not set. */ protected void scheduleRebalanceForTimeout(String resourceId, long startTime, long timeoutPeriod) { long nextTimeout = getTimeoutTime(startTime, timeoutPeriod); long nextRebalanceTime = _rebalanceScheduler.getRebalanceTime(resourceId); if (nextTimeout >= System.currentTimeMillis() && (nextRebalanceTime == TaskConstants.DEFAULT_NEVER_TIMEOUT || nextTimeout < nextRebalanceTime)) { _rebalanceScheduler.scheduleRebalance(_manager, resourceId, nextTimeout); } } /** * Basic function to check task framework resources, workflow and job, are timeout * @param startTime Resources start time * @param timeoutPeriod Resources timeout period. Will be -1 if it is not set. * @return */ protected boolean isTimeout(long startTime, long timeoutPeriod) { long nextTimeout = getTimeoutTime(startTime, timeoutPeriod); return nextTimeout != TaskConstants.DEFAULT_NEVER_TIMEOUT && nextTimeout <= System.currentTimeMillis(); } private long getTimeoutTime(long startTime, long timeoutPeriod) { return (timeoutPeriod == TaskConstants.DEFAULT_NEVER_TIMEOUT || timeoutPeriod > Long.MAX_VALUE - startTime) // check long overflow ? TaskConstants.DEFAULT_NEVER_TIMEOUT : startTime + timeoutPeriod; } /** * Set the ClusterStatusMonitor for metrics update */ public void setClusterStatusMonitor(ClusterStatusMonitor clusterStatusMonitor) { _clusterStatusMonitor = clusterStatusMonitor; } /** * Returns an appropriate TaskId depending on whether the job is targeted or not. * @param jobCfg * @param jobCtx * @param partitionNum * @return */ private String getTaskId(JobConfig jobCfg, JobContext jobCtx, int partitionNum) { if (TaskUtil.isGenericTaskJob(jobCfg)) { return jobCtx.getTaskIdForPartition(partitionNum); } // This is a targeted task return pName(jobCfg.getJobId(), partitionNum); } /** * Checks if the workflow has been stopped. * @param ctx Workflow context containing task states * @param cfg Workflow config containing set of tasks * @return returns true if all tasks are {@link TaskState#STOPPED}, false otherwise. */ protected boolean isWorkflowStopped(WorkflowContext ctx, WorkflowConfig cfg) { for (String job : cfg.getJobDag().getAllNodes()) { TaskState jobState = ctx.getJobState(job); if (jobState != null && (jobState.equals(TaskState.IN_PROGRESS) || jobState.equals(TaskState.STOPPING))) { return false; } } return true; } protected ResourceAssignment buildEmptyAssignment(String name, CurrentStateOutput currStateOutput) { ResourceAssignment assignment = new ResourceAssignment(name); Set<Partition> partitions = currStateOutput.getCurrentStateMappedPartitions(name); for (Partition partition : partitions) { Map<String, String> currentStateMap = currStateOutput.getCurrentStateMap(name, partition); Map<String, String> replicaMap = Maps.newHashMap(); for (String instanceName : currentStateMap.keySet()) { replicaMap.put(instanceName, HelixDefinedState.DROPPED.toString()); } assignment.addReplicaMap(partition, replicaMap); } return assignment; } /** * Check all the dependencies of a job to determine whether the job is ready to be scheduled. * @param job * @param workflowCfg * @param workflowCtx * @return */ protected boolean isJobReadyToSchedule(String job, WorkflowConfig workflowCfg, WorkflowContext workflowCtx, int incompleteAllCount, Map<String, JobConfig> jobConfigMap, TaskDataCache clusterDataCache) { int notStartedCount = 0; int failedOrTimeoutCount = 0; int incompleteParentCount = 0; for (String parent : workflowCfg.getJobDag().getDirectParents(job)) { TaskState jobState = workflowCtx.getJobState(parent); if (jobState == null || jobState == TaskState.NOT_STARTED) { ++notStartedCount; } else if (jobState == TaskState.FAILED || jobState == TaskState.TIMED_OUT) { ++failedOrTimeoutCount; } else if (jobState != TaskState.COMPLETED) { incompleteParentCount++; } } // If there is any parent job not started, this job should not be scheduled if (notStartedCount > 0) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("Job %s is not ready to start, notStartedParent(s)=%d.", job, notStartedCount)); } return false; } // If there is parent job failed, schedule the job only when ignore dependent // job failure enabled JobConfig jobConfig = jobConfigMap.get(job); if (jobConfig == null) { LOG.error(String.format("The job config is missing for job %s", job)); return false; } if (failedOrTimeoutCount > 0 && !jobConfig.isIgnoreDependentJobFailure()) { markJobFailed(job, null, workflowCfg, workflowCtx, jobConfigMap, clusterDataCache); if (LOG.isDebugEnabled()) { LOG.debug(String.format("Job %s is not ready to start, failedCount(s)=%d.", job, failedOrTimeoutCount)); } return false; } if (workflowCfg.isJobQueue()) { // If job comes from a JobQueue, it should apply the parallel job logics if (incompleteAllCount >= workflowCfg.getParallelJobs()) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("Job %s is not ready to schedule, inCompleteJobs(s)=%d.", job, incompleteAllCount)); } return false; } } else { // If this job comes from a generic workflow, job will not be scheduled until // all the direct parent jobs finished if (incompleteParentCount > 0) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("Job %s is not ready to start, notFinishedParent(s)=%d.", job, incompleteParentCount)); } return false; } } return true; } /** * Check if a workflow is ready to schedule. * @param workflowCfg the workflow to check * @return true if the workflow is ready for schedule, false if not ready */ protected boolean isWorkflowReadyForSchedule(WorkflowConfig workflowCfg) { Date startTime = workflowCfg.getStartTime(); // Workflow with non-scheduled config or passed start time is ready to schedule. return (startTime == null || startTime.getTime() <= System.currentTimeMillis()); } }
package com.yahoo.vespa.model.application.validation.change.search; import com.yahoo.config.application.api.ValidationOverrides; import com.yahoo.vespa.model.application.validation.change.VespaConfigChangeAction; import org.junit.Test; import java.time.Instant; import java.util.List; import static com.yahoo.vespa.model.application.validation.change.ConfigChangeTestUtils.newRefeedAction; import static com.yahoo.vespa.model.application.validation.change.ConfigChangeTestUtils.newRestartAction; import static org.junit.Assert.assertEquals; public class AttributeChangeValidatorTest { private static class Fixture extends ContentClusterFixture { AttributeChangeValidator validator; public Fixture(String currentSd, String nextSd) throws Exception { super(currentSd, nextSd); validator = new AttributeChangeValidator(currentDb().getDerivedConfiguration().getAttributeFields(), currentDb().getDerivedConfiguration().getIndexSchema(), currentDocType(), nextDb().getDerivedConfiguration().getAttributeFields(), nextDb().getDerivedConfiguration().getIndexSchema(), nextDocType()); } @Override public List<VespaConfigChangeAction> validate() { return validator.validate(ValidationOverrides.empty, Instant.now()); } } @Test public void adding_attribute_aspect_require_restart() throws Exception { Fixture f = new Fixture("field f1 type string { indexing: summary }", "field f1 type string { indexing: attribute | summary }"); f.assertValidation(newRestartAction( "Field 'f1' changed: add attribute aspect")); } @Test public void removing_attribute_aspect_require_restart() throws Exception { Fixture f = new Fixture("field f1 type string { indexing: attribute | summary }", "field f1 type string { indexing: summary }"); f.assertValidation(newRestartAction( "Field 'f1' changed: remove attribute aspect")); } @Test public void adding_attribute_field_is_ok() throws Exception { Fixture f = new Fixture("", "field f1 type string { indexing: attribute | summary \n attribute: fast-search }"); f.assertValidation(); } @Test public void removing_attribute_field_is_ok() throws Exception { Fixture f = new Fixture("field f1 type string { indexing: attribute | summary }", ""); f.assertValidation(); } @Test public void changing_fast_search_require_restart() throws Exception { new Fixture("field f1 type string { indexing: attribute }", "field f1 type string { indexing: attribute \n attribute: fast-search }"). assertValidation(newRestartAction( "Field 'f1' changed: add attribute 'fast-search'")); } @Test public void changing_fast_access_require_restart() throws Exception { new Fixture("field f1 type string { indexing: attribute \n attribute: fast-access }", "field f1 type string { indexing: attribute }"). assertValidation(newRestartAction( "Field 'f1' changed: remove attribute 'fast-access'")); } @Test public void changing_huge_require_restart() throws Exception { new Fixture("field f1 type string { indexing: attribute }", "field f1 type string { indexing: attribute \n attribute: huge }"). assertValidation(newRestartAction( "Field 'f1' changed: add attribute 'huge'")); } @Test public void changing_dense_posting_list_threshold_require_restart() throws Exception { new Fixture( "field f1 type predicate { indexing: attribute \n index { arity: 8 \n dense-posting-list-threshold: 0.2 } }", "field f1 type predicate { indexing: attribute \n index { arity: 8 \n dense-posting-list-threshold: 0.4 } }"). assertValidation(newRestartAction( "Field 'f1' changed: change property 'dense-posting-list-threshold' from '0.2' to '0.4'")); } @Test public void removing_attribute_aspect_from_index_field_is_ok() throws Exception { Fixture f = new Fixture("field f1 type string { indexing: index | attribute }", "field f1 type string { indexing: index }"); f.assertValidation(); } @Test public void removing_attribute_aspect_from_index_and_summary_field_is_ok() throws Exception { Fixture f = new Fixture("field f1 type string { indexing: index | attribute | summary }", "field f1 type string { indexing: index | summary }"); f.assertValidation(); } @Test public void changing_tensor_type_of_tensor_field_requires_refeed() throws Exception { new Fixture( "field f1 type tensor(x[]) { indexing: attribute \n attribute: tensor(x[100]) }", "field f1 type tensor(y[]) { indexing: attribute \n attribute: tensor(y[]) }") .assertValidation(newRefeedAction( "tensor-type-change", ValidationOverrides.empty, "Field 'f1' changed: tensor type: 'tensor(x[100])' -> 'tensor(y[])'", Instant.now())); } @Test public void compatible_tensor_type_change_is_ok() throws Exception { new Fixture( "field f1 type tensor(x[],y[]) { indexing: attribute \n attribute: tensor(x[104], y[52]) }", "field f1 type tensor(x[200],y[]) { indexing: attribute \n attribute: tensor(x[104], y[52]) }") .assertValidation(); } @Test public void incompatible_tensor_type_change_is_not_ok() throws Exception { try { new Fixture( "field f1 type tensor(x[],y[]) { indexing: attribute \n attribute: tensor(x[104], y[52]) }", "field f1 type tensor(x[100],y[]) { indexing: attribute \n attribute: tensor(x[104], y[52]) }") .assertValidation(); } catch (IllegalArgumentException e) { assertEquals("For search 'test', field 'f1': Incompatible types. Expected tensor(x[100],y[]) for attribute 'f1', got tensor(x[104],y[52]).", e.getMessage()); } } }
package netspy.components.config; /** * The Interface NetSpyGlobals. */ interface NetSpyGlobals { /** * The Interface PropertyKeys. */ interface PropertyKeys { /** The inbox path. */ String INBOX_PATH = "INBOX_PATH"; /** The blackword path. */ String BLACKWORD_PATH = "BLACKWORD_PATH"; /** The log path. */ String LOG_PATH = "LOG_PATH"; /** The quarantine path. */ String QUARANTINE_PATH = "QUARANTINE_PATH"; } /** * The Interface PropertyDefaultValues. */ interface PropertyDefaultValues { /** The inbox path relative. */ String INBOX_PATH_RELATIVE = "\\inbox"; /** The blackword path relative. */ String BLACKWORD_PATH_RELATIVE = "\\data\\blacklist.txt"; /** The log path relative. */ String LOG_PATH_RELATIVE = "\\logs"; /** The quarantine path relative. */ String QUARANTINE_PATH_RELATIVE = "\\quarantine"; /** The config properties path relative. */ String CONFIG_PROPERTIES_PATH_RELATIVE = "\\resources\\config.properties"; } }
package icesword.agent.service; import icesword.agent.data.process.JvmItem; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import com.sun.tools.attach.VirtualMachine; import sun.jvmstat.monitor.MonitorException; import sun.jvmstat.monitor.MonitoredHost; import sun.jvmstat.monitor.MonitoredVm; import sun.jvmstat.monitor.MonitoredVmUtil; import sun.jvmstat.monitor.VmIdentifier; import sun.tools.jps.Arguments; public class JpsMonitorService { public static Arguments JPS_ARGUMENTS = new Arguments(new String[] {"-lmv"}); public static List<JvmItem> findWorkerJVM(String filterWord) { List<JvmItem> result = new ArrayList<JvmItem>(); try { MonitoredHost monitoredHost = MonitoredHost.getMonitoredHost(JPS_ARGUMENTS.hostId()); Set<?> jvms = monitoredHost.activeVms(); for (Iterator<?> jvm = jvms.iterator(); jvm.hasNext();) { JvmItem jvmItem = new JvmItem(((Integer) jvm.next()).intValue()); buildJvmItem(monitoredHost, jvmItem); if (filterJvm(jvmItem, filterWord)) { getJVMVersion(jvmItem); if (jvmItem.status) { result.add(jvmItem); } } } } catch (MonitorException e) { e.printStackTrace(); } return result; } public static boolean filterJvm(JvmItem jvmItem, String filterWord) { if (jvmItem.mainClass.startsWith("sun.tools")) return false; if (jvmItem.mainClass.contains("icesword")) return false; if (jvmItem.mainClass.contains("jstat_plus.jar")) return false; if (filterWord != null) { return jvmItem.mainClass.contains(filterWord); } return true; } public static void buildJvmItem(MonitoredHost monitoredHost, JvmItem jvmItem) { MonitoredVm vm = null; String vmidString = "//" + jvmItem.pid + "?mode=r"; String errorString = null; try { errorString = " -- process information unavailable"; VmIdentifier id = new VmIdentifier(vmidString); vm = monitoredHost.getMonitoredVm(id, 0); errorString = " -- main class information unavailable"; jvmItem.mainClass = "" + MonitoredVmUtil.mainClass(vm, JpsMonitorService.JPS_ARGUMENTS.showLongPaths()); if (JpsMonitorService.JPS_ARGUMENTS.showMainArgs()) { errorString = " -- main args information unavailable"; String mainArgs = MonitoredVmUtil.mainArgs(vm); if (mainArgs != null && mainArgs.length() > 0) { jvmItem.mainArgs = mainArgs; } } if (JpsMonitorService.JPS_ARGUMENTS.showVmArgs()) { errorString = " -- jvm args information unavailable"; String jvmArgs = MonitoredVmUtil.jvmArgs(vm); if (jvmArgs != null && jvmArgs.length() > 0) { jvmItem.vmArgs = jvmArgs; } } if (JpsMonitorService.JPS_ARGUMENTS.showVmFlags()) { errorString = " -- jvm flags information unavailable"; String jvmFlags = MonitoredVmUtil.jvmFlags(vm); if (jvmFlags != null && jvmFlags.length() > 0) { jvmItem.vmFlags = jvmFlags; } } errorString = " -- detach failed"; monitoredHost.detach(vm); errorString = null; jvmItem.simpleDesc(); } catch (Exception e) { jvmItem.errorString = errorString; jvmItem.status = false; } } public static void getJVMVersion(JvmItem jvmItem) { try { VirtualMachine vm = VirtualMachine.attach("" + jvmItem.pid); String r = (vm.getSystemProperties().getProperty("java.vm.specification.version")); vm.detach(); jvmItem.vmVersion = r.trim(); } catch (Exception e) { jvmItem.status = false; jvmItem.errorString = "vm version information unavailable"; } } }
package org.deeplearning4j.iterator; import lombok.Getter; import lombok.NonNull; import lombok.Setter; import org.deeplearning4j.iterator.bert.BertMaskedLMMasker; import org.deeplearning4j.iterator.bert.BertSequenceMasker; import org.deeplearning4j.text.tokenization.tokenizer.Tokenizer; import org.deeplearning4j.text.tokenization.tokenizerfactory.BertWordPieceTokenizerFactory; import org.deeplearning4j.text.tokenization.tokenizerfactory.TokenizerFactory; import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.dataset.api.MultiDataSet; import org.nd4j.linalg.dataset.api.MultiDataSetPreProcessor; import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.indexing.NDArrayIndex; import org.nd4j.linalg.primitives.Pair; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; /** * BertIterator is a MultiDataSetIterator for training BERT (Transformer) models in the following way:<br> * (a) Unsupervised - Masked language model task (no sentence matching task is implemented thus far)<br> * (b) Supervised - For sequence classification (i.e., 1 label per sequence, typically used for fine tuning)<br> * The task can be specified using {@link Task}. * <br> * <b>Example for unsupervised training:</b><br> * <pre> * {@code * BertWordPieceTokenizerFactory t = new BertWordPieceTokenizerFactory(pathToVocab); * BertIterator b = BertIterator.builder() * .tokenizer(t) * .lengthHandling(BertIterator.LengthHandling.FIXED_LENGTH, 16) * .minibatchSize(2) * .sentenceProvider(<sentence provider here>) * .featureArrays(BertIterator.FeatureArrays.INDICES_MASK) * .vocabMap(t.getVocab()) * .task(BertIterator.Task.UNSUPERVISED) * .masker(new BertMaskedLMMasker(new Random(12345), 0.2, 0.5, 0.5)) * .unsupervisedLabelFormat(BertIterator.UnsupervisedLabelFormat.RANK2_IDX) * .maskToken("[MASK]") * .build(); * } * </pre> * <br> * <b>Example for supervised (sequence classification - one label per sequence) training:</b><br> * <pre> * {@code * BertWordPieceTokenizerFactory t = new BertWordPieceTokenizerFactory(pathToVocab); * BertIterator b = BertIterator.builder() * .tokenizer(t) * .lengthHandling(BertIterator.LengthHandling.FIXED_LENGTH, 16) * .minibatchSize(2) * .sentenceProvider(new TestSentenceProvider()) * .featureArrays(BertIterator.FeatureArrays.INDICES_MASK) * .vocabMap(t.getVocab()) * .task(BertIterator.Task.SEQ_CLASSIFICATION) * .build(); * } * </pre> * This iterator supports numerous ways of configuring the behaviour with respect to the sequence lengths and data layout.<br> * <br> * <u><b>{@link LengthHandling} configuration:</b></u><br> * Determines how to handle variable-length sequence situations.<br> * <b>FIXED_LENGTH</b>: Always trim longer sequences to the specified length, and always pad shorter sequences to the specified length.<br> * <b>ANY_LENGTH</b>: Output length is determined by the length of the longest sequence in the minibatch. Shorter sequences within the * minibatch are zero padded and masked.<br> * <b>CLIP_ONLY</b>: For any sequences longer than the specified maximum, clip them. If the maximum sequence length in * a minibatch is shorter than the specified maximum, no padding will occur. For sequences that are shorter than the * maximum (within the current minibatch) they will be zero padded and masked.<br> *<br><br> * <u><b>{@link FeatureArrays} configuration:</b></u><br> * Determines what arrays should be included.<br> * <b>INDICES_MASK</b>: Indices array and mask array only, no segment ID array. Returns 1 feature array, 1 feature mask array (plus labels).<br> * <b>INDICES_MASK_SEGMENTID</b>: Indices array, mask array and segment ID array (which is all 0s for single segment tasks). Returns * 2 feature arrays (indices, segment ID) and 1 feature mask array (plus labels)<br> * <br> * <u><b>{@link UnsupervisedLabelFormat} configuration:</b></u><br> * Only relevant when the task is set to {@link Task#UNSUPERVISED}. Determine the format of the labels:<br> * <b>RANK2_IDX</b>: return int32 [minibatch, numTokens] array with entries being class numbers. Example use case: with sparse softmax loss functions.<br> * <b>RANK3_NCL</b>: return float32 [minibatch, numClasses, numTokens] array with 1-hot entries along dimension 1. Example use case: RnnOutputLayer, RnnLossLayer<br> * <b>RANK3_LNC</b>: return float32 [numTokens, minibatch, numClasses] array with 1-hot entries along dimension 2. This format is occasionally * used for some RNN layers in libraries such as TensorFlow, for example<br> * <br> */ public class BertIterator implements MultiDataSetIterator { public enum Task {UNSUPERVISED, SEQ_CLASSIFICATION} public enum LengthHandling {FIXED_LENGTH, ANY_LENGTH, CLIP_ONLY} public enum FeatureArrays {INDICES_MASK, INDICES_MASK_SEGMENTID} public enum UnsupervisedLabelFormat {RANK2_IDX, RANK3_NCL, RANK3_LNC} protected Task task; protected TokenizerFactory tokenizerFactory; protected int maxTokens = -1; protected int minibatchSize = 32; protected boolean padMinibatches = false; @Getter @Setter protected MultiDataSetPreProcessor preProcessor; protected LabeledSentenceProvider sentenceProvider = null; protected LengthHandling lengthHandling; protected FeatureArrays featureArrays; protected Map<String,Integer> vocabMap; //TODO maybe use Eclipse ObjectIntHashMap or similar for fewer objects? protected BertSequenceMasker masker = null; protected UnsupervisedLabelFormat unsupervisedLabelFormat = null; protected String maskToken; protected String prependToken; protected List<String> vocabKeysAsList; protected BertIterator(Builder b){ this.task = b.task; this.tokenizerFactory = b.tokenizerFactory; this.maxTokens = b.maxTokens; this.minibatchSize = b.minibatchSize; this.padMinibatches = b.padMinibatches; this.preProcessor = b.preProcessor; this.sentenceProvider = b.sentenceProvider; this.lengthHandling = b.lengthHandling; this.featureArrays = b.featureArrays; this.vocabMap = b.vocabMap; this.masker = b.masker; this.unsupervisedLabelFormat = b.unsupervisedLabelFormat; this.maskToken = b.maskToken; this.prependToken = b.prependToken; } @Override public boolean hasNext() { return sentenceProvider.hasNext(); } @Override public MultiDataSet next() { return next(minibatchSize); } @Override public void remove() { throw new UnsupportedOperationException("Not supported"); } @Override public MultiDataSet next(int num) { Preconditions.checkState(hasNext(), "No next element available"); List<Pair<String,String>> list = new ArrayList<>(num); int count = 0; if(sentenceProvider != null){ while(sentenceProvider.hasNext() && count++ < num) { list.add(sentenceProvider.nextSentence()); } } else { //TODO - other types of iterators... throw new UnsupportedOperationException("Labelled sentence provider is null and no other iterator types have yet been implemented"); } //Get and tokenize the sentences for this minibatch List<Pair<List<String>, String>> tokenizedSentences = new ArrayList<>(num); int longestSeq = -1; for(Pair<String,String> p : list){ List<String> tokens = tokenizeSentence(p.getFirst()); tokenizedSentences.add(new Pair<>(tokens, p.getSecond())); longestSeq = Math.max(longestSeq, tokens.size()); } //Determine output array length... int outLength; switch (lengthHandling){ case FIXED_LENGTH: outLength = maxTokens; break; case ANY_LENGTH: outLength = longestSeq; break; case CLIP_ONLY: outLength = Math.min(maxTokens, longestSeq); break; default: throw new RuntimeException("Not implemented length handling mode: " + lengthHandling); } int mb = tokenizedSentences.size(); int mbPadded = padMinibatches ? minibatchSize : mb; int[][] outIdxs = new int[mbPadded][outLength]; int[][] outMask = new int[mbPadded][outLength]; for( int i=0; i<tokenizedSentences.size(); i++ ){ Pair<List<String>,String> p = tokenizedSentences.get(i); List<String> t = p.getFirst(); for( int j=0; j<outLength && j<t.size(); j++ ){ Preconditions.checkState(vocabMap.containsKey(t.get(j)), "Unknown token encontered: token \"%s\" is not in vocabulary", t.get(j)); int idx = vocabMap.get(t.get(j)); outIdxs[i][j] = idx; outMask[i][j] = 1; } } //Create actual arrays. Indices, mask, and optional segment ID INDArray outIdxsArr = Nd4j.createFromArray(outIdxs); INDArray outMaskArr = Nd4j.createFromArray(outMask); INDArray outSegmentIdArr; INDArray[] f; INDArray[] fm; if(featureArrays == FeatureArrays.INDICES_MASK_SEGMENTID){ //For now: always segment index 0 (only single s sequence input supported) outSegmentIdArr = Nd4j.zeros(DataType.INT, mbPadded, outLength); f = new INDArray[]{outIdxsArr, outSegmentIdArr}; fm = new INDArray[]{outMaskArr, null}; } else { f = new INDArray[]{outIdxsArr}; fm = new INDArray[]{outMaskArr}; } INDArray[] l = new INDArray[1]; INDArray[] lm; if(task == Task.SEQ_CLASSIFICATION){ //Sequence classification task: output is 2d, one-hot, shape [minibatch, numClasses] int numClasses; int[] classLabels = new int[mbPadded]; if(sentenceProvider != null){ numClasses = sentenceProvider.numLabelClasses(); List<String> labels = sentenceProvider.allLabels(); for(int i=0; i<mb; i++ ){ String lbl = tokenizedSentences.get(i).getRight(); classLabels[i] = labels.indexOf(lbl); Preconditions.checkState(classLabels[i] >= 0, "Provided label \"%s\" for sentence does not exist in set of classes/categories", lbl); } } else { throw new RuntimeException(); } l[0] = Nd4j.create(DataType.FLOAT, mbPadded, numClasses); for( int i=0; i<mb; i++ ){ l[0].putScalar(i, classLabels[i], 1.0); } lm = null; if(padMinibatches && mb != mbPadded){ INDArray a = Nd4j.zeros(DataType.FLOAT, mbPadded, 1); lm = new INDArray[]{a}; a.get(NDArrayIndex.interval(0, mb), NDArrayIndex.all()).assign(1); } } else if(task == Task.UNSUPERVISED){ //Unsupervised, masked language model task //Output is either 2d, or 3d depending on settings if(vocabKeysAsList == null){ String[] arr = new String[vocabMap.size()]; for(Map.Entry<String,Integer> e : vocabMap.entrySet()){ arr[e.getValue()] = e.getKey(); } vocabKeysAsList = Arrays.asList(arr); } int vocabSize = vocabMap.size(); INDArray labelArr; INDArray lMask = Nd4j.zeros(DataType.INT, mbPadded, outLength); if(unsupervisedLabelFormat == UnsupervisedLabelFormat.RANK2_IDX){ labelArr = Nd4j.create(DataType.INT, mbPadded, outLength); } else if(unsupervisedLabelFormat == UnsupervisedLabelFormat.RANK3_NCL){ labelArr = Nd4j.create(DataType.FLOAT, mbPadded, vocabSize, outLength); } else if(unsupervisedLabelFormat == UnsupervisedLabelFormat.RANK3_LNC){ labelArr = Nd4j.create(DataType.FLOAT, outLength, mbPadded, vocabSize); } else { throw new IllegalStateException("Unknown unsupervised label format: " + unsupervisedLabelFormat); } for( int i=0; i<mb; i++ ){ List<String> tokens = tokenizedSentences.get(i).getFirst(); Pair<List<String>,boolean[]> p = masker.maskSequence(tokens, maskToken, vocabKeysAsList); List<String> maskedTokens = p.getFirst(); boolean[] predictionTarget = p.getSecond(); int seqLen = Math.min(predictionTarget.length, outLength); for(int j=0; j<seqLen; j++ ){ if(predictionTarget[j]){ String oldToken = tokenizedSentences.get(i).getFirst().get(j); //This is target int targetTokenIdx = vocabMap.get(oldToken); if(unsupervisedLabelFormat == UnsupervisedLabelFormat.RANK2_IDX){ labelArr.putScalar(i, j, targetTokenIdx); } else if(unsupervisedLabelFormat == UnsupervisedLabelFormat.RANK3_NCL){ labelArr.putScalar(i, j, targetTokenIdx, 1.0); } else if(unsupervisedLabelFormat == UnsupervisedLabelFormat.RANK3_LNC){ labelArr.putScalar(j, i, targetTokenIdx, 1.0); } lMask.putScalar(i, j, 1.0); //Also update previously created feature label indexes: String newToken = maskedTokens.get(j); int newTokenIdx = vocabMap.get(newToken); outIdxsArr.putScalar(i,j,newTokenIdx); } } } l[0] = labelArr; lm = new INDArray[1]; lm[0] = lMask; } else { throw new IllegalStateException("Task not yet implemented: " + task); } org.nd4j.linalg.dataset.MultiDataSet mds = new org.nd4j.linalg.dataset.MultiDataSet(f, l, fm, lm); if(preProcessor != null) preProcessor.preProcess(mds); return mds; } private List<String> tokenizeSentence(String sentence) { Tokenizer t = tokenizerFactory.create(sentence); List<String> tokens = new ArrayList<>(); if(prependToken != null) tokens.add(prependToken); while (t.hasMoreTokens()) { String token = t.nextToken(); tokens.add(token); } return tokens; } @Override public boolean resetSupported() { return true; } @Override public boolean asyncSupported() { return true; } @Override public void reset() { if(sentenceProvider != null){ sentenceProvider.reset(); } } public static Builder builder(){ return new Builder(); } public static class Builder { protected Task task; protected TokenizerFactory tokenizerFactory; protected LengthHandling lengthHandling = LengthHandling.FIXED_LENGTH; protected int maxTokens = -1; protected int minibatchSize = 32; protected boolean padMinibatches = false; protected MultiDataSetPreProcessor preProcessor; protected LabeledSentenceProvider sentenceProvider = null; protected FeatureArrays featureArrays = FeatureArrays.INDICES_MASK_SEGMENTID; protected Map<String,Integer> vocabMap; //TODO maybe use Eclipse ObjectIntHashMap for fewer objects? protected BertSequenceMasker masker = new BertMaskedLMMasker(); protected UnsupervisedLabelFormat unsupervisedLabelFormat; protected String maskToken; protected String prependToken; /** * Specify the {@link Task} the iterator should be set up for. See {@link BertIterator} for more details. */ public Builder task(Task task){ this.task = task; return this; } /** * Specify the TokenizerFactory to use. * For BERT, typically {@link org.deeplearning4j.text.tokenization.tokenizerfactory.BertWordPieceTokenizerFactory} * is used */ public Builder tokenizer(TokenizerFactory tokenizerFactory){ this.tokenizerFactory = tokenizerFactory; return this; } /** * Specifies how the sequence length of the output data should be handled. See {@link BertIterator} for more details. * @param lengthHandling Length handling * @param maxLength Not used if LengthHandling is set to {@link LengthHandling#ANY_LENGTH} * @return */ public Builder lengthHandling(@NonNull LengthHandling lengthHandling, int maxLength){ this.lengthHandling = lengthHandling; this.maxTokens = maxLength; return this; } /** * Minibatch size to use (number of examples to train on for each iteration) * See also: {@link #padMinibatches} * @param minibatchSize Minibatch size */ public Builder minibatchSize(int minibatchSize){ this.minibatchSize = minibatchSize; return this; } /** * Default: false (disabled)<br> * If the dataset is not an exact multiple of the minibatch size, should we pad the smaller final minibatch?<br> * For example, if we have 100 examples total, and 32 minibatch size, the following number of examples will be returned * for subsequent calls of next() in the one epoch:<br> * padMinibatches = false (default): 32, 32, 32, 4.<br> * padMinibatches = true: 32, 32, 32, 32 (note: the last minibatch will have 4 real examples, and 28 masked out padding examples).<br> * Both options should result in exactly the same model. However, some BERT implementations may require exactly an * exact number of examples in all minibatches to function. */ public Builder padMinibatches(boolean padMinibatches){ this.padMinibatches = padMinibatches; return this; } /** * Set the preprocessor to be used on the MultiDataSets before returning them. Default: none (null) */ public Builder preProcessor(MultiDataSetPreProcessor preProcessor){ this.preProcessor = preProcessor; return this; } /** * Specify the source of the data for classification. Can also be used for unsupervised learning; in the unsupervised * use case, the labels will be ignored. */ public Builder sentenceProvider(LabeledSentenceProvider sentenceProvider){ this.sentenceProvider = sentenceProvider; return this; } /** * Specify what arrays should be returned. See {@link BertIterator} for more details. */ public Builder featureArrays(FeatureArrays featureArrays){ this.featureArrays = featureArrays; return this; } /** * Provide the vocabulary as a map. Keys are the words in the vocabulary, and values are the indices of those * words. For indices, they should be in range 0 to vocabMap.size()-1 inclusive.<br> * If using {@link BertWordPieceTokenizerFactory}, * this can be obtained using {@link BertWordPieceTokenizerFactory#getVocab()} */ public Builder vocabMap(Map<String,Integer> vocabMap){ this.vocabMap = vocabMap; return this; } /** * Used only for unsupervised training (i.e., when task is set to {@link Task#UNSUPERVISED} for learning a * masked language model. This can be used to customize how the masking is performed.<br> * Default: {@link BertMaskedLMMasker} */ public Builder masker(BertSequenceMasker masker){ this.masker = masker; return this; } /** * Used only for unsupervised training (i.e., when task is set to {@link Task#UNSUPERVISED} for learning a * masked language model. Used to specify the format that the labels should be returned in. * See {@link BertIterator} for more details. */ public Builder unsupervisedLabelFormat(UnsupervisedLabelFormat labelFormat){ this.unsupervisedLabelFormat = labelFormat; return this; } /** * Used only for unsupervised training (i.e., when task is set to {@link Task#UNSUPERVISED} for learning a * masked language model. This specifies the token (such as "[MASK]") that should be used when a value is masked out. * Note that this is passed to the {@link BertSequenceMasker} defined by {@link #masker(BertSequenceMasker)} hence * the exact behaviour will depend on what masker is used.<br> * Note that this must be in the vocabulary map set in {@link #vocabMap} */ public Builder maskToken(String maskToken){ this.maskToken = maskToken; return this; } /** * Prepend the specified token to the sequences, when doing supervised training.<br> * i.e., any token sequences will have this added at the start.<br> * Some BERT/Transformer models may need this - for example sequences starting with a "[CLS]" token.<br> * No token is prepended by default. * * @param prependToken The token to start each sequence with (null: no token will be prepended) */ public Builder prependToken(String prependToken){ this.prependToken = prependToken; return this; } public BertIterator build(){ Preconditions.checkState(task != null, "No task has been set. Use .task(BertIterator.Task.X) to set the task to be performed"); Preconditions.checkState(tokenizerFactory != null, "No tokenizer factory has been set. A tokenizer factory (such as BertWordPieceTokenizerFactory) is required"); Preconditions.checkState(vocabMap != null, "Cannot create iterator: No vocabMap has been set. Use Builder.vocabMap(Map<String,Integer>) to set"); Preconditions.checkState(task != Task.UNSUPERVISED || masker != null, "If task is UNSUPERVISED training, a masker must be set via masker(BertSequenceMasker) method"); Preconditions.checkState(task != Task.UNSUPERVISED || unsupervisedLabelFormat != null, "If task is UNSUPERVISED training, a label format must be set via masker(BertSequenceMasker) method"); Preconditions.checkState(task != Task.UNSUPERVISED || maskToken != null, "If task is UNSUPERVISED training, the mask token in the vocab (such as \"[MASK]\" must be specified"); return new BertIterator(this); } } }
package org.basex.query.func; import static org.basex.data.DataText.*; import static org.basex.query.util.Err.*; import static org.basex.util.Token.*; import java.io.IOException; import java.text.*; import java.util.Date; import java.util.TimeZone; import org.basex.core.Prop; import org.basex.core.User; import org.basex.core.cmd.Info; import org.basex.core.cmd.InfoDB; import org.basex.core.cmd.Rename; import org.basex.data.Data; import org.basex.data.MetaData; import org.basex.index.IndexToken.IndexType; import org.basex.index.Resources; import org.basex.io.IO; import org.basex.io.IOFile; import org.basex.io.MimeTypes; import org.basex.io.in.DataInput; import org.basex.io.out.ArrayOutput; import org.basex.io.serial.Serializer; import org.basex.io.serial.SerializerException; import org.basex.query.QueryContext; import org.basex.query.QueryException; import org.basex.query.expr.Expr; import org.basex.query.expr.IndexAccess; import org.basex.query.item.ANode; import org.basex.query.item.B64Stream; import org.basex.query.item.Bln; import org.basex.query.item.DBNode; import org.basex.query.item.DBNodeSeq; import org.basex.query.item.Empty; import org.basex.query.item.FAttr; import org.basex.query.item.FElem; import org.basex.query.item.FNode; import org.basex.query.item.FTxt; import org.basex.query.item.Int; import org.basex.query.item.Item; import org.basex.query.item.QNm; import org.basex.query.item.Str; import org.basex.query.item.Value; import org.basex.query.iter.Iter; import org.basex.query.iter.NodeIter; import org.basex.query.iter.ValueIter; import org.basex.query.path.NameTest; import org.basex.query.up.primitives.DBAdd; import org.basex.query.up.primitives.DBDelete; import org.basex.query.up.primitives.DBOptimize; import org.basex.query.up.primitives.DBRename; import org.basex.query.up.primitives.DBStore; import org.basex.query.up.primitives.DeleteNode; import org.basex.query.up.primitives.ReplaceValue; import org.basex.query.util.IndexContext; import org.basex.util.*; import org.basex.util.list.IntList; import org.basex.util.list.StringList; import org.basex.util.list.TokenList; public final class FNDb extends StandardFunc { /** Date format used for xs:dateTime generation. */ static final SimpleDateFormat DATE_FORMAT; /** Resource element name. */ static final QNm SYSTEM = new QNm("system"); /** Resource element name. */ static final QNm DATABASE = new QNm("database"); /** Resource element name. */ static final QNm RESOURCE = new QNm("resource"); /** Resource element name. */ static final QNm RESOURCES = new QNm("resources"); /** Path element name. */ static final QNm PATH = new QNm("path"); /** Raw element name. */ static final QNm RAW = new QNm("raw"); /** Size element name. */ static final QNm SIZE = new QNm("size"); /** Content type element name. */ static final QNm CTYPE = new QNm("content-type"); /** Modified date element name. */ static final QNm MDATE = new QNm("modified-date"); /** MIME type application/xml. */ static final byte[] APP_XML = token(MimeTypes.APP_XML); static { DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC")); } /** * Constructor. * @param ii input info * @param f function definition * @param e arguments */ public FNDb(final InputInfo ii, final Function f, final Expr... e) { super(ii, f, e); } @Override public Iter iter(final QueryContext ctx) throws QueryException { switch(sig) { case _DB_OPEN: return open(ctx).iter(); case _DB_TEXT: return text(ctx); case _DB_ATTRIBUTE: return attribute(ctx); case _DB_FULLTEXT: return fulltext(ctx); case _DB_LIST: return list(ctx); case _DB_LIST_DETAILS: return listDetails(ctx); case _DB_NODE_ID: return node(ctx, true); case _DB_NODE_PRE: return node(ctx, false); default: return super.iter(ctx); } } @Override public Value value(final QueryContext ctx) throws QueryException { switch(sig) { case _DB_OPEN: return open(ctx); default: return super.value(ctx); } } @Override public Item item(final QueryContext ctx, final InputInfo ii) throws QueryException { switch(sig) { case _DB_EVENT: return event(ctx); case _DB_OPEN_ID: return open(ctx, true); case _DB_OPEN_PRE: return open(ctx, false); case _DB_SYSTEM: return system(ctx); case _DB_INFO: return info(ctx); case _DB_ADD: return add(ctx); case _DB_DELETE: return delete(ctx); case _DB_RENAME: return rename(ctx); case _DB_REPLACE: return replace(ctx); case _DB_OPTIMIZE: return optimize(ctx); case _DB_STORE: return store(ctx); case _DB_RETRIEVE: return retrieve(ctx); case _DB_IS_RAW: return isRaw(ctx); case _DB_EXISTS: return exists(ctx); case _DB_IS_XML: return isXML(ctx); case _DB_CONTENT_TYPE: return contentType(ctx); default: return super.item(ctx, ii); } } /** * Performs the open function. * @param ctx query context * @return iterator * @throws QueryException query exception */ private Value open(final QueryContext ctx) throws QueryException { final Data data = data(0, ctx); final String path = expr.length < 2 ? "" : path(1, ctx); return DBNodeSeq.get(data.resources.docs(path), data, true, path.isEmpty()); } /** * Performs the open-id and open-pre function. * @param ctx query context * @param id id flag * @return result * @throws QueryException query exception */ private DBNode open(final QueryContext ctx, final boolean id) throws QueryException { final Data data = data(0, ctx); final int v = (int) checkItr(expr[1], ctx); final int pre = id ? data.pre(v) : v; if(pre < 0 || pre >= data.meta.size) IDINVALID.thrw(input, this, v); return new DBNode(data, pre); } /** * Performs the text function. * @param ctx query context * @return iterator * @throws QueryException query exception */ private Iter text(final QueryContext ctx) throws QueryException { final IndexContext ic = new IndexContext(ctx, data(0, ctx), null, true); return new IndexAccess(input, expr[1], IndexType.TEXT, ic).iter(ctx); } /** * Performs the attribute function. * @param ctx query context * @return iterator * @throws QueryException query exception */ private Iter attribute(final QueryContext ctx) throws QueryException { final IndexContext ic = new IndexContext(ctx, data(0, ctx), null, true); final IndexAccess ia = new IndexAccess( input, expr[1], IndexType.ATTRIBUTE, ic); // return iterator if no name test is specified if(expr.length < 3) return ia.iter(ctx); // parse and compile the name test final Item name = checkNoEmpty(expr[2].item(ctx, input)); final QNm nm = new QNm(checkStr(name, ctx), ctx); if(!nm.hasPrefix()) nm.uri(ctx.sc.ns.uri(EMPTY)); final NameTest nt = new NameTest(nm, NameTest.Mode.STD, true); // no results expected: return empty sequence if(!nt.comp(ctx)) return Empty.ITER; // wrap iterator with name test return new Iter() { final NodeIter ir = ia.iter(ctx); @Override public ANode next() throws QueryException { ANode n; while((n = ir.next()) != null && !nt.eq(n)); return n; } }; } /** * Performs the fulltext function. * @param ctx query context * @return iterator * @throws QueryException query exception */ private Iter fulltext(final QueryContext ctx) throws QueryException { return FNFt.search(data(0, ctx), checkStr(expr[1], ctx), this, ctx); } /** * Performs the list function. * @param ctx query context * @return iterator * @throws QueryException query exception */ private Iter list(final QueryContext ctx) throws QueryException { final TokenList tl = new TokenList(); final int el = expr.length; if(el == 0) { for(final String s : ctx.context.databases().listDBs()) tl.add(s); } else { final Data data = data(0, ctx); final String path = string(el == 1 ? EMPTY : checkStr(expr[1], ctx)); // add xml resources final Resources res = data.resources; final IntList il = res.docs(path); final int is = il.size(); for(int i = 0; i < is; i++) tl.add(data.text(il.get(i), true)); // add binary resources for(final byte[] file : res.binaries(path)) tl.add(file); } tl.sort(!Prop.WIN); return new Iter() { int pos; @Override public Str get(final long i) { return Str.get(tl.get((int) i)); } @Override public Str next() { return pos < size() ? get(pos++) : null; } @Override public boolean reset() { pos = 0; return true; } @Override public long size() { return tl.size(); } }; } /** * Performs the list-details function. * @param ctx query context * @return iterator * @throws QueryException query exception */ private Iter listDetails(final QueryContext ctx) throws QueryException { if(expr.length == 0) return listDBs(ctx); final Data data = data(0, ctx); final String path = string(expr.length == 1 ? EMPTY : checkStr(expr[1], ctx)); final IntList il = data.resources.docs(path); final TokenList tl = data.resources.binaries(path); return new Iter() { final int is = il.size(), ts = tl.size(); int ip, tp; @Override public ANode get(final long i) throws QueryException { if(i < is) { final byte[] pt = data.text(il.get((int) i), true); return resource(pt, false, 0, APP_XML, data.meta.time); } if(i < is + ts) { final byte[] pt = tl.get((int) i - is); final IO io = data.meta.binary(string(pt)); return resource(pt, true, io.length(), token(MimeTypes.get(io.path())), io.timeStamp()); } return null; } @Override public ANode next() throws QueryException { return ip < is ? get(ip++) : tp < ts ? get(ip + tp++) : null; } @Override public boolean reset() { ip = 0; tp = 0; return true; } @Override public long size() { return ip + is; } }; } /** * Performs the list-details for databases function. * @param ctx query context * @return iterator */ private Iter listDBs(final QueryContext ctx) { final StringList sl = ctx.context.databases().listDBs(); return new Iter() { int pos; @Override public ANode get(final long i) throws QueryException { final FElem res = new FElem(DATABASE); final String name = sl.get((int) i); final MetaData meta = new MetaData(name, ctx.context); DataInput di = null; try { di = new DataInput(meta.dbfile(DATAINF)); meta.read(di); res.add(new FAttr(RESOURCES, token(meta.ndocs))); final String tstamp = DATE_FORMAT.format(new Date(meta.dbtime())); res.add(new FAttr(MDATE, token(tstamp))); if(ctx.context.perm(User.CREATE, meta)) res.add(new FAttr(PATH, token(meta.original))); res.add(new FTxt(token(name))); } catch(final IOException ex) { NODB.thrw(input, name); } finally { if(di != null) try { di.close(); } catch(final IOException ex) { } } return res; } @Override public ANode next() throws QueryException { return pos < size() ? get(pos++) : null; } @Override public boolean reset() { pos = 0; return true; } @Override public long size() { return sl.size(); } }; } /** * Performs the is-raw function. * @param ctx query context * @return result * @throws QueryException query exception */ private Bln isRaw(final QueryContext ctx) throws QueryException { final Data data = data(0, ctx); final String path = path(1, ctx); final IOFile io = data.meta.binary(path); return Bln.get(io.exists() && !io.isDir()); } /** * Performs the exists function. * @param ctx query context * @return result * @throws QueryException query exception */ private Bln exists(final QueryContext ctx) throws QueryException { try { final Data data = data(0, ctx); if(expr.length == 1) return Bln.TRUE; // check if raw file or XML document exists final String path = path(1, ctx); final IOFile io = data.meta.binary(path); return Bln.get(io.exists() && !io.isDir() || data.resources.doc(path) != -1); } catch(final QueryException ex) { if(ex.err() == NODB) return Bln.FALSE; throw ex; } } /** * Performs the is-xml function. * @param ctx query context * @return result * @throws QueryException query exception */ private Bln isXML(final QueryContext ctx) throws QueryException { final Data data = data(0, ctx); final String path = path(1, ctx); return Bln.get(data.resources.doc(path) != -1); } /** * Performs the content-type function. * @param ctx query context * @return result * @throws QueryException query exception */ private Str contentType(final QueryContext ctx) throws QueryException { final Data data = data(0, ctx); final String path = path(1, ctx); if(data.resources.doc(path) != -1) return Str.get(MimeTypes.APP_XML); final IOFile io = data.meta.binary(path); if(!io.exists() || io.isDir()) RESFNF.thrw(input, path); return Str.get(MimeTypes.get(path)); } /** * Create a <code>&lt;resource/&gt;</code> node. * @param path path * @param raw is the resource a raw file * @param size size * @param ctype content type * @param mdate modified date * @return <code>&lt;resource/&gt;</code> node */ static FNode resource(final byte[] path, final boolean raw, final long size, final byte[] ctype, final long mdate) { final String tstamp = DATE_FORMAT.format(new Date(mdate)); final FElem res = new FElem(RESOURCE). add(new FTxt(path)). add(new FAttr(RAW, token(raw))). add(new FAttr(CTYPE, ctype)). add(new FAttr(MDATE, token(tstamp))); return raw ? res.add(new FAttr(SIZE, token(size))) : res; } /** * Performs the system function. * @param ctx query context * @return node */ private static ANode system(final QueryContext ctx) { return toNode(Info.info(ctx.context), SYSTEM); } /** * Performs the info function. * @param ctx query context * @return node * @throws QueryException query exception */ private ANode info(final QueryContext ctx) throws QueryException { final Data data = data(0, ctx); final boolean create = ctx.context.user.perm(User.CREATE); return toNode(InfoDB.db(data.meta, false, true, create), DATABASE); } /** * Converts the specified info string to a node fragment. * @param root name of the root node * @param str string to be converted * @return node */ private static ANode toNode(final String str, final QNm root) { final FElem top = new FElem(root); FElem node = null; for(final String l : str.split("\r\n?|\n")) { final String[] cols = l.split(": ", 2); if(cols[0].isEmpty()) continue; final String name = cols[0].replaceAll(" |-", ""); final FElem n = new FElem(new QNm(lc(token(name)))); if(cols[0].startsWith(" ")) { if(node != null) node.add(n); if(!cols[1].isEmpty()) n.add(new FTxt(token(cols[1]))); } else { node = n; top.add(n); } } return top; } /** * Performs the add function. * @param ctx query context * @return {@code null} * @throws QueryException query exception */ private Item add(final QueryContext ctx) throws QueryException { checkWrite(ctx); final Data data = data(0, ctx); final Item it = checkItem(expr[1], ctx); final String path = expr.length < 3 ? "" : path(2, ctx); ctx.updates.add(new DBAdd(data, input, it, path, ctx.context), ctx); return null; } /** * Performs the replace function. * @param ctx query context * @return {@code null} * @throws QueryException query exception */ private Item replace(final QueryContext ctx) throws QueryException { checkWrite(ctx); final Data data = data(0, ctx); final String path = path(1, ctx); final Item doc = checkItem(expr[2], ctx); // collect all old documents final Resources res = data.resources; final int pre = res.doc(path); if(pre != -1) { if(res.docs(path).size() != 1) DOCTRGMULT.thrw(input); ctx.updates.add(new DeleteNode(pre, data, input), ctx); } // delete binary resources final IOFile bin = data.meta.binary(path); if(bin != null) ctx.updates.add(new DBDelete(data, path, input), ctx); ctx.updates.add(new DBAdd(data, input, doc, path, ctx.context), ctx); final IOFile file = data.meta.binary(path); if(file != null && file.exists() && !file.isDir()) { final Item it = checkItem(doc, ctx); ctx.updates.add(new DBStore(data, token(path), it, input), ctx); } return null; } /** * Performs the delete function. * @param ctx query context * @return {@code null} * @throws QueryException query exception */ private Item delete(final QueryContext ctx) throws QueryException { checkWrite(ctx); final Data data = data(0, ctx); final String path = path(1, ctx); // delete XML resources final IntList docs = data.resources.docs(path); for(int i = 0, is = docs.size(); i < is; i++) { ctx.updates.add(new DeleteNode(docs.get(i), data, input), ctx); } // delete raw resources final IOFile bin = data.meta.binary(path); if(bin == null) UPDBDELERR.thrw(input, path); ctx.updates.add(new DBDelete(data, path, input), ctx); return null; } /** * Performs the rename function. * @param ctx query context * @return {@code null} * @throws QueryException query exception */ private Item rename(final QueryContext ctx) throws QueryException { checkWrite(ctx); final Data data = data(0, ctx); final String source = path(1, ctx); final String target = path(2, ctx); // the first step of the path should be the database name final IntList il = data.resources.docs(source); for(int i = 0, is = il.size(); i < is; i++) { final int pre = il.get(i); final String trg = Rename.target(data, pre, source, target); if(trg.isEmpty()) EMPTYPATH.thrw(input, this); ctx.updates.add(new ReplaceValue(pre, data, input, token(trg)), ctx); } // rename files final IOFile src = data.meta.binary(source); final IOFile trg = data.meta.binary(target); if(src == null || trg == null) UPDBRENAMEERR.thrw(input, src); ctx.updates.add(new DBRename(data, src.path(), trg.path(), input), ctx); return null; } /** * Performs the optimize function. * @param ctx query context * @return {@code null} * @throws QueryException query exception */ private Item optimize(final QueryContext ctx) throws QueryException { checkWrite(ctx); final Data data = data(0, ctx); final boolean all = expr.length == 2 && checkBln(expr[1], ctx); ctx.updates.add(new DBOptimize(data, ctx.context, all, input), ctx); return null; } /** * Performs the store function. * @param ctx query context * @return {@code null} * @throws QueryException query exception */ private Item store(final QueryContext ctx) throws QueryException { checkWrite(ctx); final Data data = data(0, ctx); final String path = path(1, ctx); final IOFile file = data.meta.binary(path); if(file == null || file.isDir()) RESINV.thrw(input, path); final Item it = checkItem(expr[2], ctx); ctx.updates.add(new DBStore(data, token(path), it, input), ctx); return null; } /** * Performs the retrieve function. * @param ctx query context * @return {@code null} * @throws QueryException query exception */ private B64Stream retrieve(final QueryContext ctx) throws QueryException { final Data data = data(0, ctx); final String path = path(1, ctx); final IOFile file = data.meta.binary(path); if(file == null || !file.exists() || file.isDir()) RESFNF.thrw(input, path); return new B64Stream(file, DBERR); } /** * Performs the node-pre and node-id function. * @param ctx query context * @param id id flag * @return iterator * @throws QueryException query exception */ private Iter node(final QueryContext ctx, final boolean id) throws QueryException { return new Iter() { final Iter ir = ctx.iter(expr[0]); @Override public Int next() throws QueryException { final Item it = ir.next(); if(it == null) return null; final DBNode node = checkDBNode(it); return Int.get(id ? node.data.id(node.pre) : node.pre); } }; } /** * Sends an event to the registered sessions. * @param ctx query context * @return event result * @throws QueryException query exception */ private Item event(final QueryContext ctx) throws QueryException { final byte[] name = checkStr(expr[0], ctx); final ArrayOutput ao = new ArrayOutput(); try { // run serialization final Serializer ser = Serializer.get(ao, ctx.serParams(true)); final ValueIter ir = ctx.value(expr[1]).iter(); for(Item it; (it = ir.next()) != null;) it.serialize(ser); ser.close(); } catch(final SerializerException ex) { throw ex.getCause(input); } catch(final IOException ex) { SERANY.thrw(input, ex); } // throw exception if event is unknown if(!ctx.context.events.notify(ctx.context, name, ao.toArray())) { NOEVENT.thrw(input, name); } return null; } @Override public boolean isVacuous() { return sig == Function._DB_EVENT; } @Override public boolean uses(final Use u) { final boolean up = sig == Function._DB_ADD || sig == Function._DB_DELETE || sig == Function._DB_RENAME || sig == Function._DB_REPLACE || sig == Function._DB_OPTIMIZE || sig == Function._DB_STORE; return // skip evaluation at compile time u == Use.CTX && ( sig == Function._DB_TEXT || sig == Function._DB_ATTRIBUTE || sig == Function._DB_FULLTEXT || sig == Function._DB_EVENT || up) || u == Use.UPD && up || super.uses(u); } @Override public boolean iterable() { // index functions will always yield ordered and duplicate-free results return sig == Function._DB_OPEN || sig == Function._DB_TEXT || sig == Function._DB_ATTRIBUTE || sig == Function._DB_FULLTEXT || super.iterable(); } /** * Parses the specified date and returns its time in milliseconds. * Returns {@code null} if it cannot be converted. * @param date date to be parsed * @return time in milliseconds */ public static long parse(final String date) { try { return DATE_FORMAT.parse(date).getTime(); } catch(final ParseException ex) { Util.errln(ex); return 0; } } /** * Returns the specified expression as normalized database path. * Throws an exception if the path is invalid. * @param i index of argument * @param ctx query context * @return normalized path * @throws QueryException query exception */ private String path(final int i, final QueryContext ctx) throws QueryException { final String path = string(checkStr(expr[i], ctx)); final String norm = MetaData.normPath(path); if(norm == null) RESINV.thrw(input, path); return norm; } }
package roart.controller; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; import roart.common.communication.factory.CommunicationFactory; import roart.common.communication.model.Communication; import roart.common.constants.CommunicationConstants; import roart.common.constants.Constants; import roart.common.constants.EurekaConstants; import roart.common.constants.ServiceConstants; import roart.common.util.JsonUtil; import roart.common.util.ServiceConnectionUtil; import roart.iclij.config.IclijConfig; import roart.iclij.config.IclijXMLConfig; import roart.iclij.model.component.ComponentInput; import roart.iclij.service.IclijServiceParam; import roart.iclij.service.IclijServiceResult; import roart.common.controller.ServiceControllerOtherAbstract; public class ServiceControllerOther extends ServiceControllerOtherAbstract { public ServiceControllerOther(String myservices, String services, String communications, Class replyclass) { super(myservices, services, communications, replyclass); } public void get(Object param, Communication c) { IclijServiceResult r = null; System.out.println("Cserv"+c.getService()); switch (c.getService()) { case ServiceConstants.SIMFILTER: new Sim().method((String) param); break; } if (param instanceof IclijServiceParam) { sendReply(((IclijServiceParam) param).getWebpath(), c, r); } } }
package org.cytoscape.ding.impl.cyannotator.annotations; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.cytoscape.ding.impl.DRenderingEngine; import org.cytoscape.ding.impl.cyannotator.IllegalAnnotationStructureException; import org.cytoscape.view.presentation.annotations.Annotation; import org.cytoscape.view.presentation.annotations.GroupAnnotation; public class GroupAnnotationImpl extends AbstractAnnotation implements GroupAnnotation { private List<DingAnnotation> annotations = new ArrayList<>(); public GroupAnnotationImpl(DRenderingEngine re, Map<String, String> argMap) { super(re, processArgs(argMap)); } private static Map<String, String> processArgs(Map<String, String> argMap) { argMap.remove(CANVAS); return argMap; } @Override public Class<? extends Annotation> getType() { return GroupAnnotation.class; } @Override public void setCanvas(String cnvs) { // do nothing, must be on the foreground canvas } @Override public void addMember(Annotation member) { if (member instanceof DingAnnotation) { var dMember = (DingAnnotation) member; if (dMember.getGroupParent() != null && dMember.getGroupParent() != this) throw new IllegalAnnotationStructureException("Annotation is already a member of another group."); if (!annotations.contains(dMember)) { annotations.add(dMember); dMember.setGroupParent(this); try { getCyAnnotator().checkCycle(); } catch (IllegalAnnotationStructureException e) { annotations.remove(dMember); dMember.setGroupParent(null); throw e; } } updateBounds(); var bounds = getBounds(); setLocation((int)bounds.getX(), (int)bounds.getY()); setSize((int)bounds.getWidth(), (int)bounds.getHeight()); } } @Override public List<Annotation> getMembers() { return new ArrayList<>(annotations); } @Override public void removeMember(Annotation member) { if (member instanceof DingAnnotation) { DingAnnotation dMember = (DingAnnotation)member; if (annotations != null && annotations.contains(dMember)) { annotations.remove(dMember); dMember.setGroupParent(null); } updateBounds(); } } @Override public void removeAnnotation() { // Remove all of our children for (var a : annotations) cyAnnotator.removeAnnotation(a); annotations.clear(); // Now remove ourselves cyAnnotator.removeAnnotation(this); if (groupParent != null) groupParent.removeMember(this); } @Override public Map<String, String> getArgMap() { var argMap = super.getArgMap(); argMap.put(TYPE, GroupAnnotation.class.getName()); String members = ""; if (annotations == null || annotations.size() == 0) return argMap; for (var a : annotations) members += a.getUUID().toString() + ","; if (members != null && members.length() > 1) argMap.put(MEMBERS, members.substring(0, members.length() - 1)); return argMap; } /** * Just calls the same method on all the child annotations. */ @Override public void setStyle(Map<String, String> argMap) { if (argMap != null) { for (var a : annotations) a.setStyle(argMap); } } @Override public void setLocation(double x, double y) { double deltaX = getX() - x; double deltaY = getY() - y; for (var child : annotations) { var x2 = child.getX() - deltaX; var y2 = child.getY() - deltaY; if (child.getX() != x2 || child.getY() != y2) child.setLocation(x2, y2); } updateBounds(); } @Override public void resizeAnnotationRelative(Rectangle2D initialBounds, Rectangle2D outlineBounds) { for (var a : annotations) ((AbstractAnnotation) a).resizeAnnotationRelative(initialBounds, outlineBounds); updateBounds(); } @Override public void saveBounds() { super.saveBounds(); for (var a : annotations) ((AbstractAnnotation) a).saveBounds(); } @Override public void changeCanvas(CanvasID canvasId) { for (var a : annotations) a.changeCanvas(canvasId); super.changeCanvas(canvasId); } @Override public void setRotation(double rotation) { // Get the current rotation var oldValue = this.rotation; super.setRotation(rotation); var deltaRotation = this.rotation - oldValue; var centerX = getCenter(this.getBounds()).getX(); var centerY = getCenter(this.getBounds()).getY(); // Change the rotation in each of our children for (var a : annotations) { a.setRotation(a.getRotation() + deltaRotation); var angle = deltaRotation; // Get the center of the annotation relative to the center of the union Rectangle2D bounds = a.getBounds(); Point2D aCenter = getCenter(bounds); double x = aCenter.getX() - centerX; double y = aCenter.getY() - centerY; if (Math.abs(x) > 0.1 || Math.abs(y) > 0.1) { double aRadians = Math.toRadians(angle); // Calculate the displacement relative to the center of the union double newCenterX = x * Math.cos(aRadians) - y * Math.sin(aRadians); double newCenterY = y * Math.cos(aRadians) + x * Math.sin(aRadians); // Now get the position relative to the screen newCenterX += centerX; newCenterY += centerY; a.setLocation(newCenterX - bounds.getWidth() / 2d, newCenterY - bounds.getHeight() / 2d); } } updateBounds(); } @Override public void paint(Graphics g, boolean showSelected) { super.paint(g, showSelected); if (annotations != null && annotations.size() > 0) { Graphics2D g2 = (Graphics2D) g; AffineTransform savedTransform = g2.getTransform(); AffineTransform transform = new AffineTransform(); Rectangle2D bounds = getBounds(); if (rotation != 0.0) { // transform.rotate(Math.toRadians(rotation), bounds.getX() + // bounds.getWidth()/2, bounds.getY() + bounds.getHeight()/2); g2.transform(transform); } for (DingAnnotation annotation : annotations) annotation.paint(g, false); if (rotation != 0.0) { g2.setTransform(savedTransform); } } updateBounds(); } private void updateBounds() { Rectangle2D union = null; for (var child : annotations) { if (union == null) union = child.getBounds().getBounds2D(); else union = union.createUnion(child.getBounds().getBounds2D()); } if (union != null) setBounds(union.getBounds()); } private Point2D getCenter(Rectangle2D bounds) { return new Point2D.Double(bounds.getX() + bounds.getWidth() / 2d, bounds.getY() + bounds.getHeight() / 2d); } }
package com.worth.ifs.exception; import com.fasterxml.jackson.databind.node.ObjectNode; import com.worth.ifs.commons.error.exception.*; import com.worth.ifs.util.MessageUtil; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.MessageSource; import org.springframework.core.env.Environment; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.servlet.ModelAndView; import javax.servlet.http.HttpServletRequest; import java.util.Collections; import java.util.List; /** * This controller can handle all Exceptions, so the user should always gets a * nice looking error page, or a json error message is returned. */ @ControllerAdvice public class ErrorController { private final Log log = LogFactory.getLog(getClass()); @Autowired private MessageSource messageSource; @Autowired Environment env; public ErrorController() { super(); } public ErrorController(Environment env, MessageSource messageSource) { super(); this.env = env; this.messageSource = messageSource; } @ResponseStatus(HttpStatus.BAD_REQUEST) // 400 @ExceptionHandler(value = {AutosaveElementException.class}) public @ResponseBody ObjectNode jsonAutosaveResponseHandler(AutosaveElementException e) throws AutosaveElementException { log.debug("ErrorController jsonAutosaveResponseHandler", e); return e.createJsonResponse(); } @ResponseStatus(HttpStatus.BAD_REQUEST) // 400 @ExceptionHandler(value = IncorrectArgumentTypeException.class) public ModelAndView incorrectArgumentTypeErrorHandler(HttpServletRequest req, IncorrectArgumentTypeException e) { log.debug("ErrorController incorrectArgumentTypeErrorHandler", e); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.FORBIDDEN) // 403 @ExceptionHandler(value = ForbiddenActionException.class) public ModelAndView accessDeniedException(HttpServletRequest req, ForbiddenActionException e) { log.debug("ErrorController actionNotAllowed", e); return createExceptionModelAndView(e, "forbidden", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.NOT_FOUND) // 404 @ExceptionHandler(value = ObjectNotFoundException.class) public ModelAndView objectNotFoundHandler(HttpServletRequest req, ObjectNotFoundException e) { log.debug("ErrorController objectNotFoundHandler", e); return createExceptionModelAndView(e, "404", req, e.getArguments()); } @ResponseStatus(value = HttpStatus.CONFLICT) // 409 @ExceptionHandler(value = FileAlreadyLinkedToFormInputResponseException.class) public ModelAndView fileAlreadyLinkedToFormInputResponse(HttpServletRequest req, FileAlreadyLinkedToFormInputResponseException e){ log.debug("ErrorController fileAlreadyLinkedToFormInputResponse", e ); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.LENGTH_REQUIRED) // 411 @ExceptionHandler(value = LengthRequiredException.class) public ModelAndView lengthRequiredErrorHandler(HttpServletRequest req, LengthRequiredException e){ log.debug("ErrorController lengthRequired", e ); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.PAYLOAD_TOO_LARGE) // 413 @ExceptionHandler(value = PayloadTooLargeException.class) public ModelAndView payloadTooLargeErrorHandler(HttpServletRequest req, PayloadTooLargeException e){ log.debug("ErrorController payloadTooLarge", e ); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.UNSUPPORTED_MEDIA_TYPE) // 415 @ExceptionHandler(value = UnsupportedMediaTypeException.class) public ModelAndView unsupportedMediaTypeErrorHandler(HttpServletRequest req, UnsupportedMediaTypeException e){ log.debug("ErrorController unsupportedMediaType", e ); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.INTERNAL_SERVER_ERROR) //500 @ExceptionHandler(value = UnableToCreateFileException.class) public ModelAndView defaultErrorHandler(HttpServletRequest req, UnableToCreateFileException e) { log.debug("ErrorController defaultErrorHandler", e); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.INTERNAL_SERVER_ERROR) //500 @ExceptionHandler(value = UnableToCreateFoldersException.class) public ModelAndView defaultErrorHandler(HttpServletRequest req, UnableToCreateFoldersException e) { log.debug("ErrorController defaultErrorHandler", e); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.INTERNAL_SERVER_ERROR) //500 @ExceptionHandler(value = UnableToSendNotificationException.class) public ModelAndView defaultErrorHandler(HttpServletRequest req, UnableToSendNotificationException e) { log.debug("ErrorController defaultErrorHandler", e); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.INTERNAL_SERVER_ERROR) //500 @ExceptionHandler(value = UnableToUpdateFileException.class) public ModelAndView defaultErrorHandler(HttpServletRequest req, UnableToUpdateFileException e) { log.debug("ErrorController defaultErrorHandler", e); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.INTERNAL_SERVER_ERROR) //500 @ExceptionHandler(value = UnableToDeleteFileException.class) public ModelAndView defaultErrorHandler(HttpServletRequest req, UnableToDeleteFileException e) { log.debug("ErrorController defaultErrorHandler", e); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.INTERNAL_SERVER_ERROR) //500 @ExceptionHandler(value = UnableToRenderNotificationTemplateException.class) public ModelAndView defaultErrorHandler(HttpServletRequest req, UnableToRenderNotificationTemplateException e) { log.debug("ErrorController defaultErrorHandler", e); return createExceptionModelAndView(e, "error", req, e.getArguments()); } @ResponseStatus(value= HttpStatus.INTERNAL_SERVER_ERROR) //500 @ExceptionHandler(value = {GeneralUnexpectedErrorException.class, Exception.class}) public ModelAndView defaultErrorHandler(HttpServletRequest req, Exception e) throws Exception { log.debug("ErrorController defaultErrorHandler", e); return createExceptionModelAndView(e, "error", req, Collections.emptyList()); } private ModelAndView createExceptionModelAndView(Exception e, String message, HttpServletRequest req, List<Object> arguments){ ModelAndView mav = new ModelAndView(); mav.addObject("exception", e); if(env.acceptsProfiles("uat", "dev", "test")) { mav.addObject("stacktrace", ExceptionUtils.getStackTrace(e)); String msg = MessageUtil.getFromMessageBundle(messageSource, e.getClass().getName(), e.getMessage(), arguments.toArray(), req.getLocale()); mav.addObject("message", msg); } mav.addObject("url", req.getRequestURL().toString()); mav.setViewName(message); return mav; } }
package uk.gov.nationalarchives.droid.core.interfaces.archive; import org.apache.ant.compress.util.SevenZStreamFactory; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveInputStream; import org.junit.Test; import uk.gov.nationalarchives.droid.core.interfaces.IdentificationRequest; import uk.gov.nationalarchives.droid.core.interfaces.ResourceId; import uk.gov.nationalarchives.droid.core.interfaces.RequestIdentifier; import uk.gov.nationalarchives.droid.core.interfaces.AsynchDroid; import uk.gov.nationalarchives.droid.core.interfaces.ResultHandler; import uk.gov.nationalarchives.droid.core.interfaces.IdentificationResult; import uk.gov.nationalarchives.droid.core.interfaces.resource.RequestMetaData; import java.io.File; import java.io.FileInputStream; import java.net.URI; import java.util.ArrayList; import java.util.List; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class SevenZArchiveHandlerTest { @Test public void testHandleSevenZFile() throws Exception { File file = new File(getClass().getResource("/saved.7z").getFile()); IdentificationRequestFactory factory = mock(IdentificationRequestFactory.class); List<IdentificationRequest> mockRequests = new ArrayList<>(); SevenZStreamFactory sevenZStreamFactory = new SevenZStreamFactory(); ArchiveInputStream archiveInputStream = sevenZStreamFactory.getArchiveInputStream(file, null); ArchiveEntry entry; ResourceId expectedParentId = new ResourceId(30L, ""); int count = 0; while ((entry = archiveInputStream.getNextEntry()) != null) { URI expectedUri = ArchiveFileUtils.toSevenZUri(file.toURI(), entry.getName()); IdentificationRequest mockRequest = mock(IdentificationRequest.class); RequestIdentifier expectedIdentifier = new RequestIdentifier(expectedUri); expectedIdentifier.setParentResourceId(expectedParentId); expectedIdentifier.setParentPrefix(null); expectedIdentifier.setAncestorId(10L); when(mockRequest.getIdentifier()).thenReturn(expectedIdentifier); mockRequests.add(mockRequest); when(factory.newRequest(any(RequestMetaData.class), eq(expectedIdentifier))).thenReturn(mockRequests.get(count)); count++; } AsynchDroid droidCore = mock(AsynchDroid.class); SevenZipArchiveHandler handler = new SevenZipArchiveHandler(); handler.setFactory(factory); handler.setDroid(droidCore); ResultHandler resultHandler = mock(ResultHandler.class); when(resultHandler.handleDirectory(any(IdentificationResult.class), any(ResourceId.class), anyBoolean())).thenReturn(new ResourceId(30L, "")); handler.setResultHandler(resultHandler); IdentificationRequest originalRequest = mock(IdentificationRequest.class); RequestIdentifier originalIdentifier = new RequestIdentifier(file.toURI()); originalIdentifier.setAncestorId(10L); originalIdentifier.setParentId(20L); originalIdentifier.setNodeId(30L); when(originalRequest.getIdentifier()).thenReturn(originalIdentifier); when(originalRequest.getSourceInputStream()).thenReturn(new FileInputStream(file)); when(originalRequest.getWindowReader()).thenReturn(new net.byteseek.io.reader.FileReader(file)); handler.handle(originalRequest); verify(droidCore).submit(mockRequests.get(2)); verify(droidCore).submit(mockRequests.get(3)); verify(droidCore).submit(mockRequests.get(4)); verify(droidCore).submit(mockRequests.get(5)); verify(droidCore).submit(mockRequests.get(6)); } }
package com.intellij.codeInspection.reference; import com.intellij.ExtensionPoints; import com.intellij.analysis.AnalysisScope; import com.intellij.codeInspection.ex.EntryPointsManager; import com.intellij.codeInspection.ex.EntryPointsManagerImpl; import com.intellij.codeInspection.ex.GlobalInspectionContextImpl; import com.intellij.lang.Language; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Computable; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.psi.javadoc.PsiDocTag; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.IncorrectOperationException; import gnu.trove.THashMap; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; public class RefManagerImpl extends RefManager { private int myLastUsedMask = 256*256*256*4; private static final Logger LOG = Logger.getInstance("#com.intellij.codeInspection.reference.RefManager"); private final Project myProject; private AnalysisScope myScope; private RefProject myRefProject; private THashMap<PsiElement, RefElement> myRefTable; private THashMap<String, RefPackage> myPackages; private THashMap<Module, RefModule> myModules; private final ProjectIterator myProjectIterator; private boolean myDeclarationsFound; private PsiMethod myAppMainPattern; private PsiMethod myAppPremainPattern; private PsiClass myApplet; private PsiClass myServlet; private boolean myIsInProcess = false; private List<RefGraphAnnotator> myGraphAnnotators = new ArrayList<RefGraphAnnotator>(); private GlobalInspectionContextImpl myContext; private EntryPointsManager myEntryPointsManager = null; private ReadWriteLock myLock = new ReentrantReadWriteLock(); public RefManagerImpl(Project project, AnalysisScope scope, GlobalInspectionContextImpl context) { myDeclarationsFound = false; myProject = project; myScope = scope; myContext = context; myRefProject = new RefProjectImpl(this); myRefTable = new THashMap<PsiElement, RefElement>(); myProjectIterator = new ProjectIterator(); final PsiManager psiManager = PsiManager.getInstance(project); PsiElementFactory factory = psiManager.getElementFactory(); try { myAppMainPattern = factory.createMethodFromText("void main(String[] args);", null); myAppPremainPattern = factory.createMethodFromText("void premain(String[] args, java.lang.instrument.Instrumentation i);", null); } catch (IncorrectOperationException e) { LOG.error(e); } myApplet = psiManager.findClass("java.applet.Applet", GlobalSearchScope.allScope(project)); myServlet = psiManager.findClass("javax.servlet.Servlet", GlobalSearchScope.allScope(project)); } public void iterate(RefVisitor visitor) { myLock.readLock().lock(); try { final Map<PsiElement, RefElement> refTable = getRefTable(); for (RefElement refElement : refTable.values()) { refElement.accept(visitor); if (refElement instanceof RefClass) { RefClass refClass = (RefClass)refElement; RefMethod refDefaultConstructor = refClass.getDefaultConstructor(); if (refDefaultConstructor instanceof RefImplicitConstructor) { refClass.getDefaultConstructor().accept(visitor); } } } if (myModules != null) { for (RefModule refModule : myModules.values()) { refModule.accept(visitor); } } if (myPackages != null) { for (RefPackage refPackage : myPackages.values()) { refPackage.accept(visitor); } } } finally { myLock.readLock().unlock(); } } public void cleanup() { myScope = null; myRefProject = null; myRefTable = null; myPackages = null; myModules = null; myContext = null; if (myEntryPointsManager != null) { myEntryPointsManager.cleanup(); } myGraphAnnotators.clear(); } public AnalysisScope getScope() { return myScope; } public void fireNodeInitialized(RefElement refElement){ for (RefGraphAnnotator annotator : myGraphAnnotators) { annotator.onInitialize(refElement); } } public void fireNodeMarkedReferenced(RefElement refWhat, RefElement refFrom, boolean referencedFromClassInitializer, final boolean forReading, final boolean forWriting){ for (RefGraphAnnotator annotator : myGraphAnnotators) { annotator.onMarkReferenced(refWhat, refFrom, referencedFromClassInitializer, forReading, forWriting); } } public void fireBuildReferences(RefElement refElement){ for (RefGraphAnnotator annotator : myGraphAnnotators) { annotator.onReferencesBuild(refElement); } } public void registerGraphAnnotator(RefGraphAnnotator annotator){ myGraphAnnotators.add(annotator); } public int getLastUsedMask() { myLastUsedMask *= 2; return myLastUsedMask; } public EntryPointsManager getEntryPointsManager() { if (myEntryPointsManager == null) { myEntryPointsManager = new EntryPointsManagerImpl(); ((EntryPointsManagerImpl)myEntryPointsManager).addAllPersistentEntries(EntryPointsManagerImpl.getInstance(myContext.getProject())); } return myEntryPointsManager; } public void findAllDeclarations() { if (!myDeclarationsFound) { long before = System.currentTimeMillis(); getScope().accept(myProjectIterator); myDeclarationsFound = true; LOG.info("Total duration of processing project usages:" + (System.currentTimeMillis() - before)); } } public boolean isDeclarationsFound() { return myDeclarationsFound; } public void inspectionReadActionStarted() { myIsInProcess = true; } public void inspectionReadActionFinished() { myIsInProcess = false; } public boolean isInProcess() { return myIsInProcess; } @Nullable public PsiElement getPsiAtOffset(VirtualFile vFile, int textOffset) { PsiFile psiFile = PsiManager.getInstance(myProject).findFile(vFile); if (psiFile == null) return null; PsiElement psiElem = psiFile.findElementAt(textOffset); while (psiElem != null) { if (psiElem instanceof PsiClass || psiElem instanceof PsiMethod || psiElem instanceof PsiField || psiElem instanceof PsiParameter) { return psiElem.getTextOffset() == textOffset ? psiElem : null; } psiElem = psiElem.getParent(); } return null; } public Project getProject() { return myProject; } public RefProject getRefProject() { return myRefProject; } public THashMap<PsiElement, RefElement> getRefTable() { return myRefTable; } public RefPackage getPackage(String packageName) { if (myPackages == null) { myPackages = new THashMap<String, RefPackage>(); } RefPackage refPackage = myPackages.get(packageName); if (refPackage == null) { refPackage = new RefPackageImpl(packageName); myPackages.put(packageName, refPackage); int dotIndex = packageName.lastIndexOf('.'); if (dotIndex >= 0) { ((RefPackageImpl)getPackage(packageName.substring(0, dotIndex))).add(refPackage); } else { ((RefProjectImpl)getRefProject()).add(refPackage); } } return refPackage; } public void removeReference(RefElement refElem) { myLock.writeLock().lock(); try { final Map<PsiElement, RefElement> refTable = getRefTable(); if (refElem instanceof RefMethod) { RefMethod refMethod = (RefMethod)refElem; RefParameter[] params = refMethod.getParameters(); for (RefParameter param : params) { removeReference(param); } } if (refTable.remove(refElem.getElement()) != null) return; //PsiElement may have been invalidated and new one returned by getElement() is different so we need to do this stuff. for (PsiElement psiElement : refTable.keySet()) { if (refTable.get(psiElement) == refElem) { refTable.remove(psiElement); return; } } } finally { myLock.writeLock().unlock(); } } public void initializeAnnotators() { final Object[] graphAnnotators = Extensions.getRootArea().getExtensionPoint(ExtensionPoints.INSPECTIONS_GRAPH_ANNOTATOR).getExtensions(); for (Object annotator : graphAnnotators) { registerGraphAnnotator((RefGraphAnnotator)annotator); } for (RefGraphAnnotator graphAnnotator: myGraphAnnotators) { if (graphAnnotator instanceof RefGraphAnnotatorEx) { ((RefGraphAnnotatorEx)graphAnnotator).initialize(this); } } } private class ProjectIterator extends PsiElementVisitor { private final RefUtilImpl REF_UTIL = (RefUtilImpl)RefUtil.getInstance(); public void visitElement(PsiElement element) { for (PsiElement aChildren : element.getChildren()) { aChildren.accept(this); } } @Override public void visitFile(PsiFile file) { final VirtualFile virtualFile = file.getVirtualFile(); if (virtualFile != null) { myContext.incrementJobDoneAmount(GlobalInspectionContextImpl.BUILD_GRAPH, VfsUtil.calcRelativeToProjectPath(virtualFile, myProject)); } final FileViewProvider viewProvider = file.getViewProvider(); final Set<Language> relevantLanguages = viewProvider.getPrimaryLanguages(); for (Language language : relevantLanguages) { visitElement(viewProvider.getPsi(language)); } } public void visitReferenceExpression(PsiReferenceExpression expression) { visitElement(expression); } public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { } public void visitClass(PsiClass aClass) { if (!(aClass instanceof PsiTypeParameter)) { super.visitClass(aClass); RefElement refClass = getReference(aClass); if (refClass != null) { ((RefClassImpl)refClass).buildReferences(); List children = refClass.getChildren(); if (children != null) { for (Object aChildren : children) { RefElementImpl refChild = (RefElementImpl)aChildren; refChild.buildReferences(); } } } } } public void visitDocComment(PsiDocComment comment) { super.visitDocComment(comment); final PsiDocTag[] tags = comment.getTags(); for (PsiDocTag tag : tags) { if (Comparing.strEqual(tag.getName(), GlobalInspectionContextImpl.SUPPRESS_INSPECTIONS_TAG_NAME)){ final PsiElement[] dataElements = tag.getDataElements(); if (dataElements != null && dataElements.length > 0){ final PsiModifierListOwner listOwner = PsiTreeUtil.getParentOfType(comment, PsiModifierListOwner.class); if (listOwner != null){ final RefElementImpl element = (RefElementImpl)getReference(listOwner); if (element != null) { String suppressions = ""; for (PsiElement dataElement : dataElements) { suppressions += "," + dataElement.getText(); } element.addSuppression(suppressions); } } } } } } public void visitAnnotation(PsiAnnotation annotation) { super.visitAnnotation(annotation); if (Comparing.strEqual(annotation.getQualifiedName(), "java.lang.SuppressWarnings")){ final PsiModifierListOwner listOwner = PsiTreeUtil.getParentOfType(annotation, PsiModifierListOwner.class); if (listOwner != null){ final RefElementImpl element = (RefElementImpl)getReference(listOwner); if (element != null) { StringBuffer buf = new StringBuffer(); final PsiNameValuePair[] nameValuePairs = annotation.getParameterList().getAttributes(); for (PsiNameValuePair nameValuePair : nameValuePairs) { buf.append(",").append(nameValuePair.getText().replaceAll("[{}\"\"]", "")); } if (buf.length() > 0){ element.addSuppression(buf.substring(1)); } } } } } public void visitVariable(PsiVariable variable) { super.visitVariable(variable); REF_UTIL.addTypeReference(variable, variable.getType(), RefManagerImpl.this); } public void visitInstanceOfExpression(PsiInstanceOfExpression expression) { super.visitInstanceOfExpression(expression); final PsiTypeElement typeElement = expression.getCheckType(); if (typeElement != null) { REF_UTIL.addTypeReference(expression, typeElement.getType(), RefManagerImpl.this); } } public void visitThisExpression(PsiThisExpression expression) { super.visitThisExpression(expression); final PsiJavaCodeReferenceElement qualifier = expression.getQualifier(); if (qualifier != null) { REF_UTIL.addTypeReference(expression, expression.getType(), RefManagerImpl.this); RefClass ownerClass = RefUtil.getInstance().getOwnerClass(RefManagerImpl.this, expression); if (ownerClass != null) { RefClassImpl refClass = (RefClassImpl)getReference(qualifier.resolve()); if (refClass != null) { refClass.addInstanceReference(ownerClass); } } } } } public PsiMethod getAppMainPattern() { return myAppMainPattern; } public PsiMethod getAppPremainPattern() { return myAppPremainPattern; } public PsiClass getApplet() { return myApplet; } public PsiClass getServlet() { return myServlet; } @Nullable public RefElement getReference(final PsiElement elem) { if (elem != null && !(elem instanceof PsiPackage) && RefUtil.getInstance().belongsToScope(elem, this)) { if (!elem.isValid()) return null; RefElement ref = getFromRefTable(elem); if (ref == null) { if (!isValidPointForReference()){ //LOG.assertTrue(true, "References may become invalid after process is finished"); return null; } RefElementImpl refElement = ApplicationManager.getApplication().runReadAction(new Computable<RefElementImpl>() { @Nullable public RefElementImpl compute() { if (elem instanceof PsiClass) { return new RefClassImpl((PsiClass)elem, RefManagerImpl.this); } else if (elem instanceof PsiMethod) { return new RefMethodImpl((PsiMethod)elem, RefManagerImpl.this); } else if (elem instanceof PsiField) { return new RefFieldImpl((PsiField)elem, RefManagerImpl.this); } else if (elem instanceof PsiFile) { return new RefFileImpl((PsiFile)elem, RefManagerImpl.this); } else { return null; } } }); if (refElement == null) return null; putToRefTable(elem, refElement); refElement.initialize(); return refElement; } return ref; } return null; } public @Nullable RefEntity getReference(final String type, final String fqName) { return new SmartRefElementPointerImpl(type, fqName, this).getRefElement(); } public RefMethod getMethodReference(RefClass refClass, PsiMethod psiMethod) { LOG.assertTrue(isValidPointForReference(), "References may become invalid after process is finished"); RefMethodImpl ref = (RefMethodImpl)getFromRefTable(psiMethod); if (ref == null) { ref = new RefMethodImpl(refClass, psiMethod, this); ref.initialize(); putToRefTable(psiMethod, ref); } return ref; } public RefField getFieldReference(RefClass refClass, PsiField psiField) { LOG.assertTrue(isValidPointForReference(), "References may become invalid after process is finished"); RefFieldImpl ref = (RefFieldImpl)getFromRefTable(psiField); if (ref == null) { ref = new RefFieldImpl(refClass, psiField, this); ref.initialize(); putToRefTable(psiField, ref); } return ref; } private RefElement getFromRefTable(final PsiElement element) { myLock.readLock().lock(); try { return getRefTable().get(element); } finally { myLock.readLock().unlock(); } } public RefParameter getParameterReference(PsiParameter param, int index) { LOG.assertTrue(isValidPointForReference(), "References may become invalid after process is finished"); RefElement ref = getFromRefTable(param); if (ref == null) { ref = new RefParameterImpl(param, index, this); ((RefParameterImpl)ref).initialize(); putToRefTable(param, ref); } return (RefParameter)ref; } private void putToRefTable(final PsiElement element, final RefElement ref) { myLock.writeLock().lock(); try { getRefTable().put(element, ref); } finally { myLock.writeLock().unlock(); } } public RefModule getRefModule(Module module) { if (module == null){ return null; } if (myModules == null){ myModules = new THashMap<Module, RefModule>(); } RefModule refModule = myModules.get(module); if (refModule == null){ refModule = new RefModuleImpl(module); myModules.put(module, refModule); } return refModule; } private boolean isValidPointForReference() { return myIsInProcess || ApplicationManager.getApplication().isUnitTestMode(); } }
package org.ovirt.engine.ui.uicommonweb.models.volumes; import java.util.ArrayList; import java.util.Arrays; import org.ovirt.engine.core.common.action.VdcActionParametersBase; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.action.VdcReturnValueBase; import org.ovirt.engine.core.common.action.gluster.CreateGlusterVolumeParameters; import org.ovirt.engine.core.common.action.gluster.GlusterVolumeActionParameters; import org.ovirt.engine.core.common.action.gluster.GlusterVolumeOptionParameters; import org.ovirt.engine.core.common.action.gluster.GlusterVolumeRebalanceParameters; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.storage_pool; import org.ovirt.engine.core.common.businessentities.gluster.GlusterBrickEntity; import org.ovirt.engine.core.common.businessentities.gluster.GlusterStatus; import org.ovirt.engine.core.common.businessentities.gluster.GlusterVolumeEntity; import org.ovirt.engine.core.common.businessentities.gluster.GlusterVolumeOptionEntity; import org.ovirt.engine.core.common.businessentities.gluster.GlusterVolumeType; import org.ovirt.engine.core.common.businessentities.gluster.TransportType; import org.ovirt.engine.core.common.config.Config; import org.ovirt.engine.core.common.interfaces.SearchType; import org.ovirt.engine.core.common.mode.ApplicationMode; import org.ovirt.engine.core.common.queries.ConfigurationValues; import org.ovirt.engine.core.common.queries.GetConfigurationValueParameters; import org.ovirt.engine.core.common.queries.SearchParameters; import org.ovirt.engine.core.common.queries.VdcQueryType; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.ObservableCollection; import org.ovirt.engine.core.searchbackend.SearchObjects; import org.ovirt.engine.ui.frontend.AsyncQuery; import org.ovirt.engine.ui.frontend.Frontend; import org.ovirt.engine.ui.frontend.INewAsyncCallback; import org.ovirt.engine.ui.uicommonweb.Linq; import org.ovirt.engine.ui.uicommonweb.UICommand; import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider; import org.ovirt.engine.ui.uicommonweb.models.ConfirmationModel; import org.ovirt.engine.ui.uicommonweb.models.EntityModel; import org.ovirt.engine.ui.uicommonweb.models.ISupportSystemTreeContext; import org.ovirt.engine.ui.uicommonweb.models.ListWithDetailsModel; import org.ovirt.engine.ui.uicommonweb.models.SystemTreeItemModel; import org.ovirt.engine.ui.uicommonweb.models.configure.PermissionListModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeBrickListModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeEventListModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeGeneralModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeParameterListModel; import org.ovirt.engine.ui.uicompat.ConstantsManager; import org.ovirt.engine.ui.uicompat.FrontendActionAsyncResult; import org.ovirt.engine.ui.uicompat.FrontendMultipleActionAsyncResult; import org.ovirt.engine.ui.uicompat.IFrontendActionAsyncCallback; import org.ovirt.engine.ui.uicompat.IFrontendMultipleActionAsyncCallback; public class VolumeListModel extends ListWithDetailsModel implements ISupportSystemTreeContext { public static Integer REPLICATE_COUNT_DEFAULT = 2; public static Integer STRIPE_COUNT_DEFAULT = 4; private UICommand createVolumeCommand; public UICommand getCreateVolumeCommand() { return createVolumeCommand; } private void setCreateVolumeCommand(UICommand value) { createVolumeCommand = value; } private UICommand removeVolumeCommand; public UICommand getRemoveVolumeCommand() { return removeVolumeCommand; } private void setRemoveVolumeCommand(UICommand value) { removeVolumeCommand = value; } private UICommand startCommand; private UICommand stopCommand; private UICommand rebalanceCommand; private UICommand optimizeForVirtStoreCommand; public UICommand getRebalanceCommand() { return rebalanceCommand; } public void setRebalanceCommand(UICommand rebalanceCommand) { this.rebalanceCommand = rebalanceCommand; } public UICommand getStartCommand() { return startCommand; } public void setStartCommand(UICommand startCommand) { this.startCommand = startCommand; } public UICommand getStopCommand() { return stopCommand; } public void setStopCommand(UICommand stopCommand) { this.stopCommand = stopCommand; } public UICommand getOptimizeForVirtStoreCommand() { return optimizeForVirtStoreCommand; } public void setOptimizeForVirtStoreCommand(UICommand optimizeForVirtStoreCommand) { this.optimizeForVirtStoreCommand = optimizeForVirtStoreCommand; } public VolumeListModel() { setTitle(ConstantsManager.getInstance().getConstants().volumesTitle()); setDefaultSearchString("Volumes:"); //$NON-NLS-1$ setSearchString(getDefaultSearchString()); setSearchObjects(new String[] { SearchObjects.GLUSTER_VOLUME_OBJ_NAME, SearchObjects.GLUSTER_VOLUME_PLU_OBJ_NAME }); setAvailableInModes(ApplicationMode.GlusterOnly); setCreateVolumeCommand(new UICommand("Create Volume", this)); //$NON-NLS-1$ setRemoveVolumeCommand(new UICommand("Remove", this)); //$NON-NLS-1$ setStartCommand(new UICommand("Start", this)); //$NON-NLS-1$ setStopCommand(new UICommand("Stop", this)); //$NON-NLS-1$ setRebalanceCommand(new UICommand("Rebalance", this)); //$NON-NLS-1$ setOptimizeForVirtStoreCommand(new UICommand("OptimizeForVirtStore", this)); //$NON-NLS-1$ getRebalanceCommand().setIsAvailable(false); getRemoveVolumeCommand().setIsExecutionAllowed(false); getStartCommand().setIsExecutionAllowed(false); getStopCommand().setIsExecutionAllowed(false); getRebalanceCommand().setIsExecutionAllowed(false); getSearchNextPageCommand().setIsAvailable(true); getSearchPreviousPageCommand().setIsAvailable(true); } @Override protected void InitDetailModels() { super.InitDetailModels(); ObservableCollection<EntityModel> list = new ObservableCollection<EntityModel>(); list.add(new VolumeGeneralModel()); list.add(new VolumeParameterListModel()); list.add(new VolumeBrickListModel()); list.add(new PermissionListModel()); list.add(new VolumeEventListModel()); setDetailModels(list); } private void createVolume() { if (getWindow() != null) { return; } VolumeModel volumeModel = new VolumeModel(); volumeModel.setTitle(ConstantsManager.getInstance().getConstants().createVolumeTitle()); setWindow(volumeModel); AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { VolumeListModel volumeListModel = (VolumeListModel) model; VolumeModel innerVolumeModel = (VolumeModel) volumeListModel.getWindow(); ArrayList<storage_pool> dataCenters = (ArrayList<storage_pool>) result; if (volumeListModel.getSystemTreeSelectedItem() != null) { switch (volumeListModel.getSystemTreeSelectedItem().getType()) { case Volumes: case Cluster: case Cluster_Gluster: VDSGroup cluster = (VDSGroup) volumeListModel.getSystemTreeSelectedItem().getEntity(); for (storage_pool dc : dataCenters) { if (dc.getId().equals(cluster.getstorage_pool_id())) { innerVolumeModel.getDataCenter() .setItems(new ArrayList<storage_pool>(Arrays.asList(new storage_pool[] {dc}))); innerVolumeModel.getDataCenter().setSelectedItem(dc); break; } } innerVolumeModel.getDataCenter().setIsChangable(false); innerVolumeModel.getDataCenter().setInfo(ConstantsManager.getInstance() .getConstants() .cannotChooseVolumesDataCenterinTreeContect()); innerVolumeModel.getCluster().setItems(Arrays.asList(cluster)); innerVolumeModel.getCluster().setSelectedItem(cluster); innerVolumeModel.getCluster().setIsChangable(false); innerVolumeModel.getCluster().setInfo(ConstantsManager.getInstance() .getConstants() .cannotChooseVolumesClusterinTreeContect()); break; case Clusters: case DataCenter: storage_pool selectDataCenter = (storage_pool) volumeListModel.getSystemTreeSelectedItem().getEntity(); innerVolumeModel.getDataCenter() .setItems(new ArrayList<storage_pool>(Arrays.asList(new storage_pool[] { selectDataCenter }))); innerVolumeModel.getDataCenter().setSelectedItem(selectDataCenter); innerVolumeModel.getDataCenter().setIsChangable(false); innerVolumeModel.getDataCenter().setInfo(ConstantsManager.getInstance() .getConstants() .cannotChooseVolumesDataCenterinTreeContect()); break; default: innerVolumeModel.getDataCenter().setItems(dataCenters); innerVolumeModel.getDataCenter().setSelectedItem(Linq.FirstOrDefault(dataCenters)); break; } } else { innerVolumeModel.getDataCenter().setItems(dataCenters); innerVolumeModel.getDataCenter().setSelectedItem(Linq.FirstOrDefault(dataCenters)); } UICommand command = new UICommand("onCreateVolume", volumeListModel); //$NON-NLS-1$ command.setTitle(ConstantsManager.getInstance().getConstants().ok()); command.setIsDefault(true); innerVolumeModel.getCommands().add(command); command = new UICommand("Cancel", volumeListModel); //$NON-NLS-1$ command.setTitle(ConstantsManager.getInstance().getConstants().cancel()); command.setIsCancel(true); innerVolumeModel.getCommands().add(command); } }; AsyncDataProvider.GetDataCenterList(_asyncQuery); } private void removeVolume() { if (getWindow() != null) { return; } ConfirmationModel model = new ConfirmationModel(); setWindow(model); model.setTitle(ConstantsManager.getInstance().getConstants().removeVolumesTitle()); model.setHashName("remove_volume"); //$NON-NLS-1$ model.setMessage(ConstantsManager.getInstance().getConstants().removeVolumesMessage()); model.setNote(ConstantsManager.getInstance().getConstants().removeVolumesWarning()); if (getSelectedItems() == null) { return; } java.util.ArrayList<String> list = new java.util.ArrayList<String>(); for (GlusterVolumeEntity item : Linq.<GlusterVolumeEntity> Cast(getSelectedItems())) { list.add(item.getName()); } model.setItems(list); UICommand tempVar = new UICommand("OnRemove", this); //$NON-NLS-1$ tempVar.setTitle(ConstantsManager.getInstance().getConstants().ok()); tempVar.setIsDefault(true); model.getCommands().add(tempVar); UICommand tempVar2 = new UICommand("Cancel", this); //$NON-NLS-1$ tempVar2.setTitle(ConstantsManager.getInstance().getConstants().cancel()); tempVar2.setIsCancel(true); model.getCommands().add(tempVar2); } private void onRemoveVolume() { if (getWindow() == null) { return; } ConfirmationModel model = (ConfirmationModel) getWindow(); if (model.getProgress() != null) { return; } if (getSelectedItems() == null) { return; } java.util.ArrayList<VdcActionParametersBase> list = new java.util.ArrayList<VdcActionParametersBase>(); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; list.add(new GlusterVolumeActionParameters(volume.getId(), false)); } model.StartProgress(null); Frontend.RunMultipleAction(VdcActionType.DeleteGlusterVolume, list, new IFrontendMultipleActionAsyncCallback() { @Override public void Executed(FrontendMultipleActionAsyncResult result) { ConfirmationModel localModel = (ConfirmationModel) result.getState(); localModel.StopProgress(); cancel(); } }, model); } @Override protected void SyncSearch() { SearchParameters tempVar = new SearchParameters(getSearchString(), SearchType.GlusterVolume); tempVar.setMaxCount(getSearchPageSize()); super.SyncSearch(VdcQueryType.Search, tempVar); } @Override protected void OnSelectedItemChanged() { super.OnSelectedItemChanged(); updateActionAvailability(); } @Override protected void SelectedItemsChanged() { super.OnSelectedItemChanged(); updateActionAvailability(); } private void updateActionAvailability() { if (getSelectedItems() == null || getSelectedItems().size() == 0) { getRemoveVolumeCommand().setIsExecutionAllowed(false); getStopCommand().setIsExecutionAllowed(false); getStartCommand().setIsExecutionAllowed(false); getRebalanceCommand().setIsExecutionAllowed(false); getOptimizeForVirtStoreCommand().setIsExecutionAllowed(false); return; } getRemoveVolumeCommand().setIsExecutionAllowed(true); getStopCommand().setIsExecutionAllowed(true); getStartCommand().setIsExecutionAllowed(true); getRebalanceCommand().setIsExecutionAllowed(true); getOptimizeForVirtStoreCommand().setIsExecutionAllowed(true); for (GlusterVolumeEntity volume : Linq.<GlusterVolumeEntity> Cast(getSelectedItems())) { if (volume.getStatus() == GlusterStatus.UP) { getRemoveVolumeCommand().setIsExecutionAllowed(false); getStartCommand().setIsExecutionAllowed(false); } else if (volume.getStatus() == GlusterStatus.DOWN) { getStopCommand().setIsExecutionAllowed(false); getRebalanceCommand().setIsExecutionAllowed(false); } } } private void cancel() { setWindow(null); } @Override public void ExecuteCommand(UICommand command) { super.ExecuteCommand(command); if (command.equals(getCreateVolumeCommand())) { createVolume(); } else if (command.equals(getRemoveVolumeCommand())) { removeVolume(); } else if (command.getName().equals("Cancel")) { //$NON-NLS-1$ cancel(); } else if (command.getName().equals("onCreateVolume")) { //$NON-NLS-1$ onCreateVolume(); } else if (command.equals(getStartCommand())) { start(); } else if (command.equals(getStopCommand())) { stop(); } else if (command.equals(getRebalanceCommand())) { rebalance(); } else if (command.equals(getOptimizeForVirtStoreCommand())) { optimizeForVirtStore(); } else if (command.getName().equals("onStop")) {//$NON-NLS-1$ onStop(); } else if (command.getName().equals("OnRemove")) { //$NON-NLS-1$ onRemoveVolume(); } } private void rebalance() { if (getSelectedItems() == null) { return; } ArrayList<VdcActionParametersBase> list = new java.util.ArrayList<VdcActionParametersBase>(); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; list.add(new GlusterVolumeRebalanceParameters(volume.getId(), false, false)); } Frontend.RunMultipleAction(VdcActionType.StartRebalanceGlusterVolume, list); } private void optimizeForVirtStore() { if (getSelectedItems() == null) { return; } AsyncQuery aQuery = new AsyncQuery(); aQuery.setModel(this); aQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, final Object result) { AsyncQuery aQueryInner = new AsyncQuery(); aQueryInner.setModel(this); aQueryInner.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object modelInner, final Object resultInner) { AsyncQuery aQueryInner1 = new AsyncQuery(); aQueryInner1.setModel(this); aQueryInner1.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object modelInner1, Object resultInner1) { String optionGroupVirt = (String) result; String optionOwnerUserVirt = (String) resultInner; String optionOwnerGroupVirt = (String) resultInner1; ArrayList<VdcActionParametersBase> list = new java.util.ArrayList<VdcActionParametersBase>(); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; GlusterVolumeOptionEntity optionGroup = new GlusterVolumeOptionEntity(); optionGroup.setVolumeId(volume.getId()); optionGroup.setKey("group"); //$NON-NLS-1$ optionGroup.setValue(optionGroupVirt); list.add(new GlusterVolumeOptionParameters(optionGroup)); GlusterVolumeOptionEntity optionOwnerUser = new GlusterVolumeOptionEntity(); optionOwnerUser.setVolumeId(volume.getId()); optionOwnerUser.setKey("storage.owner-uid"); //$NON-NLS-1$ optionOwnerUser.setValue(optionOwnerUserVirt); list.add(new GlusterVolumeOptionParameters(optionOwnerUser)); GlusterVolumeOptionEntity optionOwnerGroup = new GlusterVolumeOptionEntity(); optionOwnerGroup.setVolumeId(volume.getId()); optionOwnerGroup.setKey("storage.owner-gid"); //$NON-NLS-1$ optionOwnerGroup.setValue(optionOwnerGroupVirt); list.add(new GlusterVolumeOptionParameters(optionOwnerGroup)); } Frontend.RunMultipleAction(VdcActionType.SetGlusterVolumeOption, list); } }; AsyncDataProvider.GetConfigFromCache(new GetConfigurationValueParameters(ConfigurationValues.GlusterVolumeOptionOwnerGroupVirtValue, Config.DefaultConfigurationVersion), aQueryInner1); } }; AsyncDataProvider.GetConfigFromCache(new GetConfigurationValueParameters(ConfigurationValues.GlusterVolumeOptionOwnerUserVirtValue, Config.DefaultConfigurationVersion), aQueryInner); } }; AsyncDataProvider.GetConfigFromCache(new GetConfigurationValueParameters(ConfigurationValues.GlusterVolumeOptionGroupVirtValue, Config.DefaultConfigurationVersion), aQuery); } private void stop() { if (getWindow() != null) { return; } ConfirmationModel model = new ConfirmationModel(); setWindow(model); model.setTitle(ConstantsManager.getInstance().getConstants().confirmStopVolume()); model.setHashName("volume_stop"); //$NON-NLS-1$ model.setMessage(ConstantsManager.getInstance().getConstants().stopVolumeMessage()); model.setNote(ConstantsManager.getInstance().getConstants().stopVolumeWarning()); if (getSelectedItems() == null) { return; } java.util.ArrayList<String> list = new java.util.ArrayList<String>(); for (GlusterVolumeEntity item : Linq.<GlusterVolumeEntity> Cast(getSelectedItems())) { list.add(item.getName()); } model.setItems(list); UICommand tempVar = new UICommand("onStop", this); //$NON-NLS-1$ tempVar.setTitle(ConstantsManager.getInstance().getConstants().ok()); tempVar.setIsDefault(true); model.getCommands().add(tempVar); UICommand tempVar2 = new UICommand("Cancel", this); //$NON-NLS-1$ tempVar2.setTitle(ConstantsManager.getInstance().getConstants().cancel()); tempVar2.setIsCancel(true); model.getCommands().add(tempVar2); } public void onStop() { if (getWindow() == null) { return; } ConfirmationModel model = (ConfirmationModel) getWindow(); if (model.getProgress() != null) { return; } if (getSelectedItems() == null) { return; } ArrayList<VdcActionParametersBase> list = new java.util.ArrayList<VdcActionParametersBase>(); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; list.add(new GlusterVolumeActionParameters(volume.getId(), false)); } model.StartProgress(null); Frontend.RunMultipleAction(VdcActionType.StopGlusterVolume, list, new IFrontendMultipleActionAsyncCallback() { @Override public void Executed(FrontendMultipleActionAsyncResult result) { ConfirmationModel localModel = (ConfirmationModel) result.getState(); localModel.StopProgress(); cancel(); } }, model); } private void start() { if (getSelectedItems() == null) { return; } ArrayList<VdcActionParametersBase> list = new java.util.ArrayList<VdcActionParametersBase>(); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; list.add(new GlusterVolumeActionParameters(volume.getId(), false)); } Frontend.RunMultipleAction(VdcActionType.StartGlusterVolume, list); } private void onCreateVolume() { VolumeModel volumeModel = (VolumeModel) getWindow(); if (!volumeModel.validate()) { return; } Guid clusterId = ((VDSGroup) volumeModel.getCluster().getSelectedItem()).getId(); GlusterVolumeEntity volume = new GlusterVolumeEntity(); volume.setClusterId(clusterId); volume.setName((String) volumeModel.getName().getEntity()); GlusterVolumeType type = (GlusterVolumeType) volumeModel.getTypeList().getSelectedItem(); if (type == GlusterVolumeType.STRIPE || type == GlusterVolumeType.DISTRIBUTED_STRIPE) { volume.setStripeCount(volumeModel.getStripeCountValue()); } else if (type == GlusterVolumeType.REPLICATE || type == GlusterVolumeType.DISTRIBUTED_REPLICATE) { volume.setReplicaCount(volumeModel.getReplicaCountValue()); } volume.setVolumeType(type); if ((Boolean) volumeModel.getTcpTransportType().getEntity()) { volume.getTransportTypes().add(TransportType.TCP); } if ((Boolean) volumeModel.getRdmaTransportType().getEntity()) { volume.getTransportTypes().add(TransportType.RDMA); } ArrayList<GlusterBrickEntity> brickList = new ArrayList<GlusterBrickEntity>(); for (Object model : volumeModel.getBricks().getItems()) { brickList.add((GlusterBrickEntity) ((EntityModel) model).getEntity()); } volume.setBricks(brickList); if ((Boolean) volumeModel.getNfs_accecssProtocol().getEntity()) { volume.enableNFS(); } else { volume.disableNFS(); } if ((Boolean) volumeModel.getCifs_accecssProtocol().getEntity()) { volume.enableCifs(); } else { volume.disableCifs(); } volume.setAccessControlList((String) volumeModel.getAllowAccess().getEntity()); volumeModel.StartProgress(null); CreateGlusterVolumeParameters parameter = new CreateGlusterVolumeParameters(volume); Frontend.RunAction(VdcActionType.CreateGlusterVolume, parameter, new IFrontendActionAsyncCallback() { @Override public void Executed(FrontendActionAsyncResult result) { VolumeListModel localModel = (VolumeListModel) result.getState(); localModel.postOnCreateVolume(result.getReturnValue()); } }, this); } public void postOnCreateVolume(VdcReturnValueBase returnValue) { VolumeModel model = (VolumeModel) getWindow(); model.StopProgress(); if (returnValue != null && returnValue.getSucceeded()) { cancel(); } } @Override protected String getListName() { return "VolumeListModel"; //$NON-NLS-1$ } private SystemTreeItemModel systemTreeSelectedItem; @Override public SystemTreeItemModel getSystemTreeSelectedItem() { return systemTreeSelectedItem; } @Override public void setSystemTreeSelectedItem(SystemTreeItemModel value) { if (systemTreeSelectedItem != value) { systemTreeSelectedItem = value; OnSystemTreeSelectedItemChanged(); } } private void OnSystemTreeSelectedItemChanged() { updateActionAvailability(); } @Override public boolean IsSearchStringMatch(String searchString) { return searchString.trim().toLowerCase().startsWith("volume"); //$NON-NLS-1$ } }
package org.cojen.tupl.util; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.LockSupport; import org.cojen.tupl.io.UnsafeAccess; import org.cojen.tupl.io.Utils; @SuppressWarnings("restriction") public class Latch { public static final int UNLATCHED = 0, EXCLUSIVE = 0x80000000, SHARED = 1; static final int SPIN_LIMIT = Runtime.getRuntime().availableProcessors() > 1 ? 1 << 10 : 1; // TODO: Switch to VarHandle when available and utilize specialized operations. static final sun.misc.Unsafe UNSAFE = UnsafeAccess.obtain(); static final long STATE_OFFSET, FIRST_OFFSET, LAST_OFFSET; static final long WAITER_OFFSET; static { try { // Reduce the risk of "lost unpark" due to classloading. Class<?> clazz = LockSupport.class; clazz = Latch.class; STATE_OFFSET = UNSAFE.objectFieldOffset(clazz.getDeclaredField("mLatchState")); FIRST_OFFSET = UNSAFE.objectFieldOffset(clazz.getDeclaredField("mLatchFirst")); LAST_OFFSET = UNSAFE.objectFieldOffset(clazz.getDeclaredField("mLatchLast")); clazz = WaitNode.class; WAITER_OFFSET = UNSAFE.objectFieldOffset(clazz.getDeclaredField("mWaiter")); } catch (Throwable e) { throw Utils.rethrow(e); } } volatile int mLatchState; // Queue of waiting threads. private transient volatile WaitNode mLatchFirst; private transient volatile WaitNode mLatchLast; public Latch() { } /** * @param initialState UNLATCHED, EXCLUSIVE, or SHARED */ public Latch(int initialState) { // Assume that this latch instance is published to other threads safely, and so a // volatile store isn't required. UNSAFE.putInt(this, STATE_OFFSET, initialState); } boolean isHeldExclusive() { return mLatchState == EXCLUSIVE; } /** * Try to acquire the exclusive latch, barging ahead of any waiting threads if possible. */ public boolean tryAcquireExclusive() { return doTryAcquireExclusive(); } private boolean doTryAcquireExclusive() { return mLatchState == 0 && UNSAFE.compareAndSwapInt(this, STATE_OFFSET, 0, EXCLUSIVE); } /** * Attempt to acquire the exclusive latch, aborting if interrupted. * * @param nanosTimeout pass negative for infinite timeout */ public boolean tryAcquireExclusiveNanos(long nanosTimeout) throws InterruptedException { return doTryAcquireExclusiveNanos(nanosTimeout); } private boolean doTryAcquireExclusiveNanos(long nanosTimeout) throws InterruptedException { if (doTryAcquireExclusive()) { return true; } if (nanosTimeout == 0) { return false; } boolean result; try { result = acquire(new Timed(nanosTimeout)); } catch (Throwable e) { // Possibly an OutOfMemoryError. if (nanosTimeout < 0) { // Caller isn't expecting an exception, so spin. while (!doTryAcquireExclusive()); return true; } return false; } return checkTimedResult(result, nanosTimeout); } /** * Acquire the exclusive latch, barging ahead of any waiting threads if possible. */ public void acquireExclusive() { if (!doTryAcquireExclusive()) { doAcquireExclusive(); } } /** * Caller should have already called tryAcquireExclusive. */ private void doAcquireExclusive() { try { acquire(new WaitNode()); } catch (Throwable e) { // Possibly an OutOfMemoryError. Caller isn't expecting an exception, so spin. while (!doTryAcquireExclusive()); } } /** * Acquire the exclusive latch, aborting if interrupted. */ public void acquireExclusiveInterruptibly() throws InterruptedException { doTryAcquireExclusiveNanos(-1); } /** * Downgrade the held exclusive latch into a shared latch. Caller must later call * releaseShared instead of releaseExclusive. */ public final void downgrade() { mLatchState = 1; while (true) { // Sweep through the queue, waking up a contiguous run of shared waiters. final WaitNode first = first(); if (first == null) { return; } WaitNode node = first; while (true) { Thread waiter = node.mWaiter; if (waiter != null) { if (node instanceof Shared) { UNSAFE.getAndAddInt(this, STATE_OFFSET, 1); if (UNSAFE.compareAndSwapObject(node, WAITER_OFFSET, waiter, null)) { LockSupport.unpark(waiter); } else { // Already unparked, so fix the share count. UNSAFE.getAndAddInt(this, STATE_OFFSET, -1); } } else { if (node != first) { // Advance the queue past any shared waiters that were encountered. mLatchFirst = node; } return; } } WaitNode next = node.get(); if (next == null) { // Queue is now empty, unless an enqueue is in progress. if (UNSAFE.compareAndSwapObject(this, LAST_OFFSET, node, null)) { UNSAFE.compareAndSwapObject(this, FIRST_OFFSET, first, null); return; } // Sweep from the start again. break; } node = next; } } } /** * Release the held exclusive latch. */ public final void releaseExclusive() { int trials = 0; while (true) { WaitNode last = mLatchLast; if (last == null) { // No waiters, so release the latch. mLatchState = 0; // Need to check if any waiters again, due to race with enqueue. If cannot // immediately re-acquire the latch, then let the new owner (which barged in) // unpark the waiters when it releases the latch. last = mLatchLast; if (last == null || !UNSAFE.compareAndSwapInt(this, STATE_OFFSET, 0, EXCLUSIVE)) { return; } } // Although the last waiter has been observed to exist, the first waiter field // might not be set yet. WaitNode first = mLatchFirst; unpark: if (first != null) { Thread waiter = first.mWaiter; if (waiter != null) { if (first instanceof Shared) { // TODO: can this be combined into one downgrade step? downgrade(); doReleaseShared(); return; } if (!first.mFair) { // Unpark the waiter, but allow another thread to barge in. mLatchState = 0; LockSupport.unpark(waiter); return; } } // Remove first from the queue. { WaitNode next = first.get(); if (next != null) { mLatchFirst = next; } else { // Queue is now empty, unless an enqueue is in progress. if (last != first || !UNSAFE.compareAndSwapObject(this, LAST_OFFSET, last, null)) { break unpark; } UNSAFE.compareAndSwapObject(this, FIRST_OFFSET, last, null); } } if (waiter != null && UNSAFE.compareAndSwapObject(first, WAITER_OFFSET, waiter, null)) { // Fair handoff to waiting thread. LockSupport.unpark(waiter); return; } } trials = spin(trials); } } /** * Convenience method, which releases the held exclusive or shared latch. * * @param exclusive call releaseExclusive if true, else call releaseShared */ public final void release(boolean exclusive) { if (exclusive) { releaseExclusive(); } else { releaseShared(); } } /** * Releases an exclusive or shared latch. */ public final void releaseEither() { // TODO: can be non-volatile read if (mLatchState == EXCLUSIVE) { releaseExclusive(); } else { releaseShared(); } } /** * Try to acquire a shared latch, barging ahead of any waiting threads if possible. */ public boolean tryAcquireShared() { return doTryAcquireShared(); } private boolean doTryAcquireShared() { WaitNode first = mLatchFirst; if (first != null && !(first instanceof Shared)) { return false; } int state = mLatchState; return state >= 0 && UNSAFE.compareAndSwapInt(this, STATE_OFFSET, state, state + 1); } /** * Attempt to acquire a shared latch, aborting if interrupted. * * @param nanosTimeout pass negative for infinite timeout */ public boolean tryAcquireSharedNanos(long nanosTimeout) throws InterruptedException { return doTryAcquireSharedNanos(nanosTimeout); } private final boolean doTryAcquireSharedNanos(long nanosTimeout) throws InterruptedException { WaitNode first = mLatchFirst; if (first == null || first instanceof Shared) { int trials = 0; int state; while ((state = mLatchState) >= 0) { if (UNSAFE.compareAndSwapInt(this, STATE_OFFSET, state, state + 1)) { return true; } // Spin even if timeout is zero. The timeout applies to a blocking acquire. trials = spin(trials); } } if (nanosTimeout == 0) { return false; } boolean result; try { result = acquire(new TimedShared(nanosTimeout)); } catch (Throwable e) { // Possibly an OutOfMemoryError. if (nanosTimeout < 0) { // Caller isn't expecting an exception, so spin. while (!doTryAcquireShared()); return true; } return false; } return checkTimedResult(result, nanosTimeout); } private static boolean checkTimedResult(boolean result, long nanosTimeout) throws InterruptedException { if (!result && (Thread.interrupted() || nanosTimeout < 0)) { InterruptedException e; try { e = new InterruptedException(); } catch (Throwable e2) { // Possibly an OutOfMemoryError. if (nanosTimeout < 0) { throw e2; } return false; } throw e; } return result; } /** * Like tryAcquireShared, except blocks if an exclusive latch is held. * * @return false if not acquired due to contention with other shared requests */ public boolean acquireSharedUncontended() { WaitNode first = mLatchFirst; if (first == null || first instanceof Shared) { int state = mLatchState; if (state >= 0) { return UNSAFE.compareAndSwapInt(this, STATE_OFFSET, state, state + 1); } } try { acquire(new Shared()); } catch (Throwable e) { // Possibly an OutOfMemoryError. Caller isn't expecting an exception, so spin. while (!doTryAcquireShared()); } return true; } /** * Like tryAcquireSharedNanos, except blocks if an exclusive latch is held. * * @param nanosTimeout pass negative for infinite timeout * @return -1 if not acquired due to contention with other shared requests, 0 if timed out, * or 1 if acquired */ public int acquireSharedUncontendedNanos(long nanosTimeout) throws InterruptedException { WaitNode first = mLatchFirst; if (first == null || first instanceof Shared) { int state = mLatchState; if (state >= 0) { return UNSAFE.compareAndSwapInt(this, STATE_OFFSET, state, state + 1) ? 1 : -1; } } boolean result; try { result = acquire(new TimedShared(nanosTimeout)); } catch (Throwable e) { // Possibly an OutOfMemoryError. if (nanosTimeout < 0) { // Caller isn't expecting an exception, so spin. while (!doTryAcquireShared()); return 1; } return 0; } return checkTimedResult(result, nanosTimeout) ? 1 : 0; } /** * Acquire a shared latch, barging ahead of any waiting threads if possible. */ public void acquireShared() { if (!tryAcquireSharedSpin()) { try { acquire(new Shared()); } catch (Throwable e) { // Possibly an OutOfMemoryError. Caller isn't expecting an exception, so spin. while (!doTryAcquireShared()); } } } private boolean tryAcquireSharedSpin() { WaitNode first = mLatchFirst; if (first == null || first instanceof Shared) { int trials = 0; int state; while ((state = mLatchState) >= 0) { if (UNSAFE.compareAndSwapInt(this, STATE_OFFSET, state, state + 1)) { return true; } trials = spin(trials); } } return false; } /** * Acquire a shared latch, aborting if interrupted. */ public void acquireSharedInterruptibly() throws InterruptedException { doTryAcquireSharedNanos(-1); } /** * Attempt to upgrade a held shared latch into an exclusive latch. Upgrade fails if shared * latch is held by more than one thread. If successful, caller must later call * releaseExclusive instead of releaseShared. */ public boolean tryUpgrade() { return doTryUpgrade(); } private boolean doTryUpgrade() { while (true) { int state = mLatchState; if ((state & ~EXCLUSIVE) != 1) { return false; } if (UNSAFE.compareAndSwapInt(this, STATE_OFFSET, state, EXCLUSIVE)) { return true; } // Try again if exclusive bit flipped. Don't bother with spin yielding, because the // exclusive bit usually switches to 1, not 0. } } /** * Release a held shared latch. */ public void releaseShared() { doReleaseShared(); } private void doReleaseShared() { int trials = 0; while (true) { int state = mLatchState; WaitNode last = mLatchLast; if (last == null) { // No waiters, so release the latch. if (UNSAFE.compareAndSwapInt(this, STATE_OFFSET, state, --state)) { if (state == 0) { // Need to check if any waiters again, due to race with enqueue. If // cannot immediately re-acquire the latch, then let the new owner // (which barged in) unpark the waiters when it releases the latch. last = mLatchLast; if (last != null && UNSAFE.compareAndSwapInt(this, STATE_OFFSET, 0, EXCLUSIVE)) { releaseExclusive(); } } return; } } else if (state == 1) { // Try to switch to exclusive, and then let releaseExclusive deal with // unparking the waiters. if (UNSAFE.compareAndSwapInt(this, STATE_OFFSET, 1, EXCLUSIVE) || doTryUpgrade()) { releaseExclusive(); return; } } else if (UNSAFE.compareAndSwapInt(this, STATE_OFFSET, state, --state)) { return; } trials = spin(trials); } } private boolean acquire(final WaitNode node) { node.mWaiter = Thread.currentThread(); WaitNode prev = enqueue(node); int acquireResult = node.tryAcquire(this); if (acquireResult < 0) { int denied = 0; while (true) { boolean parkAbort = node.park(this); acquireResult = node.tryAcquire(this); if (acquireResult >= 0) { // Latch acquired after parking. break; } if (parkAbort) { if (!UNSAFE.compareAndSwapObject (node, WAITER_OFFSET, Thread.currentThread(), null)) { // Fair handoff just occurred. return true; } // Remove the node from the queue. If it's the first, it cannot be safely // removed without the latch having been properly acquired. So let it // linger around until the latch is released. if (prev != null) { remove(node, prev); } return false; } // Lost the race. Request fair handoff. if (denied++ == 0) { node.mFair = true; } } } if (acquireResult != 0) { // Only remove the node if requested to do so. return true; } // Remove the node now, releasing memory. if (mLatchFirst != node) { remove(node, prev); return true; } // Removing the first node requires special attention. Because the latch is now held by // the current thread, no other dequeues are in progress, but enqueues still are. while (true) { WaitNode next = node.get(); if (next != null) { mLatchFirst = next; return true; } else { // Queue is now empty, unless an enqueue is in progress. WaitNode last = mLatchLast; if (last == node && UNSAFE.compareAndSwapObject(this, LAST_OFFSET, last, null)) { UNSAFE.compareAndSwapObject(this, FIRST_OFFSET, last, null); return true; } } } } private WaitNode enqueue(final WaitNode node) { WaitNode prev = (WaitNode) UNSAFE.getAndSetObject(this, LAST_OFFSET, node); if (prev == null) { mLatchFirst = node; } else { prev.set(node); WaitNode pp = prev.mPrev; if (pp != null) { // The old last node was intended to be removed, but the last node cannot // be removed unless it's also the first. Bypass it now that a new last // node has been enqueued. pp.lazySet(node); } } return prev; } /** * @param node node to remove, not null * @param prev previous node, not null */ private void remove(final WaitNode node, final WaitNode prev) { WaitNode next = node.get(); if (next == null) { // Removing the last node creates race conditions with enqueues. Instead, stash a // reference to the previous node and let the enqueue deal with it after a new node // has been enqueued. node.mPrev = prev; next = node.get(); // Double check in case an enqueue just occurred that may have failed to notice the // previous node assignment. if (next == null) { return; } } while (next.mWaiter == null) { // Skip more nodes if possible. WaitNode nextNext = next.get(); if (nextNext == null) { break; } next = nextNext; } // Bypass the removed node, allowing it to be released. prev.lazySet(next); } private WaitNode first() { int trials = 0; while (true) { WaitNode last = mLatchLast; if (last == null) { return null; } // Although the last waiter has been observed to exist, the first waiter field // might not be set yet. WaitNode first = mLatchFirst; if (first != null) { return first; } trials = spin(trials); } } public final boolean hasQueuedThreads() { return mLatchLast != null; } @Override public String toString() { StringBuilder b = new StringBuilder(); appendMiniString(b, this); b.append(" {state="); int state = mLatchState; if (state == 0) { b.append("unlatched"); } else if (state == EXCLUSIVE) { b.append("exclusive"); } else if (state >= 0) { b.append("shared:").append(state); } else { b.append("illegal:").append(state); } WaitNode last = mLatchLast; if (last != null) { b.append(", "); WaitNode first = mLatchFirst; if (first == last) { b.append("firstQueued=").append(last); } else if (first == null) { b.append("lastQueued=").append(last); } else { b.append("firstQueued=").append(first) .append(", lastQueued=").append(last); } } return b.append('}').toString(); } static void appendMiniString(StringBuilder b, Object obj) { if (obj == null) { b.append("null"); return; } b.append(obj.getClass().getName()).append('@').append(Integer.toHexString(obj.hashCode())); } /** * @return new trials value */ static int spin(int trials) { trials++; if (trials >= SPIN_LIMIT) { Thread.yield(); trials = 0; } return trials; } /** * Atomic reference is to the next node in the chain. */ @SuppressWarnings("serial") static class WaitNode extends AtomicReference<WaitNode> { volatile Thread mWaiter; volatile boolean mFair; // Only set if node was deleted and must be bypassed when a new node is enqueued. volatile WaitNode mPrev; /** * @return true if timed out or interrupted */ boolean park(Latch latch) { LockSupport.park(latch); return false; } /** * @return <0 if thread should park; 0 if acquired and node should also be removed; >0 * if acquired and node should not be removed */ int tryAcquire(Latch latch) { int trials = 0; while (true) { for (int i=0; i<SPIN_LIMIT; i++) { boolean acquired = latch.doTryAcquireExclusive(); Object waiter = mWaiter; if (waiter == null) { // Fair handoff, and so node is no longer in the queue. return 1; } if (!acquired) { continue; } // Acquired, so no need to reference the waiter anymore. if (!mFair) { UNSAFE.putOrderedObject(this, WAITER_OFFSET, null); } else if (!UNSAFE.compareAndSwapObject(this, WAITER_OFFSET, waiter, null)) { return 1; } return 0; } if (++trials >= SPIN_LIMIT >> 1) { return -1; } // Yield to avoid parking. Thread.yield(); } } @Override public String toString() { StringBuilder b = new StringBuilder(); appendMiniString(b, this); b.append(" {waiter=").append(mWaiter); b.append(", fair=").append(mFair); b.append(", next="); appendMiniString(b, get()); b.append(", prev="); appendMiniString(b, mPrev); return b.append('}').toString(); } } @SuppressWarnings("serial") static class Timed extends WaitNode { private long mNanosTimeout; private long mEndNanos; Timed(long nanosTimeout) { mNanosTimeout = nanosTimeout; if (nanosTimeout >= 0) { mEndNanos = System.nanoTime() + nanosTimeout; } } @Override final boolean park(Latch latch) { if (mNanosTimeout < 0) { LockSupport.park(latch); return Thread.currentThread().isInterrupted(); } else { LockSupport.parkNanos(latch, mNanosTimeout); if (Thread.currentThread().isInterrupted()) { return true; } return (mNanosTimeout = mEndNanos - System.nanoTime()) <= 0; } } } @SuppressWarnings("serial") static class Shared extends WaitNode { /** * @return <0 if thread should park; 0 if acquired and node should also be removed; >0 * if acquired and node should not be removed */ @Override final int tryAcquire(Latch latch) { // Note: If mWaiter is null, then handoff was fair. The shared count should already // be correct, and this node won't be in the queue anymore. WaitNode first = latch.mLatchFirst; if (first != null && !(first instanceof Shared)) { return mWaiter == null ? 1 : -1; } int trials = 0; while (true) { if (mWaiter == null) { return 1; } int state = latch.mLatchState; if (state < 0) { return state; } if (UNSAFE.compareAndSwapInt(latch, STATE_OFFSET, state, state + 1)) { // Acquired, so no need to reference the thread anymore. Object waiter = mWaiter; if (waiter == null || !UNSAFE.compareAndSwapObject(this, WAITER_OFFSET, waiter, null)) { if (!UNSAFE.compareAndSwapInt(latch, STATE_OFFSET, state + 1, state)) { UNSAFE.getAndAddInt(latch, STATE_OFFSET, -1); } return 1; } // Only remove node if this thread is the first shared latch owner. This // guarantees that no other thread will be concurrently removing nodes. // Nodes for other threads will have their nodes removed later, as latches // are released. Early removal is a garbage collection optimization. return state; } trials = spin(trials); } } } @SuppressWarnings("serial") static class TimedShared extends Shared { private long mNanosTimeout; private long mEndNanos; TimedShared(long nanosTimeout) { mNanosTimeout = nanosTimeout; if (nanosTimeout >= 0) { mEndNanos = System.nanoTime() + nanosTimeout; } } @Override final boolean park(Latch latch) { if (mNanosTimeout < 0) { LockSupport.park(latch); return Thread.currentThread().isInterrupted(); } else { LockSupport.parkNanos(latch, mNanosTimeout); if (Thread.currentThread().isInterrupted()) { return true; } return (mNanosTimeout = mEndNanos - System.nanoTime()) <= 0; } } } }
*//* package org.safehaus.kiskis.mgmt.server.ui.modules.oozie.wizard; import com.vaadin.data.Item; import com.vaadin.event.ItemClickEvent; import com.vaadin.terminal.Sizeable; import com.vaadin.ui.Alignment; import com.vaadin.ui.Button; import com.vaadin.ui.GridLayout; import com.vaadin.ui.HorizontalLayout; import com.vaadin.ui.Label; import com.vaadin.ui.Panel; import java.util.HashSet; import java.util.Set; import java.util.UUID; import org.safehaus.kiskis.mgmt.server.ui.MgmtApplication; import org.safehaus.kiskis.mgmt.server.ui.modules.hadoop.config.ClustersTable; import org.safehaus.kiskis.mgmt.server.ui.modules.oozie.OozieDAO; import org.safehaus.kiskis.mgmt.shared.protocol.Agent; import org.safehaus.kiskis.mgmt.shared.protocol.HadoopClusterInfo; import org.safehaus.kiskis.mgmt.shared.protocol.settings.Common; /** * * @author dilshat */ public class StepStart extends Panel { private ClustersTable table; private Item selectedItem; public StepStart(final Wizard wizard) { setSizeFull(); GridLayout gridLayout = new GridLayout(10, 6); gridLayout.setSizeFull(); Label welcomeMsg = new Label( "<center><h2>Welcome to HBase Installation Wizard!</h2><br/>" + "Please click Start button to continue</center>"); welcomeMsg.setContentMode(Label.CONTENT_XHTML); gridLayout.addComponent(welcomeMsg, 3, 1, 6, 2); Label logoImg = new Label( String.format("<img src='http://%s:%s/oozie_logo.png' width='150px'/>", MgmtApplication.APP_URL, Common.WEB_SERVER_PORT)); logoImg.setContentMode(Label.CONTENT_XHTML); logoImg.setHeight(150, Sizeable.UNITS_PIXELS); logoImg.setWidth(220, Sizeable.UNITS_PIXELS); gridLayout.addComponent(logoImg, 1, 3, 2, 5); HorizontalLayout hl = new HorizontalLayout(); Button next = new Button("Start"); next.setWidth(100, Sizeable.UNITS_PIXELS); next.addListener(new Button.ClickListener() { @Override public void buttonClick(Button.ClickEvent event) { // Set<Agent> selectedAgents = MgmtApplication.getSelectedAgents(); // if (Util.isCollectionEmpty(selectedAgents)) { // show("Select nodes in the tree on the left first"); // } else { // wizard.getConfig().reset(); // wizard.getConfig().setAgents(selectedAgents); // wizard.next(); if (selectedItem != null) { UUID uid = (UUID) selectedItem.getItemProperty(HadoopClusterInfo.UUID_LABEL).getValue(); HadoopClusterInfo cluster = OozieDAO.getHadoopClusterInfo(uid); Set<UUID> dataNodes = new HashSet<UUID>(cluster.getDataNodes()); Set<UUID> taskTrackers = new HashSet<UUID>(cluster.getTaskTrackers()); dataNodes.addAll(taskTrackers); dataNodes.add(cluster.getJobTracker()); dataNodes.add(cluster.getNameNode()); dataNodes.add(cluster.getSecondaryNameNode()); Set<Agent> set = OozieDAO.getAgents(dataNodes); wizard.getConfig().reset(); wizard.getConfig().setAgents(set); wizard.next(); } else { show("Please select Hadoop cluster first"); } } }); Button refresh = new Button("Refresh"); refresh.setWidth(100, Sizeable.UNITS_PIXELS); refresh.addListener(new Button.ClickListener() { @Override public void buttonClick(Button.ClickEvent event) { table = getTable(); } }); hl.addComponent(refresh); hl.addComponent(next); gridLayout.addComponent(hl, 6, 4, 6, 4); gridLayout.setComponentAlignment(refresh, Alignment.BOTTOM_RIGHT); addComponent(gridLayout); table = getTable(); addComponent(table); } private void show(String notification) { getWindow().showNotification(notification); } private ClustersTable getTable() { table = new ClustersTable(); table.addListener(new ItemClickEvent.ItemClickListener() { @Override public void itemClick(ItemClickEvent event) { selectedItem = event.getItem(); } }); return table; } }
package org.dynmap.bukkit; import org.bukkit.Bukkit; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.configuration.InvalidConfigurationException; import org.bukkit.plugin.Plugin; import org.bukkit.plugin.PluginDescriptionFile; import org.bukkit.scheduler.BukkitTask; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.net.Proxy; import java.net.URL; import java.net.URLConnection; import java.net.URLEncoder; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Set; import java.util.UUID; import java.util.logging.Level; /** * <p> The metrics class obtains data about a plugin and submits statistics about it to the metrics backend. </p> <p> * Public methods provided by this class: </p> * <code> * Graph createGraph(String name); <br> * void addCustomData(BukkitMetrics.Plotter plotter); <br> * void start(); <br> * </code> */ public class Metrics { /** * The current revision number */ private final static int REVISION = 6; /** * The base url of the metrics domain */ private static final String BASE_URL = "http://mcstats.org"; /** * The url used to report a server's status */ private static final String REPORT_URL = "/report/%s"; /** * The separator to use for custom data. This MUST NOT change unless you are hosting your own version of metrics and * want to change it. */ private static final String CUSTOM_DATA_SEPARATOR = "~~"; /** * Interval of time to ping (in minutes) */ private static final int PING_INTERVAL = 10; /** * The plugin this metrics submits for */ private final Plugin plugin; /** * All of the custom graphs to submit to metrics */ private final Set<Graph> graphs = Collections.synchronizedSet(new HashSet<Graph>()); /** * The default graph, used for addCustomData when you don't want a specific graph */ private final Graph defaultGraph = new Graph("Default"); /** * The plugin configuration file */ private final YamlConfiguration configuration; /** * The plugin configuration file */ private final File configurationFile; /** * Unique server id */ private final String guid; /** * Debug mode */ private final boolean debug; /** * Lock for synchronization */ private final Object optOutLock = new Object(); /** * The scheduled task */ private volatile BukkitTask task = null; public Metrics(final Plugin plugin) throws IOException { if (plugin == null) { throw new IllegalArgumentException("Plugin cannot be null"); } this.plugin = plugin; // load the config configurationFile = getConfigFile(); configuration = YamlConfiguration.loadConfiguration(configurationFile); // add some defaults configuration.addDefault("opt-out", false); configuration.addDefault("guid", UUID.randomUUID().toString()); configuration.addDefault("debug", false); // Do we need to create the file? if (configuration.get("guid", null) == null) { configuration.options().header("http://mcstats.org").copyDefaults(true); configuration.save(configurationFile); } // Load the guid then guid = configuration.getString("guid"); debug = configuration.getBoolean("debug", false); } /** * Construct and create a Graph that can be used to separate specific plotters to their own graphs on the metrics * website. Plotters can be added to the graph object returned. * * @param name The name of the graph * @return Graph object created. Will never return NULL under normal circumstances unless bad parameters are given */ public Graph createGraph(final String name) { if (name == null) { throw new IllegalArgumentException("Graph name cannot be null"); } // Construct the graph object final Graph graph = new Graph(name); // Now we can add our graph graphs.add(graph); // and return back return graph; } /** * Add a Graph object to BukkitMetrics that represents data for the plugin that should be sent to the backend * * @param graph The name of the graph */ public void addGraph(final Graph graph) { if (graph == null) { throw new IllegalArgumentException("Graph cannot be null"); } graphs.add(graph); } /** * Adds a custom data plotter to the default graph * * @param plotter The plotter to use to plot custom data */ public void addCustomData(final Plotter plotter) { if (plotter == null) { throw new IllegalArgumentException("Plotter cannot be null"); } // Add the plotter to the graph o/ defaultGraph.addPlotter(plotter); // Ensure the default graph is included in the submitted graphs graphs.add(defaultGraph); } /** * Start measuring statistics. This will immediately create an async repeating task as the plugin and send the * initial data to the metrics backend, and then after that it will post in increments of PING_INTERVAL * 1200 * ticks. * * @return True if statistics measuring is running, otherwise false. */ public boolean start() { synchronized (optOutLock) { // Did we opt out? if (isOptOut()) { return false; } // Is metrics already running? if (task != null) { return true; } // Begin hitting the server with glorious data try { task = plugin.getServer().getScheduler().runTaskTimerAsynchronously(plugin, new Runnable() { private boolean firstPost = true; public void run() { try { // This has to be synchronized or it can collide with the disable method. synchronized (optOutLock) { // Disable Task, if it is running and the server owner decided to opt-out if (isOptOut() && task != null) { task.cancel(); task = null; // Tell all plotters to stop gathering information. for (Graph graph : graphs) { graph.onOptOut(); } } } // We use the inverse of firstPost because if it is the first time we are posting, // it is not a interval ping, so it evaluates to FALSE // Each time thereafter it will evaluate to TRUE, i.e PING! postPlugin(!firstPost); // After the first post we set firstPost to false // Each post thereafter will be a ping firstPost = false; } catch (IOException e) { if (debug) { Bukkit.getLogger().log(Level.INFO, "[Metrics] " + e.getMessage()); } } } }, 0, PING_INTERVAL * 1200); } catch (NoSuchMethodError nsme) { // Handle deprecated scheduler API stupidity return false; } return true; } } /** * Has the server owner denied plugin metrics? * * @return true if metrics should be opted out of it */ public boolean isOptOut() { synchronized (optOutLock) { try { // Reload the metrics file configuration.load(getConfigFile()); } catch (IOException ex) { if (debug) { Bukkit.getLogger().log(Level.INFO, "[Metrics] " + ex.getMessage()); } return true; } catch (InvalidConfigurationException ex) { if (debug) { Bukkit.getLogger().log(Level.INFO, "[Metrics] " + ex.getMessage()); } return true; } return configuration.getBoolean("opt-out", false); } } /** * Enables metrics for the server by setting "opt-out" to false in the config file and starting the metrics task. * * @throws java.io.IOException */ public void enable() throws IOException { // This has to be synchronized or it can collide with the check in the task. synchronized (optOutLock) { // Check if the server owner has already set opt-out, if not, set it. if (isOptOut()) { configuration.set("opt-out", false); configuration.save(configurationFile); } // Enable Task, if it is not running if (task == null) { start(); } } } /** * Disables metrics for the server by setting "opt-out" to true in the config file and canceling the metrics task. * * @throws java.io.IOException */ public void disable() throws IOException { // This has to be synchronized or it can collide with the check in the task. synchronized (optOutLock) { // Check if the server owner has already set opt-out, if not, set it. if (!isOptOut()) { configuration.set("opt-out", true); configuration.save(configurationFile); } // Disable Task, if it is running if (task != null) { task.cancel(); task = null; } } } /** * Gets the File object of the config file that should be used to store data such as the GUID and opt-out status * * @return the File object for the config file */ public File getConfigFile() { // I believe the easiest way to get the base folder (e.g craftbukkit set via -P) for plugins to use // is to abuse the plugin object we already have // plugin.getDataFolder() => base/plugins/PluginA/ // pluginsFolder => base/plugins/ // The base is not necessarily relative to the startup directory. File pluginsFolder = plugin.getDataFolder().getParentFile(); // return => base/plugins/PluginMetrics/config.yml return new File(new File(pluginsFolder, "PluginMetrics"), "config.yml"); } /** * Generic method that posts a plugin to the metrics website */ private void postPlugin(final boolean isPing) throws IOException { // Server software specific section PluginDescriptionFile description = plugin.getDescription(); String pluginName = description.getName(); boolean onlineMode = Bukkit.getServer().getOnlineMode(); // TRUE if online mode is enabled String pluginVersion = description.getVersion(); String serverVersion = Bukkit.getVersion(); int playersOnline = Bukkit.getServer().getOnlinePlayers().length; // END server software specific section -- all code below does not use any code outside of this class / Java // Construct the post data final StringBuilder data = new StringBuilder(); // The plugin's description file containg all of the plugin data such as name, version, author, etc data.append(encode("guid")).append('=').append(encode(guid)); encodeDataPair(data, "version", pluginVersion); encodeDataPair(data, "server", serverVersion); encodeDataPair(data, "players", Integer.toString(playersOnline)); encodeDataPair(data, "revision", String.valueOf(REVISION)); // New data as of R6 String osname = System.getProperty("os.name"); String osarch = System.getProperty("os.arch"); String osversion = System.getProperty("os.version"); String java_version = System.getProperty("java.version"); int coreCount = Runtime.getRuntime().availableProcessors(); // normalize os arch .. amd64 -> x86_64 if (osarch.equals("amd64")) { osarch = "x86_64"; } encodeDataPair(data, "osname", osname); encodeDataPair(data, "osarch", osarch); encodeDataPair(data, "osversion", osversion); encodeDataPair(data, "cores", Integer.toString(coreCount)); encodeDataPair(data, "online-mode", Boolean.toString(onlineMode)); encodeDataPair(data, "java_version", java_version); // If we're pinging, append it if (isPing) { encodeDataPair(data, "ping", "true"); } // Acquire a lock on the graphs, which lets us make the assumption we also lock everything // inside of the graph (e.g plotters) synchronized (graphs) { final Iterator<Graph> iter = graphs.iterator(); while (iter.hasNext()) { final Graph graph = iter.next(); for (Plotter plotter : graph.getPlotters()) { // The key name to send to the metrics server // The format is C-GRAPHNAME-PLOTTERNAME where separator - is defined at the top // Legacy (R4) submitters use the format Custom%s, or CustomPLOTTERNAME final String key = String.format("C%s%s%s%s", CUSTOM_DATA_SEPARATOR, graph.getName(), CUSTOM_DATA_SEPARATOR, plotter.getColumnName()); // The value to send, which for the foreseeable future is just the string // value of plotter.getValue() final String value = Integer.toString(plotter.getValue()); // Add it to the http post data :) encodeDataPair(data, key, value); } } } // Create the url URL url = new URL(BASE_URL + String.format(REPORT_URL, encode(pluginName))); // Connect to the website URLConnection connection; // Mineshafter creates a socks proxy, so we can safely bypass it // It does not reroute POST requests so we need to go around it if (isMineshafterPresent()) { connection = url.openConnection(Proxy.NO_PROXY); } else { connection = url.openConnection(); } connection.setDoOutput(true); // Write the data final OutputStreamWriter writer = new OutputStreamWriter(connection.getOutputStream()); writer.write(data.toString()); writer.flush(); // Now read the response final BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream())); final String response = reader.readLine(); // close resources writer.close(); reader.close(); if (response == null || response.startsWith("ERR")) { throw new IOException(response); //Throw the exception } else { // Is this the first update this hour? if (response.contains("OK This is your first update this hour")) { synchronized (graphs) { final Iterator<Graph> iter = graphs.iterator(); while (iter.hasNext()) { final Graph graph = iter.next(); for (Plotter plotter : graph.getPlotters()) { plotter.reset(); } } } } } } /** * Check if mineshafter is present. If it is, we need to bypass it to send POST requests * * @return true if mineshafter is installed on the server */ private boolean isMineshafterPresent() { try { Class.forName("mineshafter.MineServer"); return true; } catch (Exception e) { return false; } } /** * <p>Encode a key/value data pair to be used in a HTTP post request. This INCLUDES a & so the first key/value pair * MUST be included manually, e.g:</p> * <code> * StringBuffer data = new StringBuffer(); * data.append(encode("guid")).append('=').append(encode(guid)); * encodeDataPair(data, "version", description.getVersion()); * </code> * * @param buffer the stringbuilder to append the data pair onto * @param key the key value * @param value the value */ private static void encodeDataPair(final StringBuilder buffer, final String key, final String value) throws UnsupportedEncodingException { buffer.append('&').append(encode(key)).append('=').append(encode(value)); } /** * Encode text as UTF-8 * * @param text the text to encode * @return the encoded text, as UTF-8 */ private static String encode(final String text) throws UnsupportedEncodingException { return URLEncoder.encode(text, "UTF-8"); } /** * Represents a custom graph on the website */ public static class Graph { /** * The graph's name, alphanumeric and spaces only :) If it does not comply to the above when submitted, it is * rejected */ private final String name; /** * The set of plotters that are contained within this graph */ private final Set<Plotter> plotters = new LinkedHashSet<Plotter>(); private Graph(final String name) { this.name = name; } /** * Gets the graph's name * * @return the Graph's name */ public String getName() { return name; } /** * Add a plotter to the graph, which will be used to plot entries * * @param plotter the plotter to add to the graph */ public void addPlotter(final Plotter plotter) { plotters.add(plotter); } /** * Remove a plotter from the graph * * @param plotter the plotter to remove from the graph */ public void removePlotter(final Plotter plotter) { plotters.remove(plotter); } /** * Gets an <b>unmodifiable</b> set of the plotter objects in the graph * * @return an unmodifiable {@link java.util.Set} of the plotter objects */ public Set<Plotter> getPlotters() { return Collections.unmodifiableSet(plotters); } @Override public int hashCode() { return name.hashCode(); } @Override public boolean equals(final Object object) { if (!(object instanceof Graph)) { return false; } final Graph graph = (Graph) object; return graph.name.equals(name); } /** * Called when the server owner decides to opt-out of BukkitMetrics while the server is running. */ protected void onOptOut() { } } /** * Interface used to collect custom data for a plugin */ public static abstract class Plotter { /** * The plot's name */ private final String name; /** * Construct a plotter with the default plot name */ public Plotter() { this("Default"); } /** * Construct a plotter with a specific plot name * * @param name the name of the plotter to use, which will show up on the website */ public Plotter(final String name) { this.name = name; } /** * Get the current value for the plotted point. Since this function defers to an external function it may or may * not return immediately thus cannot be guaranteed to be thread friendly or safe. This function can be called * from any thread so care should be taken when accessing resources that need to be synchronized. * * @return the current value for the point to be plotted. */ public abstract int getValue(); /** * Get the column name for the plotted point * * @return the plotted point's column name */ public String getColumnName() { return name; } /** * Called after the website graphs have been updated */ public void reset() { } @Override public int hashCode() { return getColumnName().hashCode(); } @Override public boolean equals(final Object object) { if (!(object instanceof Plotter)) { return false; } final Plotter plotter = (Plotter) object; return plotter.name.equals(name) && plotter.getValue() == getValue(); } } }
package org.opendaylight.controller.cluster.raft; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.eclipse.jdt.annotation.NonNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Abstract class handling the mapping of * logical LogEntry Index and the physical list index. */ public abstract class AbstractReplicatedLogImpl implements ReplicatedLog { private static final Logger LOG = LoggerFactory.getLogger(AbstractReplicatedLogImpl.class); private final String logContext; // We define this as ArrayList so we can use ensureCapacity. private ArrayList<ReplicatedLogEntry> journal; private long snapshotIndex = -1; private long snapshotTerm = -1; // to be used for rollback during save snapshot failure private ArrayList<ReplicatedLogEntry> snapshottedJournal; private long previousSnapshotIndex = -1; private long previousSnapshotTerm = -1; private int dataSize = 0; protected AbstractReplicatedLogImpl(final long snapshotIndex, final long snapshotTerm, final List<ReplicatedLogEntry> unAppliedEntries, final String logContext) { this.snapshotIndex = snapshotIndex; this.snapshotTerm = snapshotTerm; this.logContext = logContext; this.journal = new ArrayList<>(unAppliedEntries.size()); for (ReplicatedLogEntry entry: unAppliedEntries) { append(entry); } } protected AbstractReplicatedLogImpl() { this(-1L, -1L, Collections.emptyList(), ""); } protected int adjustedIndex(final long logEntryIndex) { if (snapshotIndex < 0) { return (int) logEntryIndex; } return (int) (logEntryIndex - (snapshotIndex + 1)); } @Override public ReplicatedLogEntry get(final long logEntryIndex) { int adjustedIndex = adjustedIndex(logEntryIndex); if (adjustedIndex < 0 || adjustedIndex >= journal.size()) { // physical index should be less than list size and >= 0 return null; } return journal.get(adjustedIndex); } @Override public ReplicatedLogEntry last() { if (journal.isEmpty()) { return null; } // get the last entry directly from the physical index return journal.get(journal.size() - 1); } @Override public long lastIndex() { if (journal.isEmpty()) { // it can happen that after snapshot, all the entries of the // journal are trimmed till lastApplied, so lastIndex = snapshotIndex return snapshotIndex; } return last().getIndex(); } @Override public long lastTerm() { if (journal.isEmpty()) { // it can happen that after snapshot, all the entries of the // journal are trimmed till lastApplied, so lastTerm = snapshotTerm return snapshotTerm; } return last().getTerm(); } @Override public long removeFrom(final long logEntryIndex) { int adjustedIndex = adjustedIndex(logEntryIndex); if (adjustedIndex < 0 || adjustedIndex >= journal.size()) { // physical index should be less than list size and >= 0 return -1; } for (int i = adjustedIndex; i < journal.size(); i++) { dataSize -= journal.get(i).size(); } journal.subList(adjustedIndex , journal.size()).clear(); return adjustedIndex; } @Override public boolean append(final ReplicatedLogEntry replicatedLogEntry) { if (replicatedLogEntry.getIndex() > lastIndex()) { journal.add(replicatedLogEntry); dataSize += replicatedLogEntry.size(); return true; } else { LOG.warn("{}: Cannot append new entry - new index {} is not greater than the last index {}", logContext, replicatedLogEntry.getIndex(), lastIndex(), new Exception("stack trace")); return false; } } @Override public void increaseJournalLogCapacity(final int amount) { journal.ensureCapacity(journal.size() + amount); } @Override public List<ReplicatedLogEntry> getFrom(final long logEntryIndex) { return getFrom(logEntryIndex, journal.size(), NO_MAX_SIZE); } @Override public List<ReplicatedLogEntry> getFrom(final long logEntryIndex, final int maxEntries, final long maxDataSize) { int adjustedIndex = adjustedIndex(logEntryIndex); int size = journal.size(); if (adjustedIndex >= 0 && adjustedIndex < size) { // physical index should be less than list size and >= 0 int maxIndex = adjustedIndex + maxEntries; if (maxIndex > size) { maxIndex = size; } if (maxDataSize == NO_MAX_SIZE) { return new ArrayList<>(journal.subList(adjustedIndex, maxIndex)); } else { return copyJournalEntries(adjustedIndex, maxIndex, maxDataSize); } } else { return Collections.emptyList(); } } private @NonNull List<ReplicatedLogEntry> copyJournalEntries(final int fromIndex, final int toIndex, final long maxDataSize) { List<ReplicatedLogEntry> retList = new ArrayList<>(toIndex - fromIndex); long totalSize = 0; for (int i = fromIndex; i < toIndex; i++) { ReplicatedLogEntry entry = journal.get(i); totalSize += entry.size(); if (totalSize <= maxDataSize) { retList.add(entry); } else { if (retList.isEmpty()) { // Edge case - the first entry's size exceeds the threshold. We need to return // at least the first entry so add it here. retList.add(entry); } break; } } return retList; } @Override public long size() { return journal.size(); } @Override public int dataSize() { return dataSize; } @Override public boolean isPresent(final long logEntryIndex) { if (logEntryIndex > lastIndex()) { // if the request logical index is less than the last present in the list return false; } int adjustedIndex = adjustedIndex(logEntryIndex); return adjustedIndex >= 0; } @Override public boolean isInSnapshot(final long logEntryIndex) { return logEntryIndex >= 0 && logEntryIndex <= snapshotIndex && snapshotIndex != -1; } @Override public long getSnapshotIndex() { return snapshotIndex; } @Override public long getSnapshotTerm() { return snapshotTerm; } @Override public void setSnapshotIndex(final long snapshotIndex) { this.snapshotIndex = snapshotIndex; } @Override public void setSnapshotTerm(final long snapshotTerm) { this.snapshotTerm = snapshotTerm; } @Override public void clear(final int startIndex, final int endIndex) { journal.subList(startIndex, endIndex).clear(); } @Override public void snapshotPreCommit(final long snapshotCapturedIndex, final long snapshotCapturedTerm) { Preconditions.checkArgument(snapshotCapturedIndex >= snapshotIndex, "snapshotCapturedIndex must be greater than or equal to snapshotIndex"); snapshottedJournal = new ArrayList<>(journal.size()); List<ReplicatedLogEntry> snapshotJournalEntries = journal.subList(0, (int) (snapshotCapturedIndex - snapshotIndex)); snapshottedJournal.addAll(snapshotJournalEntries); snapshotJournalEntries.clear(); previousSnapshotIndex = snapshotIndex; setSnapshotIndex(snapshotCapturedIndex); previousSnapshotTerm = snapshotTerm; setSnapshotTerm(snapshotCapturedTerm); } @Override public void snapshotCommit() { snapshottedJournal = null; previousSnapshotIndex = -1; previousSnapshotTerm = -1; // need to recalc the datasize based on the entries left after precommit. int newDataSize = 0; for (ReplicatedLogEntry logEntry : journal) { newDataSize += logEntry.size(); } LOG.trace("{}: Updated dataSize from {} to {}", logContext, dataSize, newDataSize); dataSize = newDataSize; } @Override public void snapshotRollback() { snapshottedJournal.addAll(journal); journal = snapshottedJournal; snapshottedJournal = null; snapshotIndex = previousSnapshotIndex; previousSnapshotIndex = -1; snapshotTerm = previousSnapshotTerm; previousSnapshotTerm = -1; } @VisibleForTesting ReplicatedLogEntry getAtPhysicalIndex(final int index) { return journal.get(index); } }
package org.jtrfp.trcl.tools; import java.awt.Color; import java.lang.reflect.Method; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Objects; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreeNode; import org.apache.commons.math3.geometry.euclidean.threed.Rotation; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; import org.jtrfp.trcl.coll.BulkRemovable; import org.jtrfp.trcl.coll.Repopulatable; import org.jtrfp.trcl.core.TRFactory; import org.jtrfp.trcl.math.Vect3D; import com.ochafik.util.Adapter; import me.xdrop.fuzzywuzzy.FuzzySearch; public class Util { public static final Color [] DEFAULT_PALETTE = new Color []{ new Color(0,0,0,0), new Color(7,7,7), new Color(14,14,14), new Color(22,22,22), new Color(29,29,29), new Color(36,36,36), new Color(43,43,43), new Color(51,51,51), new Color(58,58,58), new Color(65,65,65), new Color(72,72,72), new Color(79,79,79), new Color(87,87,87), new Color(94,94,94), new Color(101,101,101), new Color(108,108,108), new Color(116,116,116), new Color(123,123,123), new Color(130,130,130), new Color(137,137,137), new Color(145,145,145), new Color(152,152,152), new Color(159,159,159), new Color(166,166,166), new Color(173,173,173), new Color(181,181,181), new Color(188,188,188), new Color(195,195,195), new Color(202,202,202), new Color(210,210,210), new Color(217,217,217), new Color(224,224,224), new Color(5,5,5), new Color(10,9,9), new Color(14,13,13), new Color(19,18,18), new Color(24,24,23), new Color(29,28,26), new Color(35,33,30), new Color(40,40,35), new Color(45,45,38), new Color(50,51,42), new Color(55,57,46), new Color(58,62,49), new Color(61,68,53), new Color(64,75,56), new Color(65,80,59), new Color(67,88,63), new Color(73,96,69), new Color(80,106,75), new Color(85,114,81), new Color(90,123,86), new Color(96,131,92), new Color(102,141,98), new Color(108,150,103), new Color(113,157,110), new Color(121,163,118), new Color(130,170,127), new Color(137,176,135), new Color(145,182,143), new Color(152,188,151), new Color(161,194,161), new Color(169,200,169), new Color(173,204,173), new Color(6,6,6), new Color(11,10,10), new Color(16,15,15), new Color(20,19,19), new Color(25,24,24), new Color(31,28,28), new Color(37,33,32), new Color(42,37,37), new Color(47,41,40), new Color(53,45,44), new Color(59,50,48), new Color(65,54,52), new Color(71,59,56), new Color(77,62,58), new Color(83,67,62), new Color(90,72,65), new Color(97,77,70), new Color(107,85,76), new Color(116,91,81), new Color(127,99,86), new Color(138,107,91), new Color(148,114,95), new Color(159,121,100), new Color(167,131,108), new Color(173,139,116), new Color(180,149,125), new Color(187,157,134), new Color(193,165,142), new Color(200,174,151), new Color(206,184,161), new Color(212,191,169), new Color(218,199,179), new Color(3,3,16), new Color(5,5,28), new Color(8,8,39), new Color(10,10,51), new Color(14,13,60), new Color(18,17,70), new Color(23,20,81), new Color(28,24,91), new Color(34,29,100), new Color(39,34,109), new Color(46,39,118), new Color(52,44,127), new Color(60,50,133), new Color(66,56,141), new Color(75,62,149), new Color(83,73,156), new Color(87,77,164), new Color(91,80,171), new Color(97,87,176), new Color(107,95,180), new Color(114,103,182), new Color(122,111,186), new Color(129,119,188), new Color(137,128,191), new Color(144,136,195), new Color(151,143,198), new Color(159,152,201), new Color(167,160,205), new Color(174,167,208), new Color(181,175,212), new Color(187,182,215), new Color(195,190,219), new Color(14,0,0), new Color(27,0,0), new Color(40,0,1), new Color(53,0,1), new Color(66,0,1), new Color(79,0,1), new Color(92,0,2), new Color(105,0,2), new Color(118,0,2), new Color(131,0,2), new Color(144,0,3), new Color(157,0,3), new Color(170,0,3), new Color(183,0,3), new Color(196,0,4), new Color(199,15,7), new Color(203,33,9), new Color(208,52,11), new Color(212,70,13), new Color(216,89,16), new Color(221,107,18), new Color(225,126,20), new Color(229,144,22), new Color(233,163,24), new Color(238,181,26), new Color(242,200,29), new Color(246,218,31), new Color(251,237,33), new Color(255,255,35), new Color(255,255,108), new Color(255,255,182), new Color(255,255,255), new Color(8,8,32), new Color(16,16,64), new Color(24,24,96), new Color(32,32,128), new Color(40,40,160), new Color(48,48,192), new Color(56,56,224), new Color(63,63,255), new Color(8,32,32), new Color(16,64,64), new Color(24,96,96), new Color(32,128,128), new Color(40,160,160), new Color(48,192,192), new Color(56,224,224), new Color(63,255,255), new Color(56,15,5), new Color(70,22,7), new Color(85,31,10), new Color(98,41,13), new Color(111,52,16), new Color(125,65,20), new Color(137,78,24), new Color(149,91,28), new Color(162,104,33), new Color(173,118,38), new Color(183,133,44), new Color(195,150,50), new Color(203,165,58), new Color(204,176,73), new Color(205,186,90), new Color(207,194,105), new Color(2,2,37), new Color(10,5,44), new Color(18,9,51), new Color(26,12,58), new Color(35,16,65), new Color(43,19,72), new Color(51,23,79), new Color(59,26,86), new Color(67,29,93), new Color(75,33,100), new Color(84,36,107), new Color(92,40,114), new Color(100,43,121), new Color(108,46,128), new Color(116,50,135), new Color(124,53,142), new Color(133,57,148), new Color(141,60,155), new Color(149,64,162), new Color(157,67,169), new Color(165,70,176), new Color(173,74,183), new Color(182,77,190), new Color(190,81,197), new Color(198,84,204), new Color(206,87,211), new Color(214,91,218), new Color(222,94,225), new Color(231,98,232), new Color(239,101,239), new Color(247,105,246), new Color(255,108,253), new Color(55,14,4), new Color(81,24,6), new Color(108,36,7), new Color(136,51,9), new Color(162,65,11), new Color(188,84,13), new Color(214,105,15), new Color(241,129,16), new Color(244,153,43), new Color(245,174,70), new Color(247,193,96), new Color(248,209,123), new Color(250,221,149), new Color(252,234,177), new Color(254,244,203), new Color(255,253,232), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0), new Color(0,0,0) }; public static <T>void repopulate(Collection<T> dest, Collection<T> src){ if(dest instanceof Repopulatable) ((Repopulatable<T>)dest).repopulate(src); else if(dest instanceof List){ final List<T> dst = (List<T>)dest; if(dest.size()>src.size()){ Iterator<T> sIt = src.iterator(); ListIterator<T> dIt = dst.listIterator(); while(sIt.hasNext()) {dIt.next();dIt.set(sIt.next());} dst.subList(src.size(), dst.size()).clear();//Truncate } else if(dest.size()<src.size()){ Iterator<T> sIt = src.iterator(); ListIterator<T> dIt = dst.listIterator(); while(dIt.hasNext()) {dIt.next();dIt.set(sIt.next());} final ArrayList<T> additional = new ArrayList<T>(); while(sIt.hasNext()) additional.add(sIt.next()); dest.addAll(additional); }else {//Same size Iterator<T> sIt = src.iterator(); ListIterator<T> dIt = dst.listIterator(); while(sIt.hasNext()) {dIt.next();dIt.set(sIt.next());} } }else{ dest.clear(); dest.addAll(src); } }//end repopulate(...) public static <U,V> com.ochafik.util.listenable.Adapter<U,V> bidi2Forward(final com.ochafik.util.Adapter<U,V> bidi){ return new com.ochafik.util.listenable.Adapter<U,V>(){ @Override public V adapt(U value) { return bidi.adapt(value); } }; }//end bidi2Forward(...) public static <U,V> com.ochafik.util.listenable.Adapter<V,U> bidi2Backward(final com.ochafik.util.Adapter<U,V> bidi){ return new com.ochafik.util.listenable.Adapter<V,U>(){ @Override public U adapt(V value) { return bidi.reAdapt(value); }}; }//end bidi2Backward(...) public static <U,V> Adapter<V,U> inverse(final Adapter<U,V> adapter){ return new Adapter<V,U>(){ @Override public U adapt(V value) { return adapter.reAdapt(value); } @Override public V reAdapt(U value) { return adapter.adapt(value); }}; } /** * Remove a single instance (or none) of each supplied element in given Collection. * Not the same as removeAll - only one instance removed. * @param toRemove * @since Jan 11, 2016 */ public static <E> void bulkRemove(Collection<E> toRemove, Collection<E> target){ if(target instanceof BulkRemovable) ((BulkRemovable)target).bulkRemove(toRemove); else for(E e:toRemove) target.remove(e); }//end bulkRemove(...) public static double quantize(double value, double interval){ return Math.rint(value / interval)*interval; } public static void assertPropertiesNotNull(Object bean, String ... propertyNames){ final Class beanClass = bean.getClass(); for(String propertyName : propertyNames){ Object result; try{ final String camelCaseName = Character.toUpperCase(propertyName.charAt(0))+""+propertyName.substring(1); final Method method = beanClass.getMethod("get"+camelCaseName, null); result = method.invoke(bean, null); }catch(Exception e){ throw new RuntimeException("Could not check property `"+propertyName+"`",e);} if(result == null) throw new IllegalStateException("Property `"+propertyName+" is intolerably null. Did you forget to set it?"); }//end for(propertyNames) }//end assertPropertiesNotNull public static void relativeHeadingVector( double [] originPos, double [] originHdg, double [] targetPos, double [] dest ){ final double [] vectorToTargetVar = dest; TRFactory.twosComplementSubtract(targetPos, originPos,vectorToTargetVar); assert !Vect3D.isAnyNaN(vectorToTargetVar); assert !Vect3D.isAnyEqual(vectorToTargetVar, Double.POSITIVE_INFINITY); assert !Vect3D.isAnyEqual(vectorToTargetVar, Double.NEGATIVE_INFINITY); vectorToTargetVar[1]=0; Vect3D.normalize(vectorToTargetVar,vectorToTargetVar); Rotation rot = new Rotation(new Vector3D(originHdg[0],0,originHdg[2]),new Vector3D(vectorToTargetVar)); final Vector3D deltaVector = rot.applyTo(Vector3D.PLUS_K); dest[0] = deltaVector.getX(); dest[1] = 0; dest[2] = deltaVector.getZ(); }//end relativeHeadingVector() public static List<DefaultMutableTreeNode> getLeaves(DefaultMutableTreeNode root) { final ArrayList<DefaultMutableTreeNode> result = new ArrayList<>(); final Iterator<TreeNode> it = root.depthFirstEnumeration().asIterator(); while(it.hasNext()) { final TreeNode node = it.next(); if(node.isLeaf()) result.add((DefaultMutableTreeNode)node); }//end while(hasNext) return result; }//end getLeaves(...) /** * Root node is not included when searching for object path, but is included in result * @param root * @param objectPath * @return * @since Jan 22, 2022 */ public static List<DefaultMutableTreeNode> nodePathFromUserObjectPath( DefaultMutableTreeNode root, Object ... objectPath) { final List<Object> objectPathList = Arrays.asList(objectPath); final List<DefaultMutableTreeNode> result = new ArrayList<>(objectPath.length); result.add(root); DefaultMutableTreeNode node = root; for(Object obj : objectPathList) { final Iterator<TreeNode> children = node.children().asIterator(); boolean found = false; while(children.hasNext() && !found) { final DefaultMutableTreeNode childNode = (DefaultMutableTreeNode)children.next(); if(Objects.equals(childNode.getUserObject(), obj)) { node = childNode; result.add(childNode); found = true; }//end if(matches) }//end while(hasNext) }//end for(objectPath) return result; }//end nodePathFromUserObjectPath public static List<DefaultMutableTreeNode> nodePathFromUserObjectPath(Object ...objects) { final ArrayList<DefaultMutableTreeNode> result = new ArrayList<>(); DefaultMutableTreeNode node = null; for(Object obj : objects) { final DefaultMutableTreeNode newNode = new DefaultMutableTreeNode(obj); if(node != null) node.add(newNode); node = newNode; result.add(newNode); } return result; }//end nodePathFromUserObjectPath public static DefaultMutableTreeNode getComparatorApproximation( DefaultMutableTreeNode external, DefaultMutableTreeNode reference, Comparator<DefaultMutableTreeNode> comparator) { final ArrayDeque<TreeNode> externalPath = Stream.of(external.getPath()).collect(Collectors.toCollection(()->new ArrayDeque<TreeNode>())); externalPath.poll();//Skip root since that's implied and this node will be compared to reference root's children anyway. return getComparatorApproximationFromRoot(externalPath, (DefaultMutableTreeNode)(reference.getRoot()), comparator); }//end getToStringApproximation() private static DefaultMutableTreeNode getComparatorApproximationFromRoot(ArrayDeque<TreeNode> externalPath, DefaultMutableTreeNode reference, Comparator<DefaultMutableTreeNode> comparator) { final Iterator<TreeNode> it = reference.children().asIterator(); if(!it.hasNext() || externalPath.isEmpty()) return reference; DefaultMutableTreeNode best = null; int bestScore = Integer.MAX_VALUE; final DefaultMutableTreeNode externalNode = (DefaultMutableTreeNode)externalPath.poll(); while(it.hasNext()) { final DefaultMutableTreeNode node = (DefaultMutableTreeNode)it.next(); System.out.println("compare "+node+" to "+externalNode); Objects.requireNonNull(node); Objects.requireNonNull(externalNode); final int thisScore = Math.abs(comparator.compare(node, externalNode)); if( thisScore < bestScore ) { bestScore = thisScore; best = node; }//end if(best) }//end while(hasNext) if(best.isLeaf()) return best; else return getComparatorApproximationFromRoot(externalPath, best, comparator); }//end getToStringApproximationFromRoot(...) }//end Util
package org.kylin.util; import org.apache.commons.collections4.CollectionUtils; import org.kylin.bean.W3DCode; import org.kylin.bean.p5.WCode; import org.kylin.bean.p5.WCodeSummarise; import org.kylin.constant.BitConstant; import java.util.*; import java.util.stream.Collectors; public class WCodeUtils { public static boolean validateCodes(List<WCode> wCodes){ if(CollectionUtils.isEmpty(wCodes)){ return true; } List<WCode> wCodes1 = wCodes.stream().filter(wCode -> !wCode.validate()).collect(Collectors.toList()); return CollectionUtils.isEmpty(wCodes1); } public static WCode fromW3DCode(W3DCode w3DCode){ if(w3DCode == null){ return null; } WCode wCode; if(w3DCode.getCodes()[BitConstant.HUNDRED] == null){ wCode = new WCode(2, w3DCode.getCodes()[BitConstant.DECADE],w3DCode.getCodes()[BitConstant.UNIT]); }else{ wCode = new WCode(3, w3DCode.getCodes()[BitConstant.HUNDRED], w3DCode.getCodes()[BitConstant.DECADE],w3DCode.getCodes()[BitConstant.UNIT]); } return wCode; } public static List<WCode> fromW3DCodes(List<W3DCode> w3DCodes){ if(CollectionUtils.isEmpty(w3DCodes)){ return Collections.emptyList(); } List<WCode> wCodes = new ArrayList<>(); w3DCodes.forEach(w3DCode -> { WCode wCode = fromW3DCode(w3DCode); if(w3DCode != null){ wCodes.add(wCode); } }); return wCodes; } public static List<WCode> transferToPermutationFiveCodes(List<WCode> wCodes){ if(CollectionUtils.isEmpty(wCodes) || !validateCodes(wCodes) || wCodes.get(0).getDim() != 3){ return Collections.emptyList(); } List<WCode> permutationFiveCodes = new ArrayList<>(); for(WCode wCode: wCodes){ for(int i=0; i<100; i++){ int lastFirst = i%10; int lastSecond = (int)(i/10); if(lastFirst == lastSecond){ continue; } WCode pCode = new WCode(5, wCode.getCodes().get(BitConstant.HUNDRED), wCode.getCodes().get(BitConstant.DECADE), wCode.getCodes().get(BitConstant.UNIT), lastSecond, lastFirst); permutationFiveCodes.add(pCode); } } Collections.sort(permutationFiveCodes); return permutationFiveCodes; } public static boolean isAllEvenOrOdd(WCode wCode){ if(wCode == null || CollectionUtils.isEmpty(wCode.getCodes())){ return false; } List<Integer> odds = wCode.getCodes().stream().filter(e -> e % 2 == 0).collect(Collectors.toList()); return CollectionUtils.size(odds) == 0 || CollectionUtils.size(odds) == wCode.getCodes().size(); } public static int containInSet(WCode wCode, Set<Integer> set){ if(wCode == null || CollectionUtils.isEmpty(wCode.getCodes()) || CollectionUtils.isEmpty(set)){ return 0; } List<Integer> codes = wCode.getCodes().stream().filter(e -> set.contains(e)).collect(Collectors.toList()); return CollectionUtils.size(codes); } public static boolean isExtremumCodes(WCode wCode){ if(wCode == null ){ return false; } List<Integer> codes = wCode.getCodes().stream().filter(e -> e >= 5).collect(Collectors.toList()); if(CollectionUtils.size(codes) == 5 || CollectionUtils.size(codes) == 0){ return true; } return false; } public static boolean isPair(WCode wCode){ if(wCode == null){ return false; } return wCode.getCodes().get(1) == wCode.getCodes().get(2) || wCode.getCodes().get(1) == wCode.getCodes().get(0) || wCode.getCodes().get(0) == wCode.getCodes().get(2); } public static List<WCode> filterPairCodes(List<WCode> wCodes){ if(CollectionUtils.isEmpty(wCodes)){ return Collections.emptyList(); } return wCodes.stream().filter(wCode -> isPair(wCode)).collect(Collectors.toList()); } public static Integer getPairCodeCount(List<WCode> wCodes){ return CollectionUtils.size(filterPairCodes(wCodes)); } public static Integer getNonPairCodeCount(List<WCode> wCodes){ return CollectionUtils.size(filterNonPairCodes(wCodes)); } public static WCodeSummarise construct(List<WCode> wCodes){ return new WCodeSummarise() .setwCodes(wCodes) .setPairCodes(WCodeUtils.getPairCodeCount(wCodes)) .setNonPairCodes(WCodeUtils.getNonPairCodeCount(wCodes)); } public static List<WCode> filterNonPairCodes(List<WCode> wCodes){ if(CollectionUtils.isEmpty(wCodes)){ return Collections.emptyList(); } return wCodes.stream().filter(wCode -> !isPair(wCode)).collect(Collectors.toList()); } public static List<WCode> getRandomList(List<WCode> wCodes, Integer count){ if(CollectionUtils.isEmpty(wCodes) || CollectionUtils.size(wCodes) < count){ return wCodes; } List<WCode> ret = new ArrayList<>(); Set<Integer> isSelected = new HashSet<>(); Integer size = wCodes.size(); for(int i=0; i<count && i<size; i++){ int index = new Random().nextInt(size); if(isSelected.contains(i)){ continue; } ret.add(wCodes.get(index)); isSelected.add(i); } return ret; } }
package org.metaborg.meta.lang.dynsem.interpreter.nodes.rules; import java.util.HashSet; import java.util.Set; import org.metaborg.meta.lang.dynsem.interpreter.DynSemLanguage; import org.metaborg.meta.lang.dynsem.interpreter.SourceSectionUtil; import org.spoofax.interpreter.core.Tools; import org.spoofax.interpreter.terms.IStrategoAppl; import org.spoofax.interpreter.terms.IStrategoList; import org.spoofax.interpreter.terms.IStrategoTerm; import org.spoofax.terms.TermVisitor; import com.oracle.truffle.api.Truffle; import com.oracle.truffle.api.frame.FrameDescriptor; import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.RootNode; import com.oracle.truffle.api.source.SourceSection; /** * * A node corresponding to a (merged) DynSem rule. * * * @author vladvergu * */ public class Rule extends RootNode { private final String constr; private final int arity; @Children protected final Premise[] premises; @Child protected RuleTarget target; public Rule(String constr, int arity, Premise[] premises, RuleTarget output, SourceSection source, FrameDescriptor fd) { super(DynSemLanguage.class, source, fd); this.constr = constr; this.arity = arity; this.premises = premises; this.target = output; Truffle.getRuntime().createCallTarget(this); } @ExplodeLoop public RuleResult execute(VirtualFrame frame) { /* evaluate the premises */ for (int i = 0; i < premises.length; i++) { premises[i].execute(frame); } /* evaluate the rule target */ return target.execute(frame); } public String getConstructor() { return constr; } public int getArity() { return arity; } public static Rule create(IStrategoTerm ruleT) { assert Tools.isTermAppl(ruleT); assert Tools.hasConstructor((IStrategoAppl) ruleT, "Rule", 3); FrameDescriptor fd = createFrameDescriptor(ruleT); IStrategoList premisesTerm = Tools.listAt(ruleT, 0); Premise[] premises = new Premise[premisesTerm.size()]; for (int i = 0; i < premises.length; i++) { premises[i] = Premise.create(Tools.applAt(premisesTerm, i), fd); } IStrategoAppl relationT = Tools.applAt(ruleT, 2); assert Tools.hasConstructor(relationT, "Relation", 4); IStrategoAppl lhsConTerm = Tools.applAt(Tools.applAt(relationT, 1), 0); String constr = Tools.stringAt(lhsConTerm, 0).stringValue(); int arity = Tools.listAt(lhsConTerm, 1).size(); RuleTarget target = RuleTarget.create(Tools.applAt(relationT, 3), fd); return new Rule(constr, arity, premises, target, SourceSectionUtil.fromStrategoTerm(ruleT), fd); } private static FrameDescriptor createFrameDescriptor(IStrategoTerm t) { Set<String> vars = new HashSet<>(); TermVisitor visitor = new TermVisitor() { @Override public void preVisit(IStrategoTerm t) { if (Tools.isTermAppl(t) && Tools.hasConstructor((IStrategoAppl) t, "VarRef", 1)) { vars.add(Tools.stringAt(t, 0).stringValue()); } } }; visitor.visit(t); FrameDescriptor fd = FrameDescriptor.create(); for (String v : vars) { fd.addFrameSlot(v); } return fd; } }
package org.mitre.stix; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.StringWriter; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; import javax.xml.XMLConstants; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchema; import javax.xml.bind.annotation.XmlType; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.SchemaFactory; import javax.xml.validation.Validator; import org.apache.commons.io.IOUtils; import org.springframework.core.io.Resource; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.ResourcePatternResolver; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.xml.sax.SAXException; /** * Gathers up the STIX schema useful for marshalling and unmarshalling, and * validation. * * @author nemonik (Michael Joseph Walsh <github.com@nemonik.com>) */ public class STIXSchema { private static final Logger LOGGER = Logger.getLogger(STIXSchema.class .getName()); private String version; private static STIXSchema instance; private Map<String, String> prefixSchemaBindings; private Validator validator; private javax.xml.validation.Schema schema; /** * Returns STIXSchema object representing the STIX schema. * * @return Always returns a STIXSchema object representing the STIX schema. */ public synchronized static STIXSchema getInstance() { if (instance != null) { return instance; } else { instance = new STIXSchema(); } return instance; } /** * Private constructor to permit a single STIXSchema to exists. */ private STIXSchema() { System.out.println("called STIXSchema constructor"); this.version = ((Version) this.getClass().getPackage() .getAnnotation(Version.class)).schema(); System.out.println("Classloader for STIXShema is " + this.getClass().getClassLoader()); ResourcePatternResolver patternResolver = new PathMatchingResourcePatternResolver(this.getClass().getClassLoader()); Resource[] schemaResources; try { schemaResources = patternResolver .getResources("classpath*:schemas/v" + version + "*.xsd"); prefixSchemaBindings = new HashMap<String, String>(); String url, prefix, targetNamespace; Document schemaDocument; NamedNodeMap attributes; Node attribute; for (Resource resource : schemaResources) { url = resource.getURL().toString(); schemaDocument = DocumentBuilderFactory.newInstance() .newDocumentBuilder().parse(resource.getInputStream()); schemaDocument.getDocumentElement().normalize(); attributes = schemaDocument.getDocumentElement() .getAttributes(); for (int i = 0; i < attributes.getLength(); i++) { attribute = attributes.item(i); targetNamespace = schemaDocument.getDocumentElement() .getAttribute("targetNamespace"); if (attribute.getNodeName().startsWith("xmlns:") && attribute.getNodeValue().equals(targetNamespace)) { prefix = attributes.item(i).getNodeName().split(":")[1]; if ((prefixSchemaBindings.containsKey(prefix)) && (prefixSchemaBindings.get(prefix).split( "schemas/v" + version + "/")[1] .startsWith("external"))) { continue; } LOGGER.fine(" adding: " + prefix + " :: " + url); System.out.println(" adding: " + prefix + " :: " + url); prefixSchemaBindings.put(prefix, url); } } } SchemaFactory factory = SchemaFactory .newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); Source[] schemas = new Source[prefixSchemaBindings.values().size()]; int i = 0; for (String schemaLocation : prefixSchemaBindings.values()) { schemas[i++] = new StreamSource(schemaLocation); } schema = factory.newSchema(schemas); validator = schema.newValidator(); validator.setErrorHandler(new ValidationErrorHandler()); } catch (IOException e) { throw new RuntimeException(e); } catch (SAXException e) { throw new RuntimeException(e); } catch (ParserConfigurationException e) { throw new RuntimeException(e); } } /** * Returns the schema version * * @return The STIX schema version */ public String getVersion() { return version; } /** * Validate XML text retrieved from URL * * @param url * The URL object for the XML to be validated. */ public boolean validate(URL url) { System.out.println("Called valdiate with URL: " + url); String xmlText = null; try { xmlText = IOUtils.toString(url.openStream()); } catch (IOException e) { throw new RuntimeException(e); } return validate(xmlText); } /** * Validate an XML text String against the STIX schema * * @param xmlText * A string of XML text to be validated */ public boolean validate(String xmlText) { System.out.println("Called valdiate with xmlText: " + xmlText); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); // This section removes the schema hint as we have the schema docs // otherwise exceptions may be thrown try { DocumentBuilder b = factory.newDocumentBuilder(); Document document = b.parse(new ByteArrayInputStream(xmlText .getBytes())); Element root = document.getDocumentElement(); root.removeAttribute("xsi:schemaLocation"); TransformerFactory transFactory = TransformerFactory.newInstance(); Transformer transformer = transFactory.newTransformer(); StringWriter buffer = new StringWriter(); transformer .transform(new DOMSource(root), new StreamResult(buffer)); xmlText = buffer.toString(); } catch (ParserConfigurationException e) { throw new RuntimeException(e); } catch (TransformerConfigurationException e) { throw new RuntimeException(e); } catch (TransformerException e) { throw new RuntimeException(e); } catch (SAXException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } try { validator.validate(new StreamSource(new ByteArrayInputStream( xmlText.getBytes(StandardCharsets.UTF_8)))); } catch (IOException e) { throw new RuntimeException(e); } catch (SAXException e) { return false; } return true; } /** * Returns Schema object representing the STIX schema. * * @return Always returns a non-null Schema object representing the STIX * schema. */ public javax.xml.validation.Schema getSchema() { return schema; } public static void main(String[] args) throws ParserConfigurationException, SAXException, IOException { STIXSchema schema = STIXSchema.getInstance(); URL url = new URL( "https://raw.githubusercontent.com/STIXProject/schemas/master/samples/STIX_Domain_Watchlist.xml"); System.out .println(schema .validate(url)); String xmlText = IOUtils.toString(url.openStream()); System.out .println(schema.validate(xmlText)); System.out.println(schema.getVersion()); } /** * Return the namespace URI from the package for the class of the object. * * @param obj * Expects a JAXB model object. * @return Name of the XML namespace. */ public static String getNamespaceURI(Object obj) { Package pkg = obj.getClass().getPackage(); XmlSchema xmlSchemaAnnotation = pkg.getAnnotation(XmlSchema.class); return xmlSchemaAnnotation.namespace(); } /** * Return the name from the JAXB model object. * * @param obj * Expects a JAXB model object. * @return element name */ public static String getName(Object obj) { try { return obj.getClass().getAnnotation( XmlRootElement.class).name(); } catch (NullPointerException e) { return obj.getClass().getAnnotation( XmlType.class).name(); } } /** * Return the QualifiedNam from the JAXB model object. * * @param obj * Expects a JAXB model object. * @return Qualified dName as defined by JAXB model */ public static QName getQualifiedName(Object obj) { return new QName(STIXSchema.getNamespaceURI(obj), STIXSchema.getName(obj)); } }
package org.csstudio.iter.alarm.beast.ui.alarmtable; import java.text.SimpleDateFormat; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import java.util.logging.Logger; import org.csstudio.alarm.beast.client.AlarmTreeItem; import org.csstudio.alarm.beast.client.AlarmTreePV; import org.csstudio.alarm.beast.client.AlarmTreeRoot; import org.csstudio.alarm.beast.ui.actions.AcknowledgeAction; import org.csstudio.alarm.beast.ui.actions.MaintenanceModeAction; import org.csstudio.iter.alarm.beast.ui.alarmtable.actions.ColumnConfigureAction; import org.csstudio.iter.alarm.beast.ui.alarmtable.actions.FilterAction; import org.csstudio.iter.alarm.beast.ui.alarmtable.actions.NewTableAction; import org.csstudio.iter.alarm.beast.ui.alarmtable.actions.ResetColumnsAction; import org.csstudio.iter.alarm.beast.ui.alarmtable.actions.SeparateCombineTablesAction; import org.csstudio.iter.alarm.beast.ui.alarmtable.actions.ShowFilterAction; import org.csstudio.alarm.beast.ui.clientmodel.AlarmClientModel; import org.csstudio.alarm.beast.ui.clientmodel.AlarmClientModelListener; import org.csstudio.ui.util.dnd.ControlSystemDropTarget; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.action.Separator; import org.eclipse.osgi.util.NLS; import org.eclipse.swt.SWT; import org.eclipse.swt.dnd.DND; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.IMemento; import org.eclipse.ui.IViewPart; import org.eclipse.ui.IViewSite; import org.eclipse.ui.PartInitException; import org.eclipse.ui.part.ViewPart; /** * Eclipse View for the alarm table. * * @author Kay Kasemir * @author Jaka Bobnar - Combined/split alarm tables, configurable columns */ public class AlarmTableView extends ViewPart { public static final int PROP_FILTER = 555444; public static final int PROP_FILTER_ITEM = 555445; private static AtomicInteger secondaryId = new AtomicInteger(1); /** * Return the next secondary id that has not been opened. * * @return part */ public static String newSecondaryID(IViewPart part) { while (part.getSite().getPage().findViewReference(part.getSite().getId(), String.valueOf(secondaryId.get())) != null) { secondaryId.incrementAndGet(); } return String.valueOf(secondaryId.get()); } private AlarmClientModel model; private AlarmClientModel defaultModel; private AlarmClientModelListener modelListener = new AlarmClientModelListener() { @Override public void newAlarmConfiguration(AlarmClientModel model) { parent.getDisplay().asyncExec(()->updateFilterItem()); } @Override public void serverTimeout(AlarmClientModel model) { } @Override public void serverModeUpdate(AlarmClientModel model, boolean maintenance_mode) { } @Override public void newAlarmState(AlarmClientModel model, AlarmTreePV pv, boolean parent_changed) { } }; private Composite parent; private GUI gui; private MaintenanceModeAction maintenanceModeAction; private IMemento memento; private FilterType filterType; /** Combined active and acknowledge alarms, group into separate tables? */ private boolean combinedTables; /** Should severity icons blink or not */ private boolean blinkingIcons; /** The time format string used for formatting the alarm time label */ private String timeFormat; /** The name of the filter item */ private String filterItemPath; /** The filter item, which should match the filterItemName and configurationName if the model is available */ private AlarmTreeItem filterItem; private ColumnWrapper[] columns = ColumnWrapper.getNewWrappers(); @Override public void init(IViewSite site, IMemento memento) throws PartInitException { this.memento = memento; this.blinkingIcons = Preferences.isBlinkUnacknowledged(); restoreState(memento,site); super.init(site); } @Override public void dispose() { if (secondaryId.get() > 1) { secondaryId.decrementAndGet(); } super.dispose(); } @Override public void createPartControl(final Composite parent) { this.parent = parent; parent.setLayout(new FillLayout()); try { defaultModel = AlarmClientModel.getInstance(); defaultModel.addListener(modelListener); if (filterItemPath != null) { model = AlarmClientModel.getInstance(getConfigNameFromPath(filterItemPath)); model.addListener(modelListener); } } catch (final Throwable ex) { // Instead of actual GUI, create error message final String error = ex.getCause() != null ? ex.getCause().getMessage() : ex.getMessage(); final String message = NLS.bind(org.csstudio.alarm.beast.ui.Messages.ServerErrorFmt, error); // Add to log, also display in text widget Logger.getLogger(Activator.ID).log(Level.SEVERE, "Cannot load alarm model", ex); //$NON-NLS-1$ parent.setLayout(new FillLayout()); new Text(parent, SWT.READ_ONLY | SWT.BORDER | SWT.MULTI).setText(message); return; } // Arrange for model to be released parent.addDisposeListener(new DisposeListener() { @Override public void widgetDisposed(DisposeEvent e) { releaseModel(defaultModel); releaseModel(model); } }); makeGUI(); createToolbar(); updateFilterItem(); } private void restoreState(IMemento memento, IViewSite site) { if (memento == null) { this.combinedTables = Preferences.isCombinedAlarmTable(); this.filterType = FilterType.TREE; this.columns = ColumnWrapper.fromSaveArray(Preferences.getColumns()); //set format for all tables except the main one if (site.getSecondaryId() != null) this.timeFormat = Preferences.getTimeFormat(); } else { Boolean groupSet = memento.getBoolean(Preferences.ALARM_TABLE_COMBINED_TABLES); this.combinedTables = groupSet == null ? Preferences.isCombinedAlarmTable() : groupSet; String filterTypeSet = memento.getString(Preferences.ALARM_TABLE_FILTER_TYPE); this.filterType = filterTypeSet == null ? FilterType.TREE : FilterType.valueOf(filterTypeSet.toUpperCase()); this.timeFormat = memento.getString(Preferences.ALARM_TABLE_TIME_FORMAT); if (site.getSecondaryId() != null && this.timeFormat == null) this.timeFormat = Preferences.getTimeFormat(); this.columns = ColumnWrapper.restoreColumns(memento.getChild(Preferences.ALARM_TABLE_COLUMN_SETTING)); String name = memento.getString(Preferences.ALARM_TABLE_FILTER_ITEM); this.filterItemPath = name == null || name.isEmpty() ? null : name; } } @Override public void saveState(IMemento memento) { super.saveState(memento); memento.putBoolean(Preferences.ALARM_TABLE_COMBINED_TABLES, combinedTables); memento.putString(Preferences.ALARM_TABLE_FILTER_TYPE, filterType.name()); if (filterItem != null) memento.putString(Preferences.ALARM_TABLE_FILTER_ITEM, filterItem.getPathName()); else if (filterItemPath != null) memento.putString(Preferences.ALARM_TABLE_FILTER_ITEM, filterItemPath); if (this.timeFormat != null) memento.putString(Preferences.ALARM_TABLE_TIME_FORMAT, timeFormat); IMemento columnsMemento = memento.createChild(Preferences.ALARM_TABLE_COLUMN_SETTING); ColumnWrapper.saveColumns(columnsMemento, getUpdatedColumns()); if (gui != null) { ColumnInfo info = gui.getSortingColumn(); if (info != null) memento.putString(Preferences.ALARM_TABLE_SORT_COLUMN, info.name()); memento.putBoolean(Preferences.ALARM_TABLE_SORT_UP, gui.isSortingUp()); } } private void createToolbar() { final IToolBarManager toolbar = getViewSite().getActionBars().getToolBarManager(); toolbar.removeAll(); if (defaultModel.isWriteAllowed()) { maintenanceModeAction = new MaintenanceModeAction(defaultModel); toolbar.add(maintenanceModeAction); toolbar.add(new Separator()); AcknowledgeAction action = new AcknowledgeAction(true, gui.getActiveAlarmTable()); action.clearSelectionOnAcknowledgement(gui.getActiveAlarmTable()); toolbar.add(action); action = new AcknowledgeAction(false, gui.getAcknowledgedAlarmTable()); action.clearSelectionOnAcknowledgement(gui.getAcknowledgedAlarmTable()); toolbar.add(action); toolbar.add(new Separator()); } for (FilterType f : FilterType.values()) toolbar.add(new FilterAction(this, f, this.filterType == f)); toolbar.add(new Separator()); final IMenuManager menu = getViewSite().getActionBars().getMenuManager(); menu.add(new NewTableAction(this)); menu.add(new Separator()); menu.add(new SeparateCombineTablesAction(this, true, combinedTables)); menu.add(new SeparateCombineTablesAction(this, false, !combinedTables)); menu.add(new Separator()); menu.add(new ColumnConfigureAction(this)); menu.add(new ResetColumnsAction(this)); menu.add(new Separator()); menu.add(new ShowFilterAction(this)); } private void releaseModel(AlarmClientModel model) { if (this.model != null) { this.model.removeListener(modelListener); this.model.release(); this.model = null; } } /** * Set the filter item by its path. The path is transformed to an actual item, which is then applied as the filter * item. If the item does not exist a null filter is applied. * * @see AlarmTableView#setFilterItem(AlarmTreeItem) * * @param path the path to filter on * @throws Exception if the model for the given path could not be created */ public void setFilterItemPath(String path) throws Exception { this.filterItemPath = path; if (filterItemPath == null || filterItemPath.isEmpty()) { releaseModel(model); setFilterType(FilterType.TREE); } else { String configName = getConfigNameFromPath(filterItemPath); if (model == null || !model.getConfigurationName().equals(configName)) { releaseModel(model); this.model = AlarmClientModel.getInstance(configName); this.model.addListener(modelListener); } setFilterType(FilterType.ITEM); } updateFilterItem(); } /** * @return the currently applied filter item */ public String getFilterItemPath() { return filterItemPath; } /** * Updated the filter item according to the selected filter type and filter item path. The view title * is also updated. */ private void updateFilterItem() { AlarmClientModel activeModel = null; String name; if (filterType == FilterType.TREE) { activeModel = defaultModel; name = activeModel.getConfigurationName(); if (gui != null) gui.setFilterItem(null, activeModel); } else { activeModel = model; AlarmTreeRoot root = activeModel.getConfigTree().getRoot(); this.filterItem = root.getItemByPath(filterItemPath); if (filterItem != null) { if (filterType == FilterType.ITEM) name = filterItem.getName(); else name = activeModel.getConfigurationName(); } else { //filter path is set, but the item is null, because it //either does not exist, or the model is not yet connected int idx = filterItemPath.lastIndexOf('/'); name = idx < 0 ? filterItemPath : filterItemPath.substring(idx + 1); } if (gui != null) gui.setFilterItem(filterType == FilterType.ITEM ? filterItem : null, activeModel); } if (maintenanceModeAction != null) maintenanceModeAction.setModel(activeModel); setPartName(NLS.bind(Messages.AlarmTablePartName, name)); setTitleToolTip(NLS.bind(Messages.AlarmTableTitleTT, name)); firePropertyChange(PROP_FILTER_ITEM); } /** * @return the columns as they are currently visible and ordered in the table */ public ColumnWrapper[] getUpdatedColumns() { ColumnWrapper[] columns = ColumnWrapper.getCopy(this.columns); if (gui != null) gui.updateColumnOrder(columns); return columns; } /** * Set the columns for the table. The table will display the columns in the provided order and will show only those * columns that have the visible flag set to true * * @param columns the columns to set on the table */ public void setColumns(ColumnWrapper[] columns) { this.columns = columns; redoGUI(); } private boolean makeGUI() { if (parent.isDisposed()) return false; String s = memento == null ? null : memento.getString(Preferences.ALARM_TABLE_SORT_COLUMN); ColumnInfo sorting = s == null ? ColumnInfo.PV : ColumnInfo.valueOf(s); boolean sortUp = false; if (memento != null) { Boolean b = memento.getBoolean(Preferences.ALARM_TABLE_SORT_UP); sortUp = b == null ? false : b; } if (gui != null) { sorting = gui.getSortingColumn(); sortUp = gui.isSortingUp(); gui.dispose(); } gui = new GUI(parent, getSite(), defaultModel.isWriteAllowed(), !combinedTables, columns, sorting, sortUp); gui.setBlinking(blinkingIcons); gui.setTimeFormat(timeFormat); setUpDrop(gui.getActiveAlarmTable().getTable()); setUpDrop(gui.getAcknowledgedAlarmTable().getTable()); updateFilterItem(); return true; } private void setUpDrop(Control control) { if (control == null || control.getData(DND.DROP_TARGET_KEY) != null) return; new ControlSystemDropTarget(control, AlarmTreeItem.class, AlarmTreeItem[].class, AlarmTreePV.class, AlarmTreePV[].class) { @Override public void handleDrop(Object item) { try { if (item instanceof AlarmTreeItem[]) setFilterItemPath(((AlarmTreeItem[])item)[0].getPathName()); else if (item instanceof AlarmTreeItem || item instanceof AlarmTreePV) setFilterItemPath(((AlarmTreeItem)item).getPathName()); else if (item instanceof AlarmTreePV[]) setFilterItemPath(((AlarmTreePV[])item)[0].getPathName()); } catch (Exception e) { throw new IllegalArgumentException(e); } } }; } private void redoGUI() { if (gui != null) { parent.getDisplay().asyncExec(() -> { if (makeGUI()) parent.layout(); }); } } @Override public void setFocus() { // NOP } /** * Combine all alarms into a single table or group the alarms into two separate tables (by the acknowledge status). * * @param combinedTables true if the acknowledged and unacknowledged alarms should be displayed in a single table, * or false if they should be displayed in separate tables */ public void setCombinedTables(boolean separated) { this.combinedTables = separated; redoGUI(); } /** * Set the filter type for the table. The table will display alarms according to this filter. If the filter * is {@link FilterType#TREE} all alarms from the root currently selected in the alarm tree; if the filter type * is {@link FilterType#ROOT} all alarms from the root that is currently applied to this table (through filter item) * will be displayed; if filter type is {@link FilterType#ITEM} only the alarms belonging to the filter item will * be displayed. * * @param filterType the filter type to set */ public void setFilterType(FilterType filterType) { if (filterItemPath == null && (filterType == FilterType.ROOT || filterType == FilterType.ITEM)) { throw new IllegalStateException("Cannot apply filter type " + filterType //$NON-NLS-1$ + " if no filter item is defined."); //$NON-NLS-1$ } this.filterType = filterType; updateFilterItem(); firePropertyChange(PROP_FILTER); } /** * @return the filter type currently selected for this table */ public FilterType getFilterType() { return this.filterType; } /** * Enables or disables blinking of icons of the unacknowledged alarms. * * @param blinking true if the icons should be blinking or false otherwise */ public void setBlinkingIcons(boolean blinking) { this.blinkingIcons = blinking; if (gui != null) gui.setBlinking(blinking); } /** * @return the alarm client model used by this table */ public AlarmClientModel getModel() { return filterType == FilterType.TREE ? defaultModel : model; } /** * Sets the time format used for formatting the value in the time column. Format should be in the form acceptable by * the {@link SimpleDateFormat}. * * @param format the format */ public void setTimeFormat(String format) { if (format != null && format.isEmpty()) format = null; this.timeFormat = format; if (gui != null) gui.setTimeFormat(format); } /** * @return the currently used time format or null if default */ public String getTimeFormat() { return timeFormat; } /** * Parses the configuration name from the given path. * * @param path the path to parse * @return configuration name */ public static String getConfigNameFromPath(String path) { String name = path; if (name.charAt(0) == '/') name = name.substring(1); return name.substring(0, name.indexOf('/')); } }
package org.psjava.ds.graph; import org.psjava.ds.PSCollection; @Deprecated public interface Graph<V, E> { PSCollection<V> getVertices(); Iterable<E> getEdges(V from); }
package org.appwork.utils.logging; import java.text.DateFormat; import java.util.Date; import java.util.logging.LogRecord; import java.util.logging.SimpleFormatter; import org.appwork.utils.Exceptions; import org.appwork.utils.os.CrossSystem; public class LogFormatter extends SimpleFormatter { /** * Date to convert timestamp to a readable format */ private final Date date = new Date(); /** * For thread controlled logs */ private int lastThreadID; /** * Dateformat to convert timestamp to a readable format */ private final DateFormat longTimestamp = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.MEDIUM); private final String NEWLINE = CrossSystem.getNewLine(); @Override public synchronized String format(final LogRecord record) { /* clear StringBuilder buffer */ final StringBuilder sb = new StringBuilder(); // Minimize memory allocations here. this.date.setTime(record.getMillis()); final String message = this.formatMessage(record); final int th = record.getThreadID(); if (th != this.lastThreadID) { sb.append(NEWLINE); sb.append("THREAD: "); sb.append(th); sb.append(NEWLINE); } this.lastThreadID = th; sb.append(record.getThreadID()); sb.append('|'); sb.append(record.getLoggerName()); sb.append(' '); sb.append(this.longTimestamp.format(this.date)); sb.append(" - "); sb.append(record.getLevel().getName()); sb.append(" [ "); if (record.getSourceClassName() != null) { sb.append(record.getSourceClassName()); } else { sb.append(record.getLoggerName()); } if (record.getSourceMethodName() != null) { sb.append('('); sb.append(record.getSourceMethodName()); sb.append(')'); } sb.append(" ] "); sb.append("-> "); sb.append(message); sb.append(NEWLINE); if (record.getThrown() != null) { sb.append(Exceptions.getStackTrace(record.getThrown())); sb.append(NEWLINE); } return sb.toString(); } }
package com.opengamma.financial.analytics.model.sabrcube; import java.util.Collections; import java.util.Set; import javax.time.calendar.Clock; import javax.time.calendar.ZonedDateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Sets; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.analytics.financial.instrument.InstrumentDefinition; import com.opengamma.analytics.financial.interestrate.InstrumentDerivative; import com.opengamma.analytics.financial.interestrate.InstrumentSensitivityCalculator; import com.opengamma.analytics.financial.interestrate.PresentValueNodeSensitivityCalculator; import com.opengamma.analytics.financial.interestrate.YieldCurveBundle; import com.opengamma.analytics.financial.model.interestrate.curve.YieldAndDiscountCurve; import com.opengamma.analytics.financial.model.option.definition.SABRInterestRateDataBundle; import com.opengamma.analytics.math.matrix.DoubleMatrix1D; import com.opengamma.analytics.math.matrix.DoubleMatrix2D; import com.opengamma.core.holiday.HolidaySource; import com.opengamma.core.region.RegionSource; import com.opengamma.core.security.SecuritySource; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.ComputationTargetType; import com.opengamma.engine.function.AbstractFunction; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueRequirementNames; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.financial.OpenGammaCompilationContext; import com.opengamma.financial.OpenGammaExecutionContext; import com.opengamma.financial.analytics.conversion.CapFloorCMSSpreadSecurityConverter; import com.opengamma.financial.analytics.conversion.CapFloorSecurityConverter; import com.opengamma.financial.analytics.conversion.FixedIncomeConverterDataProvider; import com.opengamma.financial.analytics.conversion.SwapSecurityConverter; import com.opengamma.financial.analytics.conversion.SwaptionSecurityConverter; import com.opengamma.financial.analytics.ircurve.InterpolatedYieldCurveSpecificationWithSecurities; import com.opengamma.financial.analytics.ircurve.YieldCurveFunction; import com.opengamma.financial.analytics.model.FunctionUtils; import com.opengamma.financial.analytics.model.InterpolatedDataProperties; import com.opengamma.financial.analytics.model.YieldCurveNodeSensitivitiesHelper; import com.opengamma.financial.analytics.model.curve.interestrate.MarketInstrumentImpliedYieldCurveFunction; import com.opengamma.financial.analytics.model.volatility.VolatilityDataFittingDefaults; import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesBundle; import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesFunctionUtils; import com.opengamma.financial.convention.ConventionBundle; import com.opengamma.financial.convention.ConventionBundleSource; import com.opengamma.financial.convention.InMemoryConventionBundleMaster; import com.opengamma.financial.convention.daycount.DayCount; import com.opengamma.financial.security.FinancialSecurity; import com.opengamma.financial.security.FinancialSecurityUtils; import com.opengamma.financial.security.FinancialSecurityVisitor; import com.opengamma.financial.security.FinancialSecurityVisitorAdapter; import com.opengamma.id.ExternalId; import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolver; import com.opengamma.util.money.Currency; public abstract class SABRYieldCurveNodeSensitivitiesFunction extends AbstractFunction.NonCompiledInvoker { private static final Logger s_logger = LoggerFactory.getLogger(SABRYieldCurveNodeSensitivitiesFunction.class); private static final InstrumentSensitivityCalculator CALCULATOR = InstrumentSensitivityCalculator.getInstance(); private FinancialSecurityVisitor<InstrumentDefinition<?>> _securityVisitor; private SecuritySource _securitySource; private FixedIncomeConverterDataProvider _definitionConverter; @Override public void init(final FunctionCompilationContext context) { final HolidaySource holidaySource = OpenGammaCompilationContext.getHolidaySource(context); final RegionSource regionSource = OpenGammaCompilationContext.getRegionSource(context); final ConventionBundleSource conventionSource = OpenGammaCompilationContext.getConventionBundleSource(context); final HistoricalTimeSeriesResolver timeSeriesResolver = OpenGammaCompilationContext.getHistoricalTimeSeriesResolver(context); _securitySource = OpenGammaCompilationContext.getSecuritySource(context); final SwapSecurityConverter swapConverter = new SwapSecurityConverter(holidaySource, conventionSource, regionSource, false); final SwaptionSecurityConverter swaptionConverter = new SwaptionSecurityConverter(_securitySource, swapConverter); final CapFloorSecurityConverter capFloorVisitor = new CapFloorSecurityConverter(holidaySource, conventionSource, regionSource); final CapFloorCMSSpreadSecurityConverter capFloorCMSSpreadSecurityVisitor = new CapFloorCMSSpreadSecurityConverter(holidaySource, conventionSource, regionSource); _securityVisitor = FinancialSecurityVisitorAdapter.<InstrumentDefinition<?>>builder().swapSecurityVisitor(swapConverter).swaptionVisitor(swaptionConverter).capFloorVisitor(capFloorVisitor) .capFloorCMSSpreadVisitor(capFloorCMSSpreadSecurityVisitor).create(); _definitionConverter = new FixedIncomeConverterDataProvider(conventionSource, timeSeriesResolver); } @Override public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) { final ValueRequirement desiredValue = desiredValues.iterator().next(); final PresentValueNodeSensitivityCalculator nodeCalculator = getNodeSensitivityCalculator(desiredValue); final FinancialSecurity security = (FinancialSecurity) target.getSecurity(); final Currency currency = FinancialSecurityUtils.getCurrency(security); final Clock snapshotClock = executionContext.getValuationClock(); final ZonedDateTime now = snapshotClock.zonedDateTime(); final ValueProperties constraints = desiredValues.iterator().next().getConstraints(); final String forwardCurveName = constraints.getValues(YieldCurveFunction.PROPERTY_FORWARD_CURVE).iterator().next(); final String fundingCurveName = constraints.getValues(YieldCurveFunction.PROPERTY_FUNDING_CURVE).iterator().next(); final String curveName = constraints.getValues(ValuePropertyNames.CURVE).iterator().next(); final String calculationMethod = constraints.getValues(ValuePropertyNames.CURVE_CALCULATION_METHOD).iterator().next(); final HistoricalTimeSeriesBundle timeSeries = HistoricalTimeSeriesFunctionUtils.getHistoricalTimeSeriesInputs(executionContext, inputs); final ValueRequirement curveSpecRequirement = getCurveSpecRequirement(currency, curveName); final Object curveSpecObject = inputs.getValue(curveSpecRequirement); if (curveSpecObject == null) { throw new OpenGammaRuntimeException("Could not get " + curveSpecRequirement); } final InstrumentDefinition<?> definition = security.accept(_securityVisitor); if (definition == null) { throw new OpenGammaRuntimeException("Definition for security " + security + " was null"); } final ConventionBundleSource conventionSource = OpenGammaExecutionContext.getConventionBundleSource(executionContext); final String conventionName = currency.getCode() + "_SWAP"; final ConventionBundle convention = conventionSource.getConventionBundle(ExternalId.of(InMemoryConventionBundleMaster.SIMPLE_NAME_SCHEME, conventionName)); if (convention == null) { throw new OpenGammaRuntimeException("Could not get convention named " + conventionName); } final DayCount dayCount = convention.getSwapFloatingLegDayCount(); if (dayCount == null) { throw new OpenGammaRuntimeException("Could not get daycount"); } final InterpolatedYieldCurveSpecificationWithSecurities curveSpec = (InterpolatedYieldCurveSpecificationWithSecurities) curveSpecObject; final SABRInterestRateDataBundle data = getModelParameters(target, inputs, currency, dayCount, desiredValue); final InstrumentDerivative derivative = _definitionConverter.convert(security, definition, now, new String[] {fundingCurveName, forwardCurveName }, timeSeries); final ValueProperties properties = getResultProperties(target, desiredValue); final ValueSpecification spec = new ValueSpecification(ValueRequirementNames.YIELD_CURVE_NODE_SENSITIVITIES, target.toSpecification(), properties); if (calculationMethod.equals(InterpolatedDataProperties.CALCULATION_METHOD_NAME)) { final DoubleMatrix1D sensitivities = CALCULATOR.calculateFromSimpleInterpolatedCurve(derivative, data, nodeCalculator); return YieldCurveNodeSensitivitiesHelper.getInstrumentLabelledSensitivitiesForCurve(curveName, data, sensitivities, curveSpec, spec); } final Object jacobianObject = inputs.getValue(getJacobianRequirement(currency, forwardCurveName, fundingCurveName, calculationMethod)); if (jacobianObject == null) { throw new OpenGammaRuntimeException("Could not get " + ValueRequirementNames.YIELD_CURVE_JACOBIAN); } final double[][] array = FunctionUtils.decodeJacobian(jacobianObject); final DoubleMatrix2D jacobian = new DoubleMatrix2D(array); DoubleMatrix1D sensitivities; if (calculationMethod.equals(MarketInstrumentImpliedYieldCurveFunction.PRESENT_VALUE_STRING)) { final Object couponSensitivityObject = inputs.getValue(getCouponSensitivitiesRequirement(currency, forwardCurveName, fundingCurveName)); if (couponSensitivityObject == null) { throw new OpenGammaRuntimeException("Could not get " + ValueRequirementNames.PRESENT_VALUE_COUPON_SENSITIVITY); } final DoubleMatrix1D couponSensitivity = (DoubleMatrix1D) couponSensitivityObject; sensitivities = CALCULATOR.calculateFromPresentValue(derivative, null, data, couponSensitivity, jacobian, nodeCalculator); } else { sensitivities = CALCULATOR.calculateFromParRate(derivative, null, data, jacobian, nodeCalculator); } return YieldCurveNodeSensitivitiesHelper.getInstrumentLabelledSensitivitiesForCurve(curveName, data, sensitivities, curveSpec, spec); } @Override public ComputationTargetType getTargetType() { return ComputationTargetType.SECURITY; } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { final Currency ccy = FinancialSecurityUtils.getCurrency(target.getSecurity()); final ValueProperties properties = getResultProperties(ccy); return Collections.singleton(new ValueSpecification(ValueRequirementNames.YIELD_CURVE_NODE_SENSITIVITIES, target.toSpecification(), properties)); } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { final ValueProperties constraints = desiredValue.getConstraints(); final Set<String> curveNames = constraints.getValues(ValuePropertyNames.CURVE); if (curveNames == null || curveNames.size() != 1) { s_logger.error("Must ask for a single named curve"); return null; } final Set<String> forwardCurveNames = constraints.getValues(YieldCurveFunction.PROPERTY_FORWARD_CURVE); if (forwardCurveNames == null || forwardCurveNames.size() != 1) { return null; } final Set<String> fundingCurveNames = constraints.getValues(YieldCurveFunction.PROPERTY_FUNDING_CURVE); if (fundingCurveNames == null || fundingCurveNames.size() != 1) { return null; } final Set<String> cubeNames = constraints.getValues(ValuePropertyNames.CUBE); if (cubeNames == null || cubeNames.size() != 1) { return null; } final Set<String> curveCalculationMethods = constraints.getValues(ValuePropertyNames.CURVE_CALCULATION_METHOD); if (curveCalculationMethods == null || curveCalculationMethods.size() != 1) { return null; } final Set<String> fittingMethods = constraints.getValues(VolatilityDataFittingDefaults.PROPERTY_FITTING_METHOD); if (fittingMethods == null || fittingMethods.size() != 1) { return null; } final String forwardCurveName = forwardCurveNames.iterator().next(); final String fundingCurveName = fundingCurveNames.iterator().next(); final String cubeName = cubeNames.iterator().next(); final String curveName = curveNames.iterator().next(); final String curveCalculationMethod = curveCalculationMethods.iterator().next(); final String fittingMethod = fittingMethods.iterator().next(); final FinancialSecurity security = (FinancialSecurity) target.getSecurity(); final Currency currency = FinancialSecurityUtils.getCurrency(security); final ValueRequirement forwardCurveRequirement = getCurveRequirement(forwardCurveName, forwardCurveName, fundingCurveName, curveCalculationMethod, currency); final ValueRequirement fundingCurveRequirement = getCurveRequirement(fundingCurveName, forwardCurveName, fundingCurveName, curveCalculationMethod, currency); final ValueRequirement curveSpecRequirement = getCurveSpecRequirement(currency, curveName); final ValueRequirement cubeRequirement = getCubeRequirement(cubeName, currency, fittingMethod); final Set<ValueRequirement> requirements = Sets.newHashSet(forwardCurveRequirement, fundingCurveRequirement, cubeRequirement, curveSpecRequirement); if (curveCalculationMethod.equals(InterpolatedDataProperties.CALCULATION_METHOD_NAME)) { return requirements; } final ValueRequirement jacobianRequirement = getJacobianRequirement(currency, forwardCurveName, fundingCurveName, curveCalculationMethod); requirements.add(jacobianRequirement); if (curveCalculationMethod.equals(MarketInstrumentImpliedYieldCurveFunction.PRESENT_VALUE_STRING)) { final ValueRequirement couponSensitivitiesRequirement = getCouponSensitivitiesRequirement(currency, forwardCurveName, fundingCurveName); requirements.add(couponSensitivitiesRequirement); } final Set<ValueRequirement> timeSeriesRequirements = _definitionConverter.getConversionTimeSeriesRequirements(security, security.accept(_securityVisitor), new String[] {fundingCurveName, forwardCurveName }); if (timeSeriesRequirements == null) { return null; } requirements.addAll(timeSeriesRequirements); return requirements; } protected abstract SABRInterestRateDataBundle getModelParameters(final ComputationTarget target, final FunctionInputs inputs, final Currency currency, final DayCount dayCount, final ValueRequirement desiredValue); private ValueRequirement getCurveRequirement(final String curveName, final String advisoryForward, final String advisoryFunding, final String calculationMethod, final Currency currency) { return YieldCurveFunction.getCurveRequirement(currency, curveName, advisoryForward, advisoryFunding, calculationMethod); } protected YieldCurveBundle getYieldCurves(final FunctionInputs inputs, final Currency currency, final ValueRequirement desiredValue) { final String forwardCurveName = desiredValue.getConstraint(YieldCurveFunction.PROPERTY_FORWARD_CURVE); final String fundingCurveName = desiredValue.getConstraint(YieldCurveFunction.PROPERTY_FUNDING_CURVE); final String curveCalculationMethod = desiredValue.getConstraint(ValuePropertyNames.CURVE_CALCULATION_METHOD); final Object forwardCurveObject = inputs.getValue(YieldCurveFunction.getCurveRequirement(currency, forwardCurveName, forwardCurveName, fundingCurveName, curveCalculationMethod)); if (forwardCurveObject == null) { throw new OpenGammaRuntimeException("Could not get forward curve"); } final Object fundingCurveObject = inputs.getValue(YieldCurveFunction.getCurveRequirement(currency, fundingCurveName, forwardCurveName, fundingCurveName, curveCalculationMethod)); if (fundingCurveObject == null) { throw new OpenGammaRuntimeException("Could not get funding curve"); } final YieldAndDiscountCurve forwardCurve = (YieldAndDiscountCurve) forwardCurveObject; final YieldAndDiscountCurve fundingCurve = (YieldAndDiscountCurve) fundingCurveObject; return new YieldCurveBundle(new String[] {fundingCurveName, forwardCurveName }, new YieldAndDiscountCurve[] {fundingCurve, forwardCurve }); } protected ValueRequirement getCubeRequirement(final String cubeName, final Currency currency, final String fittingMethod) { final ValueProperties properties = ValueProperties.builder() .with(ValuePropertyNames.CUBE, cubeName) .with(ValuePropertyNames.CURRENCY, Currency.USD.getCode()) // TODO should be 'currency.getCode()' when non-USD currencies supported .with(VolatilityDataFittingDefaults.PROPERTY_VOLATILITY_MODEL, VolatilityDataFittingDefaults.SABR_FITTING) .with(VolatilityDataFittingDefaults.PROPERTY_FITTING_METHOD, fittingMethod).get(); return new ValueRequirement(ValueRequirementNames.SABR_SURFACES, Currency.USD, properties); // TODO should be 'currency' when non-USD currencies supported } protected abstract ValueProperties getResultProperties(final Currency currency); protected abstract ValueProperties getResultProperties(final ComputationTarget target, final ValueRequirement desiredValue); protected abstract PresentValueNodeSensitivityCalculator getNodeSensitivityCalculator(final ValueRequirement desiredValue); private ValueRequirement getCurveSpecRequirement(final Currency currency, final String curveName) { final ValueProperties properties = ValueProperties.builder() .with(ValuePropertyNames.CURVE, curveName).get(); return new ValueRequirement(ValueRequirementNames.YIELD_CURVE_SPEC, ComputationTargetType.PRIMITIVE, currency.getUniqueId(), properties); } private ValueRequirement getCouponSensitivitiesRequirement(final Currency currency, final String forwardCurveName, final String fundingCurveName) { return YieldCurveFunction.getCouponSensitivityRequirement(currency, forwardCurveName, fundingCurveName); } private ValueRequirement getJacobianRequirement(final Currency currency, final String forwardCurveName, final String fundingCurveName, final String curveCalculationMethod) { return YieldCurveFunction.getJacobianRequirement(currency, forwardCurveName, fundingCurveName, curveCalculationMethod); } }
package org.vafer.jdeb; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.security.DigestOutputStream; import java.security.MessageDigest; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Task; import org.apache.tools.tar.TarEntry; import org.apache.tools.tar.TarInputStream; import org.apache.tools.tar.TarOutputStream; import org.vafer.jdeb.ar.ArArchive; import org.vafer.jdeb.ar.FileArEntry; import org.vafer.jdeb.ar.StaticArEntry; public class DebAntTask extends Task { private File deb; private File control; private Collection dataCollection = new ArrayList(); public void setDestfile(File deb) { this.deb = deb; } public void setControl(File control) { this.control = control; } public static class Data { private String prefix = ""; private int strip = 0; private File data; public void setStrip(int strip) { this.strip = strip; } public void setPrefix(String prefix) { if (!prefix.endsWith("/")) { this.prefix = prefix + "/"; return; } this.prefix = prefix; } public void setSrc(File data) { this.data = data; } public File getFile() { return data; } public int getStrip() { return strip; } public String getPrefix() { return prefix; } public String toString() { return data.toString(); } } public void addData(Data data) { dataCollection.add(data); } public void execute() { if (control == null || !control.isDirectory()) { throw new BuildException("you need to point the 'control' attribute to the control directory"); } if (dataCollection.size() == 0) { throw new BuildException("you need to provide at least one pointer to a tgz or directory with the data"); } if (deb == null) { throw new BuildException("you need to point the 'destfile' attribute to where the deb is supposed to be created"); } File tempData = null; File tempControl = null; try { tempData = File.createTempFile("deb", "data"); tempControl = File.createTempFile("deb", "control"); final TarOutputStream outputStream = new TarOutputStream(new GZIPOutputStream(new FileOutputStream(tempData))); outputStream.setLongFileMode(TarOutputStream.LONGFILE_GNU); final StringBuffer md5sum = new StringBuffer(); for (Iterator it = dataCollection.iterator(); it.hasNext();) { final Data data = (Data) it.next(); log("*** adding data from " + data); buildData(data, outputStream, md5sum); } outputStream.close(); buildControl(control, md5sum.toString(), tempControl); ArArchive ar = new ArArchive(new FileOutputStream(deb)); ar.add(new StaticArEntry("debian-binary", 0, 0, 33188, "2.0\n")); ar.add(new FileArEntry(tempControl, "control.tar.gz", 0, 0, 33188)); ar.add(new FileArEntry(tempData, "data.tar.gz", 0, 0, 33188)); ar.close(); } catch(Exception e) { if (tempData != null) { tempData.delete(); } if (tempControl != null) { tempControl.delete(); } e.printStackTrace(); throw new BuildException("could not create deb package", e); } } private static interface FileVisitor { void visit( File file ); } private void iterate( File dir, FileVisitor visitor) { // FIXME: make configurable if (".svn".equals(dir.getName())) { return; } visitor.visit(dir); if (dir.isDirectory()) { File[] childs = dir.listFiles(); for (int i = 0; i < childs.length; i++) { iterate(childs[i], visitor); } } } private String stripPath( final int p, final String s ) { if (p<=0) { return s; } int x = 0; for (int i=0 ; i<p; i++) { x = s.indexOf('/', x); if (x < 0) { return s; } } return s.substring(x+1); } private void buildData( final Data srcData, final TarOutputStream outputStream, final StringBuffer md5sum ) throws Exception { final File src = srcData.getFile(); if (!src.exists()) { return; } // FIXME: merge both cases via visitor if (src.isFile()) { final TarInputStream inputStream = new TarInputStream(new GZIPInputStream(new FileInputStream(src))); final MessageDigest digest = MessageDigest.getInstance("MD5"); while(true) { final TarEntry entry = inputStream.getNextEntry(); if (entry == null) { break; } entry.setName(srcData.getPrefix() + stripPath(srcData.getStrip(), entry.getName())); outputStream.putNextEntry(entry); digest.reset(); copy(inputStream, new DigestOutputStream(outputStream, digest)); log("adding data file name:" + entry.getName() + " size:" + entry.getSize() + " mode:" + entry.getMode() + " linkname:" + entry.getLinkName() + " username:" + entry.getUserName() + " userid:" + entry.getUserId() + " groupname:" + entry.getGroupName() + " groupid:" + entry.getGroupId() + " modtime:" + entry.getModTime() + " md5: " + toHex(digest.digest()) ); outputStream.closeEntry(); md5sum.append(entry.getName()).append(" ").append(toHex(digest.digest())).append('\n'); } inputStream.close(); } else { final MessageDigest digest = MessageDigest.getInstance("MD5"); iterate(src, new FileVisitor() { public void visit( File file ) { try { TarEntry entry = new TarEntry(file); String localName = file.getAbsolutePath().substring(src.getAbsolutePath().length()); if ("".equals(localName)) { return; } entry.setName(srcData.getPrefix() + stripPath(srcData.getStrip(), localName.substring(1))); if (file.isDirectory()) { log("adding data directory name:" + entry.getName() + " size:" + entry.getSize() + " mode:" + entry.getMode() + " linkname:" + entry.getLinkName() + " username:" + entry.getUserName() + " userid:" + entry.getUserId() + " groupname:" + entry.getGroupName() + " groupid:" + entry.getGroupId() + " modtime:" + entry.getModTime() ); outputStream.putNextEntry(entry); outputStream.closeEntry(); return; } InputStream inputStream = new FileInputStream(file); log("adding data file name:" + entry.getName() + " size:" + entry.getSize() + " mode:" + entry.getMode() + " linkname:" + entry.getLinkName() + " username:" + entry.getUserName() + " userid:" + entry.getUserId() + " groupname:" + entry.getGroupName() + " groupid:" + entry.getGroupId() + " modtime:" + entry.getModTime() + " md5: " + toHex(digest.digest()) ); outputStream.putNextEntry(entry); digest.reset(); copy(inputStream, new DigestOutputStream(outputStream, digest)); outputStream.closeEntry(); md5sum.append(entry.getName()).append(" ").append(toHex(digest.digest())).append('\n'); inputStream.close(); } catch (Exception e) { e.printStackTrace(); } } }); } } private void buildControl( final File src, final String digests, final File dst ) throws Exception { final TarOutputStream outputStream = new TarOutputStream(new GZIPOutputStream(new FileOutputStream(dst))); outputStream.setLongFileMode(TarOutputStream.LONGFILE_GNU); iterate(src, new FileVisitor() { public void visit( File file ) { if (file.isDirectory()) { return; } try { TarEntry entry = new TarEntry(file); entry.setName(file.getName()); InputStream inputStream = new FileInputStream(file); log("adding control file " + entry.getName()); outputStream.putNextEntry(entry); copy(inputStream, outputStream); outputStream.closeEntry(); inputStream.close(); } catch (Exception e) { e.printStackTrace(); } } }); byte[] data = digests.getBytes("UTF-8"); TarEntry entry = new TarEntry("md5sums"); entry.setSize(data.length); log("adding control file " + entry.getName()); outputStream.putNextEntry(entry); outputStream.write(data); outputStream.closeEntry(); outputStream.close(); } private static String toHex(byte[] b) { final StringBuffer sb = new StringBuffer(); for (int i = 0; i < b.length; ++i) { sb.append(Integer.toHexString((b[i]>>4) & 0x0f)); sb.append(Integer.toHexString(b[i] & 0x0f)); } return sb.toString(); } private static int copy(InputStream input, OutputStream output) throws IOException { byte[] buffer = new byte[2048]; int count = 0; int n = 0; while (-1 != (n = input.read(buffer))) { output.write(buffer, 0, n); count += n; } return count; } }
package org.bouncycastle.asn1; import org.bouncycastle.asn1.util.ASN1Dump; import java.io.IOException; /** * ASN.1 TaggedObject - in ASN.1 nottation this is any object proceeded by * a [n] where n is some number - these are assume to follow the construction * rules (as with sequences). */ public abstract class ASN1TaggedObject extends DERObject implements ASN1TaggedObjectParser { int tagNo; boolean empty = false; boolean explicit = true; DEREncodable obj = null; static public ASN1TaggedObject getInstance( ASN1TaggedObject obj, boolean explicit) { if (explicit) { return (ASN1TaggedObject)obj.getObject(); } throw new IllegalArgumentException("implicitly tagged tagged object"); } static public ASN1TaggedObject getInstance( Object obj) { if (obj == null || obj instanceof ASN1TaggedObject) { return (ASN1TaggedObject)obj; } throw new IllegalArgumentException("unknown object in getInstance"); } /** * Create a tagged object in the explicit style. * * @param tagNo the tag number for this object. * @param obj the tagged object. */ public ASN1TaggedObject( int tagNo, DEREncodable obj) { this.explicit = true; this.tagNo = tagNo; this.obj = obj; } /** * Create a tagged object with the style given by the value of explicit. * <p> * If the object implements ASN1Choice the tag style will always be changed * to explicit in accordance with the ASN.1 encoding rules. * </p> * @param explicit true if the object is explicitly tagged. * @param tagNo the tag number for this object. * @param obj the tagged object. */ public ASN1TaggedObject( boolean explicit, int tagNo, DEREncodable obj) { if (obj instanceof ASN1Choice) { this.explicit = true; } else { this.explicit = explicit; } this.tagNo = tagNo; this.obj = obj; } public boolean equals( Object o) { if (o == this) { return true; } if (!(o instanceof DEREncodable)) { return false; } DERObject ot = ((DEREncodable)o).getDERObject(); if (!(ot instanceof ASN1TaggedObject)) { return false; } ASN1TaggedObject other = (ASN1TaggedObject)ot; if (tagNo != other.tagNo || empty != other.empty || explicit != other.explicit) { return false; } if(obj == null) { if (other.obj != null) { return false; } } else { if (!(obj.getDERObject().equals(other.obj.getDERObject()))) { System.out.println(ASN1Dump.dumpAsString(obj)); System.out.println(ASN1Dump.dumpAsString(other.obj)); return false; } } return true; } public int hashCode() { int code = tagNo; if (obj != null) { code ^= obj.hashCode(); } return code; } public int getTagNo() { return tagNo; } /** * return whether or not the object may be explicitly tagged. * <p> * Note: if the object has been read from an input stream, the only * time you can be sure if isExplicit is returning the true state of * affairs is if it returns false. An implicitly tagged object may appear * to be explicitly tagged, so you need to understand the context under * which the reading was done as well, see getObject below. */ public boolean isExplicit() { return explicit; } public boolean isEmpty() { return empty; } /** * return whatever was following the tag. * <p> * Note: tagged objects are generally context dependent if you're * trying to extract a tagged object you should be going via the * appropriate getInstance method. */ public DERObject getObject() { if (obj != null) { return obj.getDERObject(); } return null; } /** * Return the object held in this tagged object as a parser assuming it has * the type of the passed in tag. If the object doesn't have a parser * associated with it, the base object is returned. */ public DEREncodable getObjectParser( int tag, boolean isExplicit) { if (isExplicit) { switch (tag) { case DERTags.SET: return ASN1Set.getInstance(this, isExplicit).parser(); case DERTags.SEQUENCE: return ASN1Sequence.getInstance(this, isExplicit).parser(); case DERTags.OCTET_STRING: return ASN1OctetString.getInstance(this, isExplicit).parser(); } return getObject(); } else { switch (tag) { case DERTags.SET: return ASN1Set.getInstance(this, isExplicit).parser(); case DERTags.SEQUENCE: return ASN1Sequence.getInstance(this, isExplicit).parser(); case DERTags.OCTET_STRING: return ASN1OctetString.getInstance(this, isExplicit).parser(); } } throw new RuntimeException("implicit tagging not implemented for tag: " + tag); } abstract void encode(DEROutputStream out) throws IOException; public String toString() { return "[" + tagNo + "]" + obj; } }
package org.ribasco.agql.protocols.valve.steam.webapi.pojos; import com.google.gson.annotations.SerializedName; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import java.util.List; import java.util.Map; public class SteamAssetPriceInfo { @SerializedName("prices") private Map<String, Integer> prices; @SerializedName("name") private String name; @SerializedName("date") private String date; @SerializedName("class") private List<SteamKeyValuePair<String, String>> classList; @SerializedName("classid") private String classId; public Map<String, Integer> getPrices() { return prices; } public void setPrices(Map<String, Integer> prices) { this.prices = prices; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDate() { return date; } public void setDate(String date) { this.date = date; } public List<SteamKeyValuePair<String, String>> getClassList() { return classList; } public void setClassList(List<SteamKeyValuePair<String, String>> classList) { this.classList = classList; } public String getClassId() { return classId; } public void setClassId(String classId) { this.classId = classId; } @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.NO_CLASS_NAME_STYLE) .append("ClassId", getClassId()) .append("Name", getName()) .append("Date", getDate()) .append("PriceCount", getPrices().size()).toString(); } }
package org.intermine.web.logic.profile; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpSession; import org.apache.log4j.Logger; import org.intermine.api.bag.BagQueryResult; import org.intermine.api.bag.BagQueryRunner; import org.intermine.api.profile.InterMineBag; import org.intermine.api.profile.BagState; import org.intermine.api.profile.Profile; import org.intermine.model.InterMineObject; import org.intermine.objectstore.ObjectStoreException; import org.intermine.web.logic.Constants; import org.intermine.web.logic.bag.BagQueryUpgrade; import org.intermine.web.logic.session.SessionMethods; /** * Runnable object providing upgrading osbag_int table. * @author dbutano * */ public class UpgradeBagList implements Runnable { private static final Logger LOG = Logger.getLogger(UpgradeBagList.class); private Profile profile; private BagQueryRunner bagQueryRunner; private HttpSession session; public UpgradeBagList(Profile profile, BagQueryRunner bagQueryRunner, HttpSession session) { this.profile = profile; this.bagQueryRunner = bagQueryRunner; this.session = session; } public void run() { Map<String, String> savedBagsStatus = SessionMethods.getNotCurrentSavedBagsStatus(session); Map<String, InterMineBag> savedBags = profile.getSavedBags(); for (InterMineBag bag : savedBags.values()) { if (bag.getState().equals(BagState.NOT_CURRENT.toString())) { savedBagsStatus.put(bag.getName(), Constants.UPGRADING_BAG); BagQueryUpgrade bagQueryUpgrade = new BagQueryUpgrade(bagQueryRunner, bag); BagQueryResult result = bagQueryUpgrade.getBagQueryResult(); try { if (result.getUnresolved().isEmpty() && (result.getIssues().isEmpty() || onlyOtherIssuesAlreadyContained(result))) { Map<Integer, List> matches = result.getMatches(); //we set temporary the updateBagValues parameter to true //in this way will update the extra field recently added bag.upgradeOsb(matches.keySet(), true); savedBagsStatus.put(bag.getName(), BagState.CURRENT.toString()); } else { session.setAttribute("bagQueryResult_" + bag.getName(), result); bag.setState(BagState.TO_UPGRADE); savedBagsStatus.put(bag.getName(), BagState.TO_UPGRADE.toString()); } } catch (ObjectStoreException ose) { LOG.warn("Impossible upgrade the bags list", ose); } } } } /** * Verify that the only issues existing have type OTHER and the ids contained already * existing in the list. * If the condition is verified the list can be upgraded automatically * @param result * @return */ private boolean onlyOtherIssuesAlreadyContained(BagQueryResult result) { if(result.getIssues().get(BagQueryResult.DUPLICATE).isEmpty() && result.getIssues().get(BagQueryResult.TYPE_CONVERTED).isEmpty() && result.getIssues().get(BagQueryResult.WILDCARD).isEmpty()) { Map<String, Map<String, List>> otherMatchMap = result.getIssues() .get(BagQueryResult.OTHER); Set<Integer> matchesIds = result.getMatches().keySet(); if (otherMatchMap != null) { Map<String, ArrayList<Object>> lowQualityMatches = new LinkedHashMap<String, ArrayList<Object>>(); Iterator otherMatchesIter = otherMatchMap.values().iterator(); while (otherMatchesIter.hasNext()) { Map<String, ArrayList<Object>> inputToObjectsMap = (Map) otherMatchesIter.next(); Map<String, ArrayList<Object>> inputToObjectsMapUpdated = new LinkedHashMap<String, ArrayList<Object>>(); for (String key : inputToObjectsMap.keySet()) { ArrayList<Object> listObjects = inputToObjectsMap.get(key); ArrayList<Object> listObjectsUpdated = new ArrayList<Object>(); for (Object obj : listObjects) { InterMineObject intermineObj= (InterMineObject) obj; if (matchesIds.isEmpty() || !matchesIds.contains(intermineObj.getId())) { listObjectsUpdated.add(obj); } } if (!listObjectsUpdated.isEmpty()) { inputToObjectsMapUpdated.put(key, listObjects); } } if (!inputToObjectsMapUpdated.isEmpty()) { lowQualityMatches.putAll(inputToObjectsMapUpdated); } } if (lowQualityMatches.isEmpty()) { return true; } } } return false; } }
package org.xcsp.modeler; import static org.xcsp.common.Constants.ANNOTATIONS; import static org.xcsp.common.Constants.ARGS; import static org.xcsp.common.Constants.ARRAY; import static org.xcsp.common.Constants.BLOCK; import static org.xcsp.common.Constants.CONSTRAINTS; import static org.xcsp.common.Constants.DECISION; import static org.xcsp.common.Constants.DOMAIN; import static org.xcsp.common.Constants.GROUP; import static org.xcsp.common.Constants.INSTANCE; import static org.xcsp.common.Constants.OBJECTIVES; import static org.xcsp.common.Constants.VAR; import static org.xcsp.common.Constants.VARIABLES; import static org.xcsp.common.Utilities.element; import static org.xcsp.modeler.definitions.ICtr.CONDITION; import static org.xcsp.modeler.definitions.ICtr.EXTENSION; import static org.xcsp.modeler.definitions.ICtr.FUNCTION; import static org.xcsp.modeler.definitions.ICtr.INDEX; import static org.xcsp.modeler.definitions.ICtr.INTENSION; import static org.xcsp.modeler.definitions.ICtr.LIST; import static org.xcsp.modeler.definitions.ICtr.SLIDE; import static org.xcsp.modeler.definitions.ICtr.VALUE; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Stack; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xcsp.common.Condition; import org.xcsp.common.IVar; import org.xcsp.common.Softening; import org.xcsp.common.Softening.SofteningGlobal; import org.xcsp.common.Softening.SofteningSimple; import org.xcsp.common.Types.TypeAtt; import org.xcsp.common.Types.TypeChild; import org.xcsp.common.Types.TypeClass; import org.xcsp.common.Types.TypeFramework; import org.xcsp.common.Types.TypeVar; import org.xcsp.common.Utilities; import org.xcsp.common.domains.Values.IntegerInterval; import org.xcsp.common.predicates.XNodeParent; import org.xcsp.modeler.api.ProblemAPI; import org.xcsp.modeler.definitions.DefXCSP; import org.xcsp.modeler.definitions.DefXCSP.Son; import org.xcsp.modeler.definitions.ICtr; import org.xcsp.modeler.definitions.ICtr.ICtrExtension; import org.xcsp.modeler.definitions.ICtr.ICtrInstantiation; import org.xcsp.modeler.definitions.ICtr.ICtrIntension; import org.xcsp.modeler.definitions.ICtr.ICtrMdd; import org.xcsp.modeler.definitions.ICtr.ICtrRegular; import org.xcsp.modeler.definitions.ICtr.ICtrSlide; import org.xcsp.modeler.definitions.ICtr.ICtrSmart; import org.xcsp.modeler.definitions.ICtr.Meta; import org.xcsp.modeler.entities.CtrEntities.CtrAlone; import org.xcsp.modeler.entities.CtrEntities.CtrArray; import org.xcsp.modeler.entities.CtrEntities.CtrEntity; import org.xcsp.modeler.entities.ModelingEntity; import org.xcsp.modeler.entities.ModelingEntity.TagDummy; import org.xcsp.modeler.entities.ObjEntities.ObjEntity; import org.xcsp.modeler.entities.VarEntities.VarAlone; import org.xcsp.modeler.entities.VarEntities.VarArray; import org.xcsp.modeler.entities.VarEntities.VarEntity; import org.xcsp.modeler.implementation.ProblemIMP; import org.xcsp.modeler.implementation.ProblemIMP3; import org.xcsp.modeler.implementation.ProblemIMP3.MVariable; import org.xcsp.modeler.problems.AllInterval; import org.xcsp.modeler.problems.Bibd; public class Compiler { public static final String FORMAT = TypeAtt.format.name(); public static final String XCSP3 = "XCSP3"; public static final String TYPE = TypeAtt.type.name(); public static final String ID = TypeAtt.id.name(); public static final String CLASS = TypeAtt.CLASS.name().toLowerCase(); public static final String NOTE = TypeAtt.note.name(); public static final String AS = TypeAtt.as.name(); public static final String FOR = TypeAtt.FOR.name().toLowerCase(); public static final String CIRCULAR = TypeAtt.circular.name(); public static final String OFFSET = TypeAtt.offset.name(); public static final String COLLECT = TypeAtt.collect.name(); public static final String VIOLATION_COST = TypeAtt.violationCost.name(); public static final String VIOLATION_MEASURE = TypeAtt.violationMeasure.name(); public static final String SUPPORTS = TypeChild.supports.name(); public static final String CONFLICTS = TypeChild.conflicts.name(); public static final String VAR_ARGS = "%..."; public static final int LIMIT_FOR_VAR_ARGS = 3; public static final String VARIANT = "-variant"; public static final String DATA = "-data"; public static final String DATA_FORMAT = "-dataFormat"; public static final String DATA_SAVING = "-dataSaving"; public static final String OUTPUT = "-output"; public static final String EV = "-ev"; public static final String IC = "-ic"; protected final ProblemIMP imp; protected Document doc; protected Map<String, Element> tuplesReferents = new HashMap<>(); protected int nBuiltTuplesReferents; // HARD CODING/VALUES BELOW protected int limitForUsingAs = 12; protected boolean discardIntegerType = true, discardAsRelation = true, printNotes = true; protected boolean doubleAbstraction = true, saveImmediatelyStored = true, ignoreAutomaticGroups = true, monoformGroups = false; private boolean noGroupAtAllForExtension = false, noGroupAtAllForIntension = false, noGroupAtAllForGlobal = false; private boolean uncompactDomainFor = false; private boolean mustEraseIdsOfConstraints = false; private boolean mergeSuccessiveInstantiations = false; // sometimes, for efficiency reasons, it is important to set noGroupAtAllForExtension to true and uncompactDomainFor to true /** * Builds an object that allow us to generate XCSP3 instances from the specified MCSP3 model. Data are expected to be provided at the command * line. * * @param api * the object denoting the model of the problem */ public Compiler(ProblemAPI api) { this.imp = api.imp(); } protected Document buildDocument() { // TODO control that ids are all different try { doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument(); } catch (ParserConfigurationException e) { e.printStackTrace(); } Element root = element(doc, INSTANCE, FORMAT, XCSP3, TYPE, imp.objEntities.active() ? TypeFramework.COP.name() : imp.typeFramework().name()); root.appendChild(variables()); root.appendChild(constraints()); if (imp.objEntities.active()) root.appendChild(objectives()); if (imp.annotations.active()) root.appendChild(annotations()); doc.appendChild(root); doc.normalize(); return doc; } private List<Predicate> storedP = new ArrayList<>(); private List<Relation> storedR = new ArrayList<>(); private List<Global> storedG = new ArrayList<>(); private void saveStored(Element parent, boolean immediatly, boolean br, boolean bp, boolean bg) { if (!immediatly) return; if (br && storedR.size() > 0) parent.appendChild(buildingStoredRelations()); if (bp && storedP.size() > 0) parent.appendChild(buildingStoredPredicates()); if (bg && storedG.size() > 0) parent.appendChild(buildingStoredGlobals()); } private void saveStored(Element parent) { saveStored(parent, true, true, true, true); } private abstract class Similarable<T> { protected abstract boolean isSimilarTo(T object); protected boolean haveSimilarAttributes(ICtr c1, ICtr c2) { CtrAlone ca1 = imp.ctrEntities.ctrToCtrAlone.get(c1), ca2 = imp.ctrEntities.ctrToCtrAlone.get(c2); if (ca1.id != null || ca2.id != null) return false; if (!TypeClass.equivalent(ca1.classes, ca2.classes)) return false; if ((ca1.note == null) != (ca2.note == null) || (ca1.note != null && !ca1.note.equals(ca2.note))) return false; if ((ca1.softening == null) != (ca2.softening == null)) return false; if (ca1.softening != null) { // and necessarily ca2.softening != null too if (ca1.softening.getClass() != ca2.softening.getClass()) return false; if (ca1.softening.cost != null || ca2.softening.cost != null) return false; // relaxed constraints are considered as being not similar (do not see how it could be different) // we have to check cost functions now if (ca1.softening instanceof SofteningSimple) { if (((SofteningSimple) ca1.softening).violationCost != ((SofteningSimple) ca2.softening).violationCost) return false; } else if (ca1.softening instanceof SofteningGlobal) { if (((SofteningGlobal) ca1.softening).type != ((SofteningGlobal) ca2.softening).type) return false; if (((SofteningGlobal) ca1.softening).parameters != null || ((SofteningGlobal) ca2.softening).parameters != null) return false; } else return false; } return true; } } /** * A class used to handle constraints {@code intension}, with the objective of building groups of similar intension constraints */ private class Predicate extends Similarable<Predicate> { private ICtrIntension c; private XNodeParent<?> abstractTree; private List<Object> args = new ArrayList<>(); public Predicate(ICtrIntension c, boolean abstractIntegers, boolean multiOccurrences) { this.c = c; this.abstractTree = (XNodeParent<?>) ((XNodeParent<?>) c.mapXCSP().get(FUNCTION)).abstraction(args, abstractIntegers, multiOccurrences); } private Predicate(ICtrIntension c) { this(c, true, true); } @Override protected boolean isSimilarTo(Predicate p) { return haveSimilarAttributes(c, p.c) && abstractTree.equals(p.abstractTree); } } /** * A class used to handle constraints {@code extension}, with the objective of building groups of similar extension constraints */ private class Relation extends Similarable<Relation> { private ICtrExtension c; private Relation(ICtrExtension c) { this.c = c; } @Override protected boolean isSimilarTo(Relation r) { return haveSimilarAttributes(c, r.c) && c.isSimilarTo(r.c); } } /** * A class used to handle global constraints (i.e., constraints that are neither {@code intension} nor {@code extension}), with the objective of * building groups of similar global constraints */ private class Global extends Similarable<Global> { private ICtr c; private DefXCSP def; private int[] recordedDiffs, recordedSizes; private Global(ICtr c) { this.c = c; this.def = c.defXCSP(); } @Override protected boolean isSimilarTo(Global g) { Function<Object, Integer> sizeOf = v -> v instanceof Number || v instanceof IntegerInterval || v instanceof Condition ? 1 : Stream.of((v.toString()).trim().split("\\s+")) .mapToInt(tok -> Utilities.isNumeric(tok) || Utilities.isNumericInterval(tok) ? 1 : imp.varEntities.nVarsIn(tok)).sum(); if (def.map.containsKey(ICtr.MATRIX)) return false; // currently, forbidden to group together constraints with child MATRIX if (!haveSimilarAttributes(c, g.c)) return false; int[] diffs = def == null || g.def == null ? null : def.differencesWith(g.def); if (diffs == null) return false; if (diffs.length == 0) { System.out.println("WARNING : Two similar constraints"); return false; // The constraints are identical; we return false to keep both of them (may happen with some awkward instances) } if (diffs.length == 1) { if (def.sons.get(diffs[0]).name.equals(CONDITION)) // for the moment, problem when abstracting on conditions return false; if (storedG.size() == 1) { recordedDiffs = diffs; int s1 = sizeOf.apply(def.sons.get(diffs[0]).content), s2 = sizeOf.apply(g.def.sons.get(diffs[0]).content); recordedSizes = new int[] { (s1 == s2 ? s1 : -1) }; return true; } if (recordedSizes[0] != -1 && recordedSizes[0] != sizeOf.apply(g.def.sons.get(diffs[0]).content)) recordedSizes[0] = -1; return recordedDiffs.length == 1 && recordedDiffs[0] == diffs[0]; } if (doubleAbstraction && diffs.length == 2 && def.sons.size() > 2 && !(c instanceof ICtrRegular) && !(c instanceof ICtrMdd)) { if (IntStream.of(diffs).anyMatch(i -> def.sons.get(i).name.equals(CONDITION))) return false; // for the moment, the parser does not manage abstraction of condition elements if (storedG.size() == 1) { int[] s1 = IntStream.of(diffs).map(i -> sizeOf.apply(def.sons.get(i).content)).toArray(); int[] s2 = IntStream.of(diffs).map(i -> sizeOf.apply(g.def.sons.get(i).content)).toArray(); if (IntStream.range(0, diffs.length).allMatch(i -> s1[i] == s2[i])) { recordedDiffs = diffs; recordedSizes = s1; return true; } return false; } if (recordedDiffs.length != 2 || recordedDiffs[0] != diffs[0] || recordedDiffs[1] != diffs[1]) return false; int[] s2 = IntStream.of(diffs).map(i -> sizeOf.apply(g.def.sons.get(i).content)).toArray(); return IntStream.range(0, diffs.length).allMatch(i -> recordedSizes[i] == s2[i]); } return false; // for the moment, only 1 or 2 differences are managed } @Override public String toString() { return def.toString(); } } private String seqOfParameters(int n, int start, boolean compact) { return compact && n > LIMIT_FOR_VAR_ARGS ? VAR_ARGS : IntStream.range(0, n).mapToObj(i -> "%" + (start + i)).collect(Collectors.joining(" ")); } private String seqOfParameters(int n, boolean compact) { return seqOfParameters(n, 0, compact); } private String seqOfParameters(int n) { return seqOfParameters(n, false); } private void sideAttributes(Element element, ModelingEntity entity) { if (entity == null) return; if (entity.id != null) element.setAttribute(ID, entity.id); if (entity.classes.size() > 0) element.setAttribute(CLASS, entity.classes.stream().map(c -> c.ccname()).collect(Collectors.joining(" "))); if (printNotes && entity.note != null && entity.note.length() > 0) element.setAttribute(NOTE, entity.note); if (entity instanceof CtrAlone) { Softening sf = ((CtrAlone) entity).softening; if (sf != null) { Utilities.control(sf.cost == null, "Cannot be managed at this place"); if (sf instanceof SofteningSimple) element.setAttribute(VIOLATION_COST, ((SofteningSimple) sf).violationCost + ""); else if (sf instanceof SofteningGlobal) element.setAttribute(VIOLATION_MEASURE, ((SofteningGlobal) sf).type.toString()); else Utilities.control(false, "Unreachable"); } } } private Element treatPossibleRecursiveSon(Son son, int sonIndex, int absIndex1, int absIndex2) { if (son.name.equals(ICtr.REC)) { // recursivity Utilities.control(absIndex1 != sonIndex && absIndex2 != sonIndex && son.content instanceof CtrAlone, "Pb"); CtrAlone ca = (CtrAlone) son.content; Element sub = buildingDef(ca.ctr.defXCSP()); sideAttributes(sub, ca); return sub; } return null; } private Element buildingDef(DefXCSP def, int absIndex1, String absValue1, int absIndex2, String absValue2) { Element elt = doc.createElement(def.name); def.attributes.stream().forEach(a -> elt.setAttribute(a.getKey(), a.getValue().toString())); if (def.sons.size() == 1 && def.sons.get(0).attributes.size() == 0 && def.possibleSimplification) { Element recursiveSon = treatPossibleRecursiveSon(def.sons.get(0), 0, absIndex1, absIndex2); if (recursiveSon != null) elt.appendChild(recursiveSon); else elt.setTextContent(" " + (absIndex1 == 0 ? absValue1 : def.sons.get(0).content) + " "); } else for (int i = 0; i < def.sons.size(); i++) { Element recursiveSon = treatPossibleRecursiveSon(def.sons.get(i), i, absIndex1, absIndex2); if (recursiveSon != null) elt.appendChild(recursiveSon); else { Element sub = element(doc, def.sons.get(i).name, i == absIndex1 ? absValue1 : i == absIndex2 ? absValue2 : def.sons.get(i).content); def.sons.get(i).attributes.stream().forEach(a -> sub.setAttribute(a.getKey(), a.getValue().toString())); elt.appendChild(sub); } } return elt; } private Element buildingDef(DefXCSP def, int absIndex, String absValue) { return buildingDef(def, absIndex, absValue, -1, ""); } private Element buildingDef(DefXCSP def) { return buildingDef(def, -1, "", -1, ""); } private Element baseVarEntity(Element element, VarEntity va) { sideAttributes(element, va); if (va instanceof VarArray) element.setAttribute(ICtr.SIZE, VarArray.class.cast(va).getStringSize()); if (!discardIntegerType || va.getType() != TypeVar.integer) element.setAttribute(TYPE, va.getType().name()); return element; } private Element var(VarAlone va, String s, boolean alias) { return baseVarEntity(alias ? element(doc, VAR, AS, s) : element(doc, VAR, s), va); } private Element array(VarArray va, String s, boolean alias) { return baseVarEntity(alias ? element(doc, ARRAY, AS, s) : element(doc, ARRAY, s), va); } private Element array(VarArray va, Map<IVar, String> varToDomText, Map<Object, List<IVar>> map) { Utilities.control(map.size() > 1, "The map only contains one entry"); Element element = baseVarEntity(doc.createElement(ARRAY), va); for (List<IVar> list : map.values()) { String s = uncompactDomainFor ? list.stream().map(x -> x.id()).collect(Collectors.joining(" ")) : imp.varEntities.compact(list.toArray(new IVar[list.size()])); element.appendChild(element(doc, DOMAIN, FOR, s, varToDomText.get(list.get(0)))); } return element; } protected void putInMap(IVar x, Map<IVar, String> map) { map.put(x, ((MVariable) x).dom.toString()); } protected Element variables() { System.out.println(" Saving variables"); Element element = doc.createElement(VARIABLES); Map<IVar, String> varToDom = new HashMap<>(); for (VarEntity ve : imp.varEntities.allEntities) if (ve instanceof VarAlone) putInMap(((VarAlone) ve).var, varToDom); else for (IVar x : ((VarArray) ve).flatVars) putInMap(x, varToDom); Map<String, String> domToVarReferent = new HashMap<>(); for (VarEntity ve : imp.varEntities.allEntities) { if (ve instanceof VarAlone) { VarAlone va = (VarAlone) ve; // Utilities.control(problem.varEntities.varToVarArray.get(va.var) == null, ""); if (imp.varEntities.varToVarArray.get(va.var) != null) // necessary for xcsp2 continue; String dom = varToDom.get(va.var); if (domToVarReferent.get(dom) == null) { element.appendChild(var(va, dom, false)); domToVarReferent.put(dom, va.id); } else if (dom.length() < limitForUsingAs) element.appendChild(var(va, dom, false)); else element.appendChild(var(va, domToVarReferent.get(dom), true)); } else { VarArray va = (VarArray) ve; Map<Object, List<IVar>> map = Stream.of(va.flatVars) .collect(Collectors.groupingBy(x -> varToDom.get(x), LinkedHashMap::new, Collectors.toList())); if (map.size() == 1) { String dom = varToDom.get(va.flatVars[0]); if (domToVarReferent.get(dom) == null) { element.appendChild(array(va, dom, false)); domToVarReferent.put(dom, va.id); } else if (dom.length() < limitForUsingAs) element.appendChild(array(va, dom, false)); else element.appendChild(array(va, domToVarReferent.get(dom), true)); } else element.appendChild(array(va, varToDom, map)); } } return element; } private <T extends Similarable<T>> List<Element> buildChilds(T[] t, List<T> store, Supplier<Element> spl) { List<Element> childs = new ArrayList<>(); if (t[0] instanceof Predicate && noGroupAtAllForIntension || t[0] instanceof Relation && noGroupAtAllForExtension || t[0] instanceof Global && noGroupAtAllForGlobal) { for (int i = 0; i < t.length; i++) { store.clear(); store.add(t[i]); childs.add(spl.get()); } } else if (monoformGroups) { store.add(t[0]); boolean similar = t[0].isSimilarTo(t[1]); // to record diffs Utilities.control(similar, "Should be similar"); assert Stream.of(t).allMatch(p -> p.isSimilarTo(t[0])); IntStream.range(1, t.length).forEach(i -> store.add(t[i])); childs.add(spl.get()); } else { boolean[] flags = new boolean[t.length]; for (int i = 0; i < t.length; i++) { if (flags[i] || t[i] == null) continue; store.clear(); store.add(t[i]); for (int j = i + 1; j < t.length; j++) { if (!flags[j] && t[i].isSimilarTo(t[j])) { store.add(t[j]); flags[j] = true; } } childs.add(spl.get()); } } return childs; } private Element buildingTuples(ICtrExtension c) { Object tuples = c.mapXCSP().get(ICtr.TUPLES); String key = tuples instanceof int[][] ? ICtrExtension.tableAsString((int[][]) tuples) : ICtrExtension.tableAsString((String[][]) tuples); Element eltTgt = tuplesReferents.get(key), eltSrc = null; if (eltTgt != null && !discardAsRelation) { if (eltTgt.getAttribute(ID).length() == 0) { eltTgt.setAttribute(ID, "i" + nBuiltTuplesReferents); // we add a useful missing id nBuiltTuplesReferents++; } eltSrc = element(doc, (Boolean) c.mapXCSP().get(ICtr.POSITIVE) ? SUPPORTS : CONFLICTS, AS, eltTgt.getAttribute(ID)); } else { eltSrc = element(doc, (Boolean) c.mapXCSP().get(ICtr.POSITIVE) ? SUPPORTS : CONFLICTS, key); tuplesReferents.put(key, eltSrc); } return eltSrc; } private Element buildingStoredRelations() { Relation r = storedR.get(0); // first relation Element lst = element(doc, LIST, storedR.size() == 1 ? r.c.mapXCSP().get(LIST) : seqOfParameters((Integer) r.c.mapXCSP().get(ICtr.ARITY), true)); Element ext = element(doc, EXTENSION, lst, buildingTuples(r.c)); Element elt = storedR.size() == 1 ? ext : element(doc, GROUP, ext, storedR.stream().map(rr -> element(doc, ARGS, rr.c.mapXCSP().get(LIST)))); sideAttributes(elt, imp.ctrEntities.ctrToCtrAlone.get(r.c)); // sideAttributes(elt, storedR.size() == 1 ? loader.ctrEntities.ctrToCtrAlone.get(r.c) :loader.ctrEntities?ctrToCtrArray.get(r.c)); storedR.clear(); return elt; } private List<Element> handleListOfExtension(Element parent, List<ICtr> ctrs) { saveStored(parent, true, true, false, false); return buildChilds(ctrs.stream().map(c -> new Relation((ICtrExtension) c)).toArray(Relation[]::new), storedR, () -> buildingStoredRelations()); } private Element buildingStoredPredicates() { Predicate firstPredicate = storedP.get(0); // first predicate Utilities.control(storedP.stream().allMatch(p -> p.args.size() == firstPredicate.args.size()), "Not the same size"); if (storedP.size() > 1) { Object[] similar = IntStream.range(0, firstPredicate.args.size()) .mapToObj(i -> storedP.stream().allMatch(p -> p.args.get(i).equals(firstPredicate.args.get(i))) ? firstPredicate.args.get(i) : null) .toArray(); if (Stream.of(similar).anyMatch(obj -> obj != null)) { // we reduce lists of arguments for (int i = similar.length - 1; i >= 0; i if (similar[i] != null) for (Predicate p : storedP) p.args.remove(similar[i]); // we modify the abstract tree firstPredicate.abstractTree = (XNodeParent<?>) firstPredicate.abstractTree.replacePartiallyParameters(similar); } } Element itn = element(doc, INTENSION, storedP.size() == 1 ? firstPredicate.c.mapXCSP().get(ICtr.FUNCTION) : firstPredicate.abstractTree); Element elt = null; if (storedP.size() == 1) elt = itn; else { elt = element(doc, GROUP, itn); for (Predicate p : storedP) { String s = p.args.stream().allMatch(x -> x instanceof IVar) ? imp.varEntities.compactOrdered(p.args.stream().map(x -> (IVar) x).toArray(IVar[]::new)) : Utilities.join(p.args); elt.appendChild(element(doc, ARGS, s)); } } // Element elt = storedP.size() == 1 ? itn : element(doc, GROUP, itn, storedP.stream().map(pp -> element(doc, ARGS,Utilities.join(pp.args)))); sideAttributes(elt, imp.ctrEntities.ctrToCtrAlone.get(firstPredicate.c)); storedP.clear(); return elt; } private List<Element> handleListOfIntension(Element parent, List<ICtr> ctrs) { saveStored(parent, true, false, true, false); return buildChilds(ctrs.stream().map(c -> new Predicate((ICtrIntension) c)).toArray(Predicate[]::new), storedP, () -> buildingStoredPredicates()); } private Element buildingStoredGlobals() { Element elt = null; Global g = storedG.get(0); if (storedG.size() == 1) elt = buildingDef(g.def); else { if (mergeSuccessiveInstantiations && storedG.stream().allMatch(e -> e.c instanceof ICtrInstantiation)) { IVar[] scope = storedG.stream().map(e -> Stream.of(e.c.scope())).flatMap(j -> j).toArray(IVar[]::new); String list = storedG.stream().map(e -> (String) e.c.mapXCSP().get(ICtr.LIST)).collect(Collectors.joining(" ")); String values = storedG.stream().map(e -> (String) e.c.mapXCSP().get(ICtr.VALUES)).collect(Collectors.joining(" ")); elt = buildingDef(new Global(ICtrInstantiation.buildFrom(scope, list, values)).def); } else { Utilities.control(g.recordedDiffs.length == 1 || g.recordedDiffs.length == 2, ""); int i = g.recordedDiffs[0]; if (g.recordedDiffs.length == 1) { String name = g.def.sons.get(i).name; Element gbl = buildingDef(g.def, i, name.equals(INDEX) || name.equals(VALUE) || name.equals(CONDITION) ? "%0" : g.recordedSizes[0] == -1 ? VAR_ARGS : seqOfParameters(g.recordedSizes[0], true)); // VAR_ARGS); // TODO other cases with %0 ? elt = element(doc, GROUP, gbl, storedG.stream().map(gg -> element(doc, ARGS, gg.def.sons.get(i).content))); } else { int j = g.recordedDiffs[1]; Element gbl = buildingDef(g.def, i, seqOfParameters(g.recordedSizes[0]), j, seqOfParameters(g.recordedSizes[1], g.recordedSizes[0], true)); elt = element(doc, GROUP, gbl, storedG.stream().map(gg -> element(doc, ARGS, gg.def.sons.get(i).content + " " + gg.def.sons.get(j).content))); } } } sideAttributes(elt, imp.ctrEntities.ctrToCtrAlone.get(g.c)); storedG.clear(); return elt; } private List<Element> handleListOfGlobal(Element parent, List<ICtr> ctrs) { saveStored(parent, true, false, false, true); return buildChilds(ctrs.stream().map(c -> new Global(c)).toArray(Global[]::new), storedG, () -> buildingStoredGlobals()); } private Element buildSlide(ICtrSlide ctr) { Element elt = doc.createElement(SLIDE); Map<String, Object> map = ctr.mapXCSP(); if (map.containsKey(CIRCULAR) && (Boolean) map.get(CIRCULAR)) elt.setAttribute(CIRCULAR, "true"); IVar[][] lists = (IVar[][]) map.get(ICtr.LISTS); int[] offsets = (int[]) map.get(ICtr.OFFSETS), collects = (int[]) map.get(ICtr.COLLECTS); for (int i = 0; i < lists.length; i++) { Element subelement = element(doc, LIST, imp.varEntities.compactOrdered(lists[i])); if (lists.length > 1 && collects[i] != 1) subelement.setAttribute(COLLECT, collects[i] + ""); if (offsets[i] != 1) subelement.setAttribute(OFFSET, offsets[i] + ""); elt.appendChild(subelement); } CtrAlone[] cas = (CtrAlone[]) map.get(ICtr.ALONES); ICtr c0 = cas[0].ctr; Utilities.control(Stream.of(cas).noneMatch(ca -> ca.ctr instanceof ICtrSlide), "Slide cannot appear in slide"); if (c0 instanceof ICtrIntension) elt.appendChild(element(doc, INTENSION, new Predicate((ICtrIntension) c0, false, true).abstractTree)); else if (c0 instanceof ICtrExtension && !(c0 instanceof ICtrMdd) && !(c0 instanceof ICtrSmart)) elt.appendChild(element(doc, EXTENSION, element(doc, LIST, seqOfParameters(c0.scope().length)), buildingTuples((ICtrExtension) c0))); else { Global g0 = new Global(cas[0].ctr), g1 = new Global(cas[1].ctr); int[] diffs = g0.def.differencesWith(g1.def); Utilities.control(diffs.length == 1, "Bad form of slide"); int nb = imp.varEntities.nVarsIn(g0.def.sons.get(diffs[0]).content.toString()); elt.appendChild(buildingDef(g0.def, diffs[0], seqOfParameters(nb, true))); } sideAttributes(elt, imp.ctrEntities.ctrToCtrAlone.get(ctr)); return elt; } private Element buildMeta(ICtr ctr) { Element elt = buildingDef(ctr.defXCSP()); sideAttributes(elt, imp.ctrEntities.ctrToCtrAlone.get(ctr)); return elt; } protected void handleCtr(Element parent, ICtr c) { if (c instanceof ICtrSlide) { saveStored(parent, saveImmediatelyStored, true, true, true); parent.appendChild(buildSlide((ICtrSlide) c)); } else if (c instanceof Meta) { saveStored(parent, saveImmediatelyStored, true, true, true); parent.appendChild(buildMeta(c)); } else if (c instanceof ICtrIntension) { saveStored(parent, saveImmediatelyStored, true, false, true); Predicate p = new Predicate((ICtrIntension) c); saveStored(parent, storedP.size() > 0 && (!storedP.get(0).isSimilarTo(p) || ignoreAutomaticGroups), false, true, false); storedP.add(p); } else if (c instanceof ICtrExtension && !(c instanceof ICtrMdd) && !(c instanceof ICtrSmart)) { saveStored(parent, saveImmediatelyStored, false, true, true); Relation r = new Relation((ICtrExtension) c); saveStored(parent, storedR.size() > 0 && (!storedR.get(0).isSimilarTo(r) || ignoreAutomaticGroups), true, false, false); storedR.add(r); } else { saveStored(parent, saveImmediatelyStored, true, true, false); Global g = new Global(c); saveStored(parent, storedG.size() > 0 && (!storedG.get(0).isSimilarTo(g) || ignoreAutomaticGroups), false, false, true); storedG.add(g); } } protected void setSpecificFrameworkAttributes(Element rootOfConstraints) { if (imp.typeFramework() == TypeFramework.WCSP) { // lb (lower bound) and ub (upper bound) to be managed ; TODO } } protected List<Element> buildChilds(Element parent, List<ICtr> ctrs) { ICtr c0 = ctrs.get(0); if (c0 instanceof ICtrSlide) return ctrs.stream().map(c -> buildSlide((ICtrSlide) c)).collect(Collectors.toList()); if (c0 instanceof Meta) return ctrs.stream().map(c -> buildMeta(c)).collect(Collectors.toList()); if (c0 instanceof ICtrIntension) return handleListOfIntension(parent, ctrs); if (c0 instanceof ICtrExtension && !(c0 instanceof ICtrMdd) && !(c0 instanceof ICtrSmart)) return handleListOfExtension(parent, ctrs); return handleListOfGlobal(parent, ctrs); } protected Element constraints() { System.out.println(" Saving constraints"); Element root = doc.createElement(CONSTRAINTS); setSpecificFrameworkAttributes(root); Utilities.control(storedP.size() == 0 && storedR.size() == 0 && storedG.size() == 0, "Storing structures are not empty"); Stack<Element> stackOfBlocks = new Stack<>(); stackOfBlocks.push(root); // the initial element is seen as a root block here for (CtrEntity ce : imp.ctrEntities.allEntities) { if (ce instanceof TagDummy) continue; Element currParent = stackOfBlocks.peek(); if (ce instanceof CtrArray) { CtrArray ctrArray = (CtrArray) ce; Map<String, List<ICtr>> map = Stream.of((ctrArray).ctrs) .collect(Collectors.groupingBy(c -> c.getClass().getName(), LinkedHashMap::new, Collectors.toList())); // repartition((ctrArray).ctrs); if (map.size() == 1) { saveStored(currParent, saveImmediatelyStored, true, true, true); List<Element> childs = buildChilds(currParent, map.values().iterator().next()); // if ((ctrArray.nullBasicAttributes() || childs.get(0).getAttributes().getLength() == 0)) { if (ctrArray.nullBasicAttributes()) childs.stream().forEach(c -> currParent.appendChild(c)); else if (childs.size() == 1 && childs.get(0).getAttributes().getLength() == 0) { sideAttributes(childs.get(0), ctrArray); currParent.appendChild(childs.get(0)); } else { Element block = doc.createElement(BLOCK); sideAttributes(block, ctrArray); childs.stream().forEach(c -> block.appendChild(c)); currParent.appendChild(block); } } else { saveStored(currParent); if (ctrArray.nullBasicAttributes()) { // avoiding creating a block with no attached information for (List<ICtr> list : map.values()) buildChilds(currParent, list).stream().forEach(c -> currParent.appendChild(c)); } else { Element block = doc.createElement(BLOCK); sideAttributes(block, ctrArray); for (List<ICtr> list : map.values()) buildChilds(block, list).stream().forEach(c -> block.appendChild(c)); currParent.appendChild(block); } } } else { ICtr c = ((CtrAlone) ce).ctr; if (mustEraseIdsOfConstraints) imp.ctrEntities.ctrToCtrAlone.get(c).id = null; if (imp.ctrEntities.ctrToCtrArray.get(c) == null) handleCtr(currParent, c); } } assert stackOfBlocks.size() == 1 && stackOfBlocks.peek() == root; saveStored(root); return root; } protected Element objectives() { Element root = doc.createElement(OBJECTIVES); // root.setAttribute(OPTIMIZATION, LEXICO); for (ObjEntity obj : imp.objEntities.allEntities) { Element elt = buildingDef(obj.obj.defXCSP()); sideAttributes(elt, obj); root.appendChild(elt); } return root; } protected Element annotations() { Element root = doc.createElement(ANNOTATIONS); if (imp.annotations.decision != null) { // Element vars = doc.createElement(VARIABLES); root.appendChild(Utilities.element(doc, DECISION, imp.varEntities.compactOrdered(imp.annotations.decision))); } return root; } public static boolean ev; private static ProblemAPI usage() { System.out.println("\nDescription.\n Compiler is a class that can generate XCSP3 files. You need to provide"); System.out.println(" an MCSP3 model (Java class implementing ProblemAPI) and some effective data."); System.out.println("\nUsage.\n java " + Compiler.class.getName() + " <className> [<arguments>]\n"); System.out.println(" <className> is the name of a Java class implementing " + ProblemAPI.class.getName()); System.out.println(" <arguments> is a possibly empty whitespace-separated list of elements among:"); System.out.println(" -data=... "); System.out.println(" where ... stands for the effective data. This can be the name of a JSON"); System.out.println(" file, a stand-alone value v or a list of values [v1,v2,...,vp]"); System.out.println(" -dataFormat=... "); System.out.println(" where ... stands for the formatting instructions of data (see examples)"); System.out.println(" -dataSaving"); System.out.println(" which allows us to save the data in a JSON file"); System.out.println(" -model=..."); System.out.println(" where ... stands for the name of a model variant, which allows us to write"); System.out.println(" code like 'if (isModel(\"basic\")) { ... }'"); System.out.println(" -ev"); System.out.println(" which displays the exception that has been thown, in case of a crash"); System.out.println(" -ic"); System.out.println(" which indents and compresses, using Linux commands 'xmlindent -i 2' and 'lzma'"); System.out.println(" -output=..."); System.out.println(" which ... stands for the name of the output XCSP3 file (without exetnsions)"); System.out.println("\nExamples."); System.out.println(" java " + Compiler.class.getName() + " " + AllInterval.class.getName() + " -data=5"); System.out.println(" => generates the XCSP3 file AllInterval-5.xml"); System.out.println(" java " + Compiler.class.getName() + " " + AllInterval.class.getName() + " -data=5 -dataFormat=%03d"); System.out.println(" => generates the XCSP3 file AllInterval-005.xml"); System.out.println(" java " + Compiler.class.getName() + " " + Bibd.class.getName() + " -data=[6,50,25,3,10]"); System.out.println(" => generates the XCSP3 file Bibd-6-50-25-3-10.xml"); System.out.println(" java " + Compiler.class.getName() + " " + Bibd.class.getName() + " -data=[6,50,25,3,10] -dataFormat=[%02d,%02d,%02d,%02d,%02d]"); System.out.println(" => generates the XCSP3 file Bibd-06-50-25-03-10.xml"); System.out.println( " java " + Compiler.class.getName() + " " + Bibd.class.getName() + " -data=[6,50,25,3,10] -dataFormat=[%02d,%02d,%02d,%02d,%02d] -dataSaving"); System.out.println(" => generates the JSON file Bibd-06-50-25-03-10.json"); System.out.println(" => generates the XCSP3 file Bibd-06-50-25-03-10.xml"); System.out.println(" java " + Compiler.class.getName() + " " + Bibd.class.getName() + " -data=Bibd-06-50-25-03-10.json"); System.out.println(" => generates the XCSP3 file Bibd-06-50-25-03-10.xml"); System.out.println(" java " + Compiler.class.getName() + " " + Bibd.class.getName() + " -data=Bibd-06-50-25-03-10.json -ic"); System.out.println(" => generates the indented compressed XCSP3 file Bibd-06-50-25-03-10.xml.lzma"); System.out.println(" java " + Compiler.class.getName() + " " + AllInterval.class.getName() + " -data=5 -model=test"); System.out.println(" => generates the XCSP3 file AllInterval-test-5.xml"); System.out.println(" while executing any piece of code controlled by 'isModel(\"test\"))'"); System.out.println(" java " + Compiler.class.getName() + " " + AllInterval.class.getName() + " -data=5 -output=tmp"); System.out.println(" => generates the XCSP3 file tmp.xml"); return null; } private static ProblemAPI buildInstanceAPI(String[] args) { if (args.length == 0) return usage(); try { Constructor<?>[] cs = Class.forName(args[0]).getDeclaredConstructors(); if (cs.length > 1 || cs[0].getParameterTypes().length > 0) { System.out.println("\nProblem: It is forbidden to include constructors in a class implementing " + ProblemAPI.class.getName() + "\n"); return null; } if (!ProblemAPI.class.isAssignableFrom(cs[0].getDeclaringClass())) { System.out.println("\nProblem: the specified class " + args[0] + " does not implement " + ProblemAPI.class.getName() + "\n"); return usage(); } cs[0].setAccessible(true); ProblemAPI api = (ProblemAPI) cs[0].newInstance(); String[] argsForPb = Stream.of(args).skip(1) .filter(s -> !s.startsWith(VARIANT) && !s.startsWith(DATA) && !s.startsWith(OUTPUT) && !s.equals(EV) && !s.equals(IC)) .toArray(String[]::new); ev = Stream.of(args).anyMatch(s -> s.equals(EV)); String model = Stream.of(args).filter(s -> s.startsWith(VARIANT)).map(s -> s.substring(VARIANT.length() + 1)).findFirst().orElse(""); String data = Stream.of(args).filter(s -> s.startsWith(DATA + "=")).map(s -> s.substring(DATA.length() + 1)).findFirst().orElse(""); String dataFormat = Stream.of(args).filter(s -> s.startsWith(DATA_FORMAT)).map(s -> s.substring(DATA_FORMAT.length() + 1)).findFirst().orElse(""); boolean dataSaving = Stream.of(args).anyMatch(s -> s.equals(DATA_SAVING)); new ProblemIMP3(api, model, data, dataFormat, dataSaving, argsForPb); return api; } catch (Exception e) { System.out.println("It was not possible to build an instance of the specified class " + args[0]); if (ev) e.printStackTrace(); return usage(); } } public static Document buildDocument(String[] args) { ProblemAPI api = buildInstanceAPI(args); return api == null ? null : new Compiler(api).buildDocument(); } public static void main(String[] args) { ProblemAPI api = buildInstanceAPI(args); if (api == null) return; Document document = new Compiler(api).buildDocument(); String output = Stream.of(args).filter(s -> s.startsWith(OUTPUT)).map(s -> s.substring(OUTPUT.length() + 1)).findFirst().orElse(null); String fileName = (output != null ? output : api.name()) + ".xml"; ProblemAPI.api2imp.get(api).save(document, fileName); if (Stream.of(args).anyMatch(s -> s.equals(IC))) ProblemAPI.api2imp.get(api).indentAndCompressXmlUnderLinux(fileName); } }
package org.ops4j.pax.web.itest; import static org.ops4j.pax.exam.CoreOptions.felix; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.CoreOptions.options; import static org.ops4j.pax.exam.CoreOptions.systemProperty; import static org.ops4j.pax.exam.CoreOptions.workingDirectory; import static org.ops4j.pax.exam.CoreOptions.wrappedBundle; import static org.ops4j.pax.exam.MavenUtils.asInProject; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.junit.Configuration; import org.ops4j.pax.exam.junit.ExamReactorStrategy; import org.ops4j.pax.exam.junit.JUnit4TestRunner; import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory; import org.osgi.framework.Bundle; import org.osgi.framework.BundleException; /** * @author Toni Menzel (tonit) * @since Mar 3, 2009 */ @RunWith(JUnit4TestRunner.class) public class WebConsoleIntegrationTest extends ITestBase { private Bundle installWarBundle; @Configuration public static Option[] configure() { Option[] options = baseConfigure(); Option[] options2 = options( workingDirectory("target/paxexam/"), systemProperty("org.ops4j.pax.logging.DefaultServiceLog.level").value("TRACE"), systemProperty("org.osgi.service.http.hostname").value("127.0.0.1"), systemProperty("org.osgi.service.http.port").value("8181"), systemProperty("java.protocol.handler.pkgs").value("org.ops4j.pax.url"), systemProperty("org.ops4j.pax.url.war.importPaxLoggingPackages").value("true"), systemProperty("org.ops4j.pax.web.log.ncsa.enabled").value("true"), felix(), mavenBundle().groupId("org.apache.felix") .artifactId("org.apache.felix.bundlerepository").version("1.6.2"), mavenBundle().groupId("org.apache.felix") .artifactId("org.apache.felix.configadmin").version("1.2.8"), mavenBundle().groupId("org.apache.felix") .artifactId("org.apache.felix.shell").version("1.4.2"), mavenBundle().groupId("org.apache.felix") .artifactId("org.apache.felix.shell.tui").version("1.4.1"), mavenBundle().groupId("org.apache.felix") .artifactId("org.apache.felix.webconsole").version("3.1.6"), // mavenBundle().groupId("org.ops4j.pax.web") // .artifactId("pax-web-extender-war") // .version(asInProject()), // mavenBundle().groupId("org.ops4j.pax.web") // .artifactId("pax-web-jetty-bundle").version(asInProject()), // mavenBundle().groupId("org.ops4j.pax.web") // .artifactId("pax-web-jsp").version(asInProject()), // mavenBundle().groupId("org.ops4j.pax.web") // .artifactId("pax-web-extender-war") // .version("1.0.4"), // mavenBundle().groupId("org.ops4j.pax.web") // .artifactId("pax-web-jetty-bundle").version("1.0.4"), // mavenBundle().groupId("org.ops4j.pax.web") // .artifactId("pax-web-jsp").version("1.0.4"), mavenBundle().groupId("org.ops4j.pax.logging") .artifactId("pax-logging-api").version(asInProject()), mavenBundle().groupId("org.ops4j.pax.logging") .artifactId("pax-logging-service") .version(asInProject()), // mavenBundle().groupId("org.mortbay.jetty") // .artifactId("servlet-api") // .version(asInProject()), //HTTP Client needed for UnitTesting mavenBundle("commons-codec", "commons-codec").version(asInProject()), wrappedBundle(mavenBundle("org.apache.httpcomponents", "httpclient", "4.1")), wrappedBundle(mavenBundle("org.apache.httpcomponents", "httpcore", "4.1")) ); List<Option> list = new ArrayList<Option>(Arrays.asList(options)); list.addAll(Arrays.asList(options2)); return (Option[]) list.toArray(new Option[list.size()]); } /** * You will get a list of bundles installed by default plus your testcase, * wrapped into a bundle called pax-exam-probe */ @Test public void listBundles() { for (Bundle b : bundleContext.getBundles()) { System.out.println("Bundle " + b.getBundleId() + " : " + b.getSymbolicName()); } } @Test public void testBundlesPath() throws BundleException, InterruptedException, IOException { testWebPath("http://localhost:8181/system/console/bundles", "", 401, false ); testWebPath("http://localhost:8181/system/console/bundles", "Apache Felix Web Console<br/>Bundles", 200, true); } }
package ru.lj.alamar.microbe; import java.util.Random; import ru.yandex.bolts.collection.Cf; import ru.yandex.bolts.collection.ListF; import ru.yandex.bolts.collection.Tuple2List; /** * @author ilyak */ public class Main { public static int MICROBES = 10; public static int GENES = 10; public static float GENE_MUTATION_CHANCE = 0.05f; public static float MUTATION_POSITIVE_CHANCE = 0.1f; public static float NEGATIVE_EFFECT = 0.05f; public static float POSITIVE_EFFECT = 0.01f; public static float LUCK_RATIO = 0.3f; public static void main(String[] args) { Random r = new Random(); ListF<Microbe> microbes = Cf.arrayList(); for (int i = 0; i < MICROBES; i++) { microbes.add(new Microbe(GENES)); } for (int s = 0; s < 50; s++) { float totalFitness = 0f; for (Microbe microbe : microbes) { microbe.mutate(r, GENE_MUTATION_CHANCE, NEGATIVE_EFFECT, MUTATION_POSITIVE_CHANCE, POSITIVE_EFFECT); totalFitness += microbe.fitness(); } float avgFitness = totalFitness / (float) microbes.size(); microbes = selectOffspring(r, microbes); System.out.println(microbes.size() + "\t" + avgFitness); } } static ListF<Microbe> selectOffspring(Random r, ListF<Microbe> population) { Tuple2List<Float, Microbe> withFitnessAndLuck = Tuple2List.arrayList(); for (Microbe microbe : population) { if (microbe.isDead()) continue; withFitnessAndLuck.add(microbe.fitness() * (1f - LUCK_RATIO) + r.nextFloat() * LUCK_RATIO, microbe); withFitnessAndLuck.add(microbe.fitness() * (1f - LUCK_RATIO) + r.nextFloat() * LUCK_RATIO, microbe.replicate()); } return withFitnessAndLuck.sortBy1().reverse().get2().take(population.size()); } }
package org.erhsroboticsclub.robo2013; import edu.wpi.first.wpilibj.*; import edu.wpi.first.wpilibj.can.CANTimeoutException; import org.erhsroboticsclub.robo2013.utilities.MathX; import org.erhsroboticsclub.robo2013.utilities.Messenger; public class Robo2013 extends SimpleRobot { private RobotDrive drive; private Joystick stickL, stickR; private CANJaguar topLeftJaguar, bottomLeftJaguar, topRightJaguar, bottomRightJaguar; private Messenger msg; private LinearAccelerator launcher; private AI agent; private double launchAngle = RoboMap.LAUNCHER_LEVEL_ANGLE; private boolean dynamicMode = true; /** * Called once the cRIO boots up */ public void robotInit() { msg = new Messenger(); msg.printLn("Loading FRC 2013"); try { topLeftJaguar = new CANJaguar(RoboMap.TOP_LEFT_DRIVE_MOTOR); bottomLeftJaguar = new CANJaguar(RoboMap.BOTTOM_LEFT_DRIVE_MOTOR); topRightJaguar = new CANJaguar(RoboMap.TOP_RIGHT_DRIVE_MOTOR); bottomRightJaguar = new CANJaguar(RoboMap.BOTTOM_RIGHT_DRIVE_MOTOR); } catch (CANTimeoutException ex) { msg.printLn("CAN network failed!"); msg.printLn(ex.getMessage()); } launcher = new LinearAccelerator(); drive = new RobotDrive(topLeftJaguar, bottomLeftJaguar, topRightJaguar, bottomRightJaguar); drive.setInvertedMotor(RobotDrive.MotorType.kFrontLeft, true); drive.setInvertedMotor(RobotDrive.MotorType.kFrontRight, true); drive.setInvertedMotor(RobotDrive.MotorType.kRearLeft, true); drive.setInvertedMotor(RobotDrive.MotorType.kRearRight, true); stickL = new Joystick(RoboMap.LEFT_DRIVE_STICK); stickR = new Joystick(RoboMap.RIGHT_DRIVE_STICK); agent = new AI(drive, launcher); msg.printLn("Done Loading: FRC 2013"); } /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { drive.setSafetyEnabled(false); Watchdog.getInstance().setExpiration(Double.MAX_VALUE); Watchdog.getInstance().kill(); msg.clearConsole(); msg.printLn("Auto Started"); launcher.setAngle(RoboMap.AUTO_SHOOT_ANGLE); try { drive.setSafetyEnabled(false); Watchdog.getInstance().kill(); //autonomousA();//start autonomous (Plan A) autonomousB();//start autonomous (Plan B) //autonomousC();//start autonomous (Plan C) } catch (Exception e) { msg.printLn("Auto mode failed!"); msg.printLn(e.getMessage()); } } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { drive.setSafetyEnabled(false); Watchdog.getInstance().kill(); msg.clearConsole(); msg.printOnLn("Teleop Mode", RoboMap.STATUS_LINE); while (isEnabled() && isOperatorControl()) { double startTime = System.currentTimeMillis(); launcher.setWheels(LinearAccelerator.AUTO_SHOOT_SPEED); double moveValue = MathX.max(stickL.getY(), stickR.getY()); drive.tankDrive(stickL.getY() * RoboMap.SPEED, stickR.getY() * RoboMap.SPEED); if (stickL.getRawButton(RoboMap.FIRE_BUTTON)) { launcher.launch(); } if (stickR.getRawButton(RoboMap.DYNAMIC_ANGLE_BUTTON)) { dynamicMode = true; } else if (stickR.getRawButton(RoboMap.LEVEL_ANGLE_BUTTON)) { dynamicMode = false; launchAngle = RoboMap.LAUNCHER_LEVEL_ANGLE; } else if (stickR.getRawButton(RoboMap.FEED_ANGLE_BUTTON)) { dynamicMode = false; launchAngle = RoboMap.LAUNCHER_FEED_ANGLE; } else if (stickR.getRawButton(RoboMap.NEAR_ANGLE_BUTTON)) { dynamicMode = false; launchAngle = RoboMap.LAUNCHER_NEAR_ANGLE; } else if (stickR.getRawButton(RoboMap.FAR_ANGLE_BUTTON)) { dynamicMode = false; launchAngle = RoboMap.LAUNCHER_FAR_ANGLE; } if (stickL.getRawButton(RoboMap.BUMP_UP_BUTTON)) { launcher.bumpUp(); } else if(stickL.getRawButton(RoboMap.BUMP_DOWN_BUTTON)) { launcher.bumpDown(); } if (dynamicMode) { launchAngle = MathX.map(stickR.getZ(), 1, -1, RoboMap.LAUNCHER_ANGLE_MIN, RoboMap.LAUNCHER_ANGLE_MAX); } if (stickR.getRawButton(RoboMap.FEED_ANGLE_BUTTON)) { launchAngle = RoboMap.LAUNCHER_FEED_ANGLE; } launcher.setAngle(launchAngle); if (moveValue < 0.1) { //launcher.adjustAngle(); } double actualAngle = launcher.readAngle(); double error = launchAngle - actualAngle; msg.printOnLn("angle: " + actualAngle, RoboMap.ANGLE_LINE); msg.printOnLn("setpt: " + launchAngle, RoboMap.SETPT_LINE); msg.printOnLn("error: " + error, RoboMap.ERROR_LINE); while (System.currentTimeMillis() - startTime < RoboMap.UPDATE_FREQ) { //Do nothing } } } /** * Plan A autonomous Called once by autonomousInit */ private void autonomousA() throws Exception { msg.printLn("Autonomous A:"); int fails = 0; boolean success; // 0) Set wheels to proper speed msg.printLn("Starting up launcher..."); launcher.setWheels(LinearAccelerator.AUTO_SHOOT_SPEED); // 1) Auto aim launcher msg.printLn("Aiming launcher..."); do { if (!isAutonomous()) { throw new Exception("Ran out of time!"); } success = agent.autoAimLauncher(); if (!success) { msg.printLn("turnToTarget failed!"); fails++; } if (fails > 500) { msg.printLn("Giving up..."); break; } else { msg.printLn("Retrying..."); } } while (!success); // 2) Wait for motors to come up to speed msg.printLn("Waiting for motors..."); Timer.delay(5); // 3) Fire all frisbees msg.printLn("Starting launch!"); for (int i = 0; i < 3; i++) { msg.printLn("Launching disk " + (i + 1) + "..."); launcher.launch(); } } /** * Plan B autonomous Called once by autonomousInit */ private void autonomousB() { msg.printLn("Autonomous B:"); // 0) Set the wheels to proper speed msg.printLn("Starting up launcher..."); launcher.setWheels(LinearAccelerator.AUTO_SHOOT_SPEED); // 1) Set the launch angle msg.printLn("Setting angle to " + RoboMap.AUTO_SHOOT_ANGLE + "..."); launcher.setAngle(RoboMap.AUTO_SHOOT_ANGLE); launcher.waitForAngle(5000); // 2) Fire all frisbees msg.printLn("Starting launch!"); for (int i = 0; i < 3; i++) { launcher.setAngle(RoboMap.AUTO_SHOOT_ANGLE); msg.printLn("Launching disk " + (i + 1) + "..."); launcher.launch(); } } }
package ru.spaderov.start; import ru.spaderov.models.*; import java.util.*; public class Tracker { /** * All new values will save inside this array of items */ private Item[] items = new Item[10]; private int position = 0; private static final Random RN = new Random(); /** * Create new item */ public Item add(Item item) { item.setId(this.generateId()); this.items[position++] = item; return item; } /** * Search method of items by ID * @param: item - element of the list * @return: Searched item by ID */ protected Item findById(String id) { Item result = null; for (Item item : items) { if (item != null && item.getId().equals(id)) { result = item; break; } } return result; } /** * Search method of items by name * @param: item - element of the list * @return: Searched item by name */ protected Item findByName(String name) { Item result = null; for (Item item : items) { if (item != null && item.getName().equals(name)) { result = item; break; } } return result; } /** * Search method of items by description * @param: item - element of the list * @return: Searched item by description */ protected Item findByDescription(String description) { Item result = null; for (Item item : items) { if (item != null && item.getDescription().equals(description)) { result = item; break; } } return result; } String generateId() { return String.valueOf(System.currentTimeMillis() + RN.nextInt(100)); } /** * Method shows all of items * @return items */ public Item[] getAll() { Item[] result = new Item[this.position]; for (int index = 0; index!=this.position; index++) { result[index] = this.items[index]; } return result; } /** * Add comment to item * @param item - element of the list * @return - comment's item */ public Item comment(Item item) { item.setComment(this.generateId()); this.items[position++] = item; return item; } /** * Search and edite items by ID * @param: items - list of items */ public void editById(Item item) { Item result = null; for(Item itm:items) { if(itm.equals(findById(item.getId()))) { result = item; result.setName(item.getName()); result.setDescription(item.getDescription()); } } } /** * Search and erase items by ID * @param: items - list of items */ public void deleteById(String id) { for (int k = 0; k < items.length; k++) { items[k].setId("" + (k + 1)); if (items[k].getId().equals(id)) { items[k].setName(null); items[k].setDescription(null); } for (int i = 0; i < items.length; i++) { for (int j = 0; j < items.length; j++) { if (items[j].getName() == null) { if ((j + 1) <= items.length - 1) { Item temp = items[j]; items[j] = items[j + 1]; items[j + 1] = temp; } } } } } } }
package org.exist.validation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import javax.xml.parsers.SAXParserFactory; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import org.xml.sax.XMLReader; /** * Class for checking dependencies with XML libraries. * * @author Adam Retter <adam.retter@devon.gov.uk> */ public class XmlLibraryChecker { /** * Possible XML Parsers, at least one must be valid */ private final static ClassVersion[] validParsers = { new ClassVersion("Xerces", "Xerces-J 2.9.1", "org.apache.xerces.impl.Version.getVersion()") }; /** * Possible XML Transformers, at least one must be valid */ private final static ClassVersion[] validTransformers = { new ClassVersion("Saxon", "8.9.0", "net.sf.saxon.Version.getProductVersion()"), new ClassVersion("Xalan", "Xalan Java 2.7.1", "org.apache.xalan.Version.getVersion()"), }; /** * Possible XML resolvers, at least one must be valid */ private final static ClassVersion[] validResolvers = { new ClassVersion("Resolver", "XmlResolver 1.2", "org.apache.xml.resolver.Version.getVersion()"), }; /** * Remove "@" from string. */ private static String getClassName(String classid) { String className; int lastChar = classid.lastIndexOf("@"); if (lastChar == -1) { className = classid; } else { className = classid.substring(0, lastChar); } return className; } /** * Determine the class that is actually used as XML parser. * * @return Full classname of parser. */ private static String determineActualParserClass() { String parserClass = "Unable to determine parser class"; try { SAXParserFactory factory = SAXParserFactory.newInstance(); XMLReader xmlReader = factory.newSAXParser().getXMLReader(); String classId = xmlReader.toString(); parserClass = getClassName(classId); } catch (Exception ex) { ex.printStackTrace(); } return parserClass; } /** * Determine the class that is actually used as XML transformer. * * @return Full classname of transformer. */ private static String determineActualTransformerClass(){ String transformerClass = "Unable to determine transformer class"; try { TransformerFactory factory = TransformerFactory.newInstance(); Transformer transformer = factory.newTransformer(); String classId = transformer.toString(); transformerClass = getClassName(classId); } catch (Exception ex) { ex.printStackTrace(); } return transformerClass; } /** * Perform checks on parsers, transformers and resolvers. */ public static void check() { StringBuffer message = new StringBuffer(); if (hasValidClassVersion("Parser", validParsers, message)) { System.out.println(message.toString()); } else { System.err.println(message.toString()); } message = new StringBuffer(); if (hasValidClassVersion("Transformer", validTransformers, message)) { System.out.println(message.toString()); } else { System.err.println(message.toString()); } message = new StringBuffer(); if (hasValidClassVersion("Resolver", validResolvers, message)) { System.out.println(message.toString()); } else { System.err.println(message.toString()); } System.out.println("Using parser " + determineActualParserClass()); System.out.println("Using transformer " + determineActualTransformerClass()); System.out.println(); } /** * Check if for the specified service object one of the required * classes is availabe. * * @param type Parser, Transformer or Resolver, used for reporting only. * @param validClasses Array of valid classes. * @param message Output message of detecting classes. * @return TRUE if valid class has been found, otherwise FALSE. */ public static boolean hasValidClassVersion(String type, ClassVersion[] validClasses, StringBuffer message) { String sep = System.getProperty("line.separator"); message.append("Looking for a valid " + type + "..." + sep); for (int i = 0; i < validClasses.length; i++) { String actualVersion = validClasses[i].getActualVersion(); message.append("Checking for " + validClasses[i].getSimpleName()); if (actualVersion != null) { message.append(", found version " + actualVersion); if (actualVersion.compareToIgnoreCase( validClasses[i].getRequiredVersion()) >= 0) { message.append(sep + "OK!" + sep); return true; } else { message.append(" needed version " + validClasses[i].getRequiredVersion() + sep); } } else { message.append(", not found!" + sep); } } message.append("Warning: Failed find a valid " + type + "!" + sep); message.append(sep + "Please add an appropriate " + type + " to the " + "class-path, e.g. in the 'endorsed' folder of " + "the servlet container or in the 'endorsed' folder " + "of the JRE." + sep); return false; } /** * Checks to see if a valid XML Parser exists * * @return boolean true indicates a valid Parser was found, false otherwise */ public static boolean hasValidParser() { return hasValidParser(new StringBuffer()); } /** * Checks to see if a valid XML Parser exists * * @param message Messages about the status of available Parser's will * be appended to this buffer * * @return boolean true indicates a valid Parser was found, false otherwise */ public static boolean hasValidParser(StringBuffer message) { return hasValidClassVersion("Parser", validParsers, message); } /** * Checks to see if a valid XML Transformer exists * * @return boolean true indicates a valid Transformer was found, * false otherwise */ public static boolean hasValidTransformer() { return hasValidTransformer(new StringBuffer()); } /** * Checks to see if a valid XML Transformer exists * * @param message Messages about the status of available Transformer's * will be appended to this buffer * * @return boolean true indicates a valid Transformer was found, * false otherwise */ public static boolean hasValidTransformer(StringBuffer message) { return hasValidClassVersion("Transformer", validTransformers, message); } /** * Simple class to describe a class, its required version and how to * obtain the actual version */ public static class ClassVersion { private String simpleName; private String requiredVersion; private String versionFunction; /** * Default Constructor * * @param simpleName The simple name for the class (just a * descriptor really) * @param requiredVersion The required version of the class * @param versionFunction The function to be invoked to obtain the * actual version of the class, must be fully * qualified (i.e. includes the package name) */ ClassVersion(String simpleName, String requiredVersion, String versionFunction) { this.simpleName = simpleName; this.requiredVersion = requiredVersion; this.versionFunction = versionFunction; } /** * @return the simple name of the class */ public String getSimpleName() { return simpleName; } /** * @return the required version of the class */ public String getRequiredVersion() { return requiredVersion; } /** * Invokes the specified versionFunction using reflection to get the * actual version of the class * * @return the actual version of the class */ public String getActualVersion() { String actualVersion = null; //get the class name from the specifiec version function string String versionClassName = versionFunction .substring(0, versionFunction.lastIndexOf('.')); //get the function name from the specifiec version function string String versionFunctionName = versionFunction.substring( versionFunction.lastIndexOf('.') + 1, versionFunction.lastIndexOf('(')); try { //get the class Class versionClass = Class.forName(versionClassName); //get the method Method getVersionMethod = versionClass .getMethod(versionFunctionName, (Class[]) null); //invoke the method on the class actualVersion = (String) getVersionMethod .invoke(versionClass, (Object[]) null); } catch (ClassNotFoundException cfe) { } catch (NoSuchMethodException nsme) { } catch (InvocationTargetException ite) { } catch (IllegalAccessException iae) { } //return the actual version return actualVersion; } } }
package com.intellij.debugger.impl; import com.intellij.codeInsight.daemon.impl.IdentifierHighlighterPass; import com.intellij.debugger.DebuggerInvocationUtil; import com.intellij.debugger.SourcePosition; import com.intellij.debugger.engine.SuspendContextImpl; import com.intellij.debugger.engine.SuspendManagerUtil; import com.intellij.debugger.engine.events.DebuggerCommandImpl; import com.intellij.debugger.jdi.StackFrameProxyImpl; import com.intellij.debugger.ui.impl.watch.ThreadDescriptorImpl; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.impl.FileEditorManagerImpl; import com.intellij.openapi.progress.EmptyProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiCompiledElement; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiMethod; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.xdebugger.XDebugSession; import com.intellij.xdebugger.XSourcePosition; import org.jetbrains.annotations.NotNull; import java.util.Collection; public class DebuggerContextUtil { public static void setStackFrame(final DebuggerStateManager manager, final StackFrameProxyImpl stackFrame) { ApplicationManager.getApplication().assertIsDispatchThread(); final DebuggerContextImpl context = manager.getContext(); final DebuggerSession session = context.getDebuggerSession(); if (session != null) { session.getProcess().getManagerThread().schedule(new DebuggerCommandImpl(PrioritizedTask.Priority.HIGH) { @Override protected void action() { SuspendContextImpl threadSuspendContext = SuspendManagerUtil.findContextByThread(session.getProcess().getSuspendManager(), stackFrame.threadProxy()); final DebuggerContextImpl newContext = DebuggerContextImpl.createDebuggerContext(session, threadSuspendContext, stackFrame.threadProxy(), stackFrame); DebuggerInvocationUtil.swingInvokeLater(session.getProject(), () -> { manager.setState(newContext, session.getState(), DebuggerSession.Event.REFRESH, null); SourceCodeChecker.checkSource(newContext); }); } }); } else { manager.setState(DebuggerContextImpl.EMPTY_CONTEXT, DebuggerSession.State.DISPOSED, DebuggerSession.Event.REFRESH, null); } } public static void setThread(DebuggerStateManager contextManager, ThreadDescriptorImpl item) { ApplicationManager.getApplication().assertIsDispatchThread(); final DebuggerSession session = contextManager.getContext().getDebuggerSession(); final DebuggerContextImpl newContext = DebuggerContextImpl.createDebuggerContext(session, item.getSuspendContext(), item.getThreadReference(), null); contextManager.setState(newContext, session != null? session.getState() : DebuggerSession.State.DISPOSED, DebuggerSession.Event.CONTEXT, null); } @NotNull public static DebuggerContextImpl createDebuggerContext(@NotNull DebuggerSession session, SuspendContextImpl suspendContext){ return DebuggerContextImpl.createDebuggerContext(session, suspendContext, suspendContext != null ? suspendContext.getThread() : null, null); } public static SourcePosition findNearest(@NotNull DebuggerContextImpl context, @NotNull PsiElement psi, @NotNull PsiFile file) { if (psi instanceof PsiCompiledElement) { // it makes no sense to compute text range of compiled element return null; } final DebuggerSession session = context.getDebuggerSession(); if (session != null) { return ProgressManager.getInstance().runProcess(() -> { try { final XDebugSession debugSession = session.getXDebugSession(); if (debugSession != null) { final XSourcePosition position = debugSession.getCurrentPosition(); Editor editor = ((FileEditorManagerImpl)FileEditorManager.getInstance(file.getProject())).getSelectedTextEditor(true); //final Editor editor = fileEditor instanceof TextEditorImpl ? ((TextEditorImpl)fileEditor).getEditor() : null; if (editor != null && position != null && position.getFile().equals(file.getOriginalFile().getVirtualFile())) { PsiMethod method = PsiTreeUtil.getParentOfType(PositionUtil.getContextElement(context), PsiMethod.class, false); final Collection<TextRange> ranges = IdentifierHighlighterPass.getUsages(psi, method != null ? method : file, false); final int breakPointLine = position.getLine(); int bestLine = -1; int bestOffset = -1; for (TextRange range : ranges) { final int line = editor.offsetToLogicalPosition(range.getStartOffset()).line; if (line > bestLine && line < breakPointLine) { bestLine = line; bestOffset = range.getStartOffset(); } else if (line == breakPointLine) { bestOffset = range.getStartOffset(); break; } } if (bestOffset > -1) { return SourcePosition.createFromOffset(file, bestOffset); } } } } catch (Exception ignore) { } return null; }, new EmptyProgressIndicator()); } return null; } }
package seedu.address.ui; import javafx.fxml.FXML; import javafx.scene.control.Label; import javafx.scene.layout.FlowPane; import javafx.scene.layout.HBox; import javafx.scene.layout.Region; import seedu.address.model.person.ReadOnlyTask; public class TaskCard extends UiPart<Region> { private static final String FXML = "TaskListCard.fxml"; @FXML private HBox cardPane; @FXML private Label name; @FXML private Label id; @FXML private Label start; @FXML private Label deadline; @FXML private FlowPane tags; public TaskCard(ReadOnlyTask task, int displayedIndex) { super(FXML); name.setText(task.getName().fullName); id.setText(displayedIndex + ". "); start.setText(task.getStart().value); deadline.setText(task.getDeadline().value); initTags(task); } private void initTags(ReadOnlyTask task) { task.getTags().forEach(tag -> tags.getChildren().add(new Label(tag.tagName))); } }
package org.jmist.toolkit.ui; import java.awt.Component; import java.util.ArrayList; import java.util.List; import javax.swing.JComponent; import javax.swing.JPanel; import javax.swing.JProgressBar; import javax.swing.JTable; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableCellRenderer; import javax.swing.table.TableModel; /** * A <code>JPanel</code> that allows the user to navigate a tree of progress * indicators. * @author bkimmel */ public class ProgressTreePanel extends javax.swing.JPanel { /** Creates new form ProgressTreePanel */ public ProgressTreePanel() { this.top = new ProgressNode(""); initComponents(); } /** * Creates a new <code>ProgressTreePanel</code>. * @param title The title of the root task. */ public ProgressTreePanel(String title) { this.top = new ProgressNode(title); initComponents(); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc=" Generated Code ">//GEN-BEGIN:initComponents private void initComponents() { javax.swing.JPanel progressNodeComponent; parentButton = new javax.swing.JButton(); jScrollPane1 = new javax.swing.JScrollPane(); childrenTable = new javax.swing.JTable(); progressNodePanel = new javax.swing.JPanel(); progressNodeComponent = getTopNodeComponent(); rootButton = new javax.swing.JButton(); parentButton.setText("<<"); parentButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { parentButtonActionPerformed(evt); } }); childrenTable.setModel(getTableModel()); childrenTable.getColumn("Progress").setCellRenderer(ProgressTableCellRenderer.getInstance()); childrenTable.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { childrenTableMouseClicked(evt); } }); jScrollPane1.setViewportView(childrenTable); javax.swing.GroupLayout progressNodeComponentLayout = new javax.swing.GroupLayout(progressNodeComponent); progressNodeComponent.setLayout(progressNodeComponentLayout); progressNodeComponentLayout.setHorizontalGroup( progressNodeComponentLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 408, Short.MAX_VALUE) ); progressNodeComponentLayout.setVerticalGroup( progressNodeComponentLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 59, Short.MAX_VALUE) ); javax.swing.GroupLayout progressNodePanelLayout = new javax.swing.GroupLayout(progressNodePanel); progressNodePanel.setLayout(progressNodePanelLayout); progressNodePanelLayout.setHorizontalGroup( progressNodePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(progressNodeComponent, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) ); progressNodePanelLayout.setVerticalGroup( progressNodePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(progressNodeComponent, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) ); rootButton.setText("O"); rootButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rootButtonActionPerformed(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(progressNodePanel, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(layout.createSequentialGroup() .addComponent(rootButton) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(parentButton)) .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 408, Short.MAX_VALUE)) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addComponent(progressNodePanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 171, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(parentButton) .addComponent(rootButton)) .addContainerGap()) ); }// </editor-fold>//GEN-END:initComponents /** * Fires when the user clicks on the table showing the current node's * child progress indicators. * @param evt The event arguments. */ private void childrenTableMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_childrenTableMouseClicked if (evt.getButton() == java.awt.event.MouseEvent.BUTTON1 && evt.getClickCount() == 2) { int selectedRow = this.childrenTable.getSelectedRow(); if (selectedRow >= 0) { evt.consume(); this.moveToChild(selectedRow); } } }//GEN-LAST:event_childrenTableMouseClicked /** * Fires when the user clicks on the button to switch to the root * <code>ProgressIndicator</code>. * @param evt The event arguments. */ private void rootButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_rootButtonActionPerformed this.moveToRoot(); }//GEN-LAST:event_rootButtonActionPerformed /** * Fires when the user clicks on the button to switch to the parent * <code>ProgressIndicator</code>. * @param evt The event arguments. */ private void parentButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_parentButtonActionPerformed this.moveToParent(); }//GEN-LAST:event_parentButtonActionPerformed /** * A <code>TableCellRenderer</code> for rendering progress bars in a table * cell. * @author bkimmel */ private static class ProgressTableCellRenderer implements TableCellRenderer { /* (non-Javadoc) * @see javax.swing.table.TableCellRenderer#getTableCellRendererComponent(javax.swing.JTable, java.lang.Object, boolean, boolean, int, int) */ @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { return (JProgressBar) value; } /** * Creates a new <code>ProgressTableCellRenderer</code>. This * constructor is private because this class is a singleton. */ private ProgressTableCellRenderer() { // nothing to do. } /** * Gets the single instance of <code>ProgressTableCellRenderer</code>. * @return The single instance of <code>ProgressTableCellRenderer</code>. */ public static ProgressTableCellRenderer getInstance() { if (instance == null) { instance = new ProgressTableCellRenderer(); } return instance; } /** * The single instance of <code>ProgressTableCellRenderer</code>. */ private static ProgressTableCellRenderer instance = null; } /** * Gets the <code>JPanel</code> displaying the current * <code>ProgressIndicator</code>. * @return The <code>JPanel</code> displaying the current * <code>ProgressIndicator</code>. */ private JPanel getTopNodeComponent() { return (JPanel) this.top.getComponent(); } /** * Gets the <code>TableModel</code> for the current * <code>ProgressIndicator</code>s children. * @return The <code>TableModel</code> for the current * <code>ProgressIndicator</code>s children. */ private TableModel getTableModel() { return this.top; } /** * Redisplays the contents of this <code>ProgressTreePanel</code>. */ private void refresh() { /* Update the top level progress indicator. */ JComponent progressNodeComponent = this.getTopNodeComponent(); this.progressNodePanel.removeAll(); javax.swing.GroupLayout progressNodePanelLayout = new javax.swing.GroupLayout(progressNodePanel); progressNodePanel.setLayout(progressNodePanelLayout); progressNodePanelLayout.setHorizontalGroup( progressNodePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(progressNodeComponent, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) ); progressNodePanelLayout.setVerticalGroup( progressNodePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(progressNodeComponent, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) ); /* Update the child table. */ this.childrenTable.setModel(this.getTableModel()); this.childrenTable.getColumn("Progress").setCellRenderer(ProgressTableCellRenderer.getInstance()); } /** * Gets the progress indicator at the root of the tree. * @return The <code>ProgressIndicator</code> at the root of the tree. */ public ProgressIndicator getRootProgressIndicator() { return this.getRootNode(); } /** * Gets the root <code>ProgressNode</code>. * @return The root <code>ProgressNode</code>. */ private ProgressNode getRootNode() { ProgressNode node = this.top; while (node.getParent() != null) { node = node.getParent(); } return node; } /** * Sets the specified <code>ProgressNode</code> as the active node. This * will put that node at the top of the <code>JPanel</code> and the table * will be populated with it's children. * @param node The <code>ProgressNode</code> to make the active node. */ private void moveToNode(ProgressNode node) { this.top = node; this.refresh(); } /** * Makes the root node active. */ private void moveToRoot() { this.moveToNode(this.getRootNode()); } /** * Makes the active nodes parent node the active node. This method assumes * that the current node is not the root node. */ private void moveToParent() { ProgressNode parent = this.top.getParent(); assert(parent != null); this.moveToNode(parent); } /** * Makes one of the active node's children the active node. * @param index The index of the child to make active. */ private void moveToChild(int index) { this.moveToNode(this.top.getChild(index)); } /** * The <code>ProgressIndicator</code> used by <code>ProgressTreePanel</code> * and the <code>TableModel</code> used by <code>childrenTable</code>. * @author bkimmel */ private static final class ProgressNode extends AbstractTableModel implements ProgressIndicator { /** * Creates the root <code>ProgressNode</code>. * @param title The title of the node. */ public ProgressNode(String title) { this.title = title; this.parent = null; } /** * Creates a child <code>ProgressNode</code>. * @param title The title of the node. * @param parent The parent <code>ProgressNode</code>. */ private ProgressNode(String title, ProgressNode parent) { this.title = title; this.parent = parent; } /* (non-Javadoc) * @see org.jmist.toolkit.ui.ProgressIndicator#removeChild(org.jmist.toolkit.ui.ProgressIndicator) */ @Override public void removeChild(ProgressIndicator child) { for (int index = 0; index < this.children.size(); index++) { if (this.children.get(index) == child) { this.removeChildAt(index); break; } } } /* (non-Javadoc) * @see org.jmist.toolkit.ui.ProgressIndicator#isCancelPending() */ @Override public boolean isCancelPending() { // TODO Auto-generated method stub return false; } /* (non-Javadoc) * @see org.jmist.toolkit.ui.ProgressIndicator#setProgress(double) */ @Override public void setProgress(double progress) { this.progressBar.setStringPainted(false); this.setProgressBarValue((int) Math.floor(progress * 100.0), 100); } /* (non-Javadoc) * @see org.jmist.toolkit.ui.ProgressIndicator#setProgress(int, int) */ @Override public void setProgress(int value, int maximum) { this.progressBar.setString(String.format("(%d/%d)", value, maximum)); this.progressBar.setStringPainted(true); this.setProgressBarValue(value, maximum); } /** * Updates the progress bar. * @param value The value of the progress bar. * @param maximum The maximum value of the progress bar. */ private void setProgressBarValue(int value, int maximum) { this.progressBar.setIndeterminate(false); if (this.progressBar.getMaximum() != maximum) { this.progressBar.setMaximum(maximum); } this.progressBar.setValue(value); this.fireColumnChanged(PROGRESS_COLUMN); } /* (non-Javadoc) * @see org.jmist.toolkit.ui.ProgressIndicator#setProgressIndeterminant() */ @Override public void setProgressIndeterminant() { this.progressBar.setIndeterminate(true); this.fireColumnChanged(PROGRESS_COLUMN); } /* (non-Javadoc) * @see org.jmist.toolkit.ui.ProgressIndicator#setStatusText(java.lang.String) */ @Override public void setStatusText(String statusText) { this.status = statusText; if (this.statusLabel != null) { this.statusLabel.setText(this.status); } if (this.parent != null) { this.fireColumnChanged(STATUS_COLUMN); } } /* (non-Javadoc) * @see org.jmist.toolkit.ui.ProgressIndicator#addChild(java.lang.String) */ @Override public ProgressNode addChild(String title) { ProgressNode node = new ProgressNode(title, this); int index = this.children.size(); this.children.add(node); this.fireTableRowsInserted(index, index); return node; } /** * Gets this node's parent node. * @return The parent of this <code>ProgressNode</code>. */ public ProgressNode getParent() { return this.parent; } /** * Gets the number of children of this <code>ProgressNode</code>. * @return The number of children of this <code>ProgressNode</code>. */ public int getNumChildren() { return this.children.size(); } /** * Gets a child of this <code>ProgressNode</code>. * @param index The index into the list of children of the child to * get. * @return The child <code>ProgressNode</code> at the specified index. */ public ProgressNode getChild(int index) { return this.children.get(index); } /** * Removes the child at the specified index. * @param index The index of the <code>ProgressNode</code> to remove. */ public void removeChildAt(int index) { assert(0 <= index && index < this.children.size()); this.children.remove(index); this.fireTableRowsDeleted(index, index); } /** * Fires an event on the parent <code>ProgressNode</code> notifying * its listeners that the specified column has changed. * @param column The index of the column that changed. */ private void fireColumnChanged(int column) { if (this.parent != null) { // TODO indicate which cell changed. this.parent.fireTableDataChanged(); } } /** * Gets the stand-alone <code>JComponent</code> to display when this * <code>ProgressNode</code> is the active node. * @return The stand-alone <code>JComponent</code> to display when this * <code>ProgressNode</code> is the active node. */ public JComponent getComponent() { if (this.panel == null) { javax.swing.JLabel titleLabel; this.panel = new javax.swing.JPanel(); this.statusLabel = new javax.swing.JLabel(); titleLabel = new javax.swing.JLabel(); titleLabel.setText(this.title); this.statusLabel.setText(this.status); javax.swing.JPanel progressBarPanel = new javax.swing.JPanel(); javax.swing.GroupLayout progressBarPanelLayout = new javax.swing.GroupLayout(progressBarPanel); progressBarPanel.setLayout(progressBarPanelLayout); progressBarPanelLayout.setHorizontalGroup( progressBarPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 408, Short.MAX_VALUE) .addComponent(this.progressBar, javax.swing.GroupLayout.DEFAULT_SIZE, 408, Short.MAX_VALUE) ); progressBarPanelLayout.setVerticalGroup( progressBarPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 19, Short.MAX_VALUE) .addComponent(this.progressBar, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) ); javax.swing.GroupLayout progressNodePanelLayout = new javax.swing.GroupLayout(this.panel); this.panel.setLayout(progressNodePanelLayout); progressNodePanelLayout.setHorizontalGroup( progressNodePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(statusLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 408, Short.MAX_VALUE) .addComponent(titleLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 408, Short.MAX_VALUE) .addComponent(progressBarPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) ); progressNodePanelLayout.setVerticalGroup( progressNodePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(progressNodePanelLayout.createSequentialGroup() .addComponent(titleLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(statusLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(progressBarPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); } return this.panel; } /* (non-Javadoc) * @see javax.swing.table.TableModel#getColumnCount() */ @Override public int getColumnCount() { return NUM_COLUMNS; } /* (non-Javadoc) * @see javax.swing.table.TableModel#getRowCount() */ @Override public int getRowCount() { return this.children.size(); } /* (non-Javadoc) * @see javax.swing.table.TableModel#getValueAt(int, int) */ @Override public Object getValueAt(int rowIndex, int columnIndex) { assert(rowIndex < this.children.size()); ProgressNode node = this.children.get(rowIndex); switch (columnIndex) { case CHILDREN_COLUMN: return node.children.size(); case TITLE_COLUMN: return node.title; case PROGRESS_COLUMN: return node.progressBar; case STATUS_COLUMN: return node.status; default: assert(false); return null; } } /* (non-Javadoc) * @see javax.swing.table.AbstractTableModel#getColumnClass(int) */ @Override public Class<?> getColumnClass(int columnIndex) { return COLUMN_CLASS[columnIndex]; } /* (non-Javadoc) * @see javax.swing.table.AbstractTableModel#getColumnName(int) */ @Override public String getColumnName(int column) { return COLUMN_NAME[column]; } /** The parent <code>ProgressNode</code> of this node. */ private final ProgressNode parent; /** * The <code>List</code> of children of this <code>ProgressNode</code>. */ private final List<ProgressNode> children = new ArrayList<ProgressNode>(); /** * The progress bar that displays the progress for this * <code>ProgressIndicator</code>. */ private final JProgressBar progressBar = new JProgressBar(); /** The title of this <code>ProgressIndicator</code>. */ private final String title; /** The status text of this <code>ProgressIndicator</code>. */ private String status = ""; /** * The stand-alone <code>JPanel</code> to use to display when this * <code>ProgressNode</code> is active. */ private javax.swing.JPanel panel = null; /** * A <code>JLabel</code> displaying this <code>ProgressNode</code>'s * status text. */ private javax.swing.JLabel statusLabel = null; /** The number of columns in this <code>TableModel</code>. */ private static final int NUM_COLUMNS = 4; /** The index of the column displaying the number of children. */ private static final int CHILDREN_COLUMN = 0; /** The index of the column displaying the title. */ private static final int TITLE_COLUMN = 1; /** The index of the column displaying the progress bar. */ private static final int PROGRESS_COLUMN = 2; /** The index of the column displaying the status text. */ private static final int STATUS_COLUMN = 3; /** The classes of the columns. */ private static final Class<?>[] COLUMN_CLASS = { Integer.class, String.class, JProgressBar.class, String.class }; /** The names of the columns. */ private static final String[] COLUMN_NAME = { "Children", "Title", "Progress", "Status" }; /** * Serialization version ID. */ private static final long serialVersionUID = 4409494195911210222L; } /** * The <code>ProgressNode</code> currently displaying at the top of this * panel. */ private ProgressNode top; /** * Serialization version ID. */ private static final long serialVersionUID = 4364840053111586849L; // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JTable childrenTable; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JButton parentButton; private javax.swing.JPanel progressNodePanel; private javax.swing.JButton rootButton; // End of variables declaration//GEN-END:variables }
package org.blendee.sql; import java.text.DecimalFormat; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.blendee.internal.CollectionMap; import org.blendee.internal.Traversable; import org.blendee.internal.TraversableNode; import org.blendee.internal.Traverser; import org.blendee.internal.TraverserOperator; import org.blendee.jdbc.ColumnMetadata; import org.blendee.jdbc.CrossReference; import org.blendee.jdbc.DataTypeConverter; import org.blendee.jdbc.MetadataUtilities; import org.blendee.jdbc.TablePath; /** * <br> * Relationship Relationship * @author */ public final class Relationship implements Traversable, Comparable<Relationship> { private final TablePath path; private final Relationship root; private final Relationship parent; private final String id; private final CrossReference reference; private final Map<String, Relationship> foreignKeyNameMap = new HashMap<>(); private final Map<String, Relationship> foreignKeyIdMap = new HashMap<>(); private final TraversableNode node = new TraversableNode(); private final Column[] columns; private final Map<String, Column> columnMap = new HashMap<>(); private final Column[] primaryKeyColumns; private final CollectionMap<TablePath, Relationship> convertMap; Relationship( Relationship root, Relationship parent, CrossReference reference, TablePath path, String id, List<TablePath> relationshipPath, RelationshipResolver resolver, DataTypeConverter converter, CollectionMap<TablePath, Relationship> convertMap) { this.path = path; convertMap.put(path, this); this.convertMap = convertMap; if (root == null) { this.root = this; } else { this.root = root; } this.parent = parent; this.reference = reference; this.id = id; ColumnMetadata[] metadatas = MetadataUtilities.getColumnMetadatas(path); columns = new Column[metadatas.length]; DecimalFormat columnFormat = RelationshipFactory.createDigitFormat(metadatas.length); for (int i = 0; i < metadatas.length; i++) { ColumnMetadata metadata = metadatas[i]; Column column = new Column(this, metadata, converter, columnFormat.format(i)); columns[i] = column; columnMap.put(metadata.getName(), column); } String[] primaryKeyColumnNames = MetadataUtilities.getPrimaryKeyColumnNames(path); primaryKeyColumns = new Column[primaryKeyColumnNames.length]; for (int i = 0; i < primaryKeyColumnNames.length; i++) { primaryKeyColumns[i] = columnMap.get(MetadataUtilities.regularize(primaryKeyColumnNames[i])); } if (!resolver.canTraverse(relationshipPath, path)) return; List<TablePath> myRelationshipPath = new LinkedList<>(relationshipPath); myRelationshipPath.add(path); CrossReference[] references = MetadataUtilities.getCrossReferencesOfImportedKeys(path); DecimalFormat relationshipFormat = RelationshipFactory.createDigitFormat(references.length); for (int i = 0; i < references.length; i++) { CrossReference element = references[i]; Relationship child = new Relationship( this.root, this, element, element.getPrimaryKeyTable(), id + "_" + relationshipFormat.format(i), myRelationshipPath, resolver, converter, convertMap); foreignKeyNameMap.put(MetadataUtilities.regularize(element.getForeignKeyName()), child); String[] foreignKeyColumns = element.getForeignKeyColumnNames(); foreignKeyIdMap.put(createForeignKeyId(foreignKeyColumns), child); node.add(child); } } @Override public boolean equals(Object o) { if (o != null && o.getClass().equals(TablePath.class)) throw new IllegalStateException(TablePath.class.getName() + " "); return o instanceof Relationship && id.equals(((Relationship) o).id); } @Override public int hashCode() { return id.hashCode(); } /** * * @return */ public TablePath getTablePath() { return path; } @Override public int compareTo(Relationship target) { return id.compareTo(target.id); } /** * * @return */ public Relationship[] getRelationships() { Traversable[] traversables = node.getTraversables(); Relationship[] relations = new Relationship[traversables.length]; for (int i = 0; i < traversables.length; i++) { relations[i] = (Relationship) traversables[i]; } return relations; } /** * {@link Traverser} * @param traverser {@link Traverser} */ public void traverse(Traverser traverser) { TraverserOperator.operate(traverser, this); } @Override public TraversableNode getSubNode() { return node; } /** * Blendee ID ID * @return ID */ public String getID() { return id; } /** * {@link Column} * @param columnName * @return {@link Column} * @throws NotFoundException */ public Column getColumn(String columnName) { Column column = columnMap.get(MetadataUtilities.regularize(columnName)); if (column == null) throw new NotFoundException(this + " " + columnName + " "); return column; } /** * * @return */ public Column[] getColumns() { return columns.clone(); } /** * * @return */ public Column[] getPrimaryKeyColumns() { return primaryKeyColumns.clone(); } /** * * @param column * @return true */ public boolean belongsPrimaryKey(Column column) { for (Column pkColumn : primaryKeyColumns) { if (pkColumn.equals(column)) return true; } return false; } /** * * @param foreignKeyName * @return * @throws NotFoundException */ public Relationship find(String foreignKeyName) { Relationship relationship = foreignKeyNameMap.get(MetadataUtilities.regularize(foreignKeyName)); if (relationship == null) throw new NotFoundException(createErrorMessage(foreignKeyName)); return relationship; } /** * * @param foreignKeyColumnNames * @return * @throws NotFoundException */ public Relationship find(String[] foreignKeyColumnNames) { String keyId = createForeignKeyId(MetadataUtilities.regularize(foreignKeyColumnNames)); Relationship relationship = foreignKeyIdMap.get(keyId); if (relationship == null) { throw new NotFoundException(createErrorMessage(String.join(" ", foreignKeyColumnNames))); } return relationship; } /** * * @return * @throws UnsupportedOperationException */ public CrossReference getCrossReference() { if (isRoot()) throw new UnsupportedOperationException(); return reference; } /** * * @return * @throws UnsupportedOperationException */ public Relationship getParent() { if (isRoot()) throw new UnsupportedOperationException(); return parent; } /** * * @return true */ public boolean isRoot() { return root == this; } /** * * @return */ public Relationship getRoot() { return root; } /** * * @param parents */ public void addParentTo(Collection<Relationship> parents) { if (parent == null) return; parent.addParentTo(parents); parents.add(parent); } /** * Relationship Relationship <br> * path Relationship 0 Relationship * @param path {@link TablePath} * @return {@link Relationship} */ public Relationship[] convert(TablePath path) { Collection<Relationship> list = convertMap.get(path); return list.toArray(new Relationship[list.size()]); } @Override public String toString() { return path + " " + id; } private String createErrorMessage(String base) { return this + " " + base + " "; } private static String createForeignKeyId(String[] foreignKeyColumnNames) { Arrays.sort(foreignKeyColumnNames); return String.join(",", foreignKeyColumnNames); } }
/* * $Id: AuMetadataRecorder.java,v 1.19 2014-08-29 20:46:09 pgust Exp $ */ package org.lockss.metadata; import static org.lockss.db.SqlConstants.*; import static org.lockss.metadata.MetadataManager.*; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.lockss.app.LockssDaemon; import org.lockss.db.DbException; import org.lockss.db.DbManager; import org.lockss.exporter.counter.CounterReportsManager; import org.lockss.extractor.MetadataField; import org.lockss.metadata.ArticleMetadataBuffer.ArticleMetadataInfo; import org.lockss.plugin.ArchivalUnit; import org.lockss.plugin.AuUtil; import org.lockss.plugin.Plugin; import org.lockss.plugin.PluginManager; import org.lockss.util.Logger; import org.lockss.util.MetadataUtil; import org.lockss.util.StringUtil; import org.lockss.util.TimeBase; /** * Writes to the database metadata related to an archival unit. */ public class AuMetadataRecorder { private static Logger log = Logger.getLogger(AuMetadataRecorder.class); static final String UNKNOWN_PUBLISHER_AU_PROBLEM = "UNKNOWN_PUBLISHER"; // Query to update the version of an Archival Unit metadata. private static final String UPDATE_AU_MD_QUERY = "update " + AU_MD_TABLE + " set " + MD_VERSION_COLUMN + " = ?" + " where " + AU_MD_SEQ_COLUMN + " = ?"; // Query to find the name of the type of a metadata item. private static final String GET_MD_ITEM_TYPE_NAME_QUERY = "select " + "t." + TYPE_NAME_COLUMN + " from " + MD_ITEM_TYPE_TABLE + " t" + "," + MD_ITEM_TABLE + " m" + " where m." + MD_ITEM_SEQ_COLUMN + " = ?" + " and m." + MD_ITEM_TYPE_SEQ_COLUMN + " = t." + MD_ITEM_TYPE_SEQ_COLUMN; // Query to find the DOIs of a metadata item. private static final String FIND_MD_ITEM_DOI_QUERY = "select " + DOI_COLUMN + " from " + DOI_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " = ?"; // Query to add a bibliographic item. private static final String INSERT_BIB_ITEM_QUERY = "insert into " + BIB_ITEM_TABLE + "(" + MD_ITEM_SEQ_COLUMN + "," + VOLUME_COLUMN + "," + ISSUE_COLUMN + "," + START_PAGE_COLUMN + "," + END_PAGE_COLUMN + "," + ITEM_NO_COLUMN + ") values (?,?,?,?,?,?)"; // Query to update a bibliographic item. private static final String UPDATE_BIB_ITEM_QUERY = "update " + BIB_ITEM_TABLE + " set " + VOLUME_COLUMN + " = ?" + "," + ISSUE_COLUMN + " = ?" + "," + START_PAGE_COLUMN + " = ?" + "," + END_PAGE_COLUMN + " = ?" + "," + ITEM_NO_COLUMN + " = ?" + " where " + MD_ITEM_SEQ_COLUMN + " = ?"; // Query to delete the metadata item of a publication created for an unknown // publisher. private static final String DELETE_UNKNOWN_PUBLISHER_PUBLICATION_MD_ITEM_QUERY = "delete from " + MD_ITEM_TABLE + " where " + MD_ITEM_SEQ_COLUMN + " in (" + "select " + MD_ITEM_SEQ_COLUMN + " from " + PUBLICATION_TABLE + " where " + PUBLICATION_SEQ_COLUMN + " = ?" + " and (select count(*) from " + PUBLISHER_TABLE + " pr" + "," + PUBLICATION_TABLE + " p" + " where pr." + PUBLISHER_SEQ_COLUMN + " = p." + PUBLISHER_SEQ_COLUMN + " and p." + PUBLICATION_SEQ_COLUMN + " = ?" + " and pr." + PUBLISHER_NAME_COLUMN + " like '" + UNKNOWN_PUBLISHER_AU_PROBLEM + "%') = 1)"; // Query to delete a publication created for an unknown publisher. private static final String DELETE_UNKNOWN_PUBLISHER_PUBLICATION_QUERY = "delete from " + PUBLICATION_TABLE + " where " + PUBLICATION_SEQ_COLUMN + " = ?" + " and (select count(*) from " + PUBLISHER_TABLE + " pr" + "," + PUBLICATION_TABLE + " p" + " where pr." + PUBLISHER_SEQ_COLUMN + " = p." + PUBLISHER_SEQ_COLUMN + " and p." + PUBLICATION_SEQ_COLUMN + " = ?" + " and pr." + PUBLISHER_NAME_COLUMN + " like '" + UNKNOWN_PUBLISHER_AU_PROBLEM + "%') = 1"; // Query to delete an unknown publisher. private static final String DELETE_UNKNOWN_PUBLISHER_QUERY = "delete from " + PUBLISHER_TABLE + " where " + PUBLISHER_NAME_COLUMN + " = ?" + " and " + PUBLISHER_NAME_COLUMN + " like '" + UNKNOWN_PUBLISHER_AU_PROBLEM + "%'"; // The calling task. private final ReindexingTask task; // The metadata manager. private final MetadataManager mdManager; // The database manager. private final DbManager dbManager; // The archival unit. private final ArchivalUnit au; // AU-related properties independent of the database. private final Plugin plugin; private final String platform; private final int pluginVersion; private final String auId; private final String auKey; private final String pluginId; private final boolean isBulkContent; // Database identifiers related to the AU. private Long publisherSeq = null; private Long publicationSeq = null; private Long pluginSeq = null; private Long auSeq = null; private Long auMdSeq = null; private Long parentSeq = null; // Properties used to take shortcuts in processing. private String seriesTitle = null; private String proprietarySeriesId = null; private String publicationTitle = null; private String publicationType = null; private String pIsbn = null; private String eIsbn = null; private String pIssn = null; private String eIssn = null; private String proprietaryId = null; private String volume = null; private String parentMdItemType = null; private boolean newAu = false; private String publisherName; /** * Constructor. * * @param task A ReindexingTaskwith the calling task. * @param mdManager A MetadataManager with the metadata manager. * @param au An ArchivalUnit with the archival unit. */ public AuMetadataRecorder(ReindexingTask task, MetadataManager mdManager, ArchivalUnit au) { this.task = task; this.mdManager = mdManager; dbManager = mdManager.getDbManager(); this.au = au; plugin = au.getPlugin(); isBulkContent = plugin.isBulkContent(); platform = plugin.getPublishingPlatform(); pluginVersion = mdManager.getPluginMetadataVersionNumber(plugin); auId = au.getAuId(); auKey = PluginManager.auKeyFromAuId(auId); pluginId = PluginManager.pluginIdFromAuId(auId); } /** * Writes to the database metadata related to an archival unit. * * @param conn * A Connection with the database connection to be used. * @param mditr * An Iterator<ArticleMetadataInfo> with the metadata. * @throws MetadataException * if any problem is detected with the passed metadata. * @throws DbException * if any problem occurred accessing the database. */ public void recordMetadata(Connection conn, Iterator<ArticleMetadataInfo> mditr) throws MetadataException, DbException { final String DEBUG_HEADER = "recordMetadata(): "; // Loop through the metadata for each article. while (mditr.hasNext()) { task.pokeWDog(); // Normalize all the metadata fields. ArticleMetadataInfo normalizedMdInfo = normalizeMetadata(mditr.next()); // Store the metadata fields in the database. storeMetadata(conn, normalizedMdInfo); // Count the processed article. task.incrementUpdatedArticleCount(); log.debug3(DEBUG_HEADER + "updatedArticleCount = " + task.getUpdatedArticleCount()); } if (auMdSeq != null) { // Update the AU last extraction timestamp. mdManager.updateAuLastExtractionTime(au, conn, auMdSeq); } else { log.warning("auMdSeq is null for auid = '" + au.getAuId() + "'."); } // Find the list of previous problems indexing this Archival Unit. List<String> problems = mdManager.findAuProblems(conn, auId); log.debug3(DEBUG_HEADER + "problems.size() = " + problems.size()); // Check whether the publisher name used is a synthetic name. if (publisherName.startsWith(UNKNOWN_PUBLISHER_AU_PROBLEM)) { // Yes: Check whether an entry in the AU problem table does not exist. if (!problems.contains(publisherName)) { // Yes: Add an unknown publisher entry to the AU problem table. mdManager.addAuProblem(conn, auId, publisherName); } } else { // No: Check whether there is data obtained when the publisher was unknown // that needs to be merged. if (problems.size() > 0) { // Yes: Merge it. fixUnknownPublishersAuData(conn, problems); } } } /** * Normalizes metadata info fields. * * @param mdinfo * the ArticleMetadataInfo * @return an ArticleMetadataInfo with the normalized properties. */ private ArticleMetadataInfo normalizeMetadata(ArticleMetadataInfo mdinfo) { if (mdinfo.accessUrl != null) { String accessUrl = mdinfo.accessUrl.trim(); if (accessUrl.length() > MAX_URL_COLUMN) { log.warning("accessUrl too long '" + mdinfo.accessUrl + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.accessUrl = DbManager.truncateVarchar(accessUrl, MAX_URL_COLUMN); } else { mdinfo.accessUrl = accessUrl; } } // strip punctuation and ensure values are proper ISBN or ISSN lengths mdinfo.isbn = MetadataUtil.toUnpunctuatedIsbn(mdinfo.isbn); mdinfo.eisbn = MetadataUtil.toUnpunctuatedIsbn(mdinfo.eisbn); mdinfo.issn = MetadataUtil.toUnpunctuatedIssn(mdinfo.issn); mdinfo.eissn = MetadataUtil.toUnpunctuatedIssn(mdinfo.eissn); if (mdinfo.doi != null) { String doi = mdinfo.doi.trim(); if (StringUtil.startsWithIgnoreCase(doi, "doi:")) { doi = doi.substring("doi:".length()); log.debug3("doi = '" + doi + "'."); } if (doi.length() > MAX_DOI_COLUMN) { log.warning("doi too long '" + mdinfo.doi + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.doi = DbManager.truncateVarchar(doi, MAX_DOI_COLUMN); } else { mdinfo.doi = doi; } } if (mdinfo.pubDate != null) { String pubDate = mdinfo.pubDate.trim(); if (pubDate.length() > MAX_DATE_COLUMN) { log.warning("pubDate too long '" + mdinfo.pubDate + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.pubDate = DbManager.truncateVarchar(pubDate, MAX_DATE_COLUMN); } else { mdinfo.pubDate = pubDate; } } if (mdinfo.volume != null) { String volume = mdinfo.volume.trim(); if (volume.length() > MAX_VOLUME_COLUMN) { log.warning("volume too long '" + mdinfo.volume + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.volume = DbManager.truncateVarchar(volume, MAX_VOLUME_COLUMN); } else { mdinfo.volume = volume; } } if (mdinfo.issue != null) { String issue = mdinfo.issue.trim(); if (issue.length() > MAX_ISSUE_COLUMN) { log.warning("issue too long '" + mdinfo.issue + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.issue = DbManager.truncateVarchar(issue, MAX_ISSUE_COLUMN); } else { mdinfo.issue = issue; } } if (mdinfo.startPage != null) { String startPage = mdinfo.startPage.trim(); if (startPage.length() > MAX_START_PAGE_COLUMN) { log.warning("startPage too long '" + mdinfo.startPage + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.startPage = DbManager.truncateVarchar(startPage, MAX_START_PAGE_COLUMN); } else { mdinfo.startPage = startPage; } } if (mdinfo.articleTitle != null) { String name = mdinfo.articleTitle.trim(); if (name.length() > MAX_NAME_COLUMN) { log.warning("article title too long '" + mdinfo.articleTitle + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.articleTitle = DbManager.truncateVarchar(name, MAX_NAME_COLUMN); } else { mdinfo.articleTitle = name; } } if (mdinfo.publisher != null) { String name = mdinfo.publisher.trim(); if (name.length() > MAX_NAME_COLUMN) { log.warning("publisher too long '" + mdinfo.publisher + "' for title: '" + mdinfo.publicationTitle + "'"); mdinfo.publisher = DbManager.truncateVarchar(name, MAX_NAME_COLUMN); } else { mdinfo.publisher = name; } } if (mdinfo.seriesTitle != null) { String name = mdinfo.seriesTitle.trim(); if (name.length() > MAX_NAME_COLUMN) { log.warning("series title too long '" + mdinfo.seriesTitle + "' for publisher: " + mdinfo.publisher + "'"); mdinfo.seriesTitle = DbManager.truncateVarchar(name, MAX_NAME_COLUMN); } else { mdinfo.seriesTitle = name; } } if (mdinfo.publicationTitle != null) { String name = mdinfo.publicationTitle.trim(); if (name.length() > MAX_NAME_COLUMN) { log.warning("journal title too long '" + mdinfo.publicationTitle + "' for publisher: " + mdinfo.publisher + "'"); mdinfo.publicationTitle = DbManager.truncateVarchar(name, MAX_NAME_COLUMN); } else { mdinfo.publicationTitle = name; } } if (mdinfo.authors != null) { List<String> authors = new ArrayList<String>(); for (String author : mdinfo.authors) { String name = author.trim(); if (name.length() > MAX_AUTHOR_COLUMN) { log.warning("author too long '" + author + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); authors.add(DbManager.truncateVarchar(name, MAX_AUTHOR_COLUMN)); } else { authors.add(name); } } mdinfo.authors = authors; } if (mdinfo.keywords != null) { List<String> keywords = new ArrayList<String>(); for (String keyword : mdinfo.keywords) { String name = keyword.trim(); if (name.length() > MAX_KEYWORD_COLUMN) { log.warning("keyword too long '" + keyword + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); keywords.add(DbManager.truncateVarchar(name, MAX_KEYWORD_COLUMN)); } else { keywords.add(name); } } mdinfo.keywords = keywords; } if (mdinfo.featuredUrlMap != null) { Map<String, String> featuredUrls = new HashMap<String, String>(); for (String key : mdinfo.featuredUrlMap.keySet()) { String url = mdinfo.featuredUrlMap.get(key).trim(); if (url.length() > MAX_URL_COLUMN) { log.warning("URL too long '" + mdinfo.featuredUrlMap.get(key) + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); featuredUrls.put(key, DbManager.truncateVarchar(url, MAX_URL_COLUMN)); } else { featuredUrls.put(key, url); } } mdinfo.featuredUrlMap = featuredUrls; } if (mdinfo.endPage != null) { String endPage = mdinfo.endPage.trim(); if (endPage.length() > MAX_END_PAGE_COLUMN) { log.warning("endPage too long '" + mdinfo.endPage + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.endPage = DbManager.truncateVarchar(endPage, MAX_END_PAGE_COLUMN); } else { mdinfo.endPage = endPage; } } if (mdinfo.coverage != null) { String coverage = mdinfo.coverage.trim(); if (coverage.length() > MAX_COVERAGE_COLUMN) { log.warning("coverage too long '" + mdinfo.coverage + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.coverage = DbManager.truncateVarchar(coverage, MAX_COVERAGE_COLUMN); } else { mdinfo.coverage = coverage; } } else { mdinfo.coverage = "fulltext"; } if (mdinfo.itemNumber != null) { String itemNumber = mdinfo.itemNumber.trim(); if (itemNumber.length() > MAX_ITEM_NO_COLUMN) { log.warning("itemNumber too long '" + mdinfo.itemNumber + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.itemNumber = DbManager.truncateVarchar(mdinfo.itemNumber, MAX_ITEM_NO_COLUMN); } else { mdinfo.itemNumber = itemNumber; } } if (mdinfo.proprietaryIdentifier != null) { String name = mdinfo.proprietaryIdentifier.trim(); if (name.length() > MAX_PUBLICATION_ID_COLUMN) { log.warning("proprietaryIdentifier too long '" + mdinfo.proprietaryIdentifier + "' for title: '" + mdinfo.publicationTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.proprietaryIdentifier = DbManager.truncateVarchar(name, MAX_PUBLICATION_ID_COLUMN); } else { mdinfo.proprietaryIdentifier = name; } } if (mdinfo.proprietarySeriesIdentifier != null) { String name = mdinfo.proprietarySeriesIdentifier.trim(); if (name.length() > MAX_PUBLICATION_ID_COLUMN) { log.warning("proprietarySeriesIdentifier too long '" + mdinfo.proprietarySeriesIdentifier + "' for series title: '" + mdinfo.seriesTitle + "' publisher: " + mdinfo.publisher + "'"); mdinfo.proprietarySeriesIdentifier = DbManager.truncateVarchar(name, MAX_PUBLICATION_ID_COLUMN); } else { mdinfo.proprietarySeriesIdentifier = name; } } return mdinfo; } /** * Replace gensym metadata title with new title. * * @param conn * A Connection with the connection to the database * @param mdSequence * The md_info record index. * @param unknownRoot * The unknown root prefix * @param title * The replacement title * @throws DbException * if any problem occurred accessing the database. */ private void replaceUnknownMdTitle(Connection conn, Long mdSequence, String unknownRoot, String title) throws DbException { final String DEBUG_HEADER = "replaceGenSym(): "; // Find the publication names. Map<String, String> names = mdManager.getMdItemNames(conn, mdSequence); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "names.size() = " + names.size()); // Loop through each publication name. for (Map.Entry<String, String> entry : names.entrySet()) { if (log.isDebug3()) log.debug3(DEBUG_HEADER + "entry = " + entry); // Check whether this is the primary name. if (entry.getValue().equals(PRIMARY_NAME_TYPE)) { // Yes: Check whether the name has been synthesized. if (title.startsWith(unknownRoot)) { // Yes: Check whether this is not a synthesized name. if (!entry.getKey().startsWith(unknownRoot)) { // Yes: Remove any synthesized names. mdManager.removeNotPrimarySynthesizedMdItemNames(conn, mdSequence); // Use the primary name instead of the synthesized name. publicationTitle = entry.getKey(); } } else { // No: Check whether this is a synthesized name. if (entry.getKey().startsWith(unknownRoot)) { // Yes: Update the synthesized primary name with the current one. mdManager.updatePrimarySynthesizedMdItemName(conn, mdSequence, title); // Remove the previously entered non-primary name for this // publication. mdManager.removeNotPrimaryMdItemName(conn, mdSequence, title); } } break; } } } /** * Stores in the database metadata for the Archival Unit. * * @param conn * A Connection with the connection to the database * @param mdinfo * An ArticleMetadataInfo providing the metadata. * @throws MetadataException * if any problem is detected with the passed metadata. * @throws DbException * if any problem occurred accessing the database. */ private void storeMetadata(Connection conn, ArticleMetadataInfo mdinfo) throws MetadataException, DbException { final String DEBUG_HEADER = "storeMetadata(): "; if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "Starting: auId = " + auId); log.debug3(DEBUG_HEADER + "auKey = " + auKey); log.debug3(DEBUG_HEADER + "auMdSeq = " + auMdSeq); log.debug3(DEBUG_HEADER + "mdinfo.articleTitle = " + mdinfo.articleTitle); } // Check whether the publisher has not been located in the database. if (publisherSeq == null) { // Yes: Get the publisher received in the metadata. publisherName = mdinfo.publisher; log.debug3(DEBUG_HEADER + "publisherName = " + publisherName); // Check whether the publisher is in the metadata. if (publisherName != null) { // Yes: Find the publisher or create it. publisherSeq = mdManager.findOrCreatePublisher(conn, publisherName); log.debug3(DEBUG_HEADER + "publisherSeq = " + publisherSeq); } else { // No: Find the AU in the database. auSeq = mdManager.findAuByAuId(conn, auId); log.debug3(DEBUG_HEADER + "auSeq = " + auSeq); // Check whether the AU was found. if (auSeq != null) { // Yes: Get the publisher of the AU. publisherSeq = mdManager.findAuPublisher(conn, auSeq); log.debug3(DEBUG_HEADER + "publisherSeq = " + publisherSeq); // Check whether the AU publisher was found. if (publisherSeq != null) { // Yes: Get its name. publisherName = mdManager.getPublisherName(conn, publisherSeq); log.debug3(DEBUG_HEADER + "publisherName = " + publisherName); } else { // No: Report the problem. log.error("Null publisherSeq for auSeq = " + auSeq); log.error("auId = " + auId); log.error("auKey = " + auKey); log.error("auMdSeq = " + auMdSeq); log.error("auSeq = " + auSeq); throw new MetadataException("Null publisherSeq for auSeq = " + auSeq, mdinfo); } } else { // No: Loop through all outstanding previous problems for this AU. for (String problem : mdManager.findAuProblems(conn, auId)) { // Check whether there is an unknown publisher already for this AU. if (problem.startsWith(UNKNOWN_PUBLISHER_AU_PROBLEM)) { // Yes: Get the corresponding publisher identifier. publisherSeq = mdManager.findPublisher(conn, problem); log.debug3(DEBUG_HEADER + "publisherSeq = " + publisherSeq); // Check whether the publisher exists. if (publisherSeq != null) { // Yes: Use it. publisherName = problem; break; } else { // No: Remove the obsolete problem. mdManager.removeAuProblem(conn, auId, problem); } } } // Check whether no previous unknown publisher for this AU exists. if (publisherName == null) { // Yes: Create a synthetic publisher name to be able to process the // Archival Unit. publisherName = UNKNOWN_PUBLISHER_AU_PROBLEM + TimeBase.nowMs(); log.debug3(DEBUG_HEADER + "publisherName = " + publisherName); // Create the publisher. publisherSeq = mdManager.addPublisher(conn, publisherName); log.debug3(DEBUG_HEADER + "publisherSeq = " + publisherSeq); } } } } // Check whether this is a new publication. if (publicationSeq == null || !isSamePublication(mdinfo)) { // Yes. log.debug3(DEBUG_HEADER + "is new publication."); // Get the publication type in the metadata publicationType = mdinfo.publicationType; log.debug3(DEBUG_HEADER + "publicationType = " + publicationType); // Get the journal title received in the metadata. publicationTitle = mdinfo.publicationTitle; log.debug3(DEBUG_HEADER + "publicationTitle = " + publicationTitle); // Check whether no name was received in the metadata. if (StringUtil.isNullString(publicationTitle)) { // Yes: Synthesize a name. String defaultId = Long.toString(TimeBase.nowMs()); publicationTitle = synthesizePublicationTitle(mdinfo, defaultId); } // Check whether no name was received in the metadata. if (MetadataField.PUBLICATION_TYPE_BOOKSERIES.equals(publicationType)) { // Get the series title received in the metadata seriesTitle = mdinfo.seriesTitle; log.debug3(DEBUG_HEADER + "seriesTitle = " + seriesTitle); if (StringUtil.isNullString(seriesTitle)) { // Yes: Synthesize a name. seriesTitle = synthesizeSeriesTitle(mdinfo, publicationTitle); } proprietarySeriesId = mdinfo.proprietarySeriesIdentifier; log.debug3(DEBUG_HEADER + "proprietarySeriesId = " + proprietarySeriesId); } // Get any ISBN values received in the metadata. pIsbn = mdinfo.isbn; log.debug3(DEBUG_HEADER + "pIsbn = " + pIsbn); eIsbn = mdinfo.eisbn; log.debug3(DEBUG_HEADER + "eIsbn = " + eIsbn); // Get any ISSN values received in the metadata. pIssn = mdinfo.issn; log.debug3(DEBUG_HEADER + "pIssn = " + pIssn); eIssn = mdinfo.eissn; log.debug3(DEBUG_HEADER + "eIssn = " + eIssn); proprietaryId = mdinfo.proprietaryIdentifier; log.debug3(DEBUG_HEADER + "proprietaryId = " + proprietaryId); // Get the volume received in the metadata. volume = mdinfo.volume; log.debug3(DEBUG_HEADER + "volume = " + volume); // Get the publication to which this metadata belongs. publicationSeq = mdManager.findOrCreatePublication(conn, publisherSeq, pIssn, eIssn, pIsbn, eIsbn, publicationType, seriesTitle, proprietarySeriesId, publicationTitle, proprietaryId); log.debug3(DEBUG_HEADER + "publicationSeq = " + publicationSeq); // Get the identifier of the parent, which is the publication metadata // item. parentSeq = mdManager.findPublicationMetadataItem(conn, publicationSeq); log.debug3(DEBUG_HEADER + "parentSeq = " + parentSeq); // replace any unknown titles with this publication title replaceUnknownMdTitle( conn, parentSeq,UNKNOWN_TITLE_NAME_ROOT, publicationTitle); // Get the type of the parent. parentMdItemType = getMdItemTypeName(conn, parentSeq); log.debug3(DEBUG_HEADER + "parentMdItemType = " + parentMdItemType); // replace any unknown series titles with this series title if (MetadataField.PUBLICATION_TYPE_BOOKSERIES.equals(publicationType) && !StringUtil.isNullString(seriesTitle)) { Long seriesPublicationSeq = mdManager.findBookSeries(conn, publisherSeq, pIssn, eIssn, seriesTitle); log.debug3(DEBUG_HEADER + "seriesPublicationSeq = " + seriesPublicationSeq); if (seriesPublicationSeq != null) { Long seriesSeq = mdManager.findPublicationMetadataItem(conn, seriesPublicationSeq); log.debug3(DEBUG_HEADER + "seriesMdSeq = " + seriesSeq); if (seriesSeq != null) { replaceUnknownMdTitle( conn, seriesSeq, UNKNOWN_SERIES_NAME_ROOT, seriesTitle); } } } } // Skip it if the publication could not be found or created. if (publicationSeq == null || parentSeq == null || parentMdItemType == null) { log.debug3(DEBUG_HEADER + "Done: publicationSeq or parentSeq or parentMdItemType is null."); return; } // Check whether the plugin has not been located in the database. if (pluginSeq == null) { // Yes: Find the publishing platform or create it. Long platformSeq = mdManager.findOrCreatePlatform(conn, platform); log.debug3(DEBUG_HEADER + "platformSeq = " + platformSeq); // Find the plugin or create it. pluginSeq = mdManager.findOrCreatePlugin(conn, pluginId, platformSeq, isBulkContent); log.debug3(DEBUG_HEADER + "pluginSeq = " + pluginSeq); // Skip it if the plugin could not be found or created. if (pluginSeq == null) { log.debug3(DEBUG_HEADER + "Done: pluginSeq is null."); return; } } // Check whether the Archival Unit has not been located in the database. if (auSeq == null) { // Yes: Find it or create it. auSeq = mdManager.findOrCreateAu(conn, pluginSeq, auKey); log.debug3(DEBUG_HEADER + "auSeq = " + auSeq); // Skip it if the Archival Unit could not be found or created. if (auSeq == null) { log.debug3(DEBUG_HEADER + "Done: auSeq is null."); return; } } // Check whether the Archival Unit metadata has not been located in the // database. if (auMdSeq == null) { // Yes: Find the Archival Unit metadata in the database. auMdSeq = mdManager.findAuMd(conn, auSeq); log.debug3(DEBUG_HEADER + "new auMdSeq = " + auMdSeq); } // Check whether it is a new Archival Unit metadata. if (auMdSeq == null) { long creationTime = 0; // Check whether it is possible to obtain the Archival Unit creation time. if (au != null && AuUtil.getAuState(au) != null) { // Yes: Get it. creationTime = AuUtil.getAuCreationTime(au); } // Add to the database the new Archival Unit metadata. auMdSeq = mdManager.addAuMd(conn, auSeq, pluginVersion, NEVER_EXTRACTED_EXTRACTION_TIME, creationTime); log.debug3(DEBUG_HEADER + "new auSeq = " + auMdSeq); // Skip it if the new Archival Unit metadata could not be created. if (auMdSeq == null) { log.debug3(DEBUG_HEADER + "Done: auMdSeq is null."); return; } newAu = true; } else { // No: Update the Archival Unit metadata ancillary data. updateAuMd(conn, auMdSeq, pluginVersion); log.debug3(DEBUG_HEADER + "updated AU."); } // Update or create the metadata item. updateOrCreateMdItem(conn, mdinfo); log.debug3(DEBUG_HEADER + "Done."); } /** * Creates a synthetic publication title using the available metadata. * * @param mdinfo * An ArticleMetadataInfo providing the metadata. * @param defaultId * A default id for the publication title * @return a String with the synthetic publication title. */ private String synthesizePublicationTitle( ArticleMetadataInfo mdinfo, String defaultId) { final String DEBUG_HEADER = "synthesizePublicationTitle(): "; String result = null; // Check whether the metadata included the ISBN. if (!StringUtil.isNullString(mdinfo.isbn)) { // Yes: Use it. result = UNKNOWN_TITLE_NAME_ROOT + "/isbn=" + mdinfo.isbn; // No: Check whether the metadata included the eISBN. } else if (!StringUtil.isNullString(mdinfo.eisbn)) { // Yes: Use it. result = UNKNOWN_TITLE_NAME_ROOT + "/eisbn=" + mdinfo.eisbn; // No: Check whether the metadata included the ISSN. } else if (!StringUtil.isNullString(mdinfo.issn)) { // Yes: Use it. result = UNKNOWN_TITLE_NAME_ROOT + "/issn=" + mdinfo.issn; // No: Check whether the metadata included the eISSN. } else if (!StringUtil.isNullString(mdinfo.eissn)) { // Yes: Use it. result = UNKNOWN_TITLE_NAME_ROOT + "/eissn=" + mdinfo.eissn; } else if (!StringUtil.isNullString(mdinfo.proprietaryIdentifier)) { // Yes: Use it. result = UNKNOWN_TITLE_NAME_ROOT + "/journalId=" + mdinfo.proprietaryIdentifier; } else { // No: Generate a random name. result = UNKNOWN_TITLE_NAME_ROOT + "/id=" + defaultId; } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "result = " + result); return result; } /** * Creates a synthetic book seriesn title using the available metadata. * * @param mdinfo * An ArticleMetadataInfo providing the metadata. * @param defaultId * A default id for generating the series title * @return a String with the synthetic series title. */ private String synthesizeSeriesTitle( ArticleMetadataInfo mdinfo, String defaultId) { final String DEBUG_HEADER = "synthesizeSeriesTitle(): "; String result = null; if (!StringUtil.isNullString(mdinfo.issn)) { // Yes: Use it. result = UNKNOWN_SERIES_NAME_ROOT + "/issn=" + mdinfo.issn; // No: Check whether the metadata included the eISSN. } else if (!StringUtil.isNullString(mdinfo.eissn)) { // Yes: Use it. result = UNKNOWN_SERIES_NAME_ROOT + "/eissn=" + mdinfo.eissn; } else if (!StringUtil.isNullString(mdinfo.proprietarySeriesIdentifier)) { // Yes: Use it. result = UNKNOWN_SERIES_NAME_ROOT + "/seriesId=" + mdinfo.proprietarySeriesIdentifier; } else { // No: Generate a random name. result = UNKNOWN_SERIES_NAME_ROOT + "/id=" + defaultId; } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "result = " + result); return result; } /** * Updates the metadata version an Archival Unit in the database. * * @param conn * A Connection with the connection to the database * @param auMdSeq * A Long with the identifier of the archival unit metadata. * @param version * A String with the archival unit metadata version. * @throws DbException * if any problem occurred accessing the database. */ private void updateAuMd(Connection conn, Long auMdSeq, int version) throws DbException { final String DEBUG_HEADER = "updateAuMd(): "; try { PreparedStatement updateAu = dbManager.prepareStatement(conn, UPDATE_AU_MD_QUERY); try { updateAu.setShort(1, (short) version); updateAu.setLong(2, auMdSeq); int count = dbManager.executeUpdate(updateAu); if (log.isDebug3()) { log.debug3(DEBUG_HEADER + "count = " + count); log.debug3(DEBUG_HEADER + "Updated auMdSeq = " + auMdSeq); } } finally { updateAu.close(); } } catch (SQLException sqle) { throw new DbException("Cannot update AU metadata version", sqle); } } /** * Updates a metadata item if it exists in the database, otherwise it creates * it. * * @param conn * A Connection with the connection to the database * @param mdinfo * An ArticleMetadataInfo providing the metadata. * @throws DbException * if any problem occurred accessing the database. */ private void updateOrCreateMdItem(Connection conn, ArticleMetadataInfo mdinfo) throws DbException { final String DEBUG_HEADER = "updateOrCreateMdItem(): "; // Get the publication date received in the metadata. String date = mdinfo.pubDate; log.debug3(DEBUG_HEADER + "date = " + date); // Get the issue received in the metadata. String issue = mdinfo.issue; log.debug3(DEBUG_HEADER + "issue = " + issue); // Get the start page received in the metadata. String startPage = mdinfo.startPage; log.debug3(DEBUG_HEADER + "startPage = " + startPage); // Get the end page received in the metadata. String endPage = mdinfo.endPage; log.debug3(DEBUG_HEADER + "endPage = " + endPage); // Get the item number received in the metadata. String itemNo = mdinfo.itemNumber; log.debug3(DEBUG_HEADER + "itemNo = " + itemNo); // Get the item title received in the metadata. String itemTitle = mdinfo.articleTitle; log.debug3(DEBUG_HEADER + "itemTitle = " + itemTitle); // Get the coverage received in the metadata. String coverage = mdinfo.coverage; log.debug3(DEBUG_HEADER + "coverage = " + coverage); // Get the DOI received in the metadata. String doi = mdinfo.doi; log.debug3(DEBUG_HEADER + "doi = " + doi); // Get the featured URLs received in the metadata. Map<String, String> featuredUrlMap = mdinfo.featuredUrlMap; if (log.isDebug3()) { for (String feature : featuredUrlMap.keySet()) { log.debug3(DEBUG_HEADER + "feature = " + feature + ", URL = " + featuredUrlMap.get(feature)); } } // Get the earliest fetch time of the metadata items URLs. long fetchTime = -1; try { fetchTime = Long.valueOf(mdinfo.fetchTime).longValue(); } catch (NumberFormatException nfe) { if (log.isDebug()) log.debug("Unparsable fetch time '" + mdinfo.fetchTime + "'"); } if (log.isDebug3()) log.debug3(DEBUG_HEADER + "fetchTime = " + fetchTime); // Get the access URL received in the metadata. String accessUrl = mdinfo.accessUrl; log.debug3(DEBUG_HEADER + "accessUrl = " + accessUrl); // Determine what type of a metadata item it is. String mdItemType = mdinfo.articleType; if (StringUtil.isNullString(mdItemType)) { // Skip it if the parent type is not a book or journal. log.error(DEBUG_HEADER + "Unknown parentMdItemType = " + parentMdItemType); return; } log.debug3(DEBUG_HEADER + "mdItemType = " + mdItemType); // Find the metadata item type record sequence. Long mdItemTypeSeq = mdManager.findMetadataItemType(conn, mdItemType); log.debug3(DEBUG_HEADER + "mdItemTypeSeq = " + mdItemTypeSeq); // sanity check -- type should be known in database if (mdItemTypeSeq == null) { log.error(DEBUG_HEADER + "Unknown articleType = " + mdItemType); return; } Long mdItemSeq = null; boolean newMdItem = false; // Check whether it is a metadata item for a new Archival Unit. if (newAu) { // Yes: Create the new metadata item in the database. mdItemSeq = mdManager.addMdItem(conn, parentSeq, mdItemTypeSeq, auMdSeq, date, coverage, fetchTime); log.debug3(DEBUG_HEADER + "new mdItemSeq = " + mdItemSeq); mdManager.addMdItemName(conn, mdItemSeq, itemTitle, PRIMARY_NAME_TYPE); newMdItem = true; } else { // No: Find the metadata item in the database. mdItemSeq = mdManager.findMdItem(conn, mdItemTypeSeq, auMdSeq, accessUrl); log.debug3(DEBUG_HEADER + "mdItemSeq = " + mdItemSeq); // Check whether it is a new metadata item. if (mdItemSeq == null) { // Yes: Create it. mdItemSeq = mdManager.addMdItem(conn, parentSeq, mdItemTypeSeq, auMdSeq, date, coverage, fetchTime); log.debug3(DEBUG_HEADER + "new mdItemSeq = " + mdItemSeq); mdManager.addMdItemName(conn, mdItemSeq, itemTitle, PRIMARY_NAME_TYPE); newMdItem = true; } } log.debug3(DEBUG_HEADER + "newMdItem = " + newMdItem); // Get the volume received in the metadata. String volume = mdinfo.volume; log.debug3(DEBUG_HEADER + "volume = " + volume); // Get the authors received in the metadata. Collection<String> authors = mdinfo.authors; log.debug3(DEBUG_HEADER + "authors = " + authors); // Get the keywords received in the metadata. Collection<String> keywords = mdinfo.keywords; log.debug3(DEBUG_HEADER + "keywords = " + keywords); // Check whether it is a new metadata item. if (newMdItem) { // Yes: Add the bibliographic data. int addedCount = addBibItem(conn, mdItemSeq, volume, issue, startPage, endPage, itemNo); log.debug3(DEBUG_HEADER + "addedCount = " + addedCount); // Add the item URLs. mdManager.addMdItemUrls(conn, mdItemSeq, accessUrl, featuredUrlMap); log.debug3(DEBUG_HEADER + "added AUItem URL."); // Add the item authors. mdManager.addMdItemAuthors(conn, mdItemSeq, authors); log.debug3(DEBUG_HEADER + "added AUItem authors."); // Add the item keywords. mdManager.addMdItemKeywords(conn, mdItemSeq, keywords); log.debug3(DEBUG_HEADER + "added AUItem keywords."); // Add the item DOI. mdManager.addMdItemDoi(conn, mdItemSeq, doi); log.debug3(DEBUG_HEADER + "added AUItem DOI."); } else { // No: Since the record exists, only add the properties that are new. int updatedCount = updateBibItem(conn, mdItemSeq, volume, issue, startPage, endPage, itemNo); log.debug3(DEBUG_HEADER + "updatedCount = " + updatedCount); // Add the item new URLs. mdManager.addNewMdItemUrls(conn, mdItemSeq, accessUrl, featuredUrlMap); log.debug3(DEBUG_HEADER + "added AUItem URL."); // Add the item new authors. mdManager.addNewMdItemAuthors(conn, mdItemSeq, authors); log.debug3(DEBUG_HEADER + "updated AUItem authors."); // Add the item new keywords. mdManager.addNewMdItemKeywords(conn, mdItemSeq, keywords); log.debug3(DEBUG_HEADER + "updated AUItem keywords."); // Update the item DOI. updateMdItemDoi(conn, mdItemSeq, doi); log.debug3(DEBUG_HEADER + "updated AUItem DOI."); } log.debug3(DEBUG_HEADER + "Done."); } /** * Provides the name of the type of a metadata item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the identifier of the metadata item. * @return a String with the name of the type of the metadata item. * @throws DbException * if any problem occurred accessing the database. */ private String getMdItemTypeName(Connection conn, Long mdItemSeq) throws DbException { final String DEBUG_HEADER = "getMdItemTypeName(): "; String typeName = null; try { PreparedStatement getMdItemTypeName = dbManager.prepareStatement(conn, GET_MD_ITEM_TYPE_NAME_QUERY); ResultSet resultSet = null; try { getMdItemTypeName.setLong(1, mdItemSeq); resultSet = dbManager.executeQuery(getMdItemTypeName); if (resultSet.next()) { typeName = resultSet.getString(TYPE_NAME_COLUMN); log.debug3(DEBUG_HEADER + "typeName = " + typeName); } } finally { DbManager.safeCloseResultSet(resultSet); getMdItemTypeName.close(); } } catch (SQLException sqle) { throw new DbException("Cannot get a metadata item type name", sqle); } return typeName; } /** * Updates the DOI of a metadata item in the database. * * @param conn * A Connection with the connection to the database * @param mdItemSeq * A Long with the metadata item identifier. * @param doi * A String with the metadata item DOI. * @throws DbException * if any problem occurred accessing the database. */ private void updateMdItemDoi(Connection conn, Long mdItemSeq, String doi) throws DbException { if (StringUtil.isNullString(doi)) { return; } try { PreparedStatement findMdItemDoi = dbManager.prepareStatement(conn, FIND_MD_ITEM_DOI_QUERY); ResultSet resultSet = null; try { findMdItemDoi.setLong(1, mdItemSeq); resultSet = dbManager.executeQuery(findMdItemDoi); if (!resultSet.next()) { mdManager.addMdItemDoi(conn, mdItemSeq, doi); } } finally { DbManager.safeCloseResultSet(resultSet); findMdItemDoi.close(); } } catch (SQLException sqle) { throw new DbException("Cannot update AU metadata version", sqle); } } /** * Updates a bibliographic item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @param volume * A String with the bibliographic volume. * @param issue * A String with the bibliographic issue. * @param startPage * A String with the bibliographic starting page. * @param endPage * A String with the bibliographic ending page. * @param itemNo * A String with the bibliographic item number. * @return an int with the number of database rows updated. * @throws DbException * if any problem occurred accessing the database. */ private int updateBibItem(Connection conn, Long mdItemSeq, String volume, String issue, String startPage, String endPage, String itemNo) throws DbException { final String DEBUG_HEADER = "updateBibItem(): "; int updatedCount = 0; try { PreparedStatement updateBibItem = dbManager.prepareStatement(conn, UPDATE_BIB_ITEM_QUERY); try { updateBibItem.setString(1, volume); updateBibItem.setString(2, issue); updateBibItem.setString(3, startPage); updateBibItem.setString(4, endPage); updateBibItem.setString(5, itemNo); updateBibItem.setLong(6, mdItemSeq); updatedCount = dbManager.executeUpdate(updateBibItem); } finally { DbManager.safeCloseStatement(updateBibItem); } } catch (SQLException sqle) { throw new DbException("Cannot update bibliographic item", sqle); } log.debug3(DEBUG_HEADER + "updatedCount = " + updatedCount); return updatedCount; } /** * Adds to the database a bibliographic item. * * @param conn * A Connection with the database connection to be used. * @param mdItemSeq * A Long with the metadata item identifier. * @param volume * A String with the bibliographic volume. * @param issue * A String with the bibliographic issue. * @param startPage * A String with the bibliographic starting page. * @param endPage * A String with the bibliographic ending page. * @param itemNo * A String with the bibliographic item number. * @return an int with the number of database rows inserted. * @throws DbException * if any problem occurred accessing the database. */ private int addBibItem(Connection conn, Long mdItemSeq, String volume, String issue, String startPage, String endPage, String itemNo) throws DbException { final String DEBUG_HEADER = "addBibItem(): "; int addedCount = 0; try { PreparedStatement insertBibItem = dbManager.prepareStatement(conn, INSERT_BIB_ITEM_QUERY); try { insertBibItem.setLong(1, mdItemSeq); insertBibItem.setString(2, volume); insertBibItem.setString(3, issue); insertBibItem.setString(4, startPage); insertBibItem.setString(5, endPage); insertBibItem.setString(6, itemNo); addedCount = dbManager.executeUpdate(insertBibItem); } finally { DbManager.safeCloseStatement(insertBibItem); } } catch (SQLException sqle) { throw new DbException("Cannot add bibliographic item", sqle); } log.debug3(DEBUG_HEADER + "addedCount = " + addedCount); return addedCount; } /** * Provides an indication of whether the previous publication is the same as * the current publication. * * @param mdinfo * An ArticleMetadataInfo providing the metadata of the current * publication. * @return <code>true</code> if the previous publication is the same as the * current publication, <code>false</code> otherwise. */ private boolean isSamePublication(ArticleMetadataInfo mdinfo) { return isSameProperty(publicationTitle, mdinfo.publicationTitle) && isSameProperty(pIsbn, mdinfo.isbn) && isSameProperty(eIsbn, mdinfo.eisbn) && isSameProperty(pIssn, mdinfo.issn) && isSameProperty(eIssn, mdinfo.eissn) && isSameProperty(proprietaryId, mdinfo.proprietaryIdentifier) && isSameProperty(volume, mdinfo.volume) && isSameProperty(seriesTitle, mdinfo.seriesTitle) && isSameProperty(proprietarySeriesId, mdinfo.proprietarySeriesIdentifier); } /** * Provides an indication of whether the previous property is the same as the * current property. * * @param previous * A String with the previous property. * @param current * A String with the current property. * @return <code>true</code> if the previous property is the same as the * current property, <code>false</code> otherwise. */ private boolean isSameProperty(String previous, String current) { if (!StringUtil.isNullString(previous)) { return !StringUtil.isNullString(current) && previous.equals(current); } return StringUtil.isNullString(current); } /** * Fixes the Archival Unit data of unknown publishers. * * @param conn * A Connection with the database connection to be used. * @param problems * A List<String> with the recorded problems for the Archival Unit. * @throws DbException * if any problem occurred accessing the database. */ private void fixUnknownPublishersAuData(Connection conn, List<String> problems) throws DbException { final String DEBUG_HEADER = "fixUnknownPublishersAuData(): "; log.debug3(DEBUG_HEADER + "Starting..."); // Loop through all the problems. for (String problem : problems) { if (problem.startsWith(UNKNOWN_PUBLISHER_AU_PROBLEM)) { log.debug3(DEBUG_HEADER + "Need to migrate data under publisher '" + problem + "' to publisher '" + publisherName + "'."); fixUnknownPublisherAuData(conn, problem); } } log.debug3(DEBUG_HEADER + "Done."); } /** * Fixes the Archival Unit data of an unknown publisher. * * @param conn * A Connection with the database connection to be used. * @param unknownPublisherName * A String with the name of the unknown publisher. * @throws DbException * if any problem occurred accessing the database. */ private void fixUnknownPublisherAuData(Connection conn, String unknownPublisherName) throws DbException { final String DEBUG_HEADER = "fixUnknownPublisherAuData(): "; log.debug3(DEBUG_HEADER + "unknownPublisherName = " + unknownPublisherName); // Get the identifier of the unknown publisher. Long unknownPublisherSeq = mdManager.findPublisher(conn, unknownPublisherName); log.debug3(DEBUG_HEADER + "unknownPublisherSeq = " + unknownPublisherSeq); // Check whether the unknown publisher is not the current one. if (unknownPublisherSeq != null && unknownPublisherSeq != publisherSeq) { // Yes: Get the identifiers of any publications of the unknown publisher. Set<Long> unknownPublicationSeqs = mdManager.findPublisherPublications(conn, unknownPublisherSeq); // Get the identifiers of the metadata items of the current publication. Set<Long> mdItemSeqs = mdManager.findPublicationChildMetadataItems(conn, publicationSeq); Map<String, Long> mdItemMapByName = new HashMap<String, Long>(); // Loop through all the identifiers of the metadata items of the current // publication. for (Long mdItemSeq : mdItemSeqs) { // Get allthe names of this metadata item. Map<String, String> mdItemSeqNames = mdManager.getMdItemNames(conn, mdItemSeq); // Map the identifier by each of its names. for (String mdItemSeqName : mdItemSeqNames.keySet()) { mdItemMapByName.put(mdItemSeqName, mdItemSeq); } } // Loop though all the identifiers of any publications of the unknown // publisher. for (Long unknownPublicationSeq : unknownPublicationSeqs) { log.debug3(DEBUG_HEADER + "unknownPublicationSeq = " + unknownPublicationSeq); // Ignore the publication if it is the current one. if (unknownPublicationSeq != publicationSeq) { // Fix the metadata of the publication of the unknown publisher. fixUnknownPublisherPublicationMetadata(conn, unknownPublicationSeq, mdItemMapByName); // Fix COUNTER reports references. fixUnknownPublisherPublicationCounterReportsData(conn, unknownPublicationSeq); // Remove the metadata item of the publication created for an unknown // publisher. removeUnknownPublisherPublicationMdItem(conn, unknownPublicationSeq); // Remove the publication created for an unknown publisher. removeUnknownPublisherPublication(conn, unknownPublicationSeq); } } } // Remove the record of the fixed unknown publisher problem. removeUnknownPublisher(conn, unknownPublisherName); // Remove the record of the fixed unknown publisher problem. mdManager.removeAuProblem(conn, auId, unknownPublisherName); log.debug3(DEBUG_HEADER + "Done."); } /** * Fixes the metadata of a publication of an unknown publisher. * * @param conn * A Connection with the database connection to be used. * @param unknownPublicationSeq * A Long with the identifier of the publication. * @param mdItemMapByName * A Map<String, Long> with a map of the current publication metadata * items by their names. * @throws DbException * if any problem occurred accessing the database. */ private void fixUnknownPublisherPublicationMetadata(Connection conn, Long unknownPublicationSeq, Map<String, Long> mdItemMapByName) throws DbException { final String DEBUG_HEADER = "fixUnknownPublisherPublicationMetadata(): "; log.debug3(DEBUG_HEADER + "unknownPublicationSeq = " + unknownPublicationSeq); // Get the identifiers of the metadata items of the unknown publication. Set<Long> unknownMdItemSeqs = mdManager.findPublicationChildMetadataItems(conn, unknownPublicationSeq); // Loop through all the identifiers of the metadata items of the unknown // publication. for (Long unknownMdItemSeq : unknownMdItemSeqs) { boolean merged = false; // Map the identifier by each of its names. Map<String, String> unknownMdItemSeqNames = mdManager.getMdItemNames(conn, unknownMdItemSeq); // Loop through all of the names of the unknown publication metadata item. for (String unknownMdItemSeqName : unknownMdItemSeqNames.keySet()) { // Check whether the current publication has a child metadata item with // the same name. if (mdItemMapByName.containsKey(unknownMdItemSeqName)) { // Yes: Merge the properties of the unknown publication child metadata // item into the corresponding current publication child metadata // item. mdManager.mergeChildMdItemProperties(conn, unknownMdItemSeq, mdItemMapByName.get(unknownMdItemSeqName)); merged = true; break; } } // Check whether the properties were not merged. if (!merged) { // Yes: Assign the unknown publication metadata item to the current // publication. mdManager.updateMdItemParentSeq(conn, unknownMdItemSeq, parentSeq); } } // Get the identifier of the unknown publication metadata item. Long unknownParentSeq = mdManager.findPublicationMetadataItem(conn, unknownPublicationSeq); log.debug3(DEBUG_HEADER + "unknownParentSeq = " + unknownParentSeq); // Merge the properties of the unknown publication metadata item into the // current publication metadata item. mdManager.mergeParentMdItemProperties(conn, unknownParentSeq, parentSeq); log.debug3(DEBUG_HEADER + "Done."); } /** * Fixes the COUNTER Reports data of a publication of an unknown publisher. * * @param conn * A Connection with the database connection to be used. * @param unknownPublicationSeq * A Long with the identifier of the publication. * @throws DbException * if any problem occurred accessing the database. */ private void fixUnknownPublisherPublicationCounterReportsData(Connection conn, Long unknownPublicationSeq)throws DbException { final String DEBUG_HEADER = "fixUnknownPublisherPublicationCounterReportsData(): "; log.debug3(DEBUG_HEADER + "unknownPublicationSeq = " + unknownPublicationSeq); CounterReportsManager crManager = LockssDaemon.getLockssDaemon().getCounterReportsManager(); // Merge the book type aggregate counts. crManager.mergeBookTypeAggregates(conn, unknownPublicationSeq, publicationSeq); // Delete the book type aggregate counts for the unknown publisher // publication. crManager.deleteBookTypeAggregates(conn, unknownPublicationSeq); // Merge the journal type aggregate counts. crManager.mergeJournalTypeAggregates(conn, unknownPublicationSeq, publicationSeq); // Delete the journal type aggregate counts for the unknown publisher // publication. crManager.deleteJournalTypeAggregates(conn, unknownPublicationSeq); // Merge the journal publication year aggregate counts. crManager.mergeJournalPubYearAggregates(conn, unknownPublicationSeq, publicationSeq); // Delete the journal publication year aggregate counts for the unknown // publisher // publication. crManager.deleteJournalPubYearAggregates(conn, unknownPublicationSeq); log.debug3(DEBUG_HEADER + "Done."); } /** * Removes the metadata item of a publication created for an unknown * publisher. * * @param conn * A Connection with the database connection to be used. * @param publicationSeq * A Long with the publication identifier. * @return an int with the number of rows deleted. * @throws DbException * if any problem occurred accessing the database. */ private int removeUnknownPublisherPublicationMdItem(Connection conn, Long publicationSeq) throws DbException { final String DEBUG_HEADER = "removeUnknownPublisherPublicationMdItem(): "; log.debug3(DEBUG_HEADER + "publicationSeq = " + publicationSeq); int count = 0; if (publicationSeq != null) { log.debug3(DEBUG_HEADER + "SQL = '" + DELETE_UNKNOWN_PUBLISHER_PUBLICATION_MD_ITEM_QUERY + "'."); PreparedStatement deleteMdItem = null; try { deleteMdItem = dbManager.prepareStatement(conn, DELETE_UNKNOWN_PUBLISHER_PUBLICATION_MD_ITEM_QUERY); deleteMdItem.setLong(1, publicationSeq); deleteMdItem.setLong(2, publicationSeq); count = dbManager.executeUpdate(deleteMdItem); } catch (SQLException sqle) { log.error("Cannot delete an unknown publisher publication", sqle); log.error("publicationSeq = " + publicationSeq); log.error("SQL = '" + DELETE_UNKNOWN_PUBLISHER_PUBLICATION_MD_ITEM_QUERY + "'."); throw new DbException("Cannot delete an unknown publisher publication", sqle); } finally { DbManager.safeCloseStatement(deleteMdItem); } } log.debug3(DEBUG_HEADER + "count = " + count); return count; } /** * Removes a publication created for an unknown publisher. * * @param conn * A Connection with the database connection to be used. * @param publicationSeq * A Long with the publication identifier. * @return an int with the number of rows deleted. * @throws DbException * if any problem occurred accessing the database. */ private int removeUnknownPublisherPublication(Connection conn, Long publicationSeq) throws DbException { final String DEBUG_HEADER = "removeUnknownPublisherPublication(): "; log.debug3(DEBUG_HEADER + "publicationSeq = " + publicationSeq); int count = 0; if (publicationSeq != null) { log.debug3(DEBUG_HEADER + "SQL = '" + DELETE_UNKNOWN_PUBLISHER_PUBLICATION_QUERY + "'."); PreparedStatement deletePublication = null; try { deletePublication = dbManager.prepareStatement(conn, DELETE_UNKNOWN_PUBLISHER_PUBLICATION_QUERY); deletePublication.setLong(1, publicationSeq); deletePublication.setLong(2, publicationSeq); count = dbManager.executeUpdate(deletePublication); } catch (SQLException sqle) { log.error("Cannot delete an unknown publisher publication", sqle); log.error("publicationSeq = " + publicationSeq); log.error("SQL = '" + DELETE_UNKNOWN_PUBLISHER_PUBLICATION_QUERY + "'."); throw new DbException("Cannot delete an unknown publisher publication", sqle); } finally { DbManager.safeCloseStatement(deletePublication); } } log.debug3(DEBUG_HEADER + "count = " + count); return count; } /** * Removes an unknown publisher. * * @param conn * A Connection with the database connection to be used. * @param publisherName * A String with the publisher name. * @return an int with the number of rows deleted. * @throws DbException * if any problem occurred accessing the database. */ private int removeUnknownPublisher(Connection conn, String publisherName) throws DbException { final String DEBUG_HEADER = "removeUnknownPublisherPublication(): "; log.debug3(DEBUG_HEADER + "publicationSeq = " + publicationSeq); int count = 0; if (publisherName != null && publisherName.startsWith(UNKNOWN_PUBLISHER_AU_PROBLEM)) { log.debug3(DEBUG_HEADER + "SQL = '" + DELETE_UNKNOWN_PUBLISHER_QUERY + "'."); PreparedStatement deletePublisher = null; try { deletePublisher = dbManager.prepareStatement(conn, DELETE_UNKNOWN_PUBLISHER_QUERY); deletePublisher.setString(1, publisherName); count = dbManager.executeUpdate(deletePublisher); } catch (SQLException sqle) { log.error("Cannot delete an unknown publisher", sqle); log.error("publisherName = " + publisherName); log.error("SQL = '" + DELETE_UNKNOWN_PUBLISHER_QUERY + "'."); throw new DbException("Cannot delete an unknown publisher", sqle); } finally { DbManager.safeCloseStatement(deletePublisher); } } log.debug3(DEBUG_HEADER + "count = " + count); return count; } }
package water.api; import water.MRTask2; import water.Model; import water.Request2; import water.UKV; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.TransfVec; import water.fvec.Vec; import water.util.Utils; import java.util.Arrays; /** * Compare two categorical columns, reporting a grid of co-occurrences. * * <p>The semantics follows R-approach - see R code: * <pre> * > l = c("A", "B", "C") * > a = factor(c("A", "B", "C"), levels=l) * > b = factor(c("A", "B", "A"), levels=l) * > confusionMatrix(a,b) * * Reference * Prediction A B C * A 1 0 0 * B 0 1 0 * C 1 0 0 * </pre></p> * * <p>Note: By default we report zero rows and columns.</p> * * @author cliffc */ public class ConfusionMatrix extends Request2 { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help = "", required = true, filter = Default.class) public Frame actual; @API(help="Column of the actual results (will display vertically)", required=true, filter=actualVecSelect.class) public Vec vactual; class actualVecSelect extends VecClassSelect { actualVecSelect() { super("actual"); } } @API(help = "", required = true, filter = Default.class) public Frame predict; @API(help="Column of the predicted results (will display horizontally)", required=true, filter=predictVecSelect.class) public Vec vpredict; class predictVecSelect extends VecClassSelect { predictVecSelect() { super("predict"); } } @API(help="domain of the actual response") String [] actual_domain; @API(help="domain of the predicted response") String [] predicted_domain; @API(help="union of domains") String [] domain; @API(help="Confusion Matrix (or co-occurrence matrix)") public long cm[][]; @API(help="Mean Squared Error") public double mse = Double.NaN; private boolean classification; @Override public Response serve() { Vec va = null,vp = null, avp = null; classification = vactual.isInt() && vpredict.isInt(); final boolean regression = !vactual.isInt() && !vpredict.isInt(); // Input handling if( vactual==null || vpredict==null ) throw new IllegalArgumentException("Missing actual or predict!"); if (!classification && !regression) throw new IllegalArgumentException("Both arguments must either be floating point or integer."); if (vactual.length() != vpredict.length()) throw new IllegalArgumentException("Both arguments must have the same length!"); try { if (classification) { // Create a new vectors - it is cheap since vector are only adaptation vectors va = vactual .toEnum(); // always returns TransfVec actual_domain = va._domain; vp = vpredict.toEnum(); // always returns TransfVec predicted_domain = vp._domain; if (!Arrays.equals(actual_domain, predicted_domain)) { domain = Utils.union(actual_domain, predicted_domain); int[][] vamap = Model.getDomainMapping(domain, actual_domain, true); va = TransfVec.compose( (TransfVec) va, vamap, domain, false ); // delete original va int[][] vpmap = Model.getDomainMapping(domain, predicted_domain, true); vp = TransfVec.compose( (TransfVec) vp, vpmap, domain, false ); // delete original vp } else domain = actual_domain; // The vectors are from different groups => align them, but properly delete it after computation if (!va.group().equals(vp.group())) { avp = vp; vp = va.align(vp); } cm = new CM(domain.length).doAll(va,vp)._cm; } else { assert(!vactual.isEnum()); assert(!vpredict.isEnum()); mse = new CM(1).doAll(vactual,vpredict).mse(); } return Response.done(this); } catch( Throwable t ) { return Response.error(t); } finally { // Delete adaptation vectors if (va!=null) UKV.remove(va._key); if (vp!=null) UKV.remove(vp._key); if (avp!=null) UKV.remove(avp._key); } } // Compute the co-occurrence matrix private static class CM extends MRTask2<CM> { final int _c_len; /* @OUT Classification */ long _cm[][]; /* @OUT Regression */ public double mse() { return _count > 0 ? _mse/_count : Double.POSITIVE_INFINITY; } /* @OUT Regression Helper */ private double _mse = Double.NaN; /* @OUT Regression Helper */ private long _count; CM(int c_len) { _c_len = c_len; } @Override public void map( Chunk ca, Chunk cp ) { //classification if (_c_len > 1) { _cm = new long[_c_len+1][_c_len+1]; int len = Math.min(ca._len,cp._len); // handle different lenghts, but the vectors should have been rejected already for( int i=0; i < len; i++ ) { int a=ca.isNA0(i) ? _c_len : (int)ca.at80(i); int p=cp.isNA0(i) ? _c_len : (int)cp.at80(i); _cm[a][p]++; } if( len < ca._len ) for( int i=len; i < ca._len; i++ ) _cm[ca.isNA0(i) ? _c_len : (int)ca.at80(i)][_c_len]++; if( len < cp._len ) for( int i=len; i < cp._len; i++ ) _cm[_c_len][cp.isNA0(i) ? _c_len : (int)cp.at80(i)]++; } else { _cm = null; _mse = 0; assert(ca._len == cp._len); int len = ca._len; for( int i=0; i < len; i++ ) { if (ca.isNA0(i) || cp.isNA0(i)) continue; //TODO: Improve final float a=ca.at80(i); final float p=cp.at80(i); _mse += (p-a)*(p-a); _count++; } } } @Override public void reduce( CM cm ) { if (_cm != null && cm._cm != null) { assert(_mse == Double.NaN && cm._mse == Double.NaN); Utils.add(_cm,cm._cm); } else { assert(_mse != Double.NaN && cm._mse != Double.NaN); assert(_cm == null && cm._cm == null); _mse += cm._mse; _count += cm._count; } } } public static String[] show( long xs[], String ds[] ) { String ss[] = new String[xs.length]; // the same length for( int i=0; i<ds.length; i++ ) if( xs[i] >= 0 || (ds[i] != null && ds[i].length() > 0) && !Integer.toString(i).equals(ds[i]) ) ss[i] = ds[i]; if( xs[xs.length-1] > 0 ) ss[xs.length-1] = "NA"; return ss; } @Override public boolean toHTML( StringBuilder sb ) { if (classification) { DocGen.HTML.title(sb,"Confusion Matrix"); if( cm == null ) return true; } else { DocGen.HTML.title(sb,"Mean Squared Error"); if( mse == Double.NaN ) return true; } DocGen.HTML.arrayHead(sb); if (classification) { // Sum up predicted & actuals long acts [] = new long[cm .length]; long preds[] = new long[cm[0].length]; for( int a=0; a<cm.length; a++ ) { long sum=0; for( int p=0; p<cm[a].length; p++ ) { sum += cm[a][p]; preds[p] += cm[a][p]; } acts[a] = sum; } String adomain[] = show(acts , domain); String pdomain[] = show(preds, domain); assert adomain.length == pdomain.length : "The confusion matrix should have the same length for both directions."; // Top row of CM sb.append("<tr class='warning'>"); sb.append("<th>Actual / Predicted</th>"); // Row header for( int p=0; p<pdomain.length; p++ ) if( pdomain[p] != null ) sb.append("<th>").append(pdomain[p]).append("</th>"); sb.append("<th>Error</th>"); sb.append("</tr>"); // Main CM Body long terr=0; for( int a=0; a<cm.length; a++ ) { // Actual loop if( adomain[a] == null ) continue; sb.append("<tr>"); sb.append("<th>").append(adomain[a]).append("</th>");// Row header long correct=0; for( int p=0; p<pdomain.length; p++ ) { // Predicted loop if( pdomain[p] == null ) continue; boolean onDiag = adomain[a].equals(pdomain[p]); if( onDiag ) correct = cm[a][p]; sb.append(onDiag ? "<td style='background-color:LightGreen'>":"<td>").append(cm[a][p]).append("</td>"); } long err = acts[a]-correct; terr += err; // Bump totals sb.append(String.format("<th>%5.3f = %d / %d</th>", (double)err/acts[a], err, acts[a])); sb.append("</tr>"); } // Last row of CM sb.append("<tr>"); sb.append("<th>Totals</th>");// Row header for( int p=0; p<pdomain.length; p++ ) { // Predicted loop if( pdomain[p] == null ) continue; sb.append("<td>").append(preds[p]).append("</td>"); } sb.append(String.format("<th>%5.3f = %d / %d</th>", (double)terr/vactual.length(), terr, vactual.length())); sb.append("</tr>"); } else{ // Regression sb.append("<tr class='warning'><td>" + mse + "</td></tr>"); // Row header } DocGen.HTML.arrayTail(sb); return true; } public double toASCII( StringBuilder sb ) { if( cm == null && classification) return 1.0; if( !classification) { sb.append("MSE: " + mse); return mse; } // Sum up predicted & actuals long acts [] = new long[cm .length]; long preds[] = new long[cm[0].length]; for( int a=0; a<cm.length; a++ ) { long sum=0; for( int p=0; p<cm[a].length; p++ ) { sum += cm[a][p]; preds[p] += cm[a][p]; } acts[a] = sum; } String adomain[] = show(acts , domain); String pdomain[] = show(preds, domain); // determine max length of each space-padded field int maxlen = 0; for( String s : pdomain ) if( s != null ) maxlen = Math.max(maxlen, s.length()); long sum = 0; for( int a=0; a<cm.length; a++ ) { if( adomain[a] == null ) continue; for( int p=0; p<pdomain.length; p++ ) { if( pdomain[p] == null ) continue; sum += cm[a][p]; } } maxlen = Math.max(8, Math.max(maxlen, String.valueOf(sum).length()) + 2); final String fmt = "%" + maxlen + "d"; final String fmtS = "%" + maxlen + "s"; sb.append(String.format(fmtS, "Act/Prd")); for( String s : pdomain ) if( s != null ) sb.append(String.format(fmtS, s)); sb.append(" " + String.format(fmtS, "Error\n")); long terr=0; for( int a=0; a<cm.length; a++ ) { if( adomain[a] == null ) continue; sb.append(String.format(fmtS,adomain[a])); long correct=0; for( int p=0; p<pdomain.length; p++ ) { if( pdomain[p] == null ) continue; boolean onDiag = adomain[a].equals(pdomain[p]); if( onDiag ) correct = cm[a][p]; sb.append(String.format(fmt,cm[a][p])); } long err = acts[a]-correct; terr += err; // Bump totals sb.append(" " + String.format("%5.3f = %d / %d\n", (double)err/acts[a], err, acts[a])); } sb.append(String.format(fmtS, "Totals")); for( int p=0; p<pdomain.length; p++ ) if( pdomain[p] != null ) sb.append(String.format(fmt, preds[p])); double total_err_rate = (double)terr/vactual.length(); sb.append(" " + String.format("%5.3f = %d / %d\n", total_err_rate, terr, vactual.length())); return total_err_rate; } }
package com.jenjinstudios.core.io; import com.jenjinstudios.core.util.FileUtil; import com.jenjinstudios.core.xml.MessageGroup; import java.io.*; import java.nio.file.Paths; import java.util.Collection; import java.util.LinkedList; import java.util.logging.Level; import java.util.logging.Logger; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; /** * The MessageFileFinder class is used to discover Messages.xml files in the classpath and working directory. * <p> * This class is not mean to be referenced directly by your code. * * @author Caleb Brinkman */ public final class MessageFileFinder { private static final String MESSAGE_FILE_NAME = "Messages.xml"; private static final Logger LOGGER = Logger.getLogger(MessageFileFinder.class.getName()); private final String rootDir; /** * Construct a new MessageFileFinder, which works recursively from the current working directory and classpath to * find Message files. */ public MessageFileFinder() { this.rootDir = Paths.get("").toAbsolutePath() + File.separator; } private Iterable<String> findJarMessageEntries() { Collection<String> jarMessageEntries = new LinkedList<>(); String classPath = System.getProperty("java.class.path"); String[] pathElements = classPath.split(System.getProperty("path.separator")); for (String fileName : pathElements) { if (!isCoreJar(fileName)) { seachJarFile(jarMessageEntries, fileName); } } return jarMessageEntries; } private static boolean isCoreJar(String fileName) { String javaHome = System.getProperty("java.home"); return fileName.contains(javaHome); } private void seachJarFile(Collection<String> jarMessageEntries, String fileName) { File file = new File(fileName); if (!file.isDirectory() && file.exists()) { try (FileInputStream inputStream = new FileInputStream(file); ZipInputStream zip = new ZipInputStream(inputStream)) { searchZipEntries(jarMessageEntries, zip); inputStream.close(); zip.close(); } catch (IOException ex) { LOGGER.log(Level.WARNING, "Unable to read JAR entry " + fileName, ex); } } } private static void searchZipEntries(Collection<String> jarMessageEntries, ZipInputStream zip) throws IOException { ZipEntry ze; while ((ze = zip.getNextEntry()) != null) { String entryName = ze.getName(); if (entryName.endsWith("Messages.xml")) { jarMessageEntries.add(entryName); } } } private Iterable<File> findMessageFiles() { File rootFile = new File(rootDir); return FileUtil.search(rootFile, MESSAGE_FILE_NAME); } private Collection<InputStream> findMessageFileStreams() { Collection<InputStream> inputStreams = new LinkedList<>(); Iterable<File> messageFiles = findMessageFiles(); for (File file : messageFiles) { LOGGER.log(Level.INFO, "Registering XML file {0}", file); try { //noinspection ObjectAllocationInLoop inputStreams.add(new FileInputStream(file)); } catch (FileNotFoundException ex) { LOGGER.log(Level.WARNING, "Unable to create input stream for " + file, ex); } } return inputStreams; } private Collection<InputStream> findMessageJarStreams() { Collection<InputStream> inputStreams = new LinkedList<>(); Iterable<String> jarMessageEntries = findJarMessageEntries(); for (String entry : jarMessageEntries) { LOGGER.log(Level.INFO, "Registering XML entry {0}", entry); inputStreams.add(MessageFileFinder.class.getClassLoader().getResourceAsStream(entry)); } return inputStreams; } Collection<MessageGroup> findXmlRegistries() { Collection<InputStream> streamsToRead = new LinkedList<>(); streamsToRead.addAll(findMessageJarStreams()); streamsToRead.addAll(findMessageFileStreams()); return MessageRegistryReader.readXmlStreams(streamsToRead); } }
package org.matheusdev.ror.entity; import net.indiespot.continuations.VirtualThread; import org.matheusdev.ror.entity.component.ComponentMovement; import org.matheusdev.util.Dir; import org.matheusdev.util.FloatUtils; import org.matheusdev.util.SpriteAnimation; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Input.Keys; import com.badlogic.gdx.controllers.Controller; import com.badlogic.gdx.controllers.Controllers; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.physics.box2d.Contact; import com.badlogic.gdx.physics.box2d.Fixture; import com.badlogic.gdx.physics.box2d.Manifold; import de.matthiasmann.continuations.SuspendExecution; /** * @author matheusdev * */ public class EntityPlayer extends Entity { private static final long serialVersionUID = 9012418973465053432L; private final float speed = 16f; private final SpriteAnimation[] walk; private final TextureRegion[] stand; private final Sprite sprite; private final ComponentMovement movement; private final Controller gamepad; /** * @param body * @param sprites */ public EntityPlayer(float x, float y, EntityManager entityManager) { super(createCircularBody(x, y, 0.30f, 0.1f, 0.9f, 1f, entityManager.getPhysics())); uploadAsUserData(body); body.setFixedRotation(true); walk = new SpriteAnimation[4]; walk[Dir.DOWN ] = entityManager.getResources().getAnimation("walking-down"); walk[Dir.LEFT ] = entityManager.getResources().getAnimation("walking-left"); walk[Dir.RIGHT] = entityManager.getResources().getAnimation("walking-right"); walk[Dir.UP ] = entityManager.getResources().getAnimation("walking-up"); stand = new TextureRegion[4]; stand[Dir.DOWN ] = entityManager.getResources().getRegion("standing-down"); stand[Dir.LEFT ] = entityManager.getResources().getRegion("standing-left"); stand[Dir.RIGHT] = entityManager.getResources().getRegion("standing-right"); stand[Dir.UP ] = entityManager.getResources().getRegion("standing-up"); sprite = new Sprite(stand[Dir.DOWN]); movement = new ComponentMovement(Dir.DOWN); if (Controllers.getControllers().size == 0) { System.err.println("Couldn't find controllers!"); gamepad = null; } else { gamepad = Controllers.getControllers().get(0); } } /* (non-Javadoc) * @see org.matheusdev.ddm.collision.Collidable#collide(com.badlogic.gdx.physics.box2d.Fixture, com.badlogic.gdx.physics.box2d.Contact, com.badlogic.gdx.physics.box2d.Manifold) */ @Override public void collide(Fixture other, Contact contact, Manifold manifold) { } @Override public void run() throws SuspendExecution { while (true) { float xsteer = 0f; float ysteer = 0f; if (Gdx.input.isKeyPressed(Keys.W)) { ysteer += 1f; } if (Gdx.input.isKeyPressed(Keys.S)) { ysteer -= 1f; } if (Gdx.input.isKeyPressed(Keys.D)) { xsteer += 1f; } if (Gdx.input.isKeyPressed(Keys.A)) { xsteer -= 1f; } float xGamepad = gamepad.getAxis(0); float yGamepad = gamepad.getAxis(1); if (FloatUtils.equalsEpsilon(xGamepad, 0f, 0.1f)) { xGamepad = 0f; } if (FloatUtils.equalsEpsilon(yGamepad, 0f, 0.1f)) { yGamepad = 0f; } // xsteer = xGamepad; // ysteer = -yGamepad; movement.apply(body, speed, 3f, xsteer, ysteer); for (SpriteAnimation anim : walk) { anim.tick(movement.isMoving() ? 0.016f : 0f); } VirtualThread.sleep(16); } } /* (non-Javadoc) * @see org.matheusdev.ddm.entity.Entity#draw(org.matheusdev.ddm.entity.EntityHandler, com.badlogic.gdx.graphics.g2d.SpriteBatch) */ @Override public void draw(EntityManager manager, SpriteBatch batch) { if (movement.isMoving()) { sprite.setRegion(walk[movement.getDirection()].getCurrentKeyframe()); } else { sprite.setRegion(stand[movement.getDirection()]); } draw(sprite, body, 1f, 0f, 0.2f, batch); } @Override public String toString() { return "Player Entity at " + body.getPosition(); } }
package org.eclipse.jetty.io.nio; import java.io.IOException; import java.nio.channels.ClosedChannelException; import java.nio.channels.SelectableChannel; import java.nio.channels.SelectionKey; import java.nio.channels.SocketChannel; import org.eclipse.jetty.io.AsyncEndPoint; import org.eclipse.jetty.io.Buffer; import org.eclipse.jetty.io.ConnectedEndPoint; import org.eclipse.jetty.io.Connection; import org.eclipse.jetty.io.EofException; import org.eclipse.jetty.io.nio.SelectorManager.SelectSet; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; import org.eclipse.jetty.util.thread.Timeout.Task; /** * An Endpoint that can be scheduled by {@link SelectorManager}. */ public class SelectChannelEndPoint extends ChannelEndPoint implements AsyncEndPoint, ConnectedEndPoint { public static final Logger LOG=Log.getLogger("org.eclipse.jetty.io.nio"); private final SelectorManager.SelectSet _selectSet; private final SelectorManager _manager; private SelectionKey _key; private final Runnable _handler = new Runnable() { public void run() { handle(); } }; /** The desired value for {@link SelectionKey#interestOps()} */ private int _interestOps; /** * The connection instance is the handler for any IO activity on the endpoint. * There is a different type of connection for HTTP, AJP, WebSocket and * ProxyConnect. The connection may change for an SCEP as it is upgraded * from HTTP to proxy connect or websocket. */ private volatile AsyncConnection _connection; /** true if a thread has been dispatched to handle this endpoint */ private boolean _dispatched = false; /** true if a non IO dispatch (eg async resume) is outstanding */ private boolean _asyncDispatch = false; /** true if the last write operation succeed and wrote all offered bytes */ private volatile boolean _writable = true; /** True if a thread has is blocked in {@link #blockReadable(long)} */ private boolean _readBlocked; /** True if a thread has is blocked in {@link #blockWritable(long)} */ private boolean _writeBlocked; /** true if {@link SelectSet#destroyEndPoint(SelectChannelEndPoint)} has not been called */ private boolean _open; private volatile long _idleTimestamp; private boolean _ishut; public SelectChannelEndPoint(SocketChannel channel, SelectSet selectSet, SelectionKey key, int maxIdleTime) throws IOException { super(channel, maxIdleTime); _manager = selectSet.getManager(); _selectSet = selectSet; _dispatched = false; _asyncDispatch = false; _open=true; _key = key; setCheckForIdle(true); } public SelectionKey getSelectionKey() { synchronized (this) { return _key; } } public SelectorManager getSelectManager() { return _manager; } public Connection getConnection() { return _connection; } public void setConnection(Connection connection) { Connection old=_connection; _connection=(AsyncConnection)connection; if (old!=null && old!=_connection) _manager.endPointUpgraded(this,old); } public long getIdleTimestamp() { return _idleTimestamp; } /** Called by selectSet to schedule handling * */ public void schedule() { synchronized (this) { // If there is no key, then do nothing if (_key == null || !_key.isValid()) { _readBlocked=false; _writeBlocked=false; this.notifyAll(); return; } // If there are threads dispatched reading and writing if (_readBlocked || _writeBlocked) { // assert _dispatched; if (_readBlocked && _key.isReadable()) _readBlocked=false; if (_writeBlocked && _key.isWritable()) _writeBlocked=false; // wake them up is as good as a dispatched. this.notifyAll(); // we are not interested in further selecting _key.interestOps(0); if (!_dispatched) updateKey(); return; } // Remove writeable op if ((_key.readyOps() & SelectionKey.OP_WRITE) == SelectionKey.OP_WRITE && (_key.interestOps() & SelectionKey.OP_WRITE) == SelectionKey.OP_WRITE) { // Remove writeable op _interestOps = _key.interestOps() & ~SelectionKey.OP_WRITE; _key.interestOps(_interestOps); _writable = true; // Once writable is in ops, only removed with dispatch. } // If dispatched, then deregister interest if (_dispatched) _key.interestOps(0); else { // other wise do the dispatch dispatch(); if (_dispatched && !_selectSet.getManager().isDeferringInterestedOps0()) { _key.interestOps(0); } } } } public void asyncDispatch() { synchronized(this) { if (_dispatched) _asyncDispatch=true; else dispatch(); } } public void dispatch() { synchronized(this) { if (_dispatched) { throw new IllegalStateException("dispatched"); } else { _dispatched = true; boolean dispatched = _manager.dispatch(_handler); if(!dispatched) { _dispatched = false; LOG.warn("Dispatched Failed! "+this+" to "+_manager); updateKey(); } } } } /** * Called when a dispatched thread is no longer handling the endpoint. * The selection key operations are updated. * @return If false is returned, the endpoint has been redispatched and * thread must keep handling the endpoint. */ protected boolean undispatch() { synchronized (this) { if (_asyncDispatch) { _asyncDispatch=false; return false; } _dispatched = false; updateKey(); } return true; } public void cancelTimeout(Task task) { getSelectSet().cancelTimeout(task); } public void scheduleTimeout(Task task, long timeoutMs) { getSelectSet().scheduleTimeout(task,timeoutMs); } public void setCheckForIdle(boolean check) { _idleTimestamp=check?System.currentTimeMillis():0; } public boolean isCheckForIdle() { return _idleTimestamp!=0; } protected void notIdle() { if (_idleTimestamp!=0) _idleTimestamp=System.currentTimeMillis(); } public void checkIdleTimestamp(long now) { long idleTimestamp=_idleTimestamp; if (idleTimestamp!=0 && _maxIdleTime>0) { long idleForMs=now-idleTimestamp; if (idleForMs>_maxIdleTime) { onIdleExpired(idleForMs); _idleTimestamp=now; } } } public void onIdleExpired(long idleForMs) { _connection.onIdleExpired(idleForMs); } @Override public int fill(Buffer buffer) throws IOException { int fill=super.fill(buffer); if (fill>0) notIdle(); return fill; } @Override public int flush(Buffer header, Buffer buffer, Buffer trailer) throws IOException { int l = super.flush(header, buffer, trailer); // If there was something to write and it wasn't written, then we are not writable. if (l==0 && ( header!=null && header.hasContent() || buffer!=null && buffer.hasContent() || trailer!=null && trailer.hasContent())) { synchronized (this) { _writable=false; if (!_dispatched) updateKey(); } } else if (l>0) { _writable=true; notIdle(); } return l; } @Override public int flush(Buffer buffer) throws IOException { int l = super.flush(buffer); // If there was something to write and it wasn't written, then we are not writable. if (l==0 && buffer!=null && buffer.hasContent()) { synchronized (this) { _writable=false; if (!_dispatched) updateKey(); } } else if (l>0) { _writable=true; notIdle(); } return l; } /* * Allows thread to block waiting for further events. */ @Override public boolean blockReadable(long timeoutMs) throws IOException { synchronized (this) { if (isInputShutdown()) throw new EofException(); long now=_selectSet.getNow(); long end=now+timeoutMs; boolean check=isCheckForIdle(); setCheckForIdle(true); try { _readBlocked=true; while (!isInputShutdown() && _readBlocked) { try { updateKey(); this.wait(timeoutMs>=0?(end-now):10000); } catch (InterruptedException e) { LOG.warn(e); } finally { now=_selectSet.getNow(); } if (_readBlocked && timeoutMs>0 && now>=end) return false; } } finally { _readBlocked=false; setCheckForIdle(check); } } return true; } /* * Allows thread to block waiting for further events. */ @Override public boolean blockWritable(long timeoutMs) throws IOException { synchronized (this) { if (isOutputShutdown()) throw new EofException(); long now=_selectSet.getNow(); long end=now+timeoutMs; boolean check=isCheckForIdle(); setCheckForIdle(true); try { _writeBlocked=true; while (_writeBlocked && !isOutputShutdown()) { try { updateKey(); this.wait(timeoutMs>=0?(end-now):10000); } catch (InterruptedException e) { LOG.warn(e); } finally { now=_selectSet.getNow(); } if (_writeBlocked && timeoutMs>0 && now>=end) return false; } } finally { _writeBlocked=false; setCheckForIdle(check); } } return true; } /* short cut for busyselectChannelServerTest */ public void clearWritable() { _writable=false; } /** * @see org.eclipse.jetty.io.AsyncEndPoint#scheduleWrite() */ public void scheduleWrite() { if (_writable==true) LOG.debug("Required scheduleWrite {}",this); _writable=false; updateKey(); } public boolean isWritable() { return _writable; } public boolean hasProgressed() { return false; } /** * Updates selection key. Adds operations types to the selection key as needed. No operations * are removed as this is only done during dispatch. This method records the new key and * schedules a call to doUpdateKey to do the keyChange */ private void updateKey() { final boolean changed; synchronized (this) { int current_ops=-1; if (getChannel().isOpen()) { boolean read_interest = _readBlocked || (!_dispatched && !_connection.isSuspended()); boolean write_interest= _writeBlocked || (!_dispatched && !_writable); _interestOps = ((!_socket.isInputShutdown() && read_interest ) ? SelectionKey.OP_READ : 0) | ((!_socket.isOutputShutdown()&& write_interest) ? SelectionKey.OP_WRITE : 0); try { current_ops = ((_key!=null && _key.isValid())?_key.interestOps():-1); } catch(Exception e) { _key=null; LOG.ignore(e); } } changed=_interestOps!=current_ops; } if(changed) { _selectSet.addChange(this); _selectSet.wakeup(); } } /** * Synchronize the interestOps with the actual key. Call is scheduled by a call to updateKey */ void doUpdateKey() { synchronized (this) { if (getChannel().isOpen()) { if (_interestOps>0) { if (_key==null || !_key.isValid()) { SelectableChannel sc = (SelectableChannel)getChannel(); if (sc.isRegistered()) { updateKey(); } else { try { _key=((SelectableChannel)getChannel()).register(_selectSet.getSelector(),_interestOps,this); } catch (Exception e) { LOG.ignore(e); if (_key!=null && _key.isValid()) { _key.cancel(); } if (_open) { _selectSet.destroyEndPoint(this); } _open=false; _key = null; } } } else { _key.interestOps(_interestOps); } } else { if (_key!=null && _key.isValid()) _key.interestOps(0); else _key=null; } } else { if (_key!=null && _key.isValid()) _key.cancel(); if (_open) { _open=false; _selectSet.destroyEndPoint(this); } _key = null; } } } protected void handle() { boolean dispatched=true; try { while(dispatched) { try { while(true) { final AsyncConnection next = (AsyncConnection)_connection.handle(); if (next!=_connection) { LOG.debug("{} replaced {}",next,_connection); Connection old=_connection; _connection=next; _manager.endPointUpgraded(this,old); continue; } break; } } catch (ClosedChannelException e) { LOG.ignore(e); } catch (EofException e) { LOG.debug("EOF", e); try{close();} catch(IOException e2){LOG.ignore(e2);} } catch (IOException e) { LOG.warn(e.toString()); LOG.debug(e); try{close();} catch(IOException e2){LOG.ignore(e2);} } catch (Throwable e) { LOG.warn("handle failed", e); try{close();} catch(IOException e2){LOG.ignore(e2);} } finally { if (!_ishut && isInputShutdown() && isOpen()) { _ishut=true; try { _connection.onInputShutdown(); } catch(Throwable x) { LOG.warn("onInputShutdown failed", x); try{close();} catch(IOException e2){LOG.ignore(e2);} } finally { updateKey(); } } dispatched=!undispatch(); } } } finally { if (dispatched) { dispatched=!undispatch(); while (dispatched) { LOG.warn("SCEP.run() finally DISPATCHED"); dispatched=!undispatch(); } } } } /* * @see org.eclipse.io.nio.ChannelEndPoint#close() */ @Override public void close() throws IOException { try { super.close(); } catch (IOException e) { LOG.ignore(e); } finally { updateKey(); } } @Override public String toString() { // Do NOT use synchronized (this) // because it's very easy to deadlock when debugging is enabled. // We do a best effort to print the right toString() and that's it. SelectionKey key = _key; String keyString = ""; if (key != null) { if (key.isValid()) { if (key.isReadable()) keyString += "r"; if (key.isWritable()) keyString += "w"; } else { keyString += "!"; } } else { keyString += "-"; } return String.format("SCEP@%x{l(%s)<->r(%s),d=%b,open=%b,ishut=%b,oshut=%b,rb=%b,wb=%b,w=%b,i=%d%s}-{%s}", hashCode(), _socket.getRemoteSocketAddress(), _socket.getLocalSocketAddress(), _dispatched, isOpen(), isInputShutdown(), isOutputShutdown(), _readBlocked, _writeBlocked, _writable, _interestOps, keyString, _connection); } public SelectSet getSelectSet() { return _selectSet; } /** * Don't set the SoTimeout * @see org.eclipse.jetty.io.nio.ChannelEndPoint#setMaxIdleTime(int) */ @Override public void setMaxIdleTime(int timeMs) throws IOException { _maxIdleTime=timeMs; } }
package org.mtransit.android.commons.data; import java.util.Comparator; import org.mtransit.android.commons.ColorUtils; import org.mtransit.android.commons.ComparatorUtils; import org.mtransit.android.commons.MTLog; import org.mtransit.android.commons.TimeUtils; import org.mtransit.android.commons.provider.NewsProvider; import android.content.ContentValues; import android.database.Cursor; import android.text.TextUtils; public class News implements MTLog.Loggable { private static final String TAG = News.class.getSimpleName(); @Override public String getLogTag() { return TAG; } public static final NewsComparator NEWS_COMPARATOR = new NewsComparator(); private Integer id; // internal DB ID (useful to delete) OR NULL private String uuid; private long lastUpdateInMs; private long maxValidityInMs; private long createdAtInMs; private String targetUUID; private String color; private String authorName; private String authorUsername; private String authorPictureURL; private String authorProfileURL; private String text; private String textHTML; private String webURL; private String language; private String sourceId; private String sourceLabel; public News(Integer optId, String uuid, long lastUpdateInMs, long maxValidityInMs, long createdAtInMs, String targetUUID, String color, String authorName, String authorUsername, String authorPictureURL, String authorProfileURL, String text, String optTextHTML, String webURL, String language, String sourceId, String sourceLabel) { this.id = optId; this.uuid = uuid; this.lastUpdateInMs = lastUpdateInMs; this.maxValidityInMs = maxValidityInMs; this.createdAtInMs = createdAtInMs; this.targetUUID = targetUUID; setColor(color); this.authorName = authorName; this.authorUsername = authorUsername; this.authorPictureURL = authorPictureURL; this.authorProfileURL = authorProfileURL; this.text = text; this.textHTML = optTextHTML; this.webURL = webURL; this.language = language; this.sourceId = sourceId; this.sourceLabel = sourceLabel; } @Override public String toString() { return new StringBuilder(ServiceUpdate.class.getSimpleName()).append('[') .append("id:").append(this.id) .append(',') .append("uuid:").append(this.uuid) .append(',') .append("targetUUID:").append(this.targetUUID) .append(',') .append("text:").append(this.text) .append(']').toString(); } public Integer getId() { return this.id; } public String getUUID() { return uuid; } public boolean isUseful() { return this.lastUpdateInMs + this.maxValidityInMs >= TimeUtils.currentTimeMillis(); } public String getAuthorOneLine() { if (TextUtils.isEmpty(this.authorUsername)) { return this.authorName; } return this.authorName + " (" + this.authorUsername + ")"; } public String getAuthorProfileURL() { return authorProfileURL; } public long getLastUpdateInMs() { return lastUpdateInMs; } public long getCreatedAtInMs() { return createdAtInMs; } public String getColor() { return color; } private void setColor(String color) { this.color = color; this.colorInt = null; } private Integer colorInt = null; public int getColorInt() { if (colorInt == null) { colorInt = ColorUtils.parseColor(getColor()); } return colorInt; } public String getText() { return text; } public String getSourceLabel() { return sourceLabel; } public String getTextHTML() { if (TextUtils.isEmpty(textHTML)) { return getText(); } return textHTML; } public String getWebURL() { return webURL; } public static News fromCursor(Cursor cursor) { int idIdx = cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_ID); Integer id = cursor.isNull(idIdx) ? null : cursor.getInt(idIdx); String uuid = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_UUID)); long lastUpdateInMs = cursor.getLong(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_LAST_UPDATE)); long maxValidityInMs = cursor.getLong(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_MAX_VALIDITY_IN_MS)); long createdAtInMs = cursor.getLong(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_CREATED_AT)); String targetUUID = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_TARGET_UUID)); String color = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_COLOR)); String authorName = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_AUTHOR_NAME)); String authorUsername = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_AUTHOR_USERNAME)); String authorPictureURL = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_AUTHOR_PICTURE_URL)); String authorProfileURL = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_AUTHOR_PROFILE_URL)); String text = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_TEXT)); String textHTML = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_TEXT_HTML)); String webURL = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_WEB_URL)); String language = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_LANGUAGE)); String sourceId = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_SOURCE_ID)); String sourceLabel = cursor.getString(cursor.getColumnIndexOrThrow(NewsProvider.NewsColumns.T_NEWS_K_SOURCE_LABEL)); return new News(id, uuid, lastUpdateInMs, maxValidityInMs, createdAtInMs, targetUUID, color, authorName, authorUsername, authorPictureURL, authorProfileURL, text, textHTML, webURL, language, sourceId, sourceLabel); } public ContentValues toContentValues() { ContentValues contentValues = new ContentValues(); if (this.id != null) { contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_ID, this.id); } // ELSE AUTO INCREMENT contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_UUID, this.uuid); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_LAST_UPDATE, this.lastUpdateInMs); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_MAX_VALIDITY_IN_MS, this.maxValidityInMs); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_CREATED_AT, this.createdAtInMs); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_TARGET_UUID, this.targetUUID); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_COLOR, getColor()); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_AUTHOR_NAME, this.authorName); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_AUTHOR_USERNAME, this.authorUsername); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_AUTHOR_PICTURE_URL, this.authorPictureURL); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_AUTHOR_PROFILE_URL, this.authorProfileURL); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_TEXT, this.text); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_TEXT_HTML, this.textHTML); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_WEB_URL, this.webURL); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_LANGUAGE, this.language); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_SOURCE_ID, this.sourceId); contentValues.put(NewsProvider.NewsColumns.T_NEWS_K_SOURCE_LABEL, this.sourceLabel); return contentValues; } /** * {@link NewsProvider#PROJECTION_NEWS} */ public Object[] getCursorRow() { return new Object[] { id, uuid, lastUpdateInMs, maxValidityInMs, createdAtInMs, targetUUID, getColor(), authorName, authorUsername, authorPictureURL, authorProfileURL, text, textHTML, webURL, language, sourceId, sourceLabel }; } private static class NewsComparator implements Comparator<News> { @Override public int compare(News lhs, News rhs) { long lCreatedAtInMs = lhs == null ? 0l : lhs.getCreatedAtInMs(); long rCreatedAtInMs = rhs == null ? 0l : rhs.getCreatedAtInMs(); if (lCreatedAtInMs > rCreatedAtInMs) { return ComparatorUtils.BEFORE; } else if (rCreatedAtInMs > lCreatedAtInMs) { return ComparatorUtils.AFTER; } else { return ComparatorUtils.SAME; } } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package pl.umk.mat.zawodyweb.compiler.classes; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.util.Properties; import java.util.Vector; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpException; import org.apache.commons.httpclient.NameValuePair; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.httpclient.methods.PostMethod; import org.apache.commons.httpclient.params.HttpClientParams; import pl.umk.mat.zawodyweb.checker.TestInput; import pl.umk.mat.zawodyweb.checker.TestOutput; import pl.umk.mat.zawodyweb.compiler.CompilerInterface; import pl.umk.mat.zawodyweb.database.CheckerErrors; /** * * @author Jakub Prabucki */ public class LanguageACM implements CompilerInterface { private Properties properties; @Override public void setProperties(Properties properties) { this.properties = properties; } @Override public TestOutput runTest(String path, TestInput input) { TestOutput result = new TestOutput(null); String acmSite = "http://uva.onlinejudge.org/"; String login = properties.getProperty("acm_uva.login"); String password = properties.getProperty("acm_uva.password"); HttpClient client = new HttpClient(); GetMethod logging = new GetMethod(acmSite); InputStream firstGet = null; HttpClientParams params = client.getParams(); params.setParameter("http.useragent", "Opera/9.64 (Windows NT 6.0; U; pl) Presto/2.1.1"); client.setParams(params); try { client.executeMethod(logging); firstGet = logging.getResponseBodyAsStream(); } catch (HttpException e) { result.setResult(CheckerErrors.UNDEF); result.setResultDesc(e.getMessage()); result.setText("HttpException"); logging.releaseConnection(); return result; } catch (IOException e) { result.setResult(CheckerErrors.UNDEF); result.setResultDesc(e.getMessage()); result.setText("IOException"); logging.releaseConnection(); return result; } BufferedReader br = null; try { br = new BufferedReader(new InputStreamReader(firstGet, "UTF-8")); } catch (UnsupportedEncodingException e) { } String line, name, value; Vector<NameValuePair> vectorLoginData = new Vector<NameValuePair>(); vectorLoginData.addElement(new NameValuePair("username", login)); vectorLoginData.addElement(new NameValuePair("passwd", password)); try { line = br.readLine(); while (line != null && !line.matches(".*class=\"mod_login\".*")) { line = br.readLine(); } while (line != null && !line.matches("(?i).*submit.*login.*")) { if (line.matches(".*hidden.*name=\".*value=\".*")) { name = line.split("name=\"")[1].split("\"")[0]; value = line.split("value=\"")[1].split("\"")[0]; vectorLoginData.addElement(new NameValuePair(name, value)); // FIXME: check if it's neccesary: URLDecoder.decode(value, "UTF-8")); } line = br.readLine(); } vectorLoginData.addElement(new NameValuePair("remember", "yes")); vectorLoginData.addElement(new NameValuePair("Submit", "Login")); } catch (IOException e) { result.setResult(CheckerErrors.UNDEF); result.setResultDesc(e.getMessage()); result.setText("IOException"); logging.releaseConnection(); return result; } logging.releaseConnection(); PostMethod sendAnswer = new PostMethod("http://uva.onlinejudge.org/index.php?option=com_comprofiler&task=login"); sendAnswer.setRequestHeader("Referer", acmSite); NameValuePair[] loginData = new NameValuePair[0]; loginData = vectorLoginData.toArray(loginData); sendAnswer.setRequestBody(loginData); try { client.executeMethod(sendAnswer); //br = new BufferedReader(new InputStreamReader(sendAnswer.getResponseBodyAsStream(), "UTF-8")); } catch (HttpException e) { result.setResult(CheckerErrors.UNDEF); result.setResultDesc(e.getMessage()); result.setText("HttpException"); sendAnswer.releaseConnection(); return result; } catch (IOException e) { result.setResult(CheckerErrors.UNDEF); result.setResultDesc(e.getMessage()); result.setText("IOException"); sendAnswer.releaseConnection(); return result; } sendAnswer.releaseConnection(); sendAnswer = new PostMethod("http://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=25&page=save_submission"); String lang = properties.getProperty("CODEFILE_EXTENSION"); if (lang.equals("c")) { lang = "1"; } else if (lang.equals("java")) { lang = "2"; } else if (lang.equals("cpp")) { lang = "3"; } else if (lang.equals("pas")) { lang = "4"; } NameValuePair[] dataSendAnswer = { new NameValuePair("problemid", ""), new NameValuePair("category", ""), new NameValuePair("localid", input.getText()), new NameValuePair("language", lang), new NameValuePair("code", path), new NameValuePair("submit", "Submit") }; sendAnswer.setRequestBody(dataSendAnswer); try { client.executeMethod(sendAnswer); } catch (HttpException e) { result.setResult(CheckerErrors.UNDEF); result.setResultDesc(e.getMessage()); result.setText("HttpException"); sendAnswer.releaseConnection(); return result; } catch (IOException e) { result.setResult(CheckerErrors.UNDEF); result.setResultDesc(e.getMessage()); result.setText("IOException"); sendAnswer.releaseConnection(); return result; } return result; } @Override public byte[] precompile(byte[] code) { return code; } @Override public String compile(byte[] code) { return new String(code); } @Override public String postcompile(String path) { return path; } @Override public void closeProgram(String path) { } }
package org.pentaho.di.trans.step; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.pentaho.di.core.Const; import org.pentaho.di.core.KettleAttribute; import org.pentaho.di.core.KettleAttributeInterface; import org.pentaho.di.core.SQLStatement; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.logging.LogChannel; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.logging.LoggingObjectInterface; import org.pentaho.di.core.logging.LoggingObjectType; import org.pentaho.di.core.logging.SimpleLoggingObject; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.ObjectRevision; import org.pentaho.di.repository.Repository; import org.pentaho.di.repository.RepositoryDirectory; import org.pentaho.di.resource.ResourceDefinition; import org.pentaho.di.resource.ResourceNamingInterface; import org.pentaho.di.resource.ResourceReference; import org.pentaho.di.trans.DatabaseImpact; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.errorhandling.StreamInterface; import org.w3c.dom.Document; import org.w3c.dom.Node; public class BaseStepMeta implements Cloneable, StepAttributesInterface { public static final LoggingObjectInterface loggingObject = new SimpleLoggingObject("Step metadata", LoggingObjectType.STEPMETA, null); public static final String STEP_ATTRIBUTES_FILE = "step-attributes.xml"; private boolean changed; /** database connection object to use for searching fields & checking steps */ protected Database databases[]; /** The repository that is being used for this step */ protected Repository repository; protected StepMeta parentStepMeta; protected StepIOMetaInterface ioMeta; public BaseStepMeta() { changed = false; try { loadStepAttributes(); } catch(Exception e) { e.printStackTrace(); } } public Object clone() { try { Object retval = super.clone(); return retval; } catch(CloneNotSupportedException e) { return null; } } public void setChanged(boolean ch) { changed=ch; } public void setChanged() { changed=true; } public boolean hasChanged() { return changed; } public RowMetaInterface getTableFields() { return null; } /** * Produces the XML string that describes this step's information. * * @return String containing the XML describing this step. * @throws KettleValueException in case there is an XML conversion or encoding error */ public String getXML() throws KettleException { String retval=""; return retval; } /* getFields determines which fields are - added to the stream - removed from the stream - renamed - changed * @param inputRowMeta Row containing fields that are used as input for the step. * @param name Name of the step * @param info Fields used as extra lookup information * * @return The fields that are being put out by this step. */ public void getFields(RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException { // Default: no values are added to the row in the step } /** * Each step must be able to report on the impact it has on a database, table field, etc. * @param impact The list of impacts @see org.pentaho.di.transMeta.DatabaseImpact * @param transMeta The transformation information * @param stepMeta The step information * @param prev The fields entering this step * @param input The previous step names * @param output The output step names * @param info The fields used as information by this step */ public void analyseImpact(List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String input[], String output[], RowMetaInterface info) throws KettleStepException { } /** * Standard method to return one or more SQLStatement objects that the step needs in order to work correctly. * This can mean "create table", "create index" statements but also "alter table ... add/drop/modify" statements. * * @return The SQL Statements for this step or null if an error occurred. If nothing has to be done, the SQLStatement.getSQL() == null. * @param transMeta TransInfo object containing the complete transformation * @param stepMeta StepMeta object containing the complete step * @param prev Row containing meta-data for the input fields (no data) */ public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev) throws KettleStepException { // default: this doesn't require any SQL statements to be executed! return new SQLStatement(stepMeta.getName(), null, null); } /** * Call this to cancel trailing database queries (too long running, etc) */ public void cancelQueries() throws KettleDatabaseException { // Cancel all defined queries... if (databases!=null) { for (int i=0;i<databases.length;i++) { if (databases[i]!=null) databases[i].cancelQuery(); } } } /** * Default a step doesn't use any arguments. * Implement this to notify the GUI that a window has to be displayed BEFORE launching a transformation. * * @return A row of argument values. (name and optionally a default value) */ public Map<String,String> getUsedArguments() { return null; } /** * The natural way of data flow in a transformation is source-to-target. * However, this makes mapping to target tables difficult to do. * To help out here, we supply information to the transformation meta-data model about which fields are required for a step. * This allows us to automate certain tasks like the mapping to pre-defined tables. * The Table Output step in this case will output the fields in the target table using this method. * * This default implementation returns an empty row meaning that no fields are required for this step to operate. * @return the required fields for this steps meta data. * @throws KettleException in case the required fields can't be determined * @deprecated */ public RowMetaInterface getRequiredFields() throws KettleException { return new RowMeta(); } /** * The natural way of data flow in a transformation is source-to-target. * However, this makes mapping to target tables difficult to do. * To help out here, we supply information to the transformation meta-data model about which fields are required for a step. * This allows us to automate certain tasks like the mapping to pre-defined tables. * The Table Output step in this case will output the fields in the target table using this method. * * This default implementation returns an empty row meaning that no fields are required for this step to operate. * @param space the variable space to use to do variable substitution. * @return the required fields for this steps meta data. * @throws KettleException in case the required fields can't be determined */ public RowMetaInterface getRequiredFields(VariableSpace space) throws KettleException { return new RowMeta(); } /** * This method returns all the database connections that are used by the step. * @return an array of database connections meta-data. * Return an empty array if no connections are used. */ public DatabaseMeta[] getUsedDatabaseConnections() { return new DatabaseMeta[] { }; } /** * @return the libraries that this step or plug-in uses. */ public String[] getUsedLibraries() { return new String[] {}; } /** * @return true if this step supports error "reporting" on rows: the ability to send rows to a certain target step. */ public boolean supportsErrorHandling() { return false; } /** * This method is added to exclude certain steps from layout checking. * @since 2.5.0 */ public boolean excludeFromRowLayoutVerification() { return false; } /** * This method is added to exclude certain steps from copy/distribute checking. * @since 4.0.0 */ public boolean excludeFromCopyDistributeVerification() { return false; } /** * Get a list of all the resource dependencies that the step is depending on. * * @return a list of all the resource dependencies that the step is depending on */ public List<ResourceReference> getResourceDependencies(TransMeta transMeta, StepMeta stepInfo) { List<ResourceReference> references = new ArrayList<ResourceReference>(5); // Lower the initial capacity - unusual to have more than 1 actually ResourceReference reference = new ResourceReference(stepInfo); references.add(reference); return references; } public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository) throws KettleException { return null; } /** * This returns the expected name for the dialog that edits a job entry. * The expected name is in the org.pentaho.di.ui tree and has a class name * that is the name of the job entry with 'Dialog' added to the end. * * e.g. if the job entry is org.pentaho.di.job.entries.zipfile.JobEntryZipFile * the dialog would be org.pentaho.di.ui.job.entries.zipfile.JobEntryZipFileDialog * * If the dialog class for a job entry does not match this pattern it should * override this method and return the appropriate class name * * @return full class name of the dialog */ public String getDialogClassName() { String className = getClass().getCanonicalName(); className = className.replaceFirst("\\.di\\.", ".di.ui."); if( className.endsWith("Meta") ) { className = className.substring(0, className.length()-4 ); } className += "Dialog"; return className; } public StepMeta getParentStepMeta() { return parentStepMeta; } public void setParentStepMeta(StepMeta parentStepMeta) { this.parentStepMeta = parentStepMeta; } // TODO find a way to factor out these methods... protected LogChannelInterface log; protected ArrayList<KettleAttributeInterface> attributes; // Late init to prevent us from logging blank step names, etc. public LogChannelInterface getLog() { if (log==null) { log = new LogChannel(this); } return log; } public boolean isBasic() { return getLog().isBasic(); } public boolean isDetailed() { return getLog().isDetailed(); } public boolean isDebug() { return getLog().isDebug(); } public boolean isRowLevel() { return getLog().isRowLevel(); } public void logMinimal(String message) { getLog().logMinimal(message); } public void logMinimal(String message, Object...arguments) { getLog().logMinimal(message, arguments); } public void logBasic(String message) { getLog().logBasic(message); } public void logBasic(String message, Object...arguments) { getLog().logBasic(message, arguments); } public void logDetailed(String message) { getLog().logDetailed(message); } public void logDetailed(String message, Object...arguments) { getLog().logDetailed(message, arguments); } public void logDebug(String message) { getLog().logDebug(message); } public void logDebug(String message, Object...arguments) { getLog().logDebug(message, arguments); } public void logRowlevel(String message) { getLog().logRowlevel(message); } public void logRowlevel(String message, Object...arguments) { getLog().logRowlevel(message, arguments); } public void logError(String message) { getLog().logError(message); } public void logError(String message, Throwable e) { getLog().logError(message, e); } public void logError(String message, Object...arguments) { getLog().logError(message, arguments); } public String getLogChannelId() { return null; } public String getName() { return null; } public String getObjectCopy() { return null; } public ObjectId getObjectId() { return null; } public ObjectRevision getObjectRevision() { return null; } public LoggingObjectType getObjectType() { return null; } public LoggingObjectInterface getParent() { return null; } public RepositoryDirectory getRepositoryDirectory() { return null; } /** * Returns the Input/Output metadata for this step. * By default, each step produces and accepts optional input. */ public StepIOMetaInterface getStepIOMeta() { if (ioMeta==null) { ioMeta = new StepIOMeta(true, true, true, false, false, false); } return ioMeta; } /** * @return The list of optional input streams. * It allows the user to select from a list of possible actions like "New target step" */ public List<StreamInterface> getOptionalStreams() { List<StreamInterface> list = new ArrayList<StreamInterface>(); return list; } /** * When an optional stream is selected, this method is called to handled the ETL metadata implications of that. * @param stream The optional stream to handle. */ public void handleStreamSelection(StreamInterface stream) { } public void resetStepIoMeta() { ioMeta=null; } /** * Change step names into step objects to allow them to be name-changed etc. * @param steps the steps to reference */ public void searchInfoAndTargetSteps(List<StepMeta> steps) { } /** * @return Optional interface that allows an external program to inject step metadata in a standardized fasion. * This method will return null if the interface is not available for this step. */ public StepMetaInjectionInterface getStepMetaInjectionInterface() { return null; } protected StepInjectionMetaEntry findParentEntry(List<StepInjectionMetaEntry> entries, String key) { for (StepInjectionMetaEntry look : entries) { if (look.getKey().equals(key)) return look; StepInjectionMetaEntry check = findParentEntry(look.getDetails(), key); if (check!=null) return check; } return null; } protected StepInjectionMetaEntry createEntry(KettleAttributeInterface attr, Class<?> PKG) { return new StepInjectionMetaEntry(attr.getKey(), attr.getType(), BaseMessages.getString(PKG, attr.getDescription())); } /** * Describe the metadata attributes that can be injected into this step metadata object. */ public List<StepInjectionMetaEntry> getStepInjectionMetadataEntries(Class<?> PKG) { List<StepInjectionMetaEntry> entries = new ArrayList<StepInjectionMetaEntry>(); for (KettleAttributeInterface attr : attributes) { if (attr.getParent()==null) { entries.add(createEntry(attr, PKG)); } else { StepInjectionMetaEntry entry = createEntry(attr, PKG); StepInjectionMetaEntry parentEntry = findParentEntry(entries, attr.getParent().getKey()); if (parentEntry==null) { throw new RuntimeException("An error was detected in the step attributes' definition: the parent was not found for attribute "+attr); } parentEntry.getDetails().add(entry); } } return entries; } protected void loadStepAttributes() throws KettleException { try { InputStream inputStream = getClass().getResourceAsStream(STEP_ATTRIBUTES_FILE); if (inputStream!=null) { Document document = XMLHandler.loadXMLFile(inputStream); Node attrsNode = XMLHandler.getSubNode(document, "attributes"); List<Node> nodes = XMLHandler.getNodes(attrsNode, "attribute"); attributes = new ArrayList<KettleAttributeInterface>(); for (Node node : nodes) { String key = XMLHandler.getTagAttribute(node, "id"); String xmlCode = XMLHandler.getTagValue(node, "xmlcode"); String repCode = XMLHandler.getTagValue(node, "repcode"); String description = XMLHandler.getTagValue(node, "description"); String tooltip = XMLHandler.getTagValue(node, "tooltip"); int valueType = ValueMeta.getType( XMLHandler.getTagValue(node, "valuetype") ); String parentId = XMLHandler.getTagValue(node, "parentid"); KettleAttribute attribute = new KettleAttribute(key, xmlCode, repCode, description, tooltip, valueType, findParent(attributes, parentId)); attributes.add(attribute); } } } catch(Exception e) { throw new KettleException("Unable to load file "+STEP_ATTRIBUTES_FILE, e); } } public KettleAttributeInterface findParent(List<KettleAttributeInterface> attributes, String parentId) { if (Const.isEmpty(parentId)) { return null; } for (KettleAttributeInterface attribute : attributes) { if (attribute.getKey().equals(parentId)) { return attribute; } } return null; } public KettleAttributeInterface findAttribute(String key) { for (KettleAttributeInterface attribute : attributes) { if (attribute.getKey().equals(key)) { return attribute; } } return null; } public String getXmlCode(String attributeKey) { return findAttribute(attributeKey).getXmlCode(); } public String getRepCode(String attributeKey) { KettleAttributeInterface attr = findAttribute(attributeKey); return Const.isEmpty(attr.getRepCode())?attr.getXmlCode():attr.getRepCode(); } public String getDescription(String attributeKey) { return findAttribute(attributeKey).getDescription(); } public String getTooltip(String attributeKey) { return findAttribute(attributeKey).getTooltip(); } }
package org.usfirst.frc.team1699.auto; public class AutoPath { private String path; private int[] fileAsString; private int width; public AutoPath(String path, int width){ this.path = path; this.width = width; fileAsString = Utils.loadFileAsArray(path, width); } public void generateDirections(){ } }
package com.intellij.codeInsight.lookup.impl; import com.intellij.codeInsight.CodeInsightSettings; import com.intellij.codeInsight.lookup.*; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.colors.EditorFontType; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.util.Iconable; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.UserDataHolderBase; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.meta.PsiMetaData; import com.intellij.psi.util.PsiUtilBase; import com.intellij.ui.RowIcon; import com.intellij.ui.StrikeoutLabel; import com.intellij.util.IconUtil; import com.intellij.util.ui.EmptyIcon; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; class LookupCellRenderer implements ListCellRenderer { private final int ICON_FLAGS; private final Icon EMPTY_ICON; public final int ICON_WIDTH; private final Font NORMAL_FONT; private final Font BOLD_FONT; private final Font SMALL_FONT; private final int FONT_WIDTH; public static final Color BACKGROUND_COLOR = new Color(235, 244, 254); private static final Color FOREGROUND_COLOR = Color.black; private static final Color GRAYED_FOREGROUND_COLOR = new Color(160, 160, 160); private static final Color SELECTED_BACKGROUND_COLOR = new Color(0, 82, 164); private static final Color SELECTED_FOREGROUND_COLOR = Color.white; private static final Color SELECTED_GRAYED_FOREGROUND_COLOR = Color.white; private static final Color PREFIX_FOREGROUND_COLOR = new Color(176, 0, 176); private static final Color SELECTED_PREFIX_FOREGROUND_COLOR = new Color(249, 236, 204); private static final Color EMPTY_ITEM_FOREGROUND_COLOR = FOREGROUND_COLOR; private static final int MAX_LENGTH = 70; private final boolean SHOW_SIGNATURES; private LookupImpl myLookup; private StrikeoutLabel myLabel0; // highlighted part of name private StrikeoutLabel myLabel1; // rest of name private StrikeoutLabel myLabel2; // parameters and tail text private JLabel myLabel3; // type private JPanel myPanel; private LookupElementPresentationImpl myLookupElementPresentation = new LookupElementPresentationImpl(); public LookupCellRenderer(LookupImpl lookup) { EditorColorsScheme scheme = EditorColorsManager.getInstance().getGlobalScheme(); NORMAL_FONT = scheme.getFont(EditorFontType.PLAIN); BOLD_FONT = scheme.getFont(EditorFontType.BOLD); SMALL_FONT = NORMAL_FONT; CodeInsightSettings settings = CodeInsightSettings.getInstance(); SHOW_SIGNATURES = settings.SHOW_SIGNATURES_IN_LOOKUPS; ICON_FLAGS = SHOW_SIGNATURES ? Iconable.ICON_FLAG_VISIBILITY : 0; EMPTY_ICON = IconUtil.getEmptyIcon(SHOW_SIGNATURES); ICON_WIDTH = EMPTY_ICON.getIconWidth(); myLookup = lookup; myLabel0 = new StrikeoutLabel("", SwingConstants.LEFT); myLabel0.setOpaque(true); myLabel1 = new StrikeoutLabel("", SwingConstants.LEFT); myLabel1.setOpaque(true); myLabel2 = new StrikeoutLabel("", SwingConstants.LEFT); myLabel2.setOpaque(true); myLabel3 = new JLabel("", SwingConstants.LEFT); myLabel3.setOpaque(true); myPanel = new JPanel(new BorderLayout()){ public void paint(Graphics g){ UISettings.setupAntialiasing(g); super.paint(g); } }; myPanel.add(myLabel0, BorderLayout.WEST); JPanel panel = new JPanel(new BorderLayout()); myPanel.add(panel, BorderLayout.CENTER); panel.add(myLabel1, BorderLayout.WEST); panel.add(myLabel2, BorderLayout.CENTER); panel.add(myLabel3, BorderLayout.EAST); JLabel label = myLabel0; //noinspection HardCodedStringLiteral label.setText("W"); //the widest letter known to me label.setIcon(null); label.setFont(BOLD_FONT); FONT_WIDTH = label.getPreferredSize().width; final LookupItem[] items = lookup.getItems(); if (items.length > 0) lookup.getList().setPrototypeCellValue(items[0]); myLookupElementPresentation.setItems(items); UIUtil.removeQuaquaVisualMarginsIn(myPanel); } public Component getListCellRendererComponent( final JList list, Object value, int index, boolean isSelected, boolean hasFocus) { final LookupItem item = (LookupItem)value; Color background = isSelected ? SELECTED_BACKGROUND_COLOR : BACKGROUND_COLOR; final Color foreground = isSelected ? SELECTED_FOREGROUND_COLOR : FOREGROUND_COLOR; final int preferredCount = myLookup.getPreferredItemsCount(); if (index <= preferredCount - 1 && preferredCount < list.getModel().getSize() - 1) { background = isSelected ? SELECTED_BACKGROUND_COLOR : new Color(220, 245, 220); } myLookupElementPresentation.setContext(item, background, foreground, list, isSelected); ElementLookupRenderer renderer = getRendererForItem(item); if (renderer != null) { myLabel2.setBackground(background); renderer.renderElement(item, item.getObject(), myLookupElementPresentation); } else { setItemTextLabels(item, background, foreground, isSelected, getName(item), isToStrikeout(item)); setTailTextLabel(item, background, foreground, isSelected, getText2(item), null, isToStrikeout(item)); setTypeTextLabel(item, background, foreground, list, getText3(item), null); } return myPanel; } @Nullable private static ElementLookupRenderer getRendererForItem(final LookupItem item) { for(ElementLookupRenderer renderer: Extensions.getExtensions(ElementLookupRenderer.EP_NAME)) { if (renderer.handlesItem(item.getObject())) return renderer; } return null; } private void setItemTextLabels(LookupItem item, final Color background, final Color foreground, final boolean selected, final String name, final boolean toStrikeout){ final String prefix = myLookup.getPrefix(); String text; Icon icon; if (prefix.length() > 0 && StringUtil.startsWithIgnoreCase(name, prefix)){ text = name.substring(0, prefix.length()); icon = getIcon(item); setItemTextLabel(item, background, text, icon, myLabel0, selected ? SELECTED_PREFIX_FOREGROUND_COLOR : PREFIX_FOREGROUND_COLOR, toStrikeout); text = name.substring(prefix.length()); icon = null; setItemTextLabel(item, background, text, icon, myLabel1, foreground, toStrikeout); } else{ text = ""; icon = null; setItemTextLabel(item, background, text, icon, myLabel0, selected ? SELECTED_PREFIX_FOREGROUND_COLOR : PREFIX_FOREGROUND_COLOR, toStrikeout); text = name; icon = getIcon(item); setItemTextLabel(item, background, text, icon, myLabel1, foreground, toStrikeout); } } private void setItemTextLabel(final LookupItem item, final Color background, final String text, final Icon icon, final StrikeoutLabel label, final Color fg, final boolean strikeout) { boolean bold = item.getAttribute(LookupItem.HIGHLIGHTED_ATTR) != null; label.setText(text); label.setIcon(icon); label.setFont(bold ? BOLD_FONT : NORMAL_FONT); label.setStrikeout(strikeout); label.setBackground(background); label.setForeground(fg); } private void setTailTextLabel(final LookupItem item, final Color background, Color foreground, final boolean selected, final String text, final Font forceFont, final boolean strikeout) { StrikeoutLabel label = myLabel2; if (text != null){ String s = text; int width = getTextWidth(item); int n = width - MAX_LENGTH * FONT_WIDTH; if (n > 0){ n = Math.min(n, (s.length() - 7) * FONT_WIDTH); if (n >= 0){ s = s.substring(0, s.length() - n / FONT_WIDTH - 3) + "..."; } } label.setText(s); } else{ label.setText(""); } boolean isSmall = item.getAttribute(LookupItem.TAIL_TEXT_SMALL_ATTR) != null; Font font = forceFont; if (font == null) font = isSmall ? SMALL_FONT : NORMAL_FONT; label.setStrikeout(strikeout); label.setBackground(background); label.setForeground(foreground); label.setFont(font); if (isSmall){ label.setForeground(selected ? SELECTED_GRAYED_FOREGROUND_COLOR : GRAYED_FOREGROUND_COLOR); } } private static String getText2(final LookupItem item) { return (String)item.getAttribute(LookupItem.TAIL_TEXT_ATTR); } private void setTypeTextLabel(LookupItem item, final Color background, Color foreground, JList list, final String text3, final Icon icon){ myLabel3.setHorizontalTextPosition(SwingConstants.RIGHT); myLabel3.setIcon(icon); String text = text3; final int maxWidth = (list.getFixedCellWidth() - myLabel0.getPreferredSize().width - myLabel1.getPreferredSize().width - myLabel2.getPreferredSize().width) / FONT_WIDTH - 3; JLabel label = myLabel3; if (text == null) text = ""; else text += " "; while (text.length() > maxWidth) { String repl = text.replaceFirst("<((<\\.\\.\\.>)|[^<>])*>", "<...>"); if (repl.equals(text)) { //text = "..."; break; } text = repl; } label.setText(text); label.setFont(NORMAL_FONT); Color sampleBackground = background; Object o = item.getObject(); if (o instanceof LookupValueWithUIHint && label.getText().length() == 0) { Color proposedBackground = ((LookupValueWithUIHint)o).getColorHint(); if (proposedBackground == null) { proposedBackground = BACKGROUND_COLOR; } sampleBackground = proposedBackground; label.setText(" "); } if (item.getAttribute(LookupImpl.EMPTY_ITEM_ATTRIBUTE) != null){ foreground = EMPTY_ITEM_FOREGROUND_COLOR; } label.setBackground(sampleBackground); label.setForeground(foreground); } @Nullable private static String getText3(final LookupItem item) { Object o = item.getObject(); String text; if (o instanceof LookupValueWithUIHint) { text = ((LookupValueWithUIHint)o).getTypeHint(); } else { text = (String)item.getAttribute(LookupItem.TYPE_TEXT_ATTR); } return text; } private static String getName(final LookupItem item){ final String presentableText = item.getPresentableText(); if (presentableText != null) return presentableText; final Object o = item.getObject(); String name = null; if (o instanceof PsiElement) { final PsiElement element = (PsiElement)o; if (element.isValid()) { name = PsiUtilBase.getName(element); } } else if (o instanceof PsiMetaData) { name = ((PsiMetaData)o).getName(); } else if (o instanceof PresentableLookupValue ) { name = ((PresentableLookupValue)o).getPresentation(); } else { name = String.valueOf(o); } if (name == null){ name = ""; } return name; } private Icon getIcon(LookupItem item){ Icon icon = (Icon)item.getAttribute(LookupItem.ICON_ATTR); if (icon == null) { Object o = item.getObject(); if (o instanceof Iconable && !(o instanceof PsiElement)) { icon = ((Iconable)o).getIcon(ICON_FLAGS); } else { if (o instanceof LookupValueWithPsiElement) { o = ((LookupValueWithPsiElement)o).getElement(); } if (o instanceof PsiElement) { final PsiElement element = (PsiElement)o; if (element.isValid()) { icon = element.getIcon(ICON_FLAGS); } } } } if (icon == null){ icon = EMPTY_ICON; } else if (icon.getIconWidth() < EMPTY_ICON.getIconWidth()) { final RowIcon rowIcon = new RowIcon(2); rowIcon.setIcon(icon, 0); rowIcon.setIcon(new EmptyIcon(EMPTY_ICON.getIconWidth() - icon.getIconWidth()), 1); icon = rowIcon; } return icon; } public int getMaximumWidth(final LookupItem[] items){ int maxWidth = 0; for (LookupItem item : items) { maxWidth = Math.max(maxWidth, getTextWidth(item)); } maxWidth = Math.min(maxWidth, MAX_LENGTH * FONT_WIDTH); return maxWidth + EMPTY_ICON.getIconWidth() + myLabel0.getIconTextGap() + FONT_WIDTH; } /** * Should be called in atomic action. * @return width in pixels */ private int getTextWidth(LookupItem item){ ElementLookupRenderer renderer = getRendererForItem(item); if (renderer != null) { WidthCalculatingPresentation p = new WidthCalculatingPresentation(myLookupElementPresentation); renderer.renderElement(item, item.getObject(), p); return p.myTotalWidth; } String text = getName(item); final @NonNls String TYPE_GAP = "XXX"; text += getText3(item) + TYPE_GAP; int width = myPanel.getFontMetrics(NORMAL_FONT).stringWidth(text) + 2; String text2 = getText2(item); if (text2 != null){ boolean isSmall = item.getAttribute(LookupItem.TAIL_TEXT_SMALL_ATTR) != null; FontMetrics fontMetrics = myPanel.getFontMetrics(isSmall ? SMALL_FONT : NORMAL_FONT); width += fontMetrics.stringWidth(text2); } return width; } private static boolean isToStrikeout(LookupItem item) { Object o = item.getObject(); return o instanceof LookupValueWithUIHint2 && ((LookupValueWithUIHint2)o).isStrikeout(); } private class LookupElementPresentationImpl extends UserDataHolderBase implements LookupElementPresentation { private LookupItem myItem; private Color myBackground; private Color myForeground; private JList myList; private boolean mySelected; private LookupItem[] myItems; public void setItemText(final String text) { setItemTextLabels(myItem, myBackground, myForeground, mySelected, text, false); } public void setItemText(final String text, boolean strikeout) { setItemTextLabels(myItem, myBackground, myForeground, mySelected, text, strikeout); } public void setTailText(final String text) { setTailTextLabel(myItem, myBackground, myForeground, mySelected, text, null, false); } public void setTailText(final String text, final boolean strikeout) { setTailTextLabel(myItem, myBackground, myForeground, mySelected, text, null, strikeout); } public void setTailText(final String text, final Color foreground, final boolean bold) { setTailTextLabel(myItem, myBackground, mySelected ? SELECTED_FOREGROUND_COLOR : foreground, mySelected, text, bold ? BOLD_FONT : null, isToStrikeout(myItem)); } public void setTypeText(final String text) { setTypeTextLabel(myItem, myBackground, myForeground, myList, text, null); } public void setTypeText(final String text, final Icon icon) { setTypeTextLabel(myItem, myBackground, myForeground, myList, text, icon); } public void setContext(final LookupItem item, final Color background, final Color foreground, final JList list, final boolean selected) { myItem = item; myBackground = background; myForeground = foreground; myList = list; mySelected = selected; } public void setItems(final LookupItem[] items) { myItems = items; } public LookupItem[] getItems() { return myItems; } public int getMaxLength() { return MAX_LENGTH; } public boolean trimText() { return false; } } private class WidthCalculatingPresentation extends LookupElementPresentationImpl { private LookupElementPresentationImpl myBasePresentation; private int myTotalWidth = 2; private WidthCalculatingPresentation(final LookupElementPresentationImpl basePresentation) { myBasePresentation = basePresentation; } public void setItemText(final String text) { addWidth(text); } public void setItemText(final String text, final boolean strikeout) { addWidth(text); } public void setTailText(final String text) { addWidth(text); } public void setTailText(final String text, final boolean strikeout) { addWidth(text); } public void setTypeText(final String text) { addWidth(text + "XXX"); } public void setTailText(final String text, final Color foreground, final boolean bold) { addWidth(text); } public void setTypeText(final String text, final Icon icon) { setTypeText(text); myTotalWidth += icon.getIconWidth()+2; } public boolean trimText() { return true; } private void addWidth(final String text) { myTotalWidth += myPanel.getFontMetrics(NORMAL_FONT).stringWidth(text); } public LookupItem[] getItems() { return myBasePresentation.getItems(); } public <T> T getUserData(final Key<T> key) { return myBasePresentation.getUserData(key); } public <T> void putUserData(final Key<T> key, final T value) { myBasePresentation.putUserData(key, value); } } }
package org.mtransit.parser.ca_central_fraser_valley_transit_system_bus; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.mtransit.parser.CleanUtils; import org.mtransit.parser.DefaultAgencyTools; import org.mtransit.parser.Pair; import org.mtransit.parser.SplitUtils; import org.mtransit.parser.SplitUtils.RouteTripSpec; import org.mtransit.parser.Utils; import org.mtransit.parser.gtfs.data.GCalendar; import org.mtransit.parser.gtfs.data.GCalendarDate; import org.mtransit.parser.gtfs.data.GRoute; import org.mtransit.parser.gtfs.data.GSpec; import org.mtransit.parser.gtfs.data.GStop; import org.mtransit.parser.gtfs.data.GTrip; import org.mtransit.parser.gtfs.data.GTripStop; import org.mtransit.parser.mt.data.MAgency; import org.mtransit.parser.mt.data.MDirectionType; import org.mtransit.parser.mt.data.MRoute; import org.mtransit.parser.mt.data.MTrip; import org.mtransit.parser.mt.data.MTripStop; // https://bctransit.com/*/footer/open-data public class CentralFraserValleyTransitSystemBusAgencyTools extends DefaultAgencyTools { public static void main(String[] args) { if (args == null || args.length == 0) { args = new String[3]; args[0] = "input/gtfs.zip"; args[1] = "../../mtransitapps/ca-central-fraser-valley-transit-system-bus-android/res/raw/"; args[2] = ""; // files-prefix } new CentralFraserValleyTransitSystemBusAgencyTools().start(args); } private HashSet<String> serviceIds; @Override public void start(String[] args) { System.out.printf("\nGenerating CFV Transit System bus data..."); long start = System.currentTimeMillis(); this.serviceIds = extractUsefulServiceIds(args, this, true); super.start(args); System.out.printf("\nGenerating CFV Transit System bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start)); } @Override public boolean excludeCalendar(GCalendar gCalendar) { if (this.serviceIds != null) { return excludeUselessCalendar(gCalendar, this.serviceIds); } return super.excludeCalendar(gCalendar); } @Override public boolean excludeCalendarDate(GCalendarDate gCalendarDates) { if (this.serviceIds != null) { return excludeUselessCalendarDate(gCalendarDates, this.serviceIds); } return super.excludeCalendarDate(gCalendarDates); } private static final String INCLUDE_AGENCY_ID = "6"; // CFV Transit System only @Override public boolean excludeRoute(GRoute gRoute) { if (!INCLUDE_AGENCY_ID.equals(gRoute.getAgencyId())) { return true; } return super.excludeRoute(gRoute); } @Override public boolean excludeTrip(GTrip gTrip) { if (this.serviceIds != null) { return excludeUselessTrip(gTrip, this.serviceIds); } return super.excludeTrip(gTrip); } @Override public Integer getAgencyRouteType() { return MAgency.ROUTE_TYPE_BUS; } @Override public long getRouteId(GRoute gRoute) { return Long.parseLong(gRoute.getRouteShortName()); // use route short name as route ID } @Override public String getRouteLongName(GRoute gRoute) { String routeLongName = gRoute.getRouteLongName(); routeLongName = CleanUtils.cleanSlashes(routeLongName); routeLongName = CleanUtils.cleanNumbers(routeLongName); routeLongName = CleanUtils.cleanStreetTypes(routeLongName); return CleanUtils.cleanLabel(routeLongName); } private static final String AGENCY_COLOR_GREEN = "34B233";// GREEN (from PDF Corporate Graphic Standards) private static final String AGENCY_COLOR_BLUE = "002C77"; // BLUE (from PDF Corporate Graphic Standards) private static final String AGENCY_COLOR = AGENCY_COLOR_GREEN; @Override public String getAgencyColor() { return AGENCY_COLOR; } private static final String COLOR_8CC63F = "8CC63F"; private static final String COLOR_8077B6 = "8077B6"; private static final String COLOR_F8931E = "F8931E"; private static final String COLOR_AC5C3B = "AC5C3B"; private static final String COLOR_A54499 = "A54499"; private static final String COLOR_00AEEF = "00AEEF"; private static final String COLOR_00AA4F = "00AA4F"; private static final String COLOR_0073AE = "0073AE"; private static final String COLOR_B3AA7E = "B3AA7E"; private static final String COLOR_77AE99 = "77AE99"; private static final String COLOR_7C3F25 = "7C3F25"; private static final String COLOR_FFC20E = "FFC20E"; private static final String COLOR_A3BADC = "A3BADC"; private static final String COLOR_ED1D8F = "ED1D8F"; private static final String COLOR_F49AC1 = "F49AC1"; private static final String COLOR_BF83B9 = "BF83B9"; private static final String COLOR_EC1D8D = "EC1D8D"; private static final String COLOR_367D0F = "367D0F"; private static final String COLOR_FFC10E = "FFC10E"; private static final String COLOR_F78B1F = "F78B1F"; private static final String COLOR_0073AD = "0073AD"; private static final String COLOR_49176D = "49176D"; private static final String COLOR_0D4D8B = "0D4D8B"; @Override public String getRouteColor(GRoute gRoute) { if (StringUtils.isEmpty(gRoute.getRouteColor())) { int rsn = Integer.parseInt(gRoute.getRouteShortName()); switch (rsn) { // @formatter:off case 1: return COLOR_8CC63F; case 2: return COLOR_8077B6; case 3: return COLOR_F8931E; case 4: return COLOR_AC5C3B; case 5: return COLOR_A54499; case 6: return COLOR_00AEEF; case 7: return COLOR_00AA4F; case 12: return COLOR_0073AE; case 15: return COLOR_49176D; case 16: return COLOR_B3AA7E; case 17: return COLOR_77AE99; case 21: return COLOR_7C3F25; case 22: return COLOR_FFC20E; case 23: return COLOR_A3BADC; case 24: return COLOR_ED1D8F; case 26: return COLOR_F49AC1; case 31: return COLOR_BF83B9; case 32: return COLOR_EC1D8D; case 33: return COLOR_367D0F; case 34: return COLOR_FFC10E; case 35: return COLOR_F78B1F; case 39: return COLOR_0073AD; case 40: return COLOR_49176D; case 66: return COLOR_0D4D8B; // @formatter:on default: if (isGoodEnoughAccepted()) { return AGENCY_COLOR_BLUE; } System.out.printf("\n%s: Unexpected route color: %s!\n", gRoute); System.exit(-1); return null; } } return super.getRouteColor(gRoute); } private static final String EXCHANGE_SHORT = "Exch"; private static final String BOURQUIN_EXCHANGE = "Bourquin " + EXCHANGE_SHORT; private static final String BLUERIDGE = "Blueridge"; private static final String DOWNTOWN = "Downtown"; private static final String BLUEJAY = "Bluejay"; private static final String HUNTINGDON = "Huntingdon"; private static final String BARRONS_WAY = "Barrons Way"; private static final String MC_KEE = "McKee"; private static final String UFV = "UFV"; private static final String CLEARBROOK = "Clearbrook"; private static final String SADDLE = "Saddle"; private static final String SANDY_HILL = "Sandy Hl"; private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2; static { HashMap<Long, RouteTripSpec> map2 = new HashMap<Long, RouteTripSpec>(); map2.put(5L, new RouteTripSpec(5L, MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.EAST.getId(), MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.WEST.getId()) .addTripSort(MDirectionType.EAST.intValue(), Arrays.asList(new String[] { "107273", // Westbound South Fraser at Countess "107499", // Bourquin Exchange Bay A })) .addTripSort(MDirectionType.WEST.intValue(), Arrays.asList(new String[] { "108262", // Bourquin Exchange Bay D "107022", "107023", "107024", "107199", "107082", "107258", "107273", // Westbound South Fraser at Countess })) .compileBothTripSort()); map2.put(16L, new RouteTripSpec(16L, MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.EAST.getId(), MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.WEST.getId()) .addTripSort(MDirectionType.EAST.intValue(), Arrays.asList(new String[] { "107080", // Downtown Exchange Bay B "107400", // Southbound North Parallel at Whatcom })) .addTripSort(MDirectionType.WEST.intValue(), Arrays.asList(new String[] { "107400", // Southbound North Parallel at Whatcom "107080", // Downtown Exchange Bay B })) .compileBothTripSort()); map2.put(23L, new RouteTripSpec(23L, MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.EAST.getId(), // Bourquin Exchance MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.WEST.getId()) // Highstreet Mall .addTripSort(MDirectionType.EAST.intValue(), Arrays.asList(new String[] { "120001", // != Highstreet Mall "107115", "120002", // != Highstreet Mall Bay A "105736", "107116", "107020", // Bourquin Exchange Bay E })) .addTripSort(MDirectionType.WEST.intValue(), Arrays.asList(new String[] { "107501", // Bourquin Exchange Bay C "107090", "107171", "107000", "120001", // != Highstreet Mall "107172", "120002", // != Highstreet Mall Bay A })) .compileBothTripSort()); map2.put(24L, new RouteTripSpec(24L, 0, MTrip.HEADSIGN_TYPE_STRING, "CW", 1, MTrip.HEADSIGN_TYPE_STRING, "CCW") .addTripSort(0, Arrays.asList(new String[] { "107500", // Bourquin Exchange Bay B "107021", "107013", "107166", "107020", // Bourquin Exchange Bay E })) .addTripSort(1, Arrays.asList(new String[] { "107500", // Bourquin Exchange Bay B "107122", "107303", "107085", "107499", // Bourquin Exchange Bay A })) .compileBothTripSort()); map2.put(26L, new RouteTripSpec(26L, 0, MTrip.HEADSIGN_TYPE_STRING, SANDY_HILL, 1, MTrip.HEADSIGN_TYPE_STRING, BOURQUIN_EXCHANGE) .addTripSort(0, Arrays.asList(new String[] { "108262", // Bourquin Exchange Bay D "107039", "120016", "105727", // != Eastbound 34970 block Old Clayburn "120017", // != Eastbound Sandy Hill at Old Clayburn => Bourquin Ex "107040", "107048", // != Eastbound McKinley Drive at McKinley Place "107049", // != Northbound McKinley at Sandy Hill "107053", // != Southbound Old Clayburn at Sandy Hill => Bourquin Ex })) .addTripSort(1, Arrays.asList(new String[] { "120017", // != Eastbound Sandy Hill at Old Clayburn <= START "107390", // Southbound McKee at Selkirk "107067", "107053", // != Southbound Old Clayburn at Sandy Hill <= START "107054", // != Southbound Old Clayburn at Burnside "120015", "107068", "107085", "108262", // !+ Bourquin Exchange Bay D => Sandy Hl "107499", // != Bourquin Exchange Bay A })) .compileBothTripSort()); map2.put(34L, new RouteTripSpec(34L, MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) .addTripSort(MDirectionType.NORTH.intValue(), Arrays.asList(new String[] { "107784", // == != Downtown Exchange Bay B "107756", "107819", "107834", // Southbound Stave Lake at Dewdney Trunk })) .addTripSort(MDirectionType.SOUTH.intValue(), Arrays.asList(new String[] { "107834", // Southbound Stave Lake at Dewdney Trunk "107847", "107756", "107784", // == != Downtown Exchange Bay B })) .compileBothTripSort()); map2.put(35L, new RouteTripSpec(35L, MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.EAST.getId(), MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.WEST.getId()) .addTripSort(MDirectionType.EAST.intValue(), Arrays.asList(new String[] { "107784", // Downtown Exchange Bay B "107855", // Northbound Draper at Douglas })) .addTripSort(MDirectionType.WEST.intValue(), Arrays.asList(new String[] { "107855", // Northbound Draper at Douglas "107784", // Downtown Exchange Bay B })) .compileBothTripSort()); ALL_ROUTE_TRIPS2 = map2; } @Override public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) { if (ALL_ROUTE_TRIPS2.containsKey(routeId)) { return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop); } return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop); } @Override public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) { if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) { return ALL_ROUTE_TRIPS2.get(mRoute.getId()).getAllTrips(); } return super.splitTrip(mRoute, gTrip, gtfs); } @Override public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) { if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) { return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.getId())); } return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS); } @Override public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) { if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) { return; // split } mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), gTrip.getDirectionId()); } @Override public boolean mergeHeadsign(MTrip mTrip, MTrip mTripToMerge) { List<String> headsignsValues = Arrays.asList(mTrip.getHeadsignValue(), mTripToMerge.getHeadsignValue()); if (mTrip.getRouteId() == 1L) { if (Arrays.asList( BOURQUIN_EXCHANGE, DOWNTOWN, BLUERIDGE ).containsAll(headsignsValues)) { mTrip.setHeadsignString(BLUERIDGE, mTrip.getHeadsignId()); return true; } else if (Arrays.asList( BOURQUIN_EXCHANGE, MC_KEE ).containsAll(headsignsValues)) { mTrip.setHeadsignString(MC_KEE, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 2L) { if (Arrays.asList( BARRONS_WAY, BOURQUIN_EXCHANGE, HUNTINGDON ).containsAll(headsignsValues)) { mTrip.setHeadsignString(HUNTINGDON, mTrip.getHeadsignId()); return true; } else if (Arrays.asList( BLUEJAY, DOWNTOWN ).containsAll(headsignsValues)) { mTrip.setHeadsignString(BLUEJAY, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 3L) { if (Arrays.asList( CLEARBROOK, BOURQUIN_EXCHANGE ).containsAll(headsignsValues)) { mTrip.setHeadsignString(CLEARBROOK, mTrip.getHeadsignId()); return true; } else if (Arrays.asList( UFV, BOURQUIN_EXCHANGE ).containsAll(headsignsValues)) { mTrip.setHeadsignString(UFV, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 4L) { // TODO split if (Arrays.asList( SADDLE, DOWNTOWN ).containsAll(headsignsValues)) { mTrip.setHeadsignString(SADDLE, mTrip.getHeadsignId()); return true; } } if (isGoodEnoughAccepted()) { return super.mergeHeadsign(mTrip, mTripToMerge); } System.out.printf("\n%s: Unexpected trips to merge: %s & %s!\n", mTrip.getRouteId(), mTrip, mTripToMerge); System.exit(-1); return false; } private static final Pattern EXCHANGE = Pattern.compile("((^|\\W){1}(exchange)(\\W|$){1})", Pattern.CASE_INSENSITIVE); private static final String EXCHANGE_REPLACEMENT = "$2" + EXCHANGE_SHORT + "$4"; private static final Pattern UFV_ = Pattern.compile("((^|\\W){1}(ufv)(\\W|$){1})", Pattern.CASE_INSENSITIVE); private static final String UFV_REPLACEMENT = "$2UFV$4"; private static final Pattern STARTS_WITH_NUMBER = Pattern.compile("(^[\\d]+[\\S]*)", Pattern.CASE_INSENSITIVE); private static final Pattern ENDS_WITH_CONNECTOR = Pattern.compile("( connector$)", Pattern.CASE_INSENSITIVE); private static final Pattern ENDS_WITH_VIA = Pattern.compile("( via .*$)", Pattern.CASE_INSENSITIVE); private static final Pattern STARTS_WITH_TO = Pattern.compile("(^.* to )", Pattern.CASE_INSENSITIVE); @Override public String cleanTripHeadsign(String tripHeadsign) { if (Utils.isUppercaseOnly(tripHeadsign, true, true)) { tripHeadsign = tripHeadsign.toLowerCase(Locale.ENGLISH); } tripHeadsign = EXCHANGE.matcher(tripHeadsign).replaceAll(EXCHANGE_REPLACEMENT); tripHeadsign = UFV_.matcher(tripHeadsign).replaceAll(UFV_REPLACEMENT); tripHeadsign = ENDS_WITH_VIA.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY); tripHeadsign = STARTS_WITH_TO.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY); tripHeadsign = ENDS_WITH_CONNECTOR.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY); tripHeadsign = CleanUtils.CLEAN_AND.matcher(tripHeadsign).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT); tripHeadsign = STARTS_WITH_NUMBER.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY); tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign); return CleanUtils.cleanLabel(tripHeadsign); } private static final Pattern STARTS_WITH_BOUND = Pattern.compile("(^(east|west|north|south)bound)", Pattern.CASE_INSENSITIVE); @Override public String cleanStopName(String gStopName) { gStopName = STARTS_WITH_BOUND.matcher(gStopName).replaceAll(StringUtils.EMPTY); gStopName = CleanUtils.CLEAN_AT.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT); gStopName = EXCHANGE.matcher(gStopName).replaceAll(EXCHANGE_REPLACEMENT); gStopName = CleanUtils.cleanStreetTypes(gStopName); return CleanUtils.cleanLabel(gStopName); } }
package org.eclipse.birt.report.service.api; import java.util.Collection; /** * Viewer representation of a parameter definition * */ public class ParameterDefinition { // These are copied from IScalarParameterDefinition... public static final int TEXT_BOX = 0; public static final int LIST_BOX = 1; public static final int RADIO_BUTTON = 2; public static final int CHECK_BOX = 3; public static final int AUTO = 0; public static final int LEFT = 1; public static final int CENTER = 2; public static final int RIGHT = 3; public static final int TYPE_ANY = 0; public static final int TYPE_STRING = 1; public static final int TYPE_FLOAT = 2; public static final int TYPE_DECIMAL = 3; public static final int TYPE_DATE_TIME = 4; public static final int TYPE_BOOLEAN = 5; public static final int TYPE_INTEGER = 6; public static final int TYPE_DATE = 7; public static final int TYPE_TIME = 8; public static final int SELECTION_LIST_NONE = 0; public static final int SELECTION_LIST_DYNAMIC = 1; public static final int SELECTION_LIST_STATIC = 2; private long id; private String name; private String pattern; private String displayFormat; private String displayName; private String helpText; private String promptText; private int dataType; private String valueExpr; private int controlType; private boolean hidden; private boolean allowNull; private boolean allowBlank; private boolean isRequired; private boolean mustMatch; private boolean concealValue; private boolean distinct; private boolean isMultiValue; private ParameterGroupDefinition group; private Collection selectionList; public ParameterDefinition( long id, String name, String pattern, String displayFormat, String displayName, String helpText, String promptText, int dataType, String valueExpr, int controlType, boolean hidden, boolean allowNull, boolean allowBlank, boolean isRequired, boolean mustMatch, boolean concealValue, boolean distinct, boolean isMultiValue, ParameterGroupDefinition group, Collection selectionList ) { this.id = id; this.name = name; this.pattern = pattern; this.displayFormat = displayFormat; this.displayName = displayName; this.helpText = helpText; this.promptText = promptText; this.dataType = dataType; this.valueExpr = valueExpr; this.controlType = controlType; this.hidden = hidden; this.allowNull = allowNull; this.allowBlank = allowBlank; this.isRequired = isRequired; this.mustMatch = mustMatch; this.concealValue = concealValue; this.distinct = distinct; this.isMultiValue = isMultiValue; this.group = group; this.selectionList = selectionList; if ( group != null && group.cascade( ) ) { this.isMultiValue = false; } } /** * @return the id */ public long getId( ) { return id; } public String getName( ) { return name; } public String getPattern( ) { return pattern; } public String getDisplayFormat( ) { return displayFormat; } public String getDisplayName( ) { return displayName; } public String getHelpText( ) { return helpText; } public String getPromptText( ) { return promptText; } public int getDataType( ) { return dataType; } public String getValueExpr( ) { return valueExpr; } public int getControlType( ) { return controlType; } public boolean isHidden( ) { return hidden; } /** * @deprecated * @return */ public boolean allowNull( ) { return allowNull; } /** * @deprecated * @return */ public boolean allowBlank( ) { return allowBlank; } public boolean isRequired( ) { return isRequired; } public boolean mustMatch( ) { return mustMatch; } public boolean concealValue( ) { return concealValue; } public boolean isDistinct( ) { return distinct; } /** * @return the isMultiValue */ public boolean isMultiValue( ) { return isMultiValue; } public ParameterGroupDefinition getGroup( ) { return group; } public Collection getSelectionList( ) { return selectionList; } public boolean equals( Object obj ) { if ( name == null || !( obj instanceof ParameterDefinition ) ) return false; ParameterDefinition other = (ParameterDefinition) obj; return name.equals( other.getName( ) ); } public int hashCode( ) { if ( name == null ) return 0; return name.hashCode( ); } }
package org.opendaylight.ovsdb.lib; import io.netty.channel.Channel; import java.net.InetAddress; import java.util.Collection; import javax.net.ssl.SSLContext; /** * OvsDBConnection Interface provides OVSDB connection management APIs which includes * both Active and Passive connections. * From the Library perspective, Active OVSDB connections are those that are initiated from * the Controller towards the ovsdb-manager. * While Passive OVSDB connections are those that are initiated from the ovs towards * the controller. * * <p>Applications that use OvsDBConnectionService can use the connect APIs to initiate Active * connections and can listen to the asynchronous Passive connections via registerConnectionListener * listener API. */ public interface OvsdbConnection { /** * connect API can be used by the applications to initiate Active connection from * the controller towards ovsdb-server. * @param address IP Address of the remote server that hosts the ovsdb server. * @param port Layer 4 port on which the remote ovsdb server is listening on. * @return OvsDBClient The primary Client interface for the ovsdb connection. */ OvsdbClient connect(InetAddress address, int port); /** * connect API can be used by the applications to initiate Active ssl * connection from the controller towards ovsdb-server. * @param address IP Address of the remote server that hosts the ovsdb server. * @param port Layer 4 port on which the remote ovsdb server is listening on. * @param sslContext Netty sslContext for channel configuration * @return OvsDBClient The primary Client interface for the ovsdb connection. */ OvsdbClient connectWithSsl(InetAddress address, int port, SSLContext sslContext); /** * Method to disconnect an existing connection. * @param client that represents the ovsdb connection. */ void disconnect(OvsdbClient client); /** * Method to start ovsdb server for passive connection. */ boolean startOvsdbManager(); /** * Method to start ovsdb server for passive connection with SSL. */ boolean startOvsdbManagerWithSsl(int ovsdbListenPort, SSLContext sslContext, String[] protocols, String[] cipherSuites); /** * Method to register a Passive Connection Listener with the ConnectionService. * @param listener Passive Connection listener interested in Passive OVSDB connection requests. */ void registerConnectionListener(OvsdbConnectionListener listener); /** * Method to unregister a Passive Connection Listener with the ConnectionService. */ void unregisterConnectionListener(OvsdbConnectionListener listener); /** * Returns a Collection of all the active OVSDB Connections. * * @return Collection of all the active OVSDB Connections */ Collection<OvsdbClient> getConnections(); OvsdbClient getClient(Channel channel); }
package com.kennyc.adapters; import android.content.Context; import android.content.res.Resources; import android.support.annotation.ColorInt; import android.support.annotation.ColorRes; import android.support.annotation.LayoutRes; import android.support.annotation.Nullable; import android.support.annotation.StringRes; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import java.util.ArrayList; import java.util.List; public abstract class BaseRecyclerAdapter<VH extends RecyclerView.ViewHolder, T> extends RecyclerView.Adapter<VH> { protected final String TAG = getClass().getSimpleName(); private List<T> mItems; private LayoutInflater mInflater; private Resources mResources; /** * Simple constructor for creating a BaseRecyclerAdapter * * @param context The context the adapter is running in * @param collection A list of items to populate the adapter with, can be null. If passing a null list, * {@link #addItem(Object)} will throw an exception as the list type is undefined. The list * needs to be created first with {@link #addItems(List)} */ public BaseRecyclerAdapter(Context context, List<T> collection) { mItems = collection; mInflater = LayoutInflater.from(context); mResources = context.getResources(); } /** * Adds an item to the list, {@link #notifyItemRangeInserted(int, int)} will be called * * @param object Object to add to the adapter */ public void addItem(T object) { // An exception is thrown instead of creating a List object since the type of list in unknown if (mItems == null) throw new NullPointerException("Adapter list has not been initialized"); mItems.add(object); notifyItemInserted(mItems.size()); } /** * Adds an item to the list at the given position, {@link #notifyItemRangeInserted(int, int)} will be called * * @param object Object to add to the adapter * @param position Position to add the object */ public void addItem(T object, int position) { // An exception is thrown instead of creating a List object since the type of list in unknown if (mItems == null) throw new NullPointerException("Adapter list has not been initialized"); mItems.add(position, object); notifyItemRangeInserted(position, 1); } /** * Adds a list of items to the adapter list, {@link #notifyItemRangeInserted(int, int)} will be called * * @param items List of items to add to the adapter */ public void addItems(List<T> items) { if (items == null || items.isEmpty()) { return; } int startingSize = 0; int endSize = 0; if (mItems == null) { mItems = items; } else { startingSize = mItems.size(); mItems.addAll(items); } endSize = mItems.size(); notifyItemRangeInserted(startingSize, endSize); } /** * Adds a list of items to the adapter list at the given position, {@link #notifyItemRangeInserted(int, int)} will be called * * @param items List of items to add to the adapter * @param position The position to add the items into the adapter */ public void addItems(List<T> items, int position) { if (items == null || items.isEmpty()) { return; } mItems.addAll(position, items); notifyItemRangeInserted(position, items.size()); } /** * Removes an object from the list, {@link #notifyItemRangeRemoved(int, int)} (int, int)} will be called * * @param object The object to remove from the adapter * @return If the object was removed */ public boolean removeItem(T object) { if (mItems != null) { int position = mItems.indexOf(object); return position >= 0 && removeItem(position) != null; } return false; } /** * Removes an item at the given position, {@link #notifyItemRemoved(int)} will be called * * @param position The position to remove from the adapter * @return The item removed */ public T removeItem(int position) { if (mItems != null) { T removedItem = mItems.remove(position); notifyItemRemoved(position); return removedItem; } return null; } /** * Removes a range of items from the adapter, {@link #notifyItemRangeRemoved(int, int)} will be called * * @param start Starting position of removal * @param end Ending position of removal */ public void removeItems(int start, int end) { mItems.subList(start, end).clear(); notifyItemRangeRemoved(start, end - start); } /** * Returns the index of the item in regards to the backing list. If not found, {@link RecyclerView#NO_POSITION} will be returned * * @param object The object to search for * @return The index of the item. {@link RecyclerView#NO_POSITION} will be returned if not found */ public int indexOf(T object) { return mItems != null && !mItems.isEmpty() ? mItems.indexOf(object) : RecyclerView.NO_POSITION; } /** * Removes all items from the list, {@link #notifyItemRangeRemoved(int, int)} will be called */ public void clear() { if (mItems != null) { int size = mItems.size(); mItems.clear(); notifyItemRangeRemoved(0, size); } } /** * Returns the entire list. This is <b><i>not</i></b> a copy of the list. If a copy of the list is * needed, see {@link #retainItems()} * * @return The entire list of items in the adapter */ protected List<T> getAllItems() { return mItems; } /** * Returns the object for the given position * * @param position The position to return * @return The item at the given position */ public T getItem(int position) { return mItems.get(position); } /** * Returns an ArrayList of the items in the adapter, used for saving the items for configuration changes * * @return A copy of the items in the adapter */ public ArrayList<T> retainItems() { return new ArrayList<>(mItems); } @Override public int getItemCount() { return mItems != null ? mItems.size() : 0; } /** * Returns if the adapter is empty * * @return If the adapter is empty */ public boolean isEmpty() { return getItemCount() <= 0; } /** * Returns the color for the given color resource * * @param color Color resource id * @return */ @ColorInt protected int getColor(@ColorRes int color) { return mResources.getColor(color); } /** * Returns the string for the given string resource * * @param string String resource * @return */ protected String getString(@StringRes int string) { return mResources.getString(string); } /** * Inflates a view from the given layout resource * * @param layoutId Layout resource to inflate * @param parent Optional parent view * @return */ protected View inflateView(@LayoutRes int layoutId, @Nullable ViewGroup parent) { if (parent == null) { return mInflater.inflate(layoutId, null); } return mInflater.inflate(layoutId, parent, false); } /** * Frees up any resources tied to the adapter. Should be called in an activities onDestroy lifecycle method if needed */ public void onDestroy() { } }
package fr.prcaen.externalresources.model; import org.junit.Test; import java.io.StringReader; import java.util.HashMap; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertTrue; public final class ResourcesTest { private static final HashMap<String, Resource> EMPTY_HASH_MAP = new HashMap<>(); @Test public void testAdd() throws Exception { Resources resources = new Resources(); resources.add("foo", new Resource("bar")); assertEquals(resources.members.size(), 1); } @Test public void testEntrySet() throws Exception { HashMap<String, Resource> map = EMPTY_HASH_MAP; map.put("foo", new Resource("bar")); Resources resources = new Resources(map); assertEquals(resources.entrySet().size(), 1); } @Test public void testHas() throws Exception { HashMap<String, Resource> map = EMPTY_HASH_MAP; map.put("foo", new Resource("bar")); Resources resources = new Resources(map); assertTrue(resources.has("foo")); } @Test public void testGet() throws Exception { HashMap<String, Resource> map = EMPTY_HASH_MAP; map.put("foo", new Resource("bar")); Resources resources = new Resources(map); assertEquals(resources.get("foo").getAsString(), new Resource("bar").getAsString()); } @Test public void testMerge() throws Exception { Resources firstResources = new Resources(EMPTY_HASH_MAP); firstResources.add("foo", new Resource("bar")); firstResources.add("month", new Resource("june")); Resources lastResources = new Resources(EMPTY_HASH_MAP); lastResources.add("hello", new Resource("world")); lastResources.add("month", new Resource("may")); firstResources.merge(lastResources); assertEquals(firstResources.members.size(), 3); assertTrue(firstResources.has("foo")); assertTrue(firstResources.has("hello")); assertTrue(firstResources.has("month")); assertEquals(firstResources.get("month").getAsString(), "may"); } @Test public void testFromJsonReader() throws Exception { assertNotNull(Resources.fromJson(new StringReader("{\"foo\":\"bar\"}"))); } @Test public void testFromJsonString() throws Exception { assertNotNull(Resources.fromJson("{\"foo\":\"bar\"}")); } @Test public void testFromJsonInputStream() throws Exception { assertNotNull(Resources.fromJson(getClass().getResourceAsStream("/test.json"))); } @Test public void testFromXmlReader() throws Exception { assertNotNull(Resources.fromXml(new StringReader("<?xml version=\"1.0\" encoding=\"utf-8\"?><resources><integer name=\"max_speed\">75</integer></resources>"))); } @Test public void testFromXmlString() throws Exception { assertNotNull(Resources.fromXml("<?xml version=\"1.0\" encoding=\"utf-8\"?><resources><integer name=\"max_speed\">75</integer></resources>")); } @Test public void testFromXmlInputStream() throws Exception { assertNotNull(Resources.fromXml(getClass().getResourceAsStream("/test.xml"))); } }
package liquibase.database.core; import liquibase.changelog.column.ColumnChangeLog; import liquibase.database.AbstractJdbcDatabase; import liquibase.database.DatabaseConnection; import liquibase.database.ObjectQuotingStrategy; import liquibase.database.jvm.JdbcConnection; import liquibase.logging.Logger; import liquibase.structure.DatabaseObject; import liquibase.exception.DatabaseException; import liquibase.executor.ExecutorService; import liquibase.logging.LogFactory; import liquibase.statement.SqlStatement; import liquibase.statement.core.RawCallStatement; import liquibase.statement.core.RawSqlStatement; import liquibase.structure.core.Table; import liquibase.util.JdbcUtils; import liquibase.util.StringUtils; import java.math.BigInteger; import java.sql.ResultSet; import java.sql.Statement; import java.util.*; /** * Encapsulates PostgreSQL database support. */ public class PostgresDatabase extends AbstractJdbcDatabase { public static final String PRODUCT_NAME = "PostgreSQL"; private Set<String> systemTablesAndViews = new HashSet<String>(); private Set<String> reservedWords = new HashSet<String>(); public PostgresDatabase() { super.setCurrentDateTimeFunction("NOW()"); reservedWords.addAll(Arrays.asList("ALL","ANALYSE", "AND", "ANY","ARRAY","AS", "ASC","ASYMMETRIC", "AUTHORIZATION", "BINARY", "BOTH","CASE","CAST","CHECK", "COLLATE","COLLATION", "COLUMN","CONCURRENTLY", "CONSTRAINT", "CREATE", "CURRENT_CATALOG", "CURRENT_DATE", "CURRENT_ROLE", "CURRENT_SCHEMA", "CURRENT_TIME", "CURRENT_TIMESTAMP", "CURRENT_USER", "DEFAULT", "DEFERRABLE", "DESC", "DISTINCT", "DO", "ELSE", "END", "EXCEPT", "FALSE", "FETCH", "FOR", "FOREIGN", "FROM", "FULL", "GRANT", "GROUP", "HAVING", "ILIKE", "IN", "INITIALLY", "INTERSECT", "INTO", "IS", "ISNULL", "JOIN", "LEADING", "LEFT", "LIKE", "LIMIT", "LITERAL", "LOCALTIME", "LOCALTIMESTAMP", "NOT", "NULL", "OFFSET", "ON", "ONLY", "OR", "ORDER", "OUTER", "OVER", "OVERLAPS", "PLACING", "PRIMARY", "REFERENCES", "RETURNING", "RIGHT", "SELECT", "SESSION_USER", "SIMILAR", "SOME", "SYMMETRIC", "TABLE", "THEN", "TO", "TRAILING", "TRUE", "UNION", "UNIQUE", "USER", "USING", "VARIADIC", "VERBOSE", "WHEN", "WHERE", "WINDOW", "WITH")); super.sequenceNextValueFunction = "nextval('%s')"; super.sequenceCurrentValueFunction = "currval('%s')"; super.unmodifiableDataTypes.addAll(Arrays.asList("bool", "int4", "int8", "float4", "float8", "bigserial", "serial", "bytea", "timestamptz", "text")); super.unquotedObjectsAreUppercased=false; } @Override public String getShortName() { return "postgresql"; } @Override protected String getDefaultDatabaseProductName() { return "PostgreSQL"; } @Override public Integer getDefaultPort() { return 5432; } @Override public Set<String> getSystemViews() { return systemTablesAndViews; } @Override public int getPriority() { return PRIORITY_DEFAULT; } @Override public boolean supportsInitiallyDeferrableColumns() { return true; } @Override public boolean isCorrectDatabaseImplementation(DatabaseConnection conn) throws DatabaseException { return PRODUCT_NAME.equalsIgnoreCase(conn.getDatabaseProductName()); } @Override public String getDefaultDriver(String url) { if (url.startsWith("jdbc:postgresql:")) { return "org.postgresql.Driver"; } return null; } @Override public boolean supportsCatalogInObjectName(Class<? extends DatabaseObject> type) { return false; } @Override public boolean supportsSequences() { return true; } @Override public String getDatabaseChangeLogTableName() { return super.getDatabaseChangeLogTableName().toLowerCase(); } @Override public String getDatabaseChangeLogLockTableName() { return super.getDatabaseChangeLogLockTableName().toLowerCase(); } @Override public void setConnection(DatabaseConnection conn) { super.setConnection(conn); Logger log = LogFactory.getInstance().getLog(); if (conn instanceof JdbcConnection) { Statement statement = null; ResultSet resultSet = null; try { statement = ((JdbcConnection) conn).createStatement(); resultSet = statement.executeQuery("select setting from pg_settings where name = 'edb_redwood_date'"); if (resultSet.next()) { String setting = resultSet.getString(1); if (setting != null && setting.equals("on")) { log.warning("EnterpriseDB "+conn.getURL()+" does not store DATE columns. Auto-converts them to TIMESTAMPs. (edb_redwood_date=true)"); } } } catch (Exception e) { log.info("Cannot check pg_settings", e); } finally { JdbcUtils.close(resultSet, statement); } } } // public void dropDatabaseObjects(String schema) throws DatabaseException { // try { // if (schema == null) { // schema = getConnectionUsername(); // new Executor(this).execute(new RawSqlStatement("DROP OWNED BY " + schema)); // getConnection().commit(); // changeLogTableExists = false; // changeLogLockTableExists = false; // changeLogCreateAttempted = false; // changeLogLockCreateAttempted = false; // } catch (SQLException e) { // throw new DatabaseException(e); @Override public boolean isSystemObject(DatabaseObject example) { if (example instanceof Table) { if (example.getSchema() != null) { if ("pg_catalog".equals(example.getSchema().getName()) || "pg_toast".equals(example.getSchema().getName())) { return true; } } } return super.isSystemObject(example); } @Override public boolean supportsTablespaces() { return true; } @Override public String getAutoIncrementClause() { return ""; } @Override public boolean generateAutoIncrementStartWith(BigInteger startWith) { return false; } @Override public boolean generateAutoIncrementBy(BigInteger incrementBy) { return false; } @Override public String escapeObjectName(String objectName, Class<? extends DatabaseObject> objectType) { if (quotingStrategy == ObjectQuotingStrategy.LEGACY && hasMixedCase(objectName)) { return "\"" + objectName + "\""; } else if (objectType !=null && objectType.isAssignableFrom(ColumnChangeLog.class)) { return (objectName!=null && !objectName.isEmpty())?objectName.trim():objectName; } return super.escapeObjectName(objectName, objectType); } @Override public String correctObjectName(String objectName, Class<? extends DatabaseObject> objectType) { if (objectName == null || quotingStrategy != ObjectQuotingStrategy.LEGACY) { return super.correctObjectName(objectName, objectType); } if (objectName.contains("-") || hasMixedCase(objectName) || startsWithNumeric(objectName) || isReservedWord(objectName)) { return objectName; } else { return objectName.toLowerCase(); } } /* * Check if given string has case problems according to postgresql documentation. * If there are at least one characters with upper case while all other are in lower case (or vice versa) this string should be escaped. * * Note: This may make postgres support more case sensitive than normally is, but needs to be left in for backwards compatibility. * Method is public so a subclass extension can override it to always return false. */ protected boolean hasMixedCase(String tableName) { if (tableName == null) { return false; } return StringUtils.hasUpperCase(tableName) && StringUtils.hasLowerCase(tableName); } @Override public boolean isReservedWord(String tableName) { return reservedWords.contains(tableName.toUpperCase()); } /* * Get the current search paths */ private List<String> getSearchPaths() { List<String> searchPaths = null; try { DatabaseConnection con = getConnection(); if (con != null) { String searchPathResult = (String) ExecutorService.getInstance().getExecutor(this).queryForObject(new RawSqlStatement("SHOW search_path"), String.class); if (searchPathResult != null) { String dirtySearchPaths[] = searchPathResult.split("\\,"); searchPaths = new ArrayList<String>(); for (String searchPath : dirtySearchPaths) { searchPath = searchPath.trim(); // Ensure there is consistency .. if (searchPath.equals("\"$user\"")) { searchPath = "$user"; } searchPaths.add(searchPath); } } } } catch (Exception e) { // TODO: Something? e.printStackTrace(); LogFactory.getLogger().severe("Failed to get default catalog name from postgres", e); } return searchPaths; } @Override protected SqlStatement getConnectionSchemaNameCallStatement() { return new RawCallStatement("select current_schema()"); } private boolean catalogExists(String catalogName) throws DatabaseException { return catalogName != null && runExistsQuery( "select count(*) from information_schema.schemata where catalog_name='" + catalogName + "'"); } private boolean schemaExists(String schemaName) throws DatabaseException { return schemaName != null && runExistsQuery("select count(*) from information_schema.schemata where schema_name='" + schemaName + "'"); } private boolean runExistsQuery(String query) throws DatabaseException { Long count = ExecutorService.getInstance().getExecutor(this).queryForLong(new RawSqlStatement(query)); return count != null && count > 0; } }
package brooklyn.location.jclouds; import static brooklyn.util.GroovyJavaMethods.truth; import static com.google.common.base.Preconditions.checkNotNull; import static org.jclouds.aws.ec2.reference.AWSEC2Constants.PROPERTY_EC2_AMI_QUERY; import static org.jclouds.aws.ec2.reference.AWSEC2Constants.PROPERTY_EC2_CC_AMI_QUERY; import static org.jclouds.compute.util.ComputeServiceUtils.execHttpResponse; import static org.jclouds.scriptbuilder.domain.Statements.*; import java.io.File; import java.io.IOException; import java.net.URI; import java.util.Map; import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.jclouds.Constants; import org.jclouds.ContextBuilder; import org.jclouds.aws.ec2.AWSEC2Api; import org.jclouds.compute.ComputeService; import org.jclouds.compute.ComputeServiceContext; import org.jclouds.compute.RunScriptOnNodesException; import org.jclouds.compute.domain.ExecResponse; import org.jclouds.compute.domain.NodeMetadata; import org.jclouds.compute.domain.OperatingSystem; import org.jclouds.compute.options.RunScriptOptions; import org.jclouds.compute.predicates.OperatingSystemPredicates; import org.jclouds.domain.LoginCredentials; import org.jclouds.ec2.compute.domain.PasswordDataAndPrivateKey; import org.jclouds.ec2.compute.functions.WindowsLoginCredentialsFromEncryptedData; import org.jclouds.ec2.domain.PasswordData; import org.jclouds.ec2.features.WindowsApi; import org.jclouds.encryption.bouncycastle.config.BouncyCastleCryptoModule; import org.jclouds.logging.slf4j.config.SLF4JLoggingModule; import org.jclouds.scriptbuilder.domain.Statement; import org.jclouds.scriptbuilder.domain.Statements; import org.jclouds.ssh.SshClient; import org.jclouds.sshj.config.SshjSshClientModule; import org.jclouds.util.Predicates2; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import brooklyn.entity.basic.Entities; import brooklyn.location.jclouds.config.BrooklynStandardJcloudsGuiceModule; import brooklyn.util.collections.MutableMap; import brooklyn.util.config.ConfigBag; import com.google.common.base.Charsets; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Splitter; import com.google.common.base.Strings; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.io.Files; import com.google.inject.Module; public class JcloudsUtil implements JcloudsLocationConfig { // TODO Review what utility methods are needed, and what is now supported in jclouds 1.1 private static final Logger LOG = LoggerFactory.getLogger(JcloudsUtil.class); public static String APT_INSTALL = "apt-get install -f -y -qq --force-yes"; public static String installAfterUpdatingIfNotPresent(String cmd) { String aptInstallCmd = APT_INSTALL + " " + cmd; return String.format("which %s || (%s || (apt-get update && %s))", cmd, aptInstallCmd, aptInstallCmd); } public static Predicate<NodeMetadata> predicateMatchingById(final NodeMetadata node) { return predicateMatchingById(node.getId()); } public static Predicate<NodeMetadata> predicateMatchingById(final String id) { Predicate<NodeMetadata> nodePredicate = new Predicate<NodeMetadata>() { @Override public boolean apply(NodeMetadata arg0) { return id.equals(arg0.getId()); } @Override public String toString() { return "node.id=="+id; } }; return nodePredicate; } public static Statement authorizePortInIpTables(int port) { // TODO gogrid rules only allow ports 22, 3389, 80 and 443. // the first rule will be ignored, so we have to apply this // directly return Statements.newStatementList(// just in case iptables are being used, try to open 8080 exec("iptables -I INPUT 1 -p tcp --dport " + port + " -j ACCEPT"), exec("iptables -I RH-Firewall-1-INPUT 1 -p tcp --dport " + port + " -j ACCEPT"), exec("iptables-save")); } public static ExecResponse runScriptOnNode(ComputeService computeService, NodeMetadata node, Statement statement, String scriptName) throws RunScriptOnNodesException { // TODO Includes workaround for NodeMetadata's equals/hashcode method being wrong. Map<? extends NodeMetadata, ExecResponse> scriptResults = computeService.runScriptOnNodesMatching( JcloudsUtil.predicateMatchingById(node), statement, new RunScriptOptions().nameTask(scriptName)); if (scriptResults.isEmpty()) { throw new IllegalStateException("No matching node found when executing script "+scriptName+": expected="+node); } else if (scriptResults.size() > 1) { throw new IllegalStateException("Multiple nodes matched predicate: id="+node.getId()+"; expected="+node+"; actual="+scriptResults.keySet()); } else { return Iterables.getOnlyElement(scriptResults.values()); } } public static final Statement APT_RUN_SCRIPT = newStatementList( exec(installAfterUpdatingIfNotPresent("curl")), exec("(which java && java -fullversion 2>&1|egrep -q 1.6 ) ||"), execHttpResponse(URI.create("http://whirr.s3.amazonaws.com/0.2.0-incubating-SNAPSHOT/sun/java/install")),// exec(new StringBuilder() .append("echo nameserver 208.67.222.222 >> /etc/resolv.conf\n") // jeos hasn't enough room! .append("rm -rf /var/cache/apt /usr/lib/vmware-tools\n") .append("echo \"export PATH=\\\"$JAVA_HOME/bin/:$PATH\\\"\" >> /root/.bashrc") .toString())); public static final Statement YUM_RUN_SCRIPT = newStatementList( exec("which curl ||yum --nogpgcheck -y install curl"), exec("(which java && java -fullversion 2>&1|egrep -q 1.6 ) ||"), execHttpResponse(URI.create("http://whirr.s3.amazonaws.com/0.2.0-incubating-SNAPSHOT/sun/java/install")),// exec(new StringBuilder() .append("echo nameserver 208.67.222.222 >> /etc/resolv.conf\n") .append("echo \"export PATH=\\\"$JAVA_HOME/bin/:$PATH\\\"\" >> /root/.bashrc") .toString())); public static final Statement ZYPPER_RUN_SCRIPT = exec(new StringBuilder() .append("echo nameserver 208.67.222.222 >> /etc/resolv.conf\n") .append("which curl || zypper install curl\n") .append("(which java && java -fullversion 2>&1|egrep -q 1.6 ) || zypper install java-1.6.0-openjdk\n") .toString()); // Code taken from RunScriptData public static Statement installJavaAndCurl(OperatingSystem os) { if (os == null || OperatingSystemPredicates.supportsApt().apply(os)) return APT_RUN_SCRIPT; else if (OperatingSystemPredicates.supportsYum().apply(os)) return YUM_RUN_SCRIPT; else if (OperatingSystemPredicates.supportsZypper().apply(os)) return ZYPPER_RUN_SCRIPT; else throw new IllegalArgumentException("don't know how to handle" + os.toString()); } static Map<Map<?,?>,ComputeService> cachedComputeServices = new ConcurrentHashMap<Map<?,?>,ComputeService> (); private static final Object createComputeServicesMutex = new Object(); public static ComputeService findComputeService(ConfigBag conf) { return findComputeService(conf, true); } public static ComputeService findComputeService(ConfigBag conf, boolean allowReuse) { String provider = checkNotNull(conf.get(CLOUD_PROVIDER), "provider must not be null"); String identity = checkNotNull(conf.get(ACCESS_IDENTITY), "identity must not be null"); String credential = checkNotNull(conf.get(ACCESS_CREDENTIAL), "credential must not be null"); Properties properties = new Properties(); properties.setProperty(Constants.PROPERTY_TRUST_ALL_CERTS, Boolean.toString(true)); properties.setProperty(Constants.PROPERTY_RELAX_HOSTNAME, Boolean.toString(true)); properties.setProperty("jclouds.ssh.max-retries", conf.getStringKey("jclouds.ssh.max-retries") != null ? conf.getStringKey("jclouds.ssh.max-retries").toString() : "50"); // Enable aws-ec2 lazy image fetching, if given a specific imageId; otherwise customize for specific owners; or all as a last resort if ("aws-ec2".equals(provider)) { // TODO convert AWS-only flags to config keys if (truth(conf.get(IMAGE_ID))) { properties.setProperty(PROPERTY_EC2_AMI_QUERY, ""); properties.setProperty(PROPERTY_EC2_CC_AMI_QUERY, ""); } else if (truth(conf.getStringKey("imageOwner"))) { properties.setProperty(PROPERTY_EC2_AMI_QUERY, "owner-id="+conf.getStringKey("imageOwner")+";state=available;image-type=machine"); } else if (truth(conf.getStringKey("anyOwner"))) { // set `anyOwner: true` to override the default query (which is restricted to certain owners as per below), // allowing the AMI query to bind to any machine // (note however, we sometimes pick defaults in JcloudsLocationFactory); // (and be careful, this can give a LOT of data back, taking several minutes, // and requiring extra memory allocated on the command-line) properties.setProperty(PROPERTY_EC2_AMI_QUERY, "state=available;image-type=machine"); /* * by default the following filters are applied: * Filter.1.Name=owner-id&Filter.1.Value.1=137112412989& * Filter.1.Value.2=063491364108& * Filter.1.Value.3=099720109477& * Filter.1.Value.4=411009282317& * Filter.2.Name=state&Filter.2.Value.1=available& * Filter.3.Name=image-type&Filter.3.Value.1=machine& */ } } // FIXME Deprecated mechanism, should have a ConfigKey for overrides Map<String, Object> extra = Maps.filterKeys(conf.getAllConfig(), Predicates.containsPattern("^jclouds\\.")); if (extra.size() > 0) { LOG.warn("Jclouds using deprecated property overrides: "+Entities.sanitize(extra)); } properties.putAll(extra); String endpoint = conf.get(CLOUD_ENDPOINT); if (!truth(endpoint)) endpoint = getDeprecatedProperty(conf, Constants.PROPERTY_ENDPOINT); if (truth(endpoint)) properties.setProperty(Constants.PROPERTY_ENDPOINT, endpoint); Map<?,?> cacheKey = MutableMap.builder() .putAll(properties) .put("provider", provider) .put("identify", identity) .put("credential", credential) .putIfNotNull("endpoint", endpoint) .build() .toImmutable(); if (allowReuse) { ComputeService result = cachedComputeServices.get(cacheKey); if (result!=null) { LOG.debug("jclouds ComputeService cache hit for compute service, for "+Entities.sanitize(properties)); return result; } LOG.debug("jclouds ComputeService cache miss for compute service, creating, for "+Entities.sanitize(properties)); } Iterable<Module> modules = ImmutableSet.<Module> of( new SshjSshClientModule(), new SLF4JLoggingModule(), new BouncyCastleCryptoModule(), new BrooklynStandardJcloudsGuiceModule()); // Synchronizing to avoid deadlock from sun.reflect.annotation.AnnotationType. ComputeServiceContext computeServiceContext; synchronized (createComputeServicesMutex) { computeServiceContext = ContextBuilder.newBuilder(provider) .modules(modules) .credentials(identity, credential) .overrides(properties) .build(ComputeServiceContext.class); } final ComputeService computeService = computeServiceContext.getComputeService(); if (allowReuse) { synchronized (cachedComputeServices) { ComputeService result = cachedComputeServices.get(cacheKey); if (result != null) { LOG.debug("jclouds ComputeService cache recovery for compute service, for "+Entities.sanitize(cacheKey)); //keep the old one, discard the new one computeService.getContext().close(); return result; } LOG.debug("jclouds ComputeService created "+computeService+", adding to cache, for "+Entities.sanitize(properties)); cachedComputeServices.put(cacheKey, computeService); } } return computeService; } protected static String getDeprecatedProperty(ConfigBag conf, String key) { if (conf.containsKey(key)) { LOG.warn("Jclouds using deprecated brooklyn-jclouds property "+key+": "+Entities.sanitize(conf.getAllConfig())); return (String) conf.getStringKey(key); } else { return null; } } // Do this so that if there's a problem with our USERNAME's ssh key, we can still get in to check // TODO Once we're really confident there are not going to be regular problems, then delete this public static Statement addAuthorizedKeysToRoot(File publicKeyFile) throws IOException { String publicKey = Files.toString(publicKeyFile, Charsets.UTF_8); return addAuthorizedKeysToRoot(publicKey); } public static Statement addAuthorizedKeysToRoot(String publicKey) { return newStatementList( appendFile("/root/.ssh/authorized_keys", Splitter.on('\n').split(publicKey)), interpret("chmod 600 /root/.ssh/authorized_keys")); } public static String getFirstReachableAddress(ComputeServiceContext context, NodeMetadata node) { // To pick the address, it relies on jclouds `sshForNode().apply(Node)` to check all IPs of node (private+public), // to find one that is reachable. It does `openSocketFinder.findOpenSocketOnNode(node, node.getLoginPort(), ...)`. // This keeps trying for time org.jclouds.compute.reference.ComputeServiceConstants.Timeouts.portOpen. // TODO Want to configure this timeout here. // TODO We could perhaps instead just set `templateOptions.blockOnPort(loginPort, 120)`, but need // to be careful to only set that if config WAIT_FOR_SSHABLE is true. For some advanced networking examples // (e.g. using DNAT on CloudStack), the brooklyn machine won't be able to reach the VM until some additional // setup steps have been done. See links from Andrea: // jclouds.ssh.max-retries // jclouds.ssh.retry-auth final SshClient client = context.utils().sshForNode().apply(node); return client.getHostAddress(); } // Suggest at least 15 minutes for timeout public static String waitForPasswordOnAws(ComputeService computeService, final NodeMetadata node, long timeout, TimeUnit timeUnit) throws TimeoutException { ComputeServiceContext computeServiceContext = computeService.getContext(); AWSEC2Api ec2Client = computeServiceContext.unwrapApi(AWSEC2Api.class); final WindowsApi client = ec2Client.getWindowsApi().get(); final String region = node.getLocation().getParent().getId(); // The Administrator password will take some time before it is ready - Amazon says sometimes 15 minutes. // So we create a predicate that tests if the password is ready, and wrap it in a retryable predicate. Predicate<String> passwordReady = new Predicate<String>() { @Override public boolean apply(String s) { if (Strings.isNullOrEmpty(s)) return false; PasswordData data = client.getPasswordDataInRegion(region, s); if (data == null) return false; return !Strings.isNullOrEmpty(data.getPasswordData()); } }; LOG.info("Waiting for password, for "+node.getProviderId()+":"+node.getId()); Predicate passwordReadyRetryable = Predicates2.retry(passwordReady, timeUnit.toMillis(timeout), 10*1000, TimeUnit.MILLISECONDS); boolean ready = passwordReadyRetryable.apply(node.getProviderId()); if (!ready) throw new TimeoutException("Password not available for "+node+" in region "+region+" after "+timeout+" "+timeUnit.name()); // Now pull together Amazon's encrypted password blob, and the private key that jclouds generated PasswordDataAndPrivateKey dataAndKey = new PasswordDataAndPrivateKey( client.getPasswordDataInRegion(region, node.getProviderId()), node.getCredentials().getPrivateKey()); // And apply it to the decryption function WindowsLoginCredentialsFromEncryptedData f = computeServiceContext.utils().injector().getInstance(WindowsLoginCredentialsFromEncryptedData.class); LoginCredentials credentials = f.apply(dataAndKey); return credentials.getPassword(); } }
package com.xenonteam.xenonlib.common.networking; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandler.Sharable; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.BlockPos; import net.minecraftforge.fml.common.network.NetworkRegistry; import net.minecraftforge.fml.common.network.internal.FMLProxyPacket; import net.minecraftforge.fml.relauncher.Side; import com.xenonteam.xenonlib.config.Refs; import com.xenonteam.xenonlib.tileentity.GenericTileEntity; import com.xenonteam.xenonlib.util.XUtils; /** * @author tim4242 * @author philipas * */ @Sharable public class DescriptionHandler extends SimpleChannelInboundHandler<FMLProxyPacket>{ public static final String CHANNEL = Refs.MOD_ID + "Description"; static { NetworkRegistry.INSTANCE.newChannel(CHANNEL, new DescriptionHandler()); } @Deprecated public static void init(){ } @Override protected void channelRead0(ChannelHandlerContext ctx, FMLProxyPacket msg) throws Exception{ ByteBuf buf = msg.payload(); int x = buf.readInt(); int y = buf.readInt(); int z = buf.readInt(); TileEntity te = XUtils.getClientPlayer().worldObj.getTileEntity(new BlockPos(x, y, z)); if(te instanceof GenericTileEntity) { ((GenericTileEntity)te).readFromPacket(buf); } } public static enum XSide { CLIENT(new Side[] {Side.CLIENT}), SERVER(new Side[] {Side.SERVER}), BOTH(new Side[] {Side.CLIENT, Side.SERVER}); private Side[] m_sides; XSide(Side[] sides) { m_sides = sides; } public Side[] getSides() { return m_sides; } } }
package com.wakatime.android.dashboard; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.design.widget.NavigationView; import android.support.v4.app.Fragment; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.view.Menu; import android.view.MenuItem; import android.widget.FrameLayout; import com.google.android.gms.analytics.HitBuilders; import com.google.android.gms.analytics.Tracker; import com.wakatime.android.AboutActivity; import com.wakatime.android.R; import com.wakatime.android.WakatimeApplication; import com.wakatime.android.dashboard.environment.EnvironmentFragment; import com.wakatime.android.dashboard.leaderboard.Leader; import com.wakatime.android.dashboard.leaderboard.LeaderProfileFragment; import com.wakatime.android.dashboard.leaderboard.LeaderboardFragment; import com.wakatime.android.dashboard.model.Project; import com.wakatime.android.dashboard.project.ProjectFragment; import com.wakatime.android.dashboard.project.SingleProjectFragment; import com.wakatime.android.support.view.NavigationHeaderView; import com.wakatime.android.user.UserStartActivity; import javax.inject.Inject; import butterknife.BindView; import butterknife.ButterKnife; import uk.co.chrisjenx.calligraphy.CalligraphyContextWrapper; public class DashboardActivity extends AppCompatActivity implements LogoutActionView, NavigationView.OnNavigationItemSelectedListener, EnvironmentFragment.OnProgrammingFragmentInteractionListener, ProjectFragment.OnProjectFragmentInteractionListener, LeaderboardFragment.OnLeaderListFragmentInteractionListener, SingleProjectFragment.OnSingleProjectInteractionListener { @BindView(R.id.toolbar) Toolbar toolbar; @BindView(R.id.content_dashboard) FrameLayout contentDashboard; @BindView(R.id.nav_view) NavigationView navView; @BindView(R.id.drawer_layout) DrawerLayout drawerLayout; @Inject NavigationHeaderView navigationHeaderView; @Inject LogoutHandler mLogoutHandler; private Tracker mTracker; private Fragment programmingFragment; private Fragment projectFragment; private Fragment leaderboardFragment; @Override protected void attachBaseContext(Context newBase) { super.attachBaseContext(CalligraphyContextWrapper.wrap(newBase)); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_dashboard); ButterKnife.bind(this); ((WakatimeApplication) this.getApplication()).useNetworkComponent() .inject(this); toolbar.setTitle(R.string.title_activity_dashboard); setSupportActionBar(toolbar); restoreFragments(savedInstanceState); ActionBarDrawerToggle toggle = new ActionBarDrawerToggle( this, drawerLayout, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close); drawerLayout.addDrawerListener(toggle); toggle.syncState(); navView.setNavigationItemSelectedListener(this); navigationHeaderView.on(navView.getHeaderView(0)).load(); // Called new fragment when there is no other to saved if (savedInstanceState == null) { changeToDefaultFragment(); } mTracker = ((WakatimeApplication) getApplication()).getTracker(); } @Override protected void onResume() { super.onResume(); mTracker.setScreenName("Dashboard"); mTracker.send(new HitBuilders.ScreenViewBuilder().build()); } @Override public void onBackPressed() { if (drawerLayout.isDrawerOpen(GravityCompat.START)) { drawerLayout.closeDrawer(GravityCompat.START); } else { super.onBackPressed(); } } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); if (programmingFragment != null && programmingFragment.isAdded()) { getSupportFragmentManager() .putFragment(outState, EnvironmentFragment.KEY, this.programmingFragment); } if (projectFragment != null && projectFragment.isAdded()) { getSupportFragmentManager() .putFragment(outState, ProjectFragment.KEY, this.projectFragment); } if (leaderboardFragment != null && leaderboardFragment.isAdded()) { getSupportFragmentManager() .putFragment(outState, LeaderboardFragment.KEY, this.leaderboardFragment); } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.dashboard, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); return super.onOptionsItemSelected(item); } @SuppressWarnings("StatementWithEmptyBody") @Override public boolean onNavigationItemSelected(MenuItem item) { // Handle navigation view item clicks here. int id = item.getItemId(); if (id == R.id.drawer_environment) { changeToDefaultFragment(); } else if (id == R.id.drawer_projects) { this.projectFragment = ProjectFragment.newInstance(); changeFragment(this.projectFragment); } else if (id == R.id.drawer_leaderboard) { this.leaderboardFragment = LeaderboardFragment.newInstance(); changeFragment(this.leaderboardFragment); } else if (id == R.id.drawer_logout) { this.logout(); } else if (id == R.id.drawer_about) { startActivity(new Intent(this, AboutActivity.class)); } else { changeToDefaultFragment(); } DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); drawer.closeDrawer(GravityCompat.START); return true; } private void changeToDefaultFragment() { this.programmingFragment = EnvironmentFragment.newInstance(); changeFragment(this.programmingFragment); } private void restoreFragments(Bundle bundle) { if (bundle == null) return; this.programmingFragment = getSupportFragmentManager() .getFragment(bundle, EnvironmentFragment.KEY); this.projectFragment = getSupportFragmentManager() .getFragment(bundle, ProjectFragment.KEY); this.leaderboardFragment = getSupportFragmentManager() .getFragment(bundle, LeaderboardFragment.KEY); } private void changeFragment(Fragment fragment) { this.getSupportFragmentManager().beginTransaction() .replace(R.id.content_dashboard, fragment) .commit(); } @Override public void logout() { mLogoutHandler.clearData(() -> { startActivity(new Intent(this, UserStartActivity.class)); finish(); }); } @Override public void onListFragmentInteraction(Leader leader) { getSupportFragmentManager().beginTransaction() .replace(R.id.content_dashboard, LeaderProfileFragment.newInstance(leader), leader.getUser().getName()) .addToBackStack(leader.getUser().getName()) .commit(); } @Override public void showProjectPage(Project project) { getSupportFragmentManager().beginTransaction() .replace(R.id.content_dashboard, SingleProjectFragment.newInstance(project.getName()), project.getName()) .addToBackStack(project.getName()) .commit(); } }
package advancedhud; import advancedhud.ahuditem.DefaultHudItems; import advancedhud.ahuditem.HudItem; import advancedhud.ahuditem.HudItemChat; import java.util.List; import net.minecraft.block.Block; import net.minecraft.client.Minecraft; import net.minecraft.client.entity.EntityClientPlayerMP; import net.minecraft.client.entity.EntityPlayerSP; import net.minecraft.client.gui.FontRenderer; import net.minecraft.client.gui.Gui; import net.minecraft.client.gui.GuiIngame; import net.minecraft.client.gui.GuiNewChat; import net.minecraft.client.gui.GuiPlayerInfo; import net.minecraft.client.multiplayer.NetClientHandler; import net.minecraft.client.multiplayer.PlayerControllerMP; import net.minecraft.client.renderer.EntityRenderer; import net.minecraft.client.renderer.RenderEngine; import net.minecraft.client.renderer.Tessellator; import net.minecraft.client.settings.GameSettings; import net.minecraft.client.settings.KeyBinding; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.item.ItemStack; import net.minecraft.potion.Potion; import net.minecraft.profiler.Profiler; import net.minecraft.util.Icon; import org.lwjgl.opengl.GL11; public class AGuiInGame extends GuiIngame { protected Minecraft mc; float a; public AGuiInGame(Minecraft mc) { super(mc); prevVignetteBrightness = 1.0F; this.mc = mc; } public void renderGameOverlay(float f, boolean isActive, int mouseX, int mouseY) { Profiler profiler = mc.mcProfiler; profiler.startSection("Advanced Hud"); AHud.checkForResize(); mc.entityRenderer.setupOverlayRendering(); GL11.glEnable(3042); GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F); GL11.glBlendFunc(770, 771); if (!mc.thePlayer.isPotionActive(Potion.confusion)) { float time = mc.thePlayer.prevTimeInPortal + (mc.thePlayer.timeInPortal - mc.thePlayer.prevTimeInPortal) * f; if (time > 0.0F) { renderPortalOverlay(time, AHud.screenWidth, AHud.screenHeight); } } renderPumpkinBlur(AHud.screenWidth, AHud.screenHeight); renderVignette(mc.thePlayer.getBrightness(f), AHud.screenWidth, AHud.screenHeight); renderSleepOverlay(); boolean isSurvival = mc.playerController.shouldDrawHUD(); for (Object item_ : AHud.getActiveHudItemList()) { HudItem item = (HudItem) item_; if ((isSurvival) || (item.isRenderedInCreative())) { profiler.startSection(item.getName()); item.render(f); profiler.endSection(); } } renderPlayerList(); profiler.endSection(); } public void updateTick() { Profiler profiler = mc.mcProfiler; profiler.startSection("Advanced Hud"); AHud.updateCounter += 1; if (mc.currentScreen == null) { while (AHud.keyBinding.isPressed()) mc.displayGuiScreen(new GuiScreenSettings()); } if (mc.theWorld != null) { for (Object item_ : AHud.getActiveHudItemList()) { HudItem item = (HudItem) item_; if ((mc.playerController.shouldDrawHUD()) || (item.isRenderedInCreative())) { profiler.startSection(item.getName()); item.tick(); profiler.endSection(); } } } super.updateTick(); profiler.endSection(); } protected void renderPumpkinBlur(int par1, int par2) { ItemStack itemstack = mc.thePlayer.inventory.armorItemInSlot(3); if ((mc.gameSettings.thirdPersonView == 0) && (itemstack != null) && (itemstack.itemID == Block.pumpkin.blockID)) { GL11.glDisable(2929); GL11.glDepthMask(false); GL11.glBlendFunc(770, 771); GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F); GL11.glDisable(3008); GL11.glBindTexture(3553, mc.renderEngine.getTexture("%blur%/misc/pumpkinblur.png")); Tessellator tessellator = Tessellator.instance; tessellator.startDrawingQuads(); tessellator.addVertexWithUV(0.0D, par2, -90.0D, 0.0D, 1.0D); tessellator.addVertexWithUV(par1, par2, -90.0D, 1.0D, 1.0D); tessellator.addVertexWithUV(par1, 0.0D, -90.0D, 1.0D, 0.0D); tessellator.addVertexWithUV(0.0D, 0.0D, -90.0D, 0.0D, 0.0D); tessellator.draw(); GL11.glDepthMask(true); GL11.glEnable(2929); GL11.glEnable(3008); GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F); } } protected void renderVignette(float par1, int par2, int par3) { if (!Minecraft.isFancyGraphicsEnabled()) { return; } par1 = 1.0F - par1; if (par1 < 0.0F) { par1 = 0.0F; } if (par1 > 1.0F) { par1 = 1.0F; } prevVignetteBrightness = ((float)(prevVignetteBrightness + (par1 - prevVignetteBrightness) * 0.01D)); GL11.glDisable(2929); GL11.glDepthMask(false); GL11.glBlendFunc(0, 769); GL11.glColor4f(prevVignetteBrightness, prevVignetteBrightness, prevVignetteBrightness, 1.0F); GL11.glBindTexture(3553, mc.renderEngine.getTexture("%blur%/misc/vignette.png")); Tessellator tessellator = Tessellator.instance; tessellator.startDrawingQuads(); tessellator.addVertexWithUV(0.0D, par3, -90.0D, 0.0D, 1.0D); tessellator.addVertexWithUV(par2, par3, -90.0D, 1.0D, 1.0D); tessellator.addVertexWithUV(par2, 0.0D, -90.0D, 1.0D, 0.0D); tessellator.addVertexWithUV(0.0D, 0.0D, -90.0D, 0.0D, 0.0D); tessellator.draw(); GL11.glDepthMask(true); GL11.glEnable(2929); GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F); GL11.glBlendFunc(770, 771); } protected void renderPortalOverlay(float par1, int par2, int par3) { if (par1 < 1.0F) { par1 *= par1; par1 *= par1; par1 = par1 * 0.8F + 0.2F; } GL11.glDisable(GL11.GL_ALPHA_TEST); GL11.glDisable(GL11.GL_DEPTH_TEST); GL11.glDepthMask(false); GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA); GL11.glColor4f(1.0F, 1.0F, 1.0F, par1); this.mc.renderEngine.bindTexture("/terrain.png"); Icon icon = Block.portal.getBlockTextureFromSide(1); float f1 = icon.getMinU(); float f2 = icon.getMinV(); float f3 = icon.getMaxU(); float f4 = icon.getMaxV(); Tessellator tessellator = Tessellator.instance; tessellator.startDrawingQuads(); tessellator.addVertexWithUV(0.0D, (double)par3, -90.0D, (double)f1, (double)f4); tessellator.addVertexWithUV((double)par2, (double)par3, -90.0D, (double)f3, (double)f4); tessellator.addVertexWithUV((double)par2, 0.0D, -90.0D, (double)f3, (double)f2); tessellator.addVertexWithUV(0.0D, 0.0D, -90.0D, (double)f1, (double)f2); tessellator.draw(); GL11.glDepthMask(true); GL11.glEnable(GL11.GL_DEPTH_TEST); GL11.glEnable(GL11.GL_ALPHA_TEST); GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F); } protected void renderSleepOverlay() { if (mc.thePlayer.getSleepTimer() > 0) { GL11.glDisable(2929); GL11.glDisable(3008); int k = mc.thePlayer.getSleepTimer(); float f1 = k / 100.0F; if (f1 > 1.0F) { f1 = 1.0F - (k - 100) / 10.0F; } int j1 = (int)(220.0F * f1) << 24 | 0x101020; drawRect(0, 0, AHud.screenWidth, AHud.screenHeight, j1); GL11.glEnable(3008); GL11.glEnable(2929); } } protected void renderPlayerList() { if ((mc.gameSettings.keyBindPlayerList.pressed) && ((!mc.isIntegratedServerRunning()) || (mc.thePlayer.sendQueue.playerInfoList.size() > 1))) { NetClientHandler netClientHandler = mc.thePlayer.sendQueue; List playerList = netClientHandler.playerInfoList; int maxPlayers = netClientHandler.currentServerMaxPlayers; int j3 = maxPlayers; int i4 = 1; for (; j3 > 20; j3 = (maxPlayers + i4 - 1) / i4) { i4++; } int k4 = 300 / i4; if (k4 > 150) { k4 = 150; } int i5 = (AHud.screenWidth - i4 * k4) / 2; byte byte0 = 10; drawRect(i5 - 1, byte0 - 1, i5 + k4 * i4, byte0 + 9 * j3, -2147483648); for (int k6 = 0; k6 < maxPlayers; k6++) { int j7 = i5 + k6 % i4 * k4; int i8 = byte0 + k6 / i4 * 9; drawRect(j7, i8, j7 + k4 - 1, i8 + 8, 553648127); GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F); GL11.glEnable(3008); if (k6 < playerList.size()) { GuiPlayerInfo guiplayerinfo = (GuiPlayerInfo)playerList.get(k6); mc.fontRenderer.drawStringWithShadow(guiplayerinfo.name, j7, i8, 16777215); mc.renderEngine.bindTexture("/gui/icons.png"); int l9 = 0; byte byte1 = 0; if (guiplayerinfo.responseTime < 0) byte1 = 5; else if (guiplayerinfo.responseTime < 150) byte1 = 0; else if (guiplayerinfo.responseTime < 300) byte1 = 1; else if (guiplayerinfo.responseTime < 600) byte1 = 2; else if (guiplayerinfo.responseTime < 1000) byte1 = 3; else { byte1 = 4; } zLevel += 100.0F; drawTexturedModalRect(j7 + k4 - 12, i8, 0 + l9 * 10, 176 + byte1 * 8, 10, 8); zLevel -= 100.0F; } } } } public GuiNewChat getChatGUI() { return DefaultHudItems.chat.getChatGUI(); } public int getUpdateCounter() { return AHud.updateCounter; } }
package org.jeo.cli.cmd; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.StringReader; import java.net.URI; import java.util.List; import jline.console.ConsoleReader; import org.jeo.cli.JeoCLI; import org.jeo.data.Dataset; import org.jeo.data.DirectoryRegistry; import org.jeo.data.Drivers; import org.jeo.data.Query; import org.jeo.data.Registry; import org.jeo.data.TileGrid; import org.jeo.data.TilePyramid; import org.jeo.data.TileSet; import org.jeo.data.VectorData; import org.jeo.data.Workspace; import org.jeo.feature.Field; import org.jeo.feature.Schema; import org.jeo.proj.Proj; import org.osgeo.proj4j.CoordinateReferenceSystem; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import com.google.common.base.Strings; import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; import com.vividsolutions.jts.geom.Envelope; @Parameters(commandNames="info", commandDescription="Provides information about a data source") public class InfoCmd extends JeoCmd { @Parameter(description="datasource", required=true) List<String> datas; @Override protected void doCommand(JeoCLI cli) throws Exception { ConsoleReader console = cli.getConsole(); for (String data : datas) { URI uri = parseDataURI(data); try { Object obj = Drivers.open(uri); if (obj == null) { throw new IllegalArgumentException("Unable to open data source: " + uri); } print(obj, cli); } catch(Exception e) { File f = new File(uri); if (f.exists() && f.isDirectory()) { DirectoryRegistry reg = new DirectoryRegistry(f); for (Registry.Item it : reg.list()) { print(reg.get(it.getName()), cli); } } else { throw e; } } } } void print(Object obj, JeoCLI cli) throws IOException { if (obj instanceof Workspace) { print((Workspace)obj, cli); } else if (obj instanceof VectorData) { print((VectorData)obj, cli); } else if (obj instanceof TileSet) { print((TileSet)obj, cli); } else { throw new IllegalArgumentException( "Object " + obj.getClass().getName() + " not supported"); } cli.getConsole().println(); } void print(Dataset dataset, JeoCLI cli) throws IOException { ConsoleReader console = cli.getConsole(); console.println("Name: " + dataset.getName()); console.println("Driver: " + dataset.getDriver().getName()); Envelope bbox = dataset.bounds(); console.println("Bounds: " + String.format("%f, %f, %f, %f", bbox.getMinX(), bbox.getMinY(), bbox.getMaxX(), bbox.getMaxY())); CoordinateReferenceSystem crs = dataset.getCRS(); console.println("CRS: " + (crs != null ? crs.getName() : "None")); if (crs != null) { print(crs, cli); } } void print(VectorData dataset, JeoCLI cli) throws IOException { ConsoleReader console = cli.getConsole(); try { print((Dataset) dataset, cli); console.println("Count: " + dataset.count(new Query())); console.println("Schema:"); Schema schema = dataset.getSchema(); int size = new Ordering<Field>() { public int compare(Field left, Field right) { return Ints.compare(left.getName().length(), right.getName().length()); }; }.max(schema.getFields()).getName().length() + 2; for (Field fld : schema ) { console.print(Strings.padStart(fld.getName(), size, ' ')); console.println(" : " + fld.getType().getSimpleName()); } } finally { dataset.close(); } } void print(TileSet dataset, JeoCLI cli) throws IOException { ConsoleReader console = cli.getConsole(); try { print((Dataset) dataset, cli); TilePyramid pyr = dataset.getPyramid(); console.println( String.format("Tilesize: %d, %d", pyr.getTileWidth(), pyr.getTileHeight())); console.println("Tilesets:"); for (TileGrid grid : dataset.getPyramid().getGrids()) { int width = grid.getWidth(); int height = grid.getHeight(); console.print("\t"); console.println(String.format("%d: %d x %d (%d); %f, %f", grid.getZ(), width, height, width*height, grid.getXRes(), grid.getYRes())); } } finally { dataset.close(); } } void print(Workspace workspace, JeoCLI cli) throws IOException { ConsoleReader console = cli.getConsole(); try { console.println("Driver: " + workspace.getDriver().getName()); console.println("Datasets:"); for (String l : workspace.list()) { console.print("\t"); console.println(l); } } finally { workspace.close(); } } void print(CoordinateReferenceSystem crs, JeoCLI cli) throws IOException { ConsoleReader console = cli.getConsole(); String wkt = Proj.toWKT(crs, true); BufferedReader r = new BufferedReader(new StringReader(wkt)); String line = null; while ((line = r.readLine()) != null) { console.print("\t\t"); console.println(line); } } }
package www.ctrip.com.androiddatepickertimepicker; import android.app.DatePickerDialog; import android.app.TimePickerDialog; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.widget.DatePicker; import android.widget.TimePicker; import java.util.Calendar; public class MainActivity extends AppCompatActivity { private TimePicker mTimePicker; private DatePicker mDatePicker; private Calendar mCalendar; private int year; private int month; private int day; private int hour; private int minute; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mCalendar = Calendar.getInstance(); year = mCalendar.get(Calendar.YEAR); month = mCalendar.get(Calendar.MONTH) + 1; day = mCalendar.get(Calendar.DAY_OF_MONTH); hour = mCalendar.get(Calendar.HOUR_OF_DAY); minute = mCalendar.get(Calendar.MINUTE); setTitle(year +"-"+ month + "-" + day + "-" + hour + "-" + minute); mDatePicker = (DatePicker) findViewById(R.id.datePicker); mTimePicker = (TimePicker) findViewById(R.id.timePicker); mDatePicker.init(year, mCalendar.get(Calendar.MONTH), day, new DatePicker.OnDateChangedListener() { @Override public void onDateChanged(DatePicker view, int year, int monthOfYear, int dayOfMonth) { setTitle(year +"-"+ (monthOfYear + 1) + "-" + dayOfMonth); } }); mTimePicker.setOnTimeChangedListener(new TimePicker.OnTimeChangedListener() { @Override public void onTimeChanged(TimePicker view, int hourOfDay, int minute) { setTitle(hourOfDay + ":" + minute); } }); new DatePickerDialog(this, new DatePickerDialog.OnDateSetListener(){ @Override public void onDateSet(DatePicker view, int year, int month, int dayOfMonth) { setTitle(year + "-" + (month + 1) + "-" + dayOfMonth); } },year,mCalendar.get(Calendar.MONTH),day).show(); new TimePickerDialog(this, new TimePickerDialog.OnTimeSetListener() { @Override public void onTimeSet(TimePicker view, int hourOfDay, int minute) { setTitle(hourOfDay + ":" + minute); } },hour,minute,true).show(); } }
package org.csstudio.trends.databrowser2.model; import static org.junit.Assert.*; import java.util.Timer; import org.csstudio.swt.xygraph.dataprovider.IDataProvider; import org.csstudio.swt.xygraph.linearscale.Range; import org.csstudio.utility.pv.PVFactory; import org.junit.Test; /** [Headless] JUnit Plug-In test of the PVItem * @author Kay Kasemir */ @SuppressWarnings("nls") public class PVItemHeadlessTest { /** Time in seconds for each test */ private static final double RUNTIME_SECS = 10.0; @Test public void checkPV() throws Exception { try { PVFactory.getSupportedPrefixes(); } catch (Exception ex) { ex.printStackTrace(); fail("Must run as JUnit *Plug-In* test to use PVFactory"); } } /** Check if PVItem scans its PV */ @Test public void testScannedPVItem() throws Exception { System.out.println("Scanned samples: (" + RUNTIME_SECS + " secs)"); final Timer scan_timer = new Timer(); final PVItem pv = new PVItem("sim://sine(0,10,10,1)", 1.0); pv.start(scan_timer); Thread.sleep((long) (RUNTIME_SECS * 1000)); pv.stop(); // Should have about 1 sample per second final IDataProvider samples = pv.getSamples(); System.out.println(samples); assertEquals(RUNTIME_SECS, samples.getSize(), 2.0); checkMinMax(samples); } /** Check if PVItem logs received PV monitors (value updates) */ @Test public void testMonitoredPVItem() throws Exception { System.out.println("Monitored samples: (" + RUNTIME_SECS + " secs)"); final Timer unused_timer = null; final PVItem pv = new PVItem("sim://sine(0,10,10,1)", 0.0); pv.start(unused_timer); Thread.sleep((long) (RUNTIME_SECS * 1000)); pv.stop(); // Should have about 1 sample per second final IDataProvider samples = pv.getSamples(); System.out.println(samples); assertEquals(RUNTIME_SECS, samples.getSize(), 2.0); checkMinMax(samples); } /** Check if scan period can be changed while running */ @Test public void testScanPeriodChange() throws Exception { System.out.println("Scan time change: (" + 2 * RUNTIME_SECS + " secs)"); final Timer scan_timer = new Timer(); final PVItem pv = new PVItem("sim://sine(0,10,10,1)", 1.0); pv.start(scan_timer); Thread.sleep((long) (RUNTIME_SECS * 1000)); // Leave PV running. Should have about 1 sample per second System.out.println("Samples while scanned at 1 second"); IDataProvider samples = pv.getSamples(); System.out.println(samples); assertEquals(RUNTIME_SECS, samples.getSize(), 2.0); // Change to 2 second scan System.out.println("Changing scan to 2 seconds..."); pv.setScanPeriod(2.0); Thread.sleep((long) (RUNTIME_SECS * 1000)); // Should have about 1 sample per second + 0.5 per second samples = pv.getSamples(); System.out.println(samples); assertEquals(RUNTIME_SECS + RUNTIME_SECS/2, samples.getSize(), 4.0); pv.stop(); } /** Check if value min..max is correct */ private void checkMinMax(final IDataProvider samples) { double min = Double.MAX_VALUE; double max = -Double.MAX_VALUE; for (int i=0; i<samples.getSize(); ++i) { final double value = samples.getSample(i).getYValue(); if (value < min) min = value; if (value > max) max = value; } assertEquals(new Range(min, max), samples.getYDataMinMax()); } /** Check if PVItem correctly handles waveform index */ @Test public void testWaveformIndex() throws Exception { System.out.println("Scanned samples: (" + RUNTIME_SECS + " secs)"); final Timer scan_timer = new Timer(); final PVItem pv = new PVItem("const://(1.1,2.2,3.3)", 1.0); pv.setWaveformIndex(1); pv.start(scan_timer); Thread.sleep((long) (RUNTIME_SECS * 1000)); pv.stop(); // Should have about 1 sample per second final IDataProvider samples = pv.getSamples(); System.out.println(samples); assertEquals(RUNTIME_SECS, samples.getSize(), 2.0); // Check if the samples indicate the second element assertEquals(2.2, samples.getSample(0).getYValue(), 0.000001); assertEquals(2.2, samples.getSample(1).getYValue(), 0.000001); assertEquals(new Range(2.2,2.2), samples.getYDataMinMax()); // Check if the samples indicate the third element pv.setWaveformIndex(2); assertEquals(3.3, samples.getSample(0).getYValue(), 0.000001); assertEquals(3.3, samples.getSample(1).getYValue(), 0.000001); assertEquals(new Range(3.3,3.3), samples.getYDataMinMax()); // Check if the samples indicate the third element pv.setWaveformIndex(4); assertEquals(Double.NaN, samples.getSample(0).getYValue(), 0.000001); assertEquals(Double.NaN, samples.getSample(1).getYValue(), 0.000001); assertNull(samples.getYDataMinMax()); // Check if the samples indicate the first element pv.setWaveformIndex(-1); assertEquals(1.1, samples.getSample(0).getYValue(), 0.000001); assertEquals(1.1, samples.getSample(1).getYValue(), 0.000001); assertEquals(new Range(1.1,1.1), samples.getYDataMinMax()); } }
package org.openhab.binding.zwave.internal.protocol; import gnu.io.CommPort; import gnu.io.CommPortIdentifier; import gnu.io.NoSuchPortException; import gnu.io.PortInUseException; import gnu.io.SerialPort; import gnu.io.UnsupportedCommOperationException; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.openhab.binding.zwave.internal.protocol.SerialMessage.SerialMessageClass; import org.openhab.binding.zwave.internal.protocol.SerialMessage.SerialMessagePriority; import org.openhab.binding.zwave.internal.protocol.SerialMessage.SerialMessageType; import org.openhab.binding.zwave.internal.protocol.NodeStage; import org.openhab.binding.zwave.internal.protocol.commandclass.ZWaveCommandClass.CommandClass; import org.openhab.binding.zwave.internal.protocol.commandclass.ZWaveCommandClass; import org.openhab.binding.zwave.internal.protocol.commandclass.ZWaveCommandClassDynamicState; import org.openhab.binding.zwave.internal.protocol.commandclass.ZWaveMultiInstanceCommandClass; import org.openhab.binding.zwave.internal.protocol.commandclass.ZWaveWakeUpCommandClass; import org.openhab.binding.zwave.internal.protocol.event.ZWaveEvent; import org.openhab.binding.zwave.internal.protocol.event.ZWaveInclusionEvent; import org.openhab.binding.zwave.internal.protocol.event.ZWaveInitializationCompletedEvent; import org.openhab.binding.zwave.internal.protocol.event.ZWaveNetworkEvent; import org.openhab.binding.zwave.internal.protocol.event.ZWaveNodeStatusEvent; import org.openhab.binding.zwave.internal.protocol.event.ZWaveTransactionCompletedEvent; import org.openhab.binding.zwave.internal.protocol.event.ZWaveNetworkEvent.State; import org.openhab.binding.zwave.internal.protocol.initialization.ZWaveNodeSerializer; import org.openhab.binding.zwave.internal.protocol.serialmessage.AddNodeMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.AssignReturnRouteMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.AssignSucReturnRouteMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.DeleteReturnRouteMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.EnableSucMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.GetControllerCapabilitiesMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.GetSucNodeIdMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.IdentifyNodeMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.IsFailedNodeMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.RemoveNodeMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.RequestNodeNeighborUpdateMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.RemoveFailedNodeMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.RequestNodeInfoMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.GetRoutingInfoMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.SendDataMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.SerialApiSoftResetMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.SetSucNodeMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.ZWaveCommandProcessor; import org.openhab.binding.zwave.internal.protocol.serialmessage.GetVersionMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.MemoryGetIdMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.SerialApiGetCapabilitiesMessageClass; import org.openhab.binding.zwave.internal.protocol.serialmessage.SerialApiGetInitDataMessageClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * ZWave controller class. Implements communication with the Z-Wave * controller stick using serial messages. * @author Victor Belov * @author Brian Crosby * @author Chris Jackson * @since 1.3.0 */ public class ZWaveController { private static final Logger logger = LoggerFactory.getLogger(ZWaveController.class); private static final int QUERY_STAGE_TIMEOUT = 120000; private static final int ZWAVE_RESPONSE_TIMEOUT = 5000; // 5000 ms ZWAVE_RESPONSE TIMEOUT private static final int ZWAVE_RECEIVE_TIMEOUT = 1000; // 1000 ms ZWAVE_RECEIVE_TIMEOUT private static final int INITIAL_QUEUE_SIZE = 128; private static final long WATCHDOG_TIMER_PERIOD = 10000; // 10 seconds watchdog timer private static final int TRANSMIT_OPTION_ACK = 0x01; private static final int TRANSMIT_OPTION_AUTO_ROUTE = 0x04; private static final int TRANSMIT_OPTION_EXPLORE = 0x20; private final Map<Integer, ZWaveNode> zwaveNodes = new HashMap<Integer, ZWaveNode>(); private final ArrayList<ZWaveEventListener> zwaveEventListeners = new ArrayList<ZWaveEventListener>(); private final PriorityBlockingQueue<SerialMessage> sendQueue = new PriorityBlockingQueue<SerialMessage>(INITIAL_QUEUE_SIZE, new SerialMessage.SerialMessageComparator(this)); private ZWaveSendThread sendThread; private ZWaveReceiveThread receiveThread; private final Semaphore transactionCompleted = new Semaphore(1); private volatile SerialMessage lastSentMessage = null; private long lastMessageStartTime = 0; private long longestResponseTime = 0; private SerialPort serialPort; private int zWaveResponseTimeout = ZWAVE_RESPONSE_TIMEOUT; private Timer watchdog; private String zWaveVersion = "Unknown"; private String serialAPIVersion = "Unknown"; private int homeId = 0; private int ownNodeId = 0; private int manufactureId = 0; private int deviceType = 0; private int deviceId = 0; private int ZWaveLibraryType = 0; private int sentDataPointer = 1; private boolean setSUC = false; private ZWaveDeviceType controllerType = ZWaveDeviceType.UNKNOWN; private int sucID = 0; private int SOFCount = 0; private int CANCount = 0; private int NAKCount = 0; private int ACKCount = 0; private int OOFCount = 0; private AtomicInteger timeOutCount = new AtomicInteger(0); private boolean initializationComplete = false; private boolean isConnected; // Constructors /** * Constructor. Creates a new instance of the Z-Wave controller class. * @param serialPortName the serial port name to use for * communication with the Z-Wave controller stick. * @throws SerialInterfaceException when a connection error occurs. */ public ZWaveController(final boolean isSUC, final String serialPortName, final Integer timeout) throws SerialInterfaceException { logger.info("Starting Z-Wave controller"); this.setSUC = isSUC; if(timeout != null && timeout >= 1500 && timeout <= 10000) { zWaveResponseTimeout = timeout; } logger.info("Z-Wave timeout is set to {}ms.", zWaveResponseTimeout); connect(serialPortName); this.watchdog = new Timer(true); this.watchdog.schedule( new WatchDogTimerTask(serialPortName), WATCHDOG_TIMER_PERIOD, WATCHDOG_TIMER_PERIOD); } // Incoming message handlers /** * Handles incoming Serial Messages. Serial messages can either be messages * that are a response to our own requests, or the stick asking us information. * @param incomingMessage the incoming message to process. */ private void handleIncomingMessage(SerialMessage incomingMessage) { logger.trace("Incoming message to process"); logger.debug(incomingMessage.toString()); switch (incomingMessage.getMessageType()) { case Request: handleIncomingRequestMessage(incomingMessage); break; case Response: handleIncomingResponseMessage(incomingMessage); break; default: logger.warn("Unsupported incomingMessageType: 0x%02X", incomingMessage.getMessageType()); } } /** * Handles an incoming request message. * An incoming request message is a message initiated by a node or the controller. * @param incomingMessage the incoming message to process. */ private void handleIncomingRequestMessage(SerialMessage incomingMessage) { logger.trace("Message type = REQUEST"); ZWaveCommandProcessor processor = ZWaveCommandProcessor.getMessageDispatcher(incomingMessage.getMessageClass()); if(processor != null) { processor.handleRequest(this, lastSentMessage, incomingMessage); if(processor.isTransactionComplete()) { notifyEventListeners(new ZWaveTransactionCompletedEvent(this.lastSentMessage)); transactionCompleted.release(); logger.trace("Released. Transaction completed permit count -> {}", transactionCompleted.availablePermits()); } } else { logger.warn(String.format("TODO: Implement processing of Request Message = %s (0x%02X)", incomingMessage.getMessageClass().getLabel(), incomingMessage.getMessageClass().getKey())); } } /** * Handles a failed SendData request. This can either be because of the stick actively reporting it * or because of a time-out of the transaction in the send thread. * @param originalMessage the original message that was sent */ private void handleFailedSendDataRequest(SerialMessage originalMessage) { new SendDataMessageClass().handleFailedSendDataRequest(this, originalMessage); } /** * Handles an incoming response message. * An incoming response message is a response, based one of our own requests. * @param incomingMessage the response message to process. */ private void handleIncomingResponseMessage(SerialMessage incomingMessage) { logger.trace("Message type = RESPONSE"); ZWaveCommandProcessor processor = ZWaveCommandProcessor.getMessageDispatcher(incomingMessage.getMessageClass()); if(processor != null) { processor.handleResponse(this, lastSentMessage, incomingMessage); if(processor.isTransactionComplete()) { notifyEventListeners(new ZWaveTransactionCompletedEvent(this.lastSentMessage)); transactionCompleted.release(); logger.trace("Released. Transaction completed permit count -> {}", transactionCompleted.availablePermits()); } } else { logger.warn(String.format("TODO: Implement processing of Response Message = %s (0x%02X)", incomingMessage.getMessageClass().getLabel(), incomingMessage.getMessageClass().getKey())); } switch (incomingMessage.getMessageClass()) { case GetVersion: this.zWaveVersion = ((GetVersionMessageClass)processor).getVersion(); this.ZWaveLibraryType = ((GetVersionMessageClass)processor).getLibraryType(); break; case MemoryGetId: this.ownNodeId = ((MemoryGetIdMessageClass)processor).getNodeId(); this.homeId = ((MemoryGetIdMessageClass)processor).getHomeId(); break; case SerialApiGetInitData: this.isConnected = true; for(Integer nodeId : ((SerialApiGetInitDataMessageClass)processor).getNodes()) { if(nodeId != 10) continue; ZWaveNode node = null; try { ZWaveNodeSerializer nodeSerializer = new ZWaveNodeSerializer(); node = nodeSerializer.DeserializeNode(nodeId); } catch (Exception e) { logger.error("NODE {}: Error deserialising XML file. {}", nodeId, e.toString()); node = null; } String name = null; String location = null; // Did the node deserialise ok? if (node != null) { // Remember the name and location - in case we decide the file was invalid name = node.getName(); location = node.getLocation(); // Sanity check the data from the file if (node.getManufacturer() == Integer.MAX_VALUE || node.getHomeId() != this.homeId || node.getNodeId() != nodeId) { logger.warn("NODE {}: Config file data is invalid, ignoring config.", nodeId); node = null; } else { // The restore was ok, but we have some work to set up the links that aren't // made as the deserialiser doesn't call the constructor logger.debug("NODE {}: Restored from config.", nodeId); node.setRestoredFromConfigfile(this); // Set the controller and node references for all command classes for (ZWaveCommandClass commandClass : node.getCommandClasses()) { commandClass.setController(this); commandClass.setNode(node); // Handle event handlers if (commandClass instanceof ZWaveEventListener) { this.addEventListener((ZWaveEventListener)commandClass); } // If this is the multi-instance class, add all command classes for the endpoints if (commandClass instanceof ZWaveMultiInstanceCommandClass) { for (ZWaveEndpoint endPoint : ((ZWaveMultiInstanceCommandClass) commandClass) .getEndpoints()) { for (ZWaveCommandClass endpointCommandClass : endPoint.getCommandClasses()) { endpointCommandClass.setController(this); endpointCommandClass.setNode(node); endpointCommandClass.setEndpoint(endPoint); // Handle event handlers if (endpointCommandClass instanceof ZWaveEventListener) { this.addEventListener((ZWaveEventListener)endpointCommandClass); } } } } } } } // Create a new node if it wasn't deserialised ok if(node == null) { node = new ZWaveNode(this.homeId, nodeId, this); // Try to maintain the name and location (user supplied data) // even if the XML file was considered corrupt and we reload data from the device. node.setName(name); node.setLocation(location); } if(nodeId == this.ownNodeId) { // This is the controller node. // We already know the device type, id, manufacturer so set it here // It won't be set later as we probably won't request the manufacturer specific data node.setDeviceId(this.getDeviceId()); node.setDeviceType(this.getDeviceType()); node.setManufacturer(this.getManufactureId()); } // Place nodes in the local ZWave Controller this.zwaveNodes.put(nodeId, node); node.initialiseNode(); } break; case GetSucNodeId: // Remember the SUC ID this.sucID = ((GetSucNodeIdMessageClass)processor).getSucNodeId(); // If we want to be the SUC, enable it here if(this.setSUC == true && this.sucID == 0) { // We want to be SUC this.enqueue(new EnableSucMessageClass().doRequest(EnableSucMessageClass.SUCType.SERVER)); this.enqueue(new SetSucNodeMessageClass().doRequest(this.ownNodeId, SetSucNodeMessageClass.SUCType.SERVER)); } else if(this.setSUC == false && this.sucID == this.ownNodeId) { // We don't want to be SUC, but we currently are! // Disable SERVER functionality, and set the node to 0 this.enqueue(new EnableSucMessageClass().doRequest(EnableSucMessageClass.SUCType.NONE)); this.enqueue(new SetSucNodeMessageClass().doRequest(this.ownNodeId, SetSucNodeMessageClass.SUCType.NONE)); } this.enqueue(new GetControllerCapabilitiesMessageClass().doRequest()); break; case SerialApiGetCapabilities: this.serialAPIVersion = ((SerialApiGetCapabilitiesMessageClass)processor).getSerialAPIVersion(); this.manufactureId = ((SerialApiGetCapabilitiesMessageClass)processor).getManufactureId(); this.deviceId = ((SerialApiGetCapabilitiesMessageClass)processor).getDeviceId(); this.deviceType = ((SerialApiGetCapabilitiesMessageClass)processor).getDeviceType(); this.enqueue(new SerialApiGetInitDataMessageClass().doRequest()); break; case GetControllerCapabilities: this.controllerType = ((GetControllerCapabilitiesMessageClass)processor).getDeviceType(); break; default: break; } } // Controller methods /** * Connects to the comm port and starts send and receive threads. * @param serialPortName the port name to open * @throws SerialInterfaceException when a connection error occurs. */ public void connect(final String serialPortName) throws SerialInterfaceException { logger.info("Connecting to serial port {}", serialPortName); try { CommPortIdentifier portIdentifier = CommPortIdentifier.getPortIdentifier(serialPortName); CommPort commPort = portIdentifier.open("org.openhab.binding.zwave",2000); this.serialPort = (SerialPort) commPort; this.serialPort.setSerialPortParams(115200,SerialPort.DATABITS_8,SerialPort.STOPBITS_1,SerialPort.PARITY_NONE); this.serialPort.enableReceiveThreshold(1); this.serialPort.enableReceiveTimeout(ZWAVE_RECEIVE_TIMEOUT); this.receiveThread = new ZWaveReceiveThread(); this.receiveThread.start(); this.sendThread = new ZWaveSendThread(); this.sendThread.start(); logger.info("Serial port is initialized"); } catch (NoSuchPortException e) { logger.error(String.format("Port %s does not exist", serialPortName)); throw new SerialInterfaceException(String.format("Port %s does not exist", serialPortName), e); } catch (PortInUseException e) { logger.error(String.format("Port %s in use.", serialPortName)); throw new SerialInterfaceException(String.format("Port %s in use.", serialPortName), e); } catch (UnsupportedCommOperationException e) { logger.error(String.format("Unsupported comm operation on Port %s.", serialPortName)); throw new SerialInterfaceException(String.format("Unsupported comm operation on Port %s.", serialPortName), e); } } /** * Closes the connection to the Z-Wave controller. */ public void close() { if (watchdog != null) { watchdog.cancel(); watchdog = null; } disconnect(); // clear nodes collection and send queue ArrayList<ZWaveEventListener> copy = new ArrayList<ZWaveEventListener>(this.zwaveEventListeners); for (Object listener : copy.toArray()) { if (!(listener instanceof ZWaveNode)) continue; this.zwaveEventListeners.remove(listener); } this.zwaveNodes.clear(); this.sendQueue.clear(); logger.info("Stopped Z-Wave controller"); } /** * Disconnects from the serial interface and stops * send and receive threads. */ public void disconnect() { if (sendThread != null) { sendThread.interrupt(); try { sendThread.join(); } catch (InterruptedException e) { } sendThread = null; } if (receiveThread != null) { receiveThread.interrupt(); try { receiveThread.join(); } catch (InterruptedException e) { } receiveThread = null; } if(transactionCompleted.availablePermits() < 0) transactionCompleted.release(transactionCompleted.availablePermits()); transactionCompleted.drainPermits(); logger.trace("Transaction completed permit count -> {}", transactionCompleted.availablePermits()); if (this.serialPort != null) { this.serialPort.close(); this.serialPort = null; } logger.info("Disconnected from serial port"); } /** * Enqueues a message for sending on the send queue. * @param serialMessage the serial message to enqueue. */ public void enqueue(SerialMessage serialMessage) { this.sendQueue.add(serialMessage); logger.debug("Enqueueing message. Queue length = {}", this.sendQueue.size()); } /** * Returns the size of the send queue. */ public int getSendQueueLength() { return this.sendQueue.size(); } /** * Notify our own event listeners of a Z-Wave event. * @param event the event to send. */ public void notifyEventListeners(ZWaveEvent event) { logger.debug("Notifying event listeners"); ArrayList<ZWaveEventListener> copy = new ArrayList<ZWaveEventListener>(this.zwaveEventListeners); for (ZWaveEventListener listener : copy) { logger.trace("Notifying {}", listener.toString()); listener.ZWaveIncomingEvent(event); } // We also need to handle the inclusion internally within the controller if(event instanceof ZWaveInclusionEvent) { ZWaveInclusionEvent incEvent = (ZWaveInclusionEvent)event; switch(incEvent.getEvent()) { case IncludeDone: logger.debug("NODE {}: Including node.", incEvent.getNodeId()); // First make sure this isn't an existing node if(getNode(incEvent.getNodeId()) != null) { logger.debug("NODE {}: Newly included node already exists - not initialising.", incEvent.getNodeId()); break; } // Initialise the new node ZWaveNode node = new ZWaveNode(this.homeId, incEvent.getNodeId(), this); this.zwaveNodes.put(incEvent.getNodeId(), node); node.initialiseNode(); break; case ExcludeDone: logger.debug("NODE {}: Excluding node.", incEvent.getNodeId()); // Remove the node from the controller if(getNode(incEvent.getNodeId()) == null) { logger.debug("NODE {}: Excluding node that doesn't exist.", incEvent.getNodeId()); break; } this.zwaveNodes.remove(incEvent.getNodeId()); // Remove the XML file ZWaveNodeSerializer nodeSerializer = new ZWaveNodeSerializer(); nodeSerializer.DeleteNode(event.getNodeId()); break; default: break; } } if(event instanceof ZWaveNetworkEvent) { ZWaveNetworkEvent networkEvent = (ZWaveNetworkEvent)event; switch(networkEvent.getEvent()) { case FailedNode: if(getNode(networkEvent.getNodeId()) == null) { logger.debug("NODE {}: Deleting a node that doesn't exist.", networkEvent.getNodeId()); break; } if (networkEvent.getState() == State.Success) { logger.warn("NODE {}: Marking node as FAILED because its on the controllers failed node list.", networkEvent.getNodeId()); getNode(networkEvent.getNodeId()).setFailed(); ZWaveEvent zEvent = new ZWaveNodeStatusEvent(networkEvent.getNodeId(), ZWaveNodeStatusEvent.State.Failed); this.notifyEventListeners(zEvent); break; } case DeleteNode: if(getNode(networkEvent.getNodeId()) == null) { logger.debug("NODE {}: Deleting a node that doesn't exist.", networkEvent.getNodeId()); break; } this.zwaveNodes.remove(networkEvent.getNodeId()); //Remove the XML file ZWaveNodeSerializer nodeSerializer = new ZWaveNodeSerializer(); nodeSerializer.DeleteNode(event.getNodeId()); break; default: break; } } } /** * Initializes communication with the Z-Wave controller stick. */ public void initialize() { this.enqueue(new GetVersionMessageClass().doRequest()); this.enqueue(new MemoryGetIdMessageClass().doRequest()); this.enqueue(new SerialApiGetCapabilitiesMessageClass().doRequest()); this.enqueue(new GetSucNodeIdMessageClass().doRequest()); } /** * Send Identify Node message to the controller. * @param nodeId the nodeId of the node to identify */ public void identifyNode(int nodeId) { this.enqueue(new IdentifyNodeMessageClass().doRequest(nodeId)); } /** * Send Request Node info message to the controller. * @param nodeId the nodeId of the node to identify */ public void requestNodeInfo(int nodeId) { this.enqueue(new RequestNodeInfoMessageClass().doRequest(nodeId)); } /** * Checks for dead or sleeping nodes during Node initialization. * JwS: merged checkInitComplete and checkForDeadOrSleepingNodes to prevent possibly looping nodes multiple times. */ public void checkForDeadOrSleepingNodes(){ int completeCount = 0; if (zwaveNodes.isEmpty()) return; // There are still nodes waiting to get a ping. // So skip the dead node checking. for (SerialMessage serialMessage : sendQueue) { if (serialMessage.getPriority() == SerialMessagePriority.Low) return; } logger.trace("Checking for Dead or Sleeping Nodes."); for (Map.Entry<Integer, ZWaveNode> entry : zwaveNodes.entrySet()){ if (entry.getValue().getNodeStage() == NodeStage.EMPTYNODE) continue; logger.debug(String.format("NODE %d: Has been in Stage %s since %s", entry.getKey(), entry.getValue().getNodeStage().getLabel(), entry.getValue().getQueryStageTimeStamp().toString())); if(entry.getValue().getNodeStage() == NodeStage.DONE || entry.getValue().getNodeStage() == NodeStage.DEAD || (!entry.getValue().isListening() && !entry.getValue().isFrequentlyListening())) { completeCount++; continue; } logger.trace("NODE {}: Checking if {} miliseconds have passed in current stage.", entry.getKey(), QUERY_STAGE_TIMEOUT); if(Calendar.getInstance().getTimeInMillis() < (entry.getValue().getQueryStageTimeStamp().getTime() + QUERY_STAGE_TIMEOUT)) continue; logger.warn(String.format("NODE %d: May be dead, marking node DEAD.", entry.getKey())); entry.getValue().setDead(); completeCount++; } // If all nodes are completed, then we say the binding is ready for business if(this.zwaveNodes.size() == completeCount && initializationComplete == false) { // We only want this event once! initializationComplete = true; ZWaveEvent zEvent = new ZWaveInitializationCompletedEvent(this.ownNodeId); this.notifyEventListeners(zEvent); // If there are DEAD nodes, send a Node Status event // We do that here to avoid messing with the binding initialisation for(ZWaveNode node : this.getNodes()) { if (node.isDead()) { logger.debug("NODE {}: DEAD node.", node.getNodeId()); zEvent = new ZWaveNodeStatusEvent(node.getNodeId(), ZWaveNodeStatusEvent.State.Dead); this.notifyEventListeners(zEvent); } } } } /** * Polls a node for any dynamic information * @param node */ public void pollNode(ZWaveNode node) { for (ZWaveCommandClass zwaveCommandClass : node.getCommandClasses()) { logger.trace("NODE {}: Inspecting command class {}", node.getNodeId(), zwaveCommandClass.getCommandClass().getLabel()); if (zwaveCommandClass instanceof ZWaveCommandClassDynamicState) { logger.debug("NODE {}: Found dynamic state command class {}", node.getNodeId(), zwaveCommandClass.getCommandClass() .getLabel()); ZWaveCommandClassDynamicState zdds = (ZWaveCommandClassDynamicState) zwaveCommandClass; int instances = zwaveCommandClass.getInstances(); if (instances == 0) { Collection<SerialMessage> dynamicQueries = zdds.getDynamicValues(true); for (SerialMessage serialMessage : dynamicQueries) { sendData(serialMessage); } } else { for (int i = 1; i <= instances; i++) { Collection<SerialMessage> dynamicQueries = zdds.getDynamicValues(true); for (SerialMessage serialMessage : dynamicQueries) { sendData(node.encapsulate(serialMessage, zwaveCommandClass, i)); } } } } else if (zwaveCommandClass instanceof ZWaveMultiInstanceCommandClass) { ZWaveMultiInstanceCommandClass multiInstanceCommandClass = (ZWaveMultiInstanceCommandClass) zwaveCommandClass; for (ZWaveEndpoint endpoint : multiInstanceCommandClass.getEndpoints()) { for (ZWaveCommandClass endpointCommandClass : endpoint.getCommandClasses()) { logger.trace(String.format("NODE %d: Inspecting command class %s for endpoint %d", node.getNodeId(), endpointCommandClass .getCommandClass().getLabel(), endpoint.getEndpointId())); if (endpointCommandClass instanceof ZWaveCommandClassDynamicState) { logger.debug("NODE {}: Found dynamic state command class {}", node.getNodeId(), endpointCommandClass .getCommandClass().getLabel()); ZWaveCommandClassDynamicState zdds2 = (ZWaveCommandClassDynamicState) endpointCommandClass; Collection<SerialMessage> dynamicQueries = zdds2.getDynamicValues(true); for (SerialMessage serialMessage : dynamicQueries) { sendData(node.encapsulate(serialMessage, endpointCommandClass, endpoint.getEndpointId())); } } } } } } } /** * Request the node routing information. * * @param nodeId The address of the node to update */ public void requestNodeRoutingInfo(int nodeId) { this.enqueue(new GetRoutingInfoMessageClass().doRequest(nodeId)); } /** * Request the node neighbor list to be updated for the specified node. * Once this is complete, the requestNodeRoutingInfo will be called * automatically to update the data in the binding. * * @param nodeId The address of the node to update */ public void requestNodeNeighborUpdate(int nodeId) { this.enqueue(new RequestNodeNeighborUpdateMessageClass().doRequest(nodeId)); } /** * Puts the controller into inclusion mode to add new nodes */ public void requestAddNodesStart() { this.enqueue(new AddNodeMessageClass().doRequestStart(true)); } /** * Terminates the inclusion mode */ public void requestAddNodesStop() { this.enqueue(new AddNodeMessageClass().doRequestStop()); } /** * Puts the controller into exclusion mode to remove new nodes */ public void requestRemoveNodesStart() { this.enqueue(new RemoveNodeMessageClass().doRequestStart(true)); } /** * Terminates the exclusion mode */ public void requestRemoveNodesStop() { this.enqueue(new RemoveNodeMessageClass().doRequestStop()); } /** * Sends a request to perform a soft reset on the controller. * This will just reset the controller - probably similar to a power cycle. * It doesn't reinitialise the network, or change the network configuration. * * NOTE: At least for some (most!) sticks, this doesn't return a response. * Therefore, the number of retries is set to 1. * NOTE: On some (most!) ZWave-Plus sticks, this can cause the stick to hang. */ public void requestSoftReset() { SerialMessage msg = new SerialApiSoftResetMessageClass().doRequest(); msg.attempts = 1; this.enqueue(msg); } /** * Request if the node is currently marked as failed by the controller. * @param nodeId The address of the node to check */ public void requestIsFailedNode(int nodeId) { this.enqueue(new IsFailedNodeMessageClass().doRequest(nodeId)); } /** * Removes a failed node from the network. * Note that this won't remove nodes that have not failed. * @param nodeId The address of the node to remove */ public void requestRemoveFailedNode(int nodeId) { this.enqueue(new RemoveFailedNodeMessageClass().doRequest(nodeId)); } /** * Delete all return nodes from the specified node. This should be performed * before updating the routes * * @param nodeId */ public void requestDeleteAllReturnRoutes(int nodeId) { this.enqueue(new DeleteReturnRouteMessageClass().doRequest(nodeId)); } /** * Request the controller to set the return route between two nodes * * @param nodeId * Source node * @param destinationId * Destination node */ public void requestAssignReturnRoute(int nodeId, int destinationId) { this.enqueue(new AssignReturnRouteMessageClass().doRequest(nodeId, destinationId, getCallbackId())); } /** * Request the controller to set the return route from a node to the controller * * @param nodeId * Source node */ public void requestAssignSucReturnRoute(int nodeId) { this.enqueue(new AssignSucReturnRouteMessageClass().doRequest(nodeId, getCallbackId())); } /** * Returns the next callback ID * @return callback ID */ public int getCallbackId() { if (++sentDataPointer > 0xFF) sentDataPointer = 1; logger.debug("Callback ID = {}", sentDataPointer); return sentDataPointer; } /** * Transmits the SerialMessage to a single Z-Wave Node. * Sets the transmission options as well. * @param serialMessage the Serial message to send. */ public void sendData(SerialMessage serialMessage) { if (serialMessage.getMessageClass() != SerialMessageClass.SendData) { logger.error(String.format("Invalid message class %s (0x%02X) for sendData", serialMessage.getMessageClass().getLabel(), serialMessage.getMessageClass().getKey())); return; } if (serialMessage.getMessageType() != SerialMessageType.Request) { logger.error("Only request messages can be sent"); return; } ZWaveNode node = this.getNode(serialMessage.getMessageNode()); // Keep track of the number of packets sent to this device node.incrementSendCount(); if (!node.isListening() && !node.isFrequentlyListening() && serialMessage.getPriority() != SerialMessagePriority.Low) { ZWaveWakeUpCommandClass wakeUpCommandClass = (ZWaveWakeUpCommandClass)node.getCommandClass(CommandClass.WAKE_UP); // If it's a battery operated device, check if it's awake or place in wake-up queue. if (wakeUpCommandClass != null && !wakeUpCommandClass.processOutgoingWakeupMessage(serialMessage)) { return; } } serialMessage.setTransmitOptions(TRANSMIT_OPTION_ACK | TRANSMIT_OPTION_AUTO_ROUTE | TRANSMIT_OPTION_EXPLORE); serialMessage.setCallbackId(getCallbackId()); this.enqueue(serialMessage); } /** * Add a listener for Z-Wave events to this controller. * @param eventListener the event listener to add. */ public void addEventListener(ZWaveEventListener eventListener) { this.zwaveEventListeners.add(eventListener); } /** * Remove a listener for Z-Wave events to this controller. * @param eventListener the event listener to remove. */ public void removeEventListener(ZWaveEventListener eventListener) { this.zwaveEventListeners.remove(eventListener); } /** * Gets the API Version of the controller. * @return the serialAPIVersion */ public String getSerialAPIVersion() { return serialAPIVersion; } /** * Gets the zWave Version of the controller. * @return the zWaveVersion */ public String getZWaveVersion() { return zWaveVersion; } /** * Gets the Manufacturer ID of the controller. * @return the manufactureId */ public int getManufactureId() { return manufactureId; } /** * Gets the device type of the controller; * @return the deviceType */ public int getDeviceType() { return deviceType; } /** * Gets the device ID of the controller. * @return the deviceId */ public int getDeviceId() { return deviceId; } /** * Gets the node ID of the controller. * @return the deviceId */ public int getOwnNodeId() { return ownNodeId; } /** * Gets the device type of the controller. * @return the device type */ public ZWaveDeviceType getControllerType() { return controllerType; } /** * Gets the networks SUC controller ID. * @return the device id of the SUC, or 0 if none exists */ public int getSucId() { return sucID; } /** * Gets the node object using it's node ID as key. * Returns null if the node is not found * @param nodeId the Node ID of the node to get. * @return node object */ public ZWaveNode getNode(int nodeId) { return this.zwaveNodes.get(nodeId); } /** * Gets the node list * @return */ public Collection<ZWaveNode> getNodes() { return this.zwaveNodes.values(); } /** * Indicates a working connection to the * Z-Wave controller stick and initialization complete * @return isConnected; */ public boolean isConnected() { return isConnected && initializationComplete; } /** * Gets the number of Start Of Frames received. * @return the sOFCount */ public int getSOFCount() { return SOFCount; } /** * Gets the number of Canceled Frames received. * @return the cANCount */ public int getCANCount() { return CANCount; } /** * Gets the number of Not Acknowledged Frames received. * @return the nAKCount */ public int getNAKCount() { return NAKCount; } /** * Gets the number of Acknowledged Frames received. * @return the aCKCount */ public int getACKCount() { return ACKCount; } /** * Returns the number of Out of Order frames received. * @return the oOFCount */ public int getOOFCount() { return OOFCount; } /** * Returns the number of Time-Outs while sending. * @return the oOFCount */ public int getTimeOutCount() { return timeOutCount.get(); } // Nested classes and enumerations /** * Z-Wave controller Send Thread. Takes care of sending all messages. * It uses a semaphore to synchronize communication with the receiving thread. * @author Jan-Willem Spuij * @since 1.3.0 */ private class ZWaveSendThread extends Thread { private final Logger logger = LoggerFactory.getLogger(ZWaveSendThread.class); /** * Run method. Runs the actual sending process. */ @Override public void run() { logger.debug("Starting Z-Wave send thread"); try { while (!interrupted()) { try { lastSentMessage = sendQueue.take(); logger.debug("Took message from queue for sending. Queue length = {}", sendQueue.size()); } catch (InterruptedException e1) { break; } if (lastSentMessage == null) continue; // If this message is a data packet to a node // then make sure the node is not a battery device. // If it's a battery device, it needs to be awake, or we queue the frame until it is. if (lastSentMessage.getMessageClass() == SerialMessageClass.SendData) { ZWaveNode node = getNode(lastSentMessage.getMessageNode()); if (node != null && !node.isListening() && !node.isFrequentlyListening() && lastSentMessage.getPriority() != SerialMessagePriority.Low) { ZWaveWakeUpCommandClass wakeUpCommandClass = (ZWaveWakeUpCommandClass)node.getCommandClass(CommandClass.WAKE_UP); // If it's a battery operated device, check if it's awake or place in wake-up queue. if (wakeUpCommandClass != null && !wakeUpCommandClass.processOutgoingWakeupMessage(lastSentMessage)) { continue; } } } // Clear the semaphore used to acknowledge the response. transactionCompleted.drainPermits(); // Send the message to the controller byte[] buffer = lastSentMessage.getMessageBuffer(); logger.debug("Sending Message = " + SerialMessage.bb2hex(buffer)); lastMessageStartTime = System.currentTimeMillis(); try { synchronized (serialPort.getOutputStream()) { serialPort.getOutputStream().write(buffer); serialPort.getOutputStream().flush(); } } catch (IOException e) { logger.error("Got I/O exception {} during sending. exiting thread.", e.getLocalizedMessage()); break; } // Now wait for the response... try { if (!transactionCompleted.tryAcquire(1, zWaveResponseTimeout, TimeUnit.MILLISECONDS)) { timeOutCount.incrementAndGet(); if (lastSentMessage.getMessageClass() == SerialMessageClass.SendData) { buffer = new SerialMessage(SerialMessageClass.SendDataAbort, SerialMessageType.Request, SerialMessageClass.SendData, SerialMessagePriority.High).getMessageBuffer(); logger.debug("Sending Message = " + SerialMessage.bb2hex(buffer)); try { synchronized (serialPort.getOutputStream()) { serialPort.getOutputStream().write(buffer); serialPort.getOutputStream().flush(); } } catch (IOException e) { logger.error("Got I/O exception {} during sending. exiting thread.", e.getLocalizedMessage()); break; } } if (--lastSentMessage.attempts >= 0) { logger.error("NODE {}: Timeout while sending message. Requeueing", lastSentMessage.getMessageNode()); if (lastSentMessage.getMessageClass() == SerialMessageClass.SendData) handleFailedSendDataRequest(lastSentMessage); else enqueue(lastSentMessage); } else { logger.warn("NODE {}: Discarding message: {}", lastSentMessage.getMessageNode(), lastSentMessage.toString()); } continue; } long responseTime = System.currentTimeMillis() - lastMessageStartTime; if(responseTime > longestResponseTime) longestResponseTime = responseTime; logger.debug("Response processed after {}ms/{}ms.", responseTime, longestResponseTime); logger.trace("Acquired. Transaction completed permit count -> {}", transactionCompleted.availablePermits()); } catch (InterruptedException e) { break; } } } catch (Exception e) { logger.error("Got an exception during sending. exiting thread.", e); } logger.debug("Stopped Z-Wave send thread"); } } /** * Z-Wave controller Receive Thread. Takes care of receiving all messages. * It uses a semaphore to synchronize communication with the sending thread. * @author Jan-Willem Spuij * @since 1.3.0 */ private class ZWaveReceiveThread extends Thread { private static final int SOF = 0x01; private static final int ACK = 0x06; private static final int NAK = 0x15; private static final int CAN = 0x18; private final Logger logger = LoggerFactory.getLogger(ZWaveReceiveThread.class); /** * Sends 1 byte frame response. * @param response the response code to send. */ private void sendResponse(int response) { try { synchronized (serialPort.getOutputStream()) { serialPort.getOutputStream().write(response); serialPort.getOutputStream().flush(); } } catch (IOException e) { logger.error(e.getMessage()); } } /** * Processes incoming message and notifies event handlers. * @param buffer the buffer to process. */ private void processIncomingMessage(byte[] buffer) { SerialMessage serialMessage = new SerialMessage(buffer); if (serialMessage.isValid) { logger.trace("Message is valid, sending ACK"); sendResponse(ACK); } else { logger.error("Message is not valid, discarding"); return; } handleIncomingMessage(serialMessage); } /** * Run method. Runs the actual receiving process. */ @Override public void run() { logger.debug("Starting Z-Wave receive thread"); try { // Send a NAK to resynchronise communications sendResponse(NAK); while (!interrupted()) { int nextByte; try { nextByte = serialPort.getInputStream().read(); if (nextByte == -1) continue; } catch (IOException e) { logger.error("Got I/O exception {} during receiving. exiting thread.", e.getLocalizedMessage()); break; } switch (nextByte) { case SOF: int messageLength; try { messageLength = serialPort.getInputStream().read(); } catch (IOException e) { logger.error("Got I/O exception {} during receiving. exiting thread.", e.getLocalizedMessage()); break; } byte[] buffer = new byte[messageLength + 2]; buffer[0] = SOF; buffer[1] = (byte)messageLength; int total = 0; while (total < messageLength) { try { int read = serialPort.getInputStream().read(buffer, total + 2, messageLength - total); total += (read > 0 ? read : 0); } catch (IOException e) { logger.error("Got I/O exception {} during receiving. exiting thread.", e.getLocalizedMessage()); return; } } logger.trace("Reading message finished" ); logger.debug("Receive Message = {}", SerialMessage.bb2hex(buffer)); processIncomingMessage(buffer); SOFCount++; break; case ACK: logger.trace("Received ACK"); ACKCount++; break; case NAK: logger.error("Message not acklowledged by controller (NAK), discarding"); transactionCompleted.release(); logger.trace("Released. Transaction completed permit count -> {}", transactionCompleted.availablePermits()); NAKCount++; break; case CAN: logger.error("Message cancelled by controller (CAN), resending"); try { Thread.sleep(100); } catch (InterruptedException e) { break; } enqueue(lastSentMessage); transactionCompleted.release(); logger.trace("Released. Transaction completed permit count -> {}", transactionCompleted.availablePermits()); CANCount++; break; default: logger.warn(String.format("Out of Frame flow. Got 0x%02X. Sending NAK.", nextByte)); sendResponse(NAK); OOFCount++; break; } } } catch (Exception e) { logger.error("Got an exception during receiving. exiting thread.", e); } logger.debug("Stopped Z-Wave receive thread"); } } /** * WatchDogTimerTask class. Acts as a watch dog and * checks the serial threads to see whether they are * still running. * @author Jan-Willem Spuij * @since 1.3.0 */ private class WatchDogTimerTask extends TimerTask { private final Logger logger = LoggerFactory.getLogger(WatchDogTimerTask.class); private final String serialPortName; /** * Creates a new instance of the WatchDogTimerTask class. * @param serialPortName the serial port name to reconnect to * in case the serial threads have died. */ public WatchDogTimerTask(String serialPortName) { this.serialPortName = serialPortName; } /** * {@inheritDoc} */ @Override public void run() { logger.trace("Watchdog: Checking Serial threads"); if ((receiveThread != null && !receiveThread.isAlive()) || (sendThread != null && !sendThread.isAlive())) { logger.warn("Threads not alive, respawning"); disconnect(); try { connect(serialPortName); } catch (SerialInterfaceException e) { logger.error("unable to restart Serial threads: {}", e.getLocalizedMessage()); } } } } }
package pitt.search.semanticvectors; import java.io.IOException; import java.util.Arrays; import java.util.logging.Logger; import pitt.search.semanticvectors.vectors.VectorType; /** * Command line utility for creating semantic vector indexes. */ public class BuildIndex { public static Logger logger = Logger.getLogger("pitt.search.semanticvectors"); public static String usageMessage = "\nBuildIndex class in package pitt.search.semanticvectors" + "\nUsage: java pitt.search.semanticvectors.BuildIndex PATH_TO_LUCENE_INDEX" + "\nBuildIndex creates termvectors and docvectors files in local directory." + "\nOther parameters that can be changed include number of dimensions, " + "vector type (real, binary or complex), seed length (number of non-zero entries in" + "basic vectors), minimum term frequency, and number of iterative training cycles." + "\nTo change these use the command line arguments " + "\n -vectortpe [real, complex or binary]" + "\n -dimension [number of dimension]" + "\n -seedlength [seed length]" + "\n -minfrequency [minimum term frequency]" + "\n -maxnonalphabetchars [number non-alphabet characters (-1 for any number)]" + "\n -trainingcycles [training cycles]" + "\n -docindexing [incremental|inmemory|none] Switch between building doc vectors incrementally" + "\n (requires positional index), all in memory (default case), or not at all"; /** * Builds term vector and document vector stores from a Lucene index. * @param args [command line options to be parsed] then path to Lucene index */ public static void main (String[] args) throws IllegalArgumentException { try { args = Flags.parseCommandLineFlags(args); } catch (IllegalArgumentException e) { System.err.println(usageMessage); throw e; } // Only one argument should remain, the path to the Lucene index. if (args.length != 1) { System.out.println(usageMessage); throw (new IllegalArgumentException("After parsing command line flags, there were " + args.length + " arguments, instead of the expected 1.")); } String luceneIndex = args[0]; VerbatimLogger.info("Seedlength: " + Flags.seedlength + ", Dimension: " + Flags.dimension + ", Vector type: " + Flags.vectortype + ", Minimum frequency: " + Flags.minfrequency + ", Maximum frequency: " + Flags.maxfrequency + ", Number non-alphabet characters: " + Flags.maxnonalphabetchars + ", Contents fields are: " + Arrays.toString(Flags.contentsfields) + "\n"); String termFile = Flags.termvectorsfile; String docFile = Flags.docvectorsfile; try{ TermVectorsFromLucene vecStore; if (Flags.initialtermvectors.length() > 0) { // If Flags.initialtermvectors="random" create elemental (random index) // term vectors. Recommended to iterate at least once (i.e. -trainingcycles = 2) to // obtain semantic term vectors. // Otherwise attempt to load pre-existing semantic term vectors. VerbatimLogger.info("Creating term vectors ... \n"); vecStore = TermVectorsFromLucene.createTermBasedRRIVectors( luceneIndex, VectorType.valueOf(Flags.vectortype.toUpperCase()), Flags.dimension, Flags.seedlength, Flags.minfrequency, Flags.maxfrequency, Flags.maxnonalphabetchars, Flags.initialtermvectors, Flags.contentsfields); } else { VerbatimLogger.info("Creating elemental document vectors ... \n"); vecStore = TermVectorsFromLucene.createTermVectorsFromLucene( luceneIndex, VectorType.valueOf(Flags.vectortype.toUpperCase()), Flags.dimension, Flags.seedlength, Flags.minfrequency, Flags.maxfrequency, Flags.maxnonalphabetchars, null, Flags.contentsfields); } // Create doc vectors and write vectors to disk. if (Flags.docindexing.equals("incremental")) { VectorStoreWriter.writeVectors(termFile, vecStore); IncrementalDocVectors.createIncrementalDocVectors( vecStore, luceneIndex, Flags.contentsfields, "incremental_"+docFile); IncrementalTermVectors itermVectors = null; for (int i = 1; i < Flags.trainingcycles; ++i) { itermVectors = new IncrementalTermVectors( luceneIndex, VectorType.valueOf(Flags.vectortype.toUpperCase()), Flags.dimension, Flags.contentsfields, "incremental_"+docFile+".bin"); VectorStoreWriter.writeVectors( "incremental_termvectors"+Flags.trainingcycles+".bin", itermVectors); // Write over previous cycle's docvectors until final // iteration, then rename according to number cycles if (i == Flags.trainingcycles-1) docFile = "docvectors"+Flags.trainingcycles+".bin"; IncrementalDocVectors.createIncrementalDocVectors( itermVectors, luceneIndex, Flags.contentsfields, "incremental_"+docFile); } } else if (Flags.docindexing.equals("inmemory")) { DocVectors docVectors = new DocVectors(vecStore); for (int i = 1; i < Flags.trainingcycles; ++i) { VerbatimLogger.info("\nRetraining with learned document vectors ..."); vecStore = TermVectorsFromLucene.createTermVectorsFromLucene( luceneIndex, VectorType.valueOf(Flags.vectortype.toUpperCase()), Flags.dimension, Flags.seedlength, Flags.minfrequency, Flags.maxfrequency, Flags.maxnonalphabetchars, docVectors, Flags.contentsfields); docVectors = new DocVectors(vecStore); } // At end of training, convert document vectors from ID keys to pathname keys. VectorStore writeableDocVectors = docVectors.makeWriteableVectorStore(); if (Flags.trainingcycles > 1) { termFile = "termvectors" + Flags.trainingcycles + ".bin"; docFile = "docvectors" + Flags.trainingcycles + ".bin"; } VerbatimLogger.info("Writing term vectors to " + termFile + "\n"); VectorStoreWriter.writeVectors(termFile, vecStore); VerbatimLogger.info("Writing doc vectors to " + docFile + "\n"); VectorStoreWriter.writeVectors(docFile, writeableDocVectors); } else { // Write term vectors to disk even if there are no docvectors to output. VerbatimLogger.info("Writing term vectors to " + termFile + "\n"); VectorStoreWriter.writeVectors(termFile, vecStore); } } catch (IOException e) { e.printStackTrace(); } } }
package org.eclipse.birt.chart.ui.swt.wizard.data; import org.eclipse.birt.chart.exception.ChartException; import org.eclipse.birt.chart.model.ChartWithAxes; import org.eclipse.birt.chart.model.data.DataPackage; import org.eclipse.birt.chart.model.data.Query; import org.eclipse.birt.chart.model.data.SeriesDefinition; import org.eclipse.birt.chart.model.data.impl.QueryImpl; import org.eclipse.birt.chart.ui.extension.i18n.Messages; import org.eclipse.birt.chart.ui.swt.ColorPalette; import org.eclipse.birt.chart.ui.swt.ColumnBindingInfo; import org.eclipse.birt.chart.ui.swt.DataDefinitionTextManager; import org.eclipse.birt.chart.ui.swt.DataTextDropListener; import org.eclipse.birt.chart.ui.swt.DefaultSelectDataComponent; import org.eclipse.birt.chart.ui.swt.IQueryExpressionManager; import org.eclipse.birt.chart.ui.swt.SimpleTextTransfer; import org.eclipse.birt.chart.ui.swt.composites.BaseGroupSortingDialog; import org.eclipse.birt.chart.ui.swt.composites.GroupSortingDialog; import org.eclipse.birt.chart.ui.swt.interfaces.IChartDataSheet; import org.eclipse.birt.chart.ui.swt.interfaces.IDataServiceProvider; import org.eclipse.birt.chart.ui.swt.interfaces.IUIServiceProvider; import org.eclipse.birt.chart.ui.swt.wizard.ChartWizardContext; import org.eclipse.birt.chart.ui.util.ChartUIConstants; import org.eclipse.birt.chart.ui.util.ChartUIUtil; import org.eclipse.birt.chart.ui.util.UIHelper; import org.eclipse.birt.chart.util.ChartUtil; import org.eclipse.birt.chart.util.PluginSettings; import org.eclipse.birt.core.data.ExpressionUtil; import org.eclipse.birt.core.data.IColumnBinding; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.core.ui.frameworks.taskwizard.WizardBase; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.jface.window.Window; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.dnd.DND; import org.eclipse.swt.dnd.DropTarget; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.KeyListener; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Text; public class BaseDataDefinitionComponent extends DefaultSelectDataComponent implements SelectionListener, ModifyListener, FocusListener, KeyListener, IQueryExpressionManager { protected Composite cmpTop; private CCombo cmbDefinition; protected Text txtDefinition = null; private Button btnBuilder = null; private Button btnGroup = null; protected Query query = null; protected SeriesDefinition seriesdefinition = null; protected ChartWizardContext context = null; private String sTitle = null; private String description = ""; //$NON-NLS-1$ private String tooltipWhenBlank = Messages.getString( "BaseDataDefinitionComponent.Tooltip.InputValueExpression" ); //$NON-NLS-1$ protected boolean isQueryModified; private final String queryType; private int style = BUTTON_NONE; private AggregateEditorComposite fAggEditorComposite; /** Indicates no button */ public static final int BUTTON_NONE = 0; /** Indicates button for group sorting will be created */ public static final int BUTTON_GROUP = 1; /** Indicates button for aggregation will be created */ public static final int BUTTON_AGGREGATION = 2; /** * * @param queryType * @param seriesdefinition * @param query * @param context * @param sTitle */ public BaseDataDefinitionComponent( String queryType, SeriesDefinition seriesdefinition, Query query, ChartWizardContext context, String sTitle ) { this( BUTTON_NONE, queryType, seriesdefinition, query, context, sTitle ); } /** * * @param style * Specify buttons by using '|'. See {@link #BUTTON_GROUP}, * {@link #BUTTON_NONE}, {@link #BUTTON_AGGREGATION} * @param queryType * query type. See {@link ChartUIConstants#QUERY_CATEGORY}, * {@link ChartUIConstants#QUERY_VALUE}, * {@link ChartUIConstants#QUERY_OPTIONAL} * @param seriesdefinition * @param query * @param context * @param sTitle */ public BaseDataDefinitionComponent( int style, String queryType, SeriesDefinition seriesdefinition, Query query, ChartWizardContext context, String sTitle ) { super( ); assert query != null; this.query = query; this.queryType = queryType; this.seriesdefinition = seriesdefinition; this.context = context; this.sTitle = ( sTitle == null || sTitle.length( ) == 0 ) ? Messages.getString( "BaseDataDefinitionComponent.Text.SpecifyDataDefinition" ) //$NON-NLS-1$ : sTitle; this.style = style; } public Composite createArea( Composite parent ) { int numColumns = 2; if ( description != null && description.length( ) > 0 ) { numColumns++; } if ( ( style & BUTTON_AGGREGATION ) == BUTTON_AGGREGATION ) { numColumns++; } if ( ( style & BUTTON_GROUP ) == BUTTON_GROUP ) { numColumns++; } cmpTop = new Composite( parent, SWT.NONE ); { GridLayout glContent = new GridLayout( ); glContent.numColumns = numColumns; glContent.marginHeight = 0; glContent.marginWidth = 0; glContent.horizontalSpacing = 2; cmpTop.setLayout( glContent ); GridData gd = new GridData( GridData.FILL_HORIZONTAL ); cmpTop.setLayoutData( gd ); } if ( description != null && description.length( ) > 0 ) { Label lblDesc = new Label( cmpTop, SWT.NONE ); lblDesc.setText( description ); lblDesc.setToolTipText( tooltipWhenBlank ); } if ( ( style & BUTTON_AGGREGATION ) == BUTTON_AGGREGATION ) { createAggregationItem( cmpTop ); } final Object[] predefinedQuery = context.getPredefinedQuery( queryType ); if ( predefinedQuery != null ) { cmbDefinition = new CCombo( cmpTop, context.getDataServiceProvider( ) .checkState( IDataServiceProvider.PART_CHART ) ? SWT.READ_ONLY | SWT.BORDER : SWT.BORDER ); GridData gd = new GridData( GridData.FILL_HORIZONTAL ); gd.widthHint = 80; gd.grabExcessHorizontalSpace = true; cmbDefinition.setLayoutData( gd ); if ( predefinedQuery.length > 0 ) { populateExprComboItems( predefinedQuery ); } else if ( getQuery( ).getDefinition( ) == null || getQuery( ).getDefinition( ).equals( "" ) ) //$NON-NLS-1$ { cmbDefinition.setEnabled( false ); } cmbDefinition.addListener( SWT.Selection, new Listener( ) { public void handleEvent( Event event ) { String oldQuery = query.getDefinition( ) == null ? "" : query.getDefinition( ); //$NON-NLS-1$ // Combo may be disposed, so cache the text first String text = cmbDefinition.getText( ); // Do nothing for the same query if ( !isTableSharedBinding( ) && text.equals( oldQuery ) ) { return; } updateQuery( text ); // Set category/Y optional expression by value series // expression if it is crosstab sharing. if ( !oldQuery.equals( text ) && queryType == ChartUIConstants.QUERY_VALUE ) { if ( context.getDataServiceProvider( ) .update( ChartUIConstants.QUERY_VALUE, text ) ) { Event e = new Event( ); e.data = BaseDataDefinitionComponent.this; e.widget = cmbDefinition; e.type = IChartDataSheet.EVENT_QUERY; context.getDataSheet( ).notifyListeners( e ); } } // Change direction once category query is changed in xtab // case if ( context.getDataServiceProvider( ) .checkState( IDataServiceProvider.PART_CHART ) && ChartUIConstants.QUERY_CATEGORY.equals( queryType ) && context.getModel( ) instanceof ChartWithAxes ) { ( (ChartWithAxes) context.getModel( ) ).setTransposed( cmbDefinition.getSelectionIndex( ) > 0 ); } if ( predefinedQuery.length == 0 && ( getQuery( ).getDefinition( ) == null || getQuery( ).getDefinition( ) .equals( "" ) ) ) //$NON-NLS-1$ { cmbDefinition.setEnabled( false ); } } } ); cmbDefinition.addModifyListener( this ); cmbDefinition.addFocusListener( this ); cmbDefinition.addKeyListener( this ); initComboExprText( ); } else { txtDefinition = new Text( cmpTop, SWT.BORDER | SWT.SINGLE ); GridData gdTXTDefinition = new GridData( GridData.FILL_HORIZONTAL ); gdTXTDefinition.widthHint = 80; gdTXTDefinition.grabExcessHorizontalSpace = true; txtDefinition.setLayoutData( gdTXTDefinition ); if ( query != null && query.getDefinition( ) != null ) { txtDefinition.setText( query.getDefinition( ) ); txtDefinition.setToolTipText( getTooltipForDataText( query.getDefinition( ) ) ); } txtDefinition.addModifyListener( this ); txtDefinition.addFocusListener( this ); txtDefinition.addKeyListener( this ); } // Listener for handling dropping of custom table header Control dropControl = getInputControl( ); DropTarget target = new DropTarget( dropControl, DND.DROP_COPY ); Transfer[] types = new Transfer[]{ SimpleTextTransfer.getInstance( ) }; target.setTransfer( types ); // Add drop support target.addDropListener( new DataTextDropListener( dropControl ) ); // Add color manager DataDefinitionTextManager.getInstance( ) .addDataDefinitionText( dropControl, this ); btnBuilder = new Button( cmpTop, SWT.PUSH ); { GridData gdBTNBuilder = new GridData( ); ChartUIUtil.setChartImageButtonSizeByPlatform( gdBTNBuilder ); btnBuilder.setLayoutData( gdBTNBuilder ); btnBuilder.setImage( UIHelper.getImage( "icons/obj16/expressionbuilder.gif" ) ); //$NON-NLS-1$ btnBuilder.addSelectionListener( this ); btnBuilder.setToolTipText( Messages.getString( "DataDefinitionComposite.Tooltip.InvokeExpressionBuilder" ) ); //$NON-NLS-1$ btnBuilder.getImage( ).setBackground( btnBuilder.getBackground( ) ); btnBuilder.setEnabled( context.getUIServiceProvider( ) .isInvokingSupported( ) ); btnBuilder.setVisible( context.getUIServiceProvider( ) .isEclipseModeSupported( ) ); } if ( ( style & BUTTON_GROUP ) == BUTTON_GROUP ) { btnGroup = new Button( cmpTop, SWT.PUSH ); GridData gdBTNGroup = new GridData( ); ChartUIUtil.setChartImageButtonSizeByPlatform( gdBTNGroup ); btnGroup.setLayoutData( gdBTNGroup ); btnGroup.setImage( UIHelper.getImage( "icons/obj16/group.gif" ) ); //$NON-NLS-1$ btnGroup.addSelectionListener( this ); btnGroup.setToolTipText( Messages.getString( "BaseDataDefinitionComponent.Label.EditGroupSorting" ) ); //$NON-NLS-1$ } // Updates color setting setColor( ); // In shared binding, only support predefined query IDataServiceProvider provider = context.getDataServiceProvider( ); boolean isCubeNoMultiDimensions = ( provider.checkState( IDataServiceProvider.HAS_CUBE ) || provider.checkState( IDataServiceProvider.SHARE_CROSSTAB_QUERY ) ) && !provider.checkState( IDataServiceProvider.MULTI_CUBE_DIMENSIONS ); if ( context.getDataServiceProvider( ) .checkState( IDataServiceProvider.PART_CHART ) || context.getDataServiceProvider( ) .checkState( IDataServiceProvider.SHARE_QUERY ) ) { // Sharing query with crosstab allows user to edit category and Y // optional expression, so here doesn't disable the text field if it // is SHARE_CROSSTAB_QUERY. if ( txtDefinition != null && !context.getDataServiceProvider( ) .checkState( IDataServiceProvider.SHARE_CROSSTAB_QUERY ) ) { txtDefinition.setEnabled( false ); } btnBuilder.setEnabled( false ); if ( btnGroup != null ) { btnGroup.setEnabled( false ); } } if ( cmbDefinition != null && ChartUIConstants.QUERY_OPTIONAL.equals( queryType ) && isCubeNoMultiDimensions ) { cmbDefinition.setEnabled( false ); btnBuilder.setEnabled( false ); } setTooltipForInputControl( ); return cmpTop; } /** * Initialize combo text and data. */ private void initComboExprText( ) { if ( isTableSharedBinding( ) ) { initComboExprTextForSharedBinding( ); } else { cmbDefinition.setText( query.getDefinition( ) ); } } /** * Check if current is using table shared binding. * * @return * @since 2.3 */ private boolean isTableSharedBinding( ) { return context.getDataServiceProvider( ) .checkState( IDataServiceProvider.SHARE_QUERY ) && cmbDefinition != null && cmbDefinition.getData( ) != null; } /** * Initialize combo text and data for shared binding. */ private void initComboExprTextForSharedBinding( ) { setUITextForSharedBinding( cmbDefinition, query.getDefinition( ) ); } /** * Populate expression items for combo. * * @param predefinedQuery */ private void populateExprComboItems( Object[] predefinedQuery ) { if ( predefinedQuery[0] instanceof Object[] ) { String[] items = new String[predefinedQuery.length]; Object[] data = new Object[predefinedQuery.length]; for ( int i = 0; i < items.length; i++ ) { items[i] = (String) ( (Object[]) predefinedQuery[i] )[0]; data[i] = ( (Object[]) predefinedQuery[i] )[1]; } cmbDefinition.setItems( items ); cmbDefinition.setData( data ); } else if ( predefinedQuery[0] instanceof String ) { String[] items = new String[predefinedQuery.length]; for ( int i = 0; i < items.length; i++ ) { items[i] = (String) predefinedQuery[i]; } cmbDefinition.setItems( items ); } } public void selectArea( boolean selected, Object data ) { if ( data instanceof Object[] ) { Object[] array = (Object[]) data; seriesdefinition = (SeriesDefinition) array[0]; query = (Query) array[1]; setUIText( getInputControl( ), query.getDefinition( ) ); DataDefinitionTextManager.getInstance( ) .addDataDefinitionText( getInputControl( ), this ); if ( fAggEditorComposite != null ) { fAggEditorComposite.setSeriesDefinition( seriesdefinition ); } } setColor( ); } private void setColor( ) { if ( query != null ) { Color cColor = ColorPalette.getInstance( ) .getColor( getDisplayExpression( ) ); if ( getInputControl( ) != null ) { ChartUIUtil.setBackgroundColor( getInputControl( ), true, cColor ); } } } public void dispose( ) { if ( getInputControl( ) != null ) { DataDefinitionTextManager.getInstance( ) .removeDataDefinitionText( getInputControl( ) ); } super.dispose( ); } /* * (non-Javadoc) * * @see org.eclipse.swt.events.SelectionListener#widgetSelected(org.eclipse.swt.events.SelectionEvent) */ public void widgetSelected( SelectionEvent e ) { if ( e.getSource( ).equals( btnBuilder ) ) { handleBuilderAction( ); } else if ( e.getSource( ).equals( btnGroup ) ) { handleGroupAction( ); } } /** * Handle grouping/sorting action. */ protected void handleGroupAction( ) { SeriesDefinition sdBackup = (SeriesDefinition) EcoreUtil.copy( seriesdefinition ); GroupSortingDialog groupDialog = createGroupSortingDialog( sdBackup ); if ( groupDialog.open( ) == Window.OK ) { if ( !sdBackup.eIsSet( DataPackage.eINSTANCE.getSeriesDefinition_Sorting( ) ) ) { seriesdefinition.eUnset( DataPackage.eINSTANCE.getSeriesDefinition_Sorting( ) ); } else { seriesdefinition.setSorting( sdBackup.getSorting( ) ); } seriesdefinition.setSortKey( sdBackup.getSortKey( ) ); seriesdefinition.getSortKey( ) .eAdapters( ) .addAll( seriesdefinition.eAdapters( ) ); seriesdefinition.setGrouping( sdBackup.getGrouping( ) ); seriesdefinition.getGrouping( ) .eAdapters( ) .addAll( seriesdefinition.eAdapters( ) ); ChartUIUtil.checkGroupType( context, context.getModel( ) ); ChartUIUtil.isValidAggregation( context, seriesdefinition.getGrouping( ), true ); } } /** * Handle builder dialog action. */ protected void handleBuilderAction( ) { try { String sExpr = context.getUIServiceProvider( ) .invoke( IUIServiceProvider.COMMAND_EXPRESSION_DATA_BINDINGS, getExpression( getInputControl( ) ), context.getExtendedItem( ), sTitle ); boolean isSuccess = setUIText( getInputControl( ), sExpr ); updateQuery( sExpr ); if ( !isSuccess ) { Event event = new Event( ); event.type = IChartDataSheet.EVENT_QUERY; event.data = queryType; context.getDataSheet( ).notifyListeners( event ); } } catch ( ChartException e1 ) { WizardBase.displayException( e1 ); } } /** * Create instance of <code>GroupSortingDialog</code> for base series or Y * series. * * @param sdBackup * @return */ protected GroupSortingDialog createGroupSortingDialog( SeriesDefinition sdBackup ) { return new BaseGroupSortingDialog( cmpTop.getShell( ), context, sdBackup ); } /* * (non-Javadoc) * * @see org.eclipse.swt.events.SelectionListener#widgetDefaultSelected(org.eclipse.swt.events.SelectionEvent) */ public void widgetDefaultSelected( SelectionEvent e ) { } /* * (non-Javadoc) * * @see org.eclipse.swt.events.ModifyListener#modifyText(org.eclipse.swt.events.ModifyEvent) */ public void modifyText( ModifyEvent e ) { if ( e.getSource( ).equals( getInputControl( ) ) ) { isQueryModified = true; // Reset tooltip setTooltipForInputControl( ); } } /** * Set tooltip for input control. */ private void setTooltipForInputControl( ) { getInputControl( ).setToolTipText( getTooltipForDataText( getExpression( getInputControl( ) ) ) ); } /** * Sets the description in the left of data text box. * * @param description */ public void setDescription( String description ) { this.description = description; } public void focusGained( FocusEvent e ) { // TODO Auto-generated method stub } public void focusLost( FocusEvent e ) { // Null event is fired by Drop Listener manually if ( e == null || e.widget.equals( getInputControl( ) ) ) { saveQuery( ); } } protected void saveQuery( ) { if ( isQueryModified ) { updateQuery( ChartUIUtil.getText( getInputControl( ) ) ); // Refresh color from ColorPalette setColor( ); getInputControl( ).getParent( ).layout( ); Event e = new Event( ); e.text = query.getDefinition( ) == null ? "" //$NON-NLS-1$ : query.getDefinition( ); e.data = e.text; e.widget = getInputControl( ); e.type = 0; fireEvent( e ); isQueryModified = false; } } private String getTooltipForDataText( String queryText ) { if ( isTableSharedBinding( ) ) { int index = cmbDefinition.getSelectionIndex( ); if ( index >= 0 ) { ColumnBindingInfo cbi = (ColumnBindingInfo) ( (Object[]) cmbDefinition.getData( ) )[index]; if ( cbi.getColumnType( ) == ColumnBindingInfo.GROUP_COLUMN || cbi.getColumnType( ) == ColumnBindingInfo.AGGREGATE_COLUMN ) { return cbi.getTooltip( ); } } } if ( queryText.trim( ).length( ) == 0 ) { return tooltipWhenBlank; } return queryText; } public void keyPressed( KeyEvent e ) { if ( e.keyCode == SWT.CR || e.keyCode == SWT.KEYPAD_CR ) { saveQuery( ); } } public void keyReleased( KeyEvent e ) { // TODO Auto-generated method stub } public void setTooltipWhenBlank( String tootipWhenBlank ) { this.tooltipWhenBlank = tootipWhenBlank; } private void createAggregationItem( Composite composite ) { SeriesDefinition baseSD = ChartUIUtil.getBaseSeriesDefinitions( context.getModel( ) ) .get( 0 ); boolean enabled = ChartUIUtil.isGroupingSupported( context ) && ( PluginSettings.instance( ).inEclipseEnv( ) || baseSD.getGrouping( ) .isEnabled( ) ); fAggEditorComposite = new AggregateEditorComposite( composite, seriesdefinition, context, enabled ); } private Control getInputControl( ) { if ( txtDefinition != null ) { return txtDefinition; } if ( cmbDefinition != null ) { return cmbDefinition; } return null; } private String getExpression( Control control ) { return getActualExpression( control ); } private boolean setUIText( Control control, String expression ) { if ( control == null || control.isDisposed( ) ) { return false; } if ( control instanceof Text ) { ( (Text) control ).setText( expression ); } else if ( control instanceof CCombo ) { if ( isTableSharedBinding( ) ) { setUITextForSharedBinding( (CCombo) control, expression ); } else { ( (CCombo) control ).setText( expression ); } } return true; } /** * @param control * @param expression */ private void setUITextForSharedBinding( CCombo control, String expression ) { Object[] data = (Object[]) control.getData( ); if ( data == null || data.length == 0 ) { control.setText( expression ); } else { String expr = getDisplayExpressionForSharedBinding( control, expression ); control.setText( expr ); } } /** * Update query by specified expression. * <p> * Under shared binding case, update grouping/aggregate attributes of chart * model if the selected item is group/aggregate expression. */ public void updateQuery( String expression ) { if ( getInputControl( ) instanceof CCombo ) { String oldQuery = query.getDefinition( ) == null ? "" : query.getDefinition( ); //$NON-NLS-1$ Object checkResult = context.getDataServiceProvider( ) .checkData( queryType, expression ); if ( checkResult != null && checkResult instanceof Boolean ) { if ( !( (Boolean) checkResult ).booleanValue( ) ) { // Can't select expressions of one dimension to set // on category series and Y optional at one time. WizardBase.showException( Messages.getString( "BaseDataDefinitionComponent.WarningMessage.ExpressionsForbidden" ) ); //$NON-NLS-1$ setUIText( getInputControl( ), oldQuery ); return; } else { WizardBase.removeException( ); } } } if ( !isTableSharedBinding( ) ) { setQueryExpression( expression ); return; } updateQueryForSharedBinding( expression ); // Binding color to input control by expression and refresh color of // preview table. String regex = "\\Qrow[\"\\E.*\\Q\"]\\E"; //$NON-NLS-1$ if ( expression.matches( regex ) ) { DataDefinitionTextManager.getInstance( ) .updateControlBackground( getInputControl( ), expression ); final Event e = new Event( ); e.data = BaseDataDefinitionComponent.this; e.widget = getInputControl( ); e.type = IChartDataSheet.EVENT_QUERY; e.detail = IChartDataSheet.DETAIL_UPDATE_COLOR; // Use async thread to update UI to prevent control disposed Display.getCurrent( ).asyncExec( new Runnable( ) { public void run( ) { context.getDataSheet( ).notifyListeners( e ); } } ); } else { getInputControl( ).setBackground( null ); } } /** * Update query expression for sharing query with table. * * @param expression */ private void updateQueryForSharedBinding( String expression ) { Object[] data = (Object[]) cmbDefinition.getData( ); if ( data != null && data.length > 0 ) { String expr = expression; if ( ChartUIConstants.QUERY_CATEGORY.equals( queryType ) || ChartUIConstants.QUERY_OPTIONAL.equals( queryType ) ) { boolean isGroupExpr = false; for ( int i = 0; i < data.length; i++ ) { ColumnBindingInfo chi = (ColumnBindingInfo) data[i]; int type = chi.getColumnType( ); if ( type == ColumnBindingInfo.GROUP_COLUMN ) { String groupRegex = ChartUtil.createRegularRowExpression( chi.getName( ), false ); String regex = ChartUtil.createRegularRowExpression( chi.getName( ), true ); if ( expression.matches( regex ) ) { isGroupExpr = true; expr = expression.replaceAll( groupRegex, chi.getExpression( ) ); break; } } } if ( ChartUIConstants.QUERY_CATEGORY.equals( queryType ) ) { if ( isGroupExpr ) { seriesdefinition.getGrouping( ).setEnabled( true ); } else { seriesdefinition.getGrouping( ).setEnabled( false ); } } } else if ( ChartUIConstants.QUERY_VALUE.equals( queryType ) ) { boolean isAggregationExpr = false; ColumnBindingInfo chi = null; for ( int i = 0; i < data.length; i++ ) { chi = (ColumnBindingInfo) data[i]; int type = chi.getColumnType( ); if ( type == ColumnBindingInfo.AGGREGATE_COLUMN ) { String aggRegex = ChartUtil.createRegularRowExpression( chi.getName( ), false ); String regex = ChartUtil.createRegularRowExpression( chi.getName( ), true ); if ( expression.matches( regex ) ) { isAggregationExpr = true; expr = expression.replaceAll( aggRegex, chi.getExpression( ) ); break; } } } if ( isAggregationExpr ) { seriesdefinition.getGrouping( ).setEnabled( true ); seriesdefinition.getGrouping( ) .setAggregateExpression( chi.getChartAggExpression( ) ); } else { seriesdefinition.getGrouping( ).setEnabled( false ); seriesdefinition.getGrouping( ) .setAggregateExpression( null ); } } setQueryExpression( expr ); } else { setQueryExpression( expression ); } } private void setQueryExpression( String expression ) { if ( query != null ) { query.setDefinition( expression ); } else { query = QueryImpl.create( expression ); query.eAdapters( ).addAll( seriesdefinition.eAdapters( ) ); // Since the data query must be non-null, it's created in // ChartUIUtil.getDataQuery(), assume current null is a grouping // query seriesdefinition.setQuery( query ); } } /* * (non-Javadoc) * * @see org.eclipse.birt.chart.ui.swt.IQueryExpressionManager#getQuery() */ public Query getQuery( ) { if ( query == null ) { query = QueryImpl.create( getExpression( getInputControl( ) ) ); query.eAdapters( ).addAll( seriesdefinition.eAdapters( ) ); // Since the data query must be non-null, it's created in // ChartUIUtil.getDataQuery(), assume current null is a grouping // query seriesdefinition.setQuery( query ); } return query; } /* * (non-Javadoc) * * @see org.eclipse.birt.chart.ui.swt.IQueryExpressionManager#getDisplayExpression() */ public String getDisplayExpression( ) { if ( cmbDefinition != null && isTableSharedBinding( ) ) { return getDisplayExpressionForSharedBinding( cmbDefinition, query.getDefinition( ) ); } else { String expr = query.getDefinition( ); return ( expr == null ) ? "" : expr; //$NON-NLS-1$ } } private String getDisplayExpressionForSharedBinding( CCombo combo, String expression ) { String expr = expression; Object[] data = (Object[]) combo.getData( ); for ( int i = 0; data != null && i < data.length; i++ ) { ColumnBindingInfo chi = (ColumnBindingInfo) data[i]; if ( chi.getExpression( ) == null ) { continue; } String columnExpr = null; try { columnExpr = ( (IColumnBinding) ExpressionUtil.extractColumnExpressions( chi.getExpression( ) ) .get( 0 ) ).getResultSetColumnName( ); } catch ( BirtException e ) { continue; } String columnRegex = ChartUtil.createRegularRowExpression( columnExpr, false ); String regex = ChartUtil.createRegularRowExpression( columnExpr, true ); if ( expression != null && expression.matches( regex ) ) { if ( queryType == ChartUIConstants.QUERY_CATEGORY ) { boolean sdGrouped = seriesdefinition.getGrouping( ) .isEnabled( ); boolean groupedBinding = ( chi.getColumnType( ) == ColumnBindingInfo.GROUP_COLUMN ); if ( sdGrouped && groupedBinding ) { expr = expression.replaceAll( columnRegex, ExpressionUtil.createJSRowExpression( chi.getName( ) ) ); break; } } else if ( queryType == ChartUIConstants.QUERY_OPTIONAL ) { expr = expression.replaceAll( columnRegex, ExpressionUtil.createJSRowExpression( chi.getName( ) ) ); break; } } } return ( expr == null ) ? "" : expr; //$NON-NLS-1$ } /* * (non-Javadoc) * * @see org.eclipse.birt.chart.ui.swt.IQueryExpressionManager#isValidExpression(java.lang.String) */ public boolean isValidExpression( String expression ) { if ( context.getDataServiceProvider( ) .checkState( IDataServiceProvider.SHARE_QUERY ) ) { int index = cmbDefinition.indexOf( expression ); if ( index < 0 ) { return false; } return true; } return true; } /** * The method is used to get actual expression from input control.For shared * binding case, the expression is stored in data field of combo widget. * * @param control * @return * @since 2.3 */ private String getActualExpression( Control control ) { if ( control instanceof Text ) { return ( (Text) control ).getText( ); } if ( control instanceof CCombo ) { Object[] data = (Object[]) control.getData( ); if ( data != null && data.length > 0 && data[0] instanceof ColumnBindingInfo ) { String txt = ( (CCombo) control ).getText( ); String[] items = ( (CCombo) control ).getItems( ); int index = 0; for ( ; items != null && items.length > 0 && index < items.length; index++ ) { if ( items[index].equals( txt ) ) { break; } } if ( items != null && index >= 0 && index < items.length ) { return ( (ColumnBindingInfo) data[index] ).getExpression( ); } } return ( (CCombo) control ).getText( ); } return ""; //$NON-NLS-1$ } }
package com.codenvy.ide.ext.datasource.client.store; import java.util.SortedMap; import java.util.TreeMap; import javax.inject.Inject; import javax.validation.constraints.NotNull; import com.codenvy.ide.dto.DtoFactory; import com.codenvy.ide.ext.datasource.shared.ColumnDTO; import com.codenvy.ide.ext.datasource.shared.DatabaseDTO; import com.codenvy.ide.ext.datasource.shared.SchemaDTO; import com.codenvy.ide.ext.datasource.shared.TableDTO; public class SortMetadataProcessor implements PreStoreProcessor { /** The client version of the DTO factory. */ private final DtoFactory dtoFactory; @Inject public SortMetadataProcessor(final @NotNull DtoFactory dtoFactory) { this.dtoFactory = dtoFactory; } @Override public DatabaseDTO execute(final DatabaseDTO databaseDto) throws PreStoreProcessorException { if (databaseDto == null) { return null; } // create a copy final String json = this.dtoFactory.toJson(databaseDto); final DatabaseDTO modified = this.dtoFactory.createDtoFromJson(json, DatabaseDTO.class); sortSchemas(modified); return modified; } /** * Sort the schemas in the database metadata DTO. * * @param database the metadata DTO. */ private void sortSchemas(final DatabaseDTO database) { for (final SchemaDTO schema : database.getSchemas().values()) { sortTables(schema); } SortedMap<String, SchemaDTO> sortedSchemas = new TreeMap<String, SchemaDTO>(database.getSchemas()); database.setSchemas(sortedSchemas); } /** * Sort the tables in the schema metadata DTO. * * @param schema the metadata DTO */ private void sortTables(final SchemaDTO schema) { for (final TableDTO table : schema.getTables().values()) { sortColumns(table); } SortedMap<String, TableDTO> sortedTables = new TreeMap<String, TableDTO>(schema.getTables()); schema.setTables(sortedTables); } /** * Sort the columns in the table metadata DTO. * * @param table the metadata DTO */ private void sortColumns(final TableDTO table) { SortedMap<String, ColumnDTO> sortedColumn = new TreeMap<String, ColumnDTO>(table.getColumns()); table.setColumns(sortedColumn); } }
package com.yahoo.vespa.hosted.controller.restapi.application; import com.google.common.base.Joiner; import com.yahoo.component.Version; import com.yahoo.config.application.api.DeploymentInstanceSpec; import com.yahoo.config.application.api.DeploymentSpec; import com.yahoo.config.provision.ApplicationId; import com.yahoo.config.provision.SystemName; import com.yahoo.config.provision.zone.ZoneId; import com.yahoo.container.jdisc.HttpResponse; import com.yahoo.restapi.MessageResponse; import com.yahoo.restapi.SlimeJsonResponse; import com.yahoo.slime.Cursor; import com.yahoo.slime.Slime; import com.yahoo.vespa.hosted.controller.Application; import com.yahoo.vespa.hosted.controller.Controller; import com.yahoo.vespa.hosted.controller.Instance; import com.yahoo.vespa.hosted.controller.NotExistsException; import com.yahoo.vespa.hosted.controller.api.integration.LogEntry; import com.yahoo.vespa.hosted.controller.api.integration.deployment.ApplicationVersion; import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobId; import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType; import com.yahoo.vespa.hosted.controller.api.integration.deployment.RunId; import com.yahoo.vespa.hosted.controller.api.integration.deployment.SourceRevision; import com.yahoo.vespa.hosted.controller.application.ApplicationPackage; import com.yahoo.vespa.hosted.controller.application.Change; import com.yahoo.vespa.hosted.controller.application.Deployment; import com.yahoo.vespa.hosted.controller.application.TenantAndApplicationId; import com.yahoo.vespa.hosted.controller.deployment.ConvergenceSummary; import com.yahoo.vespa.hosted.controller.deployment.DeploymentStatus; import com.yahoo.vespa.hosted.controller.deployment.DeploymentSteps; import com.yahoo.vespa.hosted.controller.deployment.JobController; import com.yahoo.vespa.hosted.controller.deployment.JobList; import com.yahoo.vespa.hosted.controller.deployment.JobStatus; import com.yahoo.vespa.hosted.controller.deployment.Run; import com.yahoo.vespa.hosted.controller.deployment.RunLog; import com.yahoo.vespa.hosted.controller.deployment.RunStatus; import com.yahoo.vespa.hosted.controller.deployment.Step; import com.yahoo.vespa.hosted.controller.deployment.Versions; import com.yahoo.vespa.hosted.controller.versions.VespaVersion; import java.net.URI; import java.util.ArrayDeque; import java.util.Comparator; import java.util.Deque; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import static com.yahoo.config.application.api.DeploymentSpec.UpgradePolicy.conservative; import static com.yahoo.config.application.api.DeploymentSpec.UpgradePolicy.defaultPolicy; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.stagingTest; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.systemTest; import static com.yahoo.vespa.hosted.controller.deployment.Step.Status.succeeded; import static com.yahoo.vespa.hosted.controller.deployment.Step.Status.unfinished; import static com.yahoo.vespa.hosted.controller.deployment.Step.deployReal; import static com.yahoo.vespa.hosted.controller.deployment.Step.installInitialReal; import static com.yahoo.vespa.hosted.controller.deployment.Step.installReal; import static com.yahoo.vespa.hosted.controller.versions.VespaVersion.Confidence.broken; import static com.yahoo.vespa.hosted.controller.versions.VespaVersion.Confidence.high; import static com.yahoo.vespa.hosted.controller.versions.VespaVersion.Confidence.normal; import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toMap; /** * Implements the REST API for the job controller delegated from the Application API. * * @see JobController * @see ApplicationApiHandler * * @author smorgrav * @author jonmv */ class JobControllerApiHandlerHelper { /** * @return Response with all job types that have recorded runs for the application _and_ the status for the last run of that type */ static HttpResponse jobTypeResponse(Controller controller, ApplicationId id, URI baseUriForJobs) { Application application = controller.applications().requireApplication(TenantAndApplicationId.from(id)); DeploymentStatus deploymentStatus = controller.jobController().deploymentStatus(application); Instance instance = application.require(id.instance()); Change change = instance.change(); Optional<DeploymentInstanceSpec> spec = application.deploymentSpec().instance(id.instance()); Optional<DeploymentSteps> steps = spec.map(s -> new DeploymentSteps(s, controller::system)); List<JobType> jobs = deploymentStatus.jobSteps().keySet().stream() .filter(jobId -> id.equals(jobId.application())) .map(JobId::type) .collect(Collectors.toUnmodifiableList()); List<JobType> productionJobs = jobs.stream() .filter(JobType::isProduction) .collect(Collectors.toUnmodifiableList()); Map<JobType, JobStatus> status = deploymentStatus.instanceJobs(id.instance()); // The logic for pending runs imitates DeploymentTrigger logic; not good, but the trigger wiring must be re-written to reuse :S Map<JobType, Versions> pendingProduction = productionJobs.stream() .filter(type -> ! controller.applications().deploymentTrigger().isComplete(change, change, instance, type, status.get(type))) .collect(Collectors.toMap(type -> type, type -> Versions.from(change, application, Optional.ofNullable(instance.deployments().get(type.zone(controller.system()))), controller.systemVersion()), (v1, v2) -> { throw new IllegalStateException("Entries '" + v1 + "' and '" + v2 + "' have the same key!"); }, LinkedHashMap::new)); Map<JobType, Run> running = jobs.stream() .map(type -> controller.jobController().last(id, type)) .flatMap(Optional::stream) .filter(run -> ! run.hasEnded()) .collect(toMap(run -> run.id().type(), run -> run)); Slime slime = new Slime(); Cursor responseObject = slime.setObject(); Cursor lastVersionsObject = responseObject.setObject("lastVersions"); if (application.latestVersion().isPresent()) { lastPlatformToSlime(lastVersionsObject.setObject("platform"), controller, application, instance, status, change, productionJobs, spec); lastApplicationToSlime(lastVersionsObject.setObject("application"), application, instance, status, change, productionJobs, controller); } Cursor deployingObject = responseObject.setObject("deploying"); if ( ! change.isEmpty()) { change.platform().ifPresent(version -> deployingObject.setString("platform", version.toString())); change.application().ifPresent(version -> applicationVersionToSlime(deployingObject.setObject("application"), version)); } Cursor deploymentsArray = responseObject.setArray("deployments"); steps.ifPresent(deploymentSteps -> deploymentSteps.production().forEach(step -> { if (step.isTest()) return; Cursor deploymentsObject = deploymentsArray.addObject(); deploymentSteps.toJobs(step).forEach(type -> { ZoneId zone = type.zone(controller.system()); Deployment deployment = instance.deployments().get(zone); if (deployment != null) deploymentToSlime(deploymentsObject.setObject(zone.region().value()), change, pendingProduction, running, type, status.get(type), deployment); }); })); Cursor jobsObject = responseObject.setObject("jobs"); steps.ifPresent(deploymentSteps -> jobs.forEach(type -> { jobTypeToSlime(jobsObject.setObject(shortNameOf(type, controller.system())), controller, application, instance, status, type, deploymentSteps, pendingProduction, running, baseUriForJobs.resolve(baseUriForJobs.getPath() + "/" + type.jobName()).normalize()); })); Cursor devJobsObject = responseObject.setObject("devJobs"); for (JobType type : JobType.allIn(controller.system())) if ( type.environment() != null && type.environment().isManuallyDeployed()) controller.jobController().last(instance.id(), type) .ifPresent(last -> { Cursor devJobObject = devJobsObject.setObject(type.jobName()); runToSlime(devJobObject.setArray("runs").addObject(), last, baseUriForJobs.resolve(baseUriForJobs.getPath() + "/" + type.jobName()).normalize()); devJobObject.setString("url", baseUriForJobs.resolve(baseUriForJobs.getPath() + "/" + type.jobName()).normalize().toString()); }); return new SlimeJsonResponse(slime); } private static void lastPlatformToSlime(Cursor lastPlatformObject, Controller controller, Application application, Instance instance, Map<JobType, JobStatus> status, Change change, List<JobType> productionJobs, Optional<DeploymentInstanceSpec> instanceSpec) { VespaVersion lastVespa = controller.versionStatus().version(controller.systemVersion()); VespaVersion.Confidence targetConfidence = instanceSpec.map(spec -> Map.of(defaultPolicy, normal, conservative, high) .getOrDefault(spec.upgradePolicy(), broken)) .orElse(normal); for (VespaVersion version : controller.versionStatus().versions()) if ( ! version.versionNumber().isAfter(controller.systemVersion()) && version.confidence().equalOrHigherThan(targetConfidence)) lastVespa = version; Version lastPlatform = lastVespa.versionNumber(); lastPlatformObject.setString("platform", lastPlatform.toString()); lastPlatformObject.setLong("at", lastVespa.committedAt().toEpochMilli()); long completed = productionJobs.stream() .filter(type -> ! type.isTest()) .filter(type -> controller.applications().deploymentTrigger().isComplete(Change.of(lastPlatform), change.withoutPlatform().withoutPin().with(lastPlatform), instance, type, status.get(type))) .count(); long total = productionJobs.stream().filter(type -> ! type.isTest()).count(); if (Optional.of(lastPlatform).equals(change.platform())) lastPlatformObject.setString("deploying", completed + " of " + total + " complete"); else if (completed == total) lastPlatformObject.setString("completed", completed + " of " + total + " complete"); else if ( ! application.deploymentSpec().requireInstance(instance.name()).canUpgradeAt(controller.clock().instant())) { lastPlatformObject.setString("blocked", application.deploymentSpec().instances().stream() .flatMap(spec -> spec.changeBlocker().stream()) .filter(blocker -> blocker.blocksVersions()) .filter(blocker -> blocker.window().includes(controller.clock().instant())) .findAny().map(blocker -> blocker.window().toString()).get()); } else lastPlatformObject.setString("pending", instance.change().isEmpty() ? "Waiting for upgrade slot" : "Waiting for " + instance.change() + " to complete"); } private static void lastApplicationToSlime(Cursor lastApplicationObject, Application application, Instance instance, Map<JobType, JobStatus> status, Change change, List<JobType> productionJobs, Controller controller) { ApplicationVersion lastApplication = application.latestVersion().get(); applicationVersionToSlime(lastApplicationObject.setObject("application"), lastApplication); lastApplicationObject.setLong("at", lastApplication.buildTime().get().toEpochMilli()); long completed = productionJobs.stream() .filter(type -> ! type.isTest()) .filter(type -> controller.applications().deploymentTrigger().isComplete(Change.of(lastApplication), change.withoutApplication().with(lastApplication), instance, type, status.get(type))) .count(); long total = productionJobs.stream().filter(type -> ! type.isTest()).count(); if (Optional.of(lastApplication).equals(change.application())) lastApplicationObject.setString("deploying", completed + " of " + total + " complete"); else if (completed == total) lastApplicationObject.setString("completed", completed + " of " + total + " complete"); else if ( ! application.deploymentSpec().instances().stream() .allMatch(spec -> spec.canChangeRevisionAt(controller.clock().instant()))) { lastApplicationObject.setString("blocked", application.deploymentSpec().instances().stream() .flatMap(spec -> spec.changeBlocker().stream()) .filter(blocker -> blocker.blocksRevisions()) .filter(blocker -> blocker.window().includes(controller.clock().instant())) .findAny().map(blocker -> blocker.window().toString()).get()); } else lastApplicationObject.setString("pending", "Waiting for current deployment to complete"); } private static void deploymentToSlime(Cursor deploymentObject, Change change, Map<JobType, Versions> pendingProduction, Map<JobType, Run> running, JobType type, JobStatus jobStatus, Deployment deployment) { deploymentObject.setLong("at", deployment.at().toEpochMilli()); deploymentObject.setString("platform", deployment.version().toString()); applicationVersionToSlime(deploymentObject.setObject("application"), deployment.applicationVersion()); deploymentObject.setBool("verified", jobStatus.lastSuccess() .map(Run::versions) .filter(run -> run.targetPlatform().equals(deployment.version()) && run.targetApplication().equals(deployment.applicationVersion())) .isPresent()); if (running.containsKey(type)) deploymentObject.setString("status", running.get(type).stepStatus(deployReal).equals(Optional.of(unfinished)) ? "deploying" : "verifying"); else if (change.hasTargets()) deploymentObject.setString("status", pendingProduction.containsKey(type) ? "pending" : "completed"); } private static void jobTypeToSlime(Cursor jobObject, Controller controller, Application application, Instance instance, Map<JobType, JobStatus> status, JobType type, DeploymentSteps steps, Map<JobType, Versions> pendingProduction, Map<JobType, Run> running, URI baseUriForJob) { instance.jobPause(type).ifPresent(until -> jobObject.setLong("pausedUntil", until.toEpochMilli())); int runs = 0; Cursor runArray = jobObject.setArray("runs"); JobList jobList = JobList.from(status.values()); if (type.environment().isTest()) { Deque<List<JobType>> pending = new ArrayDeque<>(); pendingProduction.entrySet().stream() .filter(typeVersions -> jobList.type(type).successOn(typeVersions.getValue()).isEmpty()) .filter(typeVersions -> jobList.production().triggeredOn(typeVersions.getValue()).isEmpty()) .collect(groupingBy(Map.Entry::getValue, LinkedHashMap::new, Collectors.mapping(Map.Entry::getKey, toList()))) .forEach((versions, types) -> pending.addFirst(types)); for (List<JobType> productionTypes : pending) { Versions versions = pendingProduction.get(productionTypes.get(0)); if (statusOf(controller, instance.id(), type, versions).equals("running")) continue; runs++; Cursor runObject = runArray.addObject(); runObject.setString("status", "pending"); versionsToSlime(runObject, versions); if ( ! controller.applications().deploymentTrigger().triggerAt(controller.clock().instant(), type, status.get(type), versions, instance, application.deploymentSpec())) runObject.setObject("tasks").setString("cooldown", "failed"); else runObject.setObject("tasks").setString("capacity", "running"); runObject.setString("reason", "Testing for " + Joiner.on(", ").join(productionTypes)); } } else if ( pendingProduction.containsKey(type) && ! running.containsKey(type)) { Versions versions = pendingProduction.get(type); runs++; Cursor runObject = runArray.addObject(); runObject.setString("status", "pending"); versionsToSlime(runObject, pendingProduction.get(type)); Cursor pendingObject = runObject.setObject("tasks"); if (instance.jobPauses().containsKey(type)) pendingObject.setString("paused", "pending"); else if ( ! controller.applications().deploymentTrigger().triggerAt(controller.clock().instant(), type, status.get(type), versions, instance, application.deploymentSpec())) pendingObject.setString("cooldown", "failed"); else { int pending = 0; controller.applications().deploymentTrigger(); if (jobList.production().triggeredOn(versions).isEmpty()) { if (jobList.type(systemTest).successOn(versions).isEmpty()) { pending++; pendingObject.setString(shortNameOf(systemTest, controller.system()), statusOf(controller, instance.id(), systemTest, versions)); } if (jobList.type(stagingTest).successOn(versions).isEmpty()) { pending++; pendingObject.setString(shortNameOf(stagingTest, controller.system()), statusOf(controller, instance.id(), stagingTest, versions)); } } steps: for (DeploymentSpec.Step step : steps.production()) { if (steps.toJobs(step).contains(type)) break; for (JobType stepType : steps.toJobs(step)) { if (pendingProduction.containsKey(stepType)) { Versions jobVersions = Versions.from(instance.change(), application, Optional.ofNullable(instance.deployments().get(stepType.zone(controller.system()))), controller.systemVersion()); pendingObject.setString(shortNameOf(stepType, controller.system()), statusOf(controller, instance.id(), stepType, jobVersions)); if (++pending == 3) break steps; } } } if (pending == 0) pendingObject.setString("delay", "running"); } } controller.jobController().runs(instance.id(), type).values().stream() .sorted(Comparator.comparing(run -> -run.id().number())) .limit(Math.max(0, 10 - runs)) .forEach(run -> runToSlime(runArray.addObject(), run, baseUriForJob)); jobObject.setString("url", baseUriForJob.toString()); } private static String statusOf(Controller controller, ApplicationId id, JobType type, Versions versions) { return controller.jobController().last(id, type) .filter(run -> run.versions().targetsMatch(versions)) .filter(run -> type != stagingTest || run.versions().sourcesMatchIfPresent(versions)) .map(JobControllerApiHandlerHelper::taskStatusOf) .orElse("pending"); } private static String shortNameOf(JobType type, SystemName system) { return type.jobName().replaceFirst("production-", ""); } private static String taskStatusOf(Run run) { switch (run.status()) { case running: return "running"; case success: return "succeeded"; default: return "failed"; } } private static void runToSlime(Cursor runObject, Run run, URI baseUriForJobType) { runObject.setLong("id", run.id().number()); runObject.setString("status", nameOf(run.status())); runObject.setLong("start", run.start().toEpochMilli()); run.end().ifPresent(instant -> runObject.setLong("end", instant.toEpochMilli())); versionsToSlime(runObject, run.versions()); Cursor stepsObject = runObject.setObject("steps"); run.steps().forEach((step, info) -> stepsObject.setString(step.name(), info.status().name())); Cursor tasksObject = runObject.setObject("tasks"); taskStatus(deployReal, run).ifPresent(status -> tasksObject.setString("deploy", status)); taskStatus(Step.installReal, run).ifPresent(status -> tasksObject.setString("install", status)); taskStatus(Step.endTests, run).ifPresent(status -> tasksObject.setString("test", status)); runObject.setString("log", baseUriForJobType.resolve(baseUriForJobType.getPath() + "/run/" + run.id().number()).normalize().toString()); } /** Returns the status of the task represented by the given step, if it has started. */ private static Optional<String> taskStatus(Step step, Run run) { return run.readySteps().contains(step) ? Optional.of("running") : Optional.ofNullable(run.steps().get(step)) .filter(info -> info.status() != unfinished) .map(info -> info.status().name()); } /** Returns a response with the runs for the given job type. */ static HttpResponse runResponse(Map<RunId, Run> runs, URI baseUriForJobType) { Slime slime = new Slime(); Cursor cursor = slime.setObject(); runs.forEach((runid, run) -> runToSlime(cursor.setObject(Long.toString(runid.number())), run, baseUriForJobType)); return new SlimeJsonResponse(slime); } private static void versionsToSlime(Cursor runObject, Versions versions) { runObject.setString("wantedPlatform", versions.targetPlatform().toString()); applicationVersionToSlime(runObject.setObject("wantedApplication"), versions.targetApplication()); versions.sourcePlatform().ifPresent(version -> runObject.setString("currentPlatform", version.toString())); versions.sourceApplication().ifPresent(version -> applicationVersionToSlime(runObject.setObject("currentApplication"), version)); } static void applicationVersionToSlime(Cursor versionObject, ApplicationVersion version) { versionObject.setString("hash", version.id()); if (version.isUnknown()) return; versionObject.setLong("build", version.buildNumber().getAsLong()); Cursor sourceObject = versionObject.setObject("source"); version.source().ifPresent(source -> { sourceObject.setString("gitRepository", source.repository()); sourceObject.setString("gitBranch", source.branch()); sourceObject.setString("gitCommit", source.commit()); }); version.sourceUrl().ifPresent(url -> versionObject.setString("sourceUrl", url)); version.commit().ifPresent(commit -> versionObject.setString("commit", commit)); } /** * @return Response with logs from a single run */ static HttpResponse runDetailsResponse(JobController jobController, RunId runId, String after) { Slime slime = new Slime(); Cursor detailsObject = slime.setObject(); Run run = jobController.run(runId) .orElseThrow(() -> new IllegalStateException("Unknown run '" + runId + "'")); detailsObject.setBool("active", ! run.hasEnded()); detailsObject.setString("status", nameOf(run.status())); try { jobController.updateTestLog(runId); jobController.updateVespaLog(runId); } catch (RuntimeException ignored) { } // Return response when this fails, which it does when, e.g., logserver is booting. RunLog runLog = (after == null ? jobController.details(runId) : jobController.details(runId, Long.parseLong(after))) .orElseThrow(() -> new NotExistsException(String.format( "No run details exist for application: %s, job type: %s, number: %d", runId.application().toShortString(), runId.type().jobName(), runId.number()))); Cursor logObject = detailsObject.setObject("log"); for (Step step : Step.values()) { if ( ! runLog.get(step).isEmpty()) toSlime(logObject.setArray(step.name()), runLog.get(step)); } runLog.lastId().ifPresent(id -> detailsObject.setLong("lastId", id)); Cursor stepsObject = detailsObject.setObject("steps"); run.steps().forEach((step, info) -> { Cursor stepCursor = stepsObject.setObject(step.name()); stepCursor.setString("status", info.status().name()); info.startTime().ifPresent(startTime -> stepCursor.setLong("startMillis", startTime.toEpochMilli())); run.convergenceSummary().ifPresent(summary -> { // If initial installation never succeeded, but is part of the job, summary concerns it. // If initial succeeded, or is not part of this job, summary concerns upgrade installation. if ( step == installInitialReal && info.status() != succeeded || step == installReal && run.stepStatus(installInitialReal).map(status -> status == succeeded).orElse(true)) toSlime(stepCursor.setObject("convergence"), summary); }); }); return new SlimeJsonResponse(slime); } private static void toSlime(Cursor summaryObject, ConvergenceSummary summary) { summaryObject.setLong("nodes", summary.nodes()); summaryObject.setLong("down", summary.down()); summaryObject.setLong("needPlatformUpgrade", summary.needPlatformUpgrade()); summaryObject.setLong("upgrading", summary.upgradingPlatform()); summaryObject.setLong("needReboot", summary.needReboot()); summaryObject.setLong("rebooting", summary.rebooting()); summaryObject.setLong("needRestart", summary.needRestart()); summaryObject.setLong("restarting", summary.restarting()); summaryObject.setLong("upgradingOs", summary.upgradingOs()); summaryObject.setLong("upgradingFirmware", summary.upgradingFirmware()); summaryObject.setLong("services", summary.services()); summaryObject.setLong("needNewConfig", summary.needNewConfig()); } private static void toSlime(Cursor entryArray, List<LogEntry> entries) { entries.forEach(entry -> toSlime(entryArray.addObject(), entry)); } private static void toSlime(Cursor entryObject, LogEntry entry) { entryObject.setLong("at", entry.at().toEpochMilli()); entryObject.setString("type", entry.type().name()); entryObject.setString("message", entry.message()); } /** * Unpack payload and submit to job controller. Defaults instance to 'default' and renders the * application version on success. * * @return Response with the new application version */ static HttpResponse submitResponse(JobController jobController, String tenant, String application, Optional<SourceRevision> sourceRevision, Optional<String> authorEmail, Optional<String> sourceUrl, Optional<String> commit, long projectId, ApplicationPackage applicationPackage, byte[] testPackage) { ApplicationVersion version = jobController.submit(TenantAndApplicationId.from(tenant, application), sourceRevision, authorEmail, sourceUrl, commit, projectId, applicationPackage, testPackage); return new MessageResponse(version.toString()); } /** Aborts any job of the given type. */ static HttpResponse abortJobResponse(JobController jobs, ApplicationId id, JobType type) { Slime slime = new Slime(); Cursor responseObject = slime.setObject(); Optional<Run> run = jobs.last(id, type).flatMap(last -> jobs.active(last.id())); if (run.isPresent()) { jobs.abort(run.get().id()); responseObject.setString("message", "Aborting " + run.get().id()); } else responseObject.setString("message", "Nothing to abort."); return new SlimeJsonResponse(slime); } private static String nameOf(RunStatus status) { switch (status) { case running: return "running"; case aborted: return "aborted"; case error: return "error"; case testFailure: return "testFailure"; case outOfCapacity: return "outOfCapacity"; case installationFailed: return "installationFailed"; case deploymentFailed: return "deploymentFailed"; case success: return "success"; default: throw new IllegalArgumentException("Unexpected status '" + status + "'"); } } /** * @return Response with all job types that have recorded runs for the application _and_ the status for the last run of that type */ static HttpResponse overviewResponse(Controller controller, TenantAndApplicationId id, URI baseUriForDeployments) { Application application = controller.applications().requireApplication(id); DeploymentStatus status = controller.jobController().deploymentStatus(application); Slime slime = new Slime(); Cursor responseObject = slime.setObject(); responseObject.setString("tenant", id.tenant().value()); responseObject.setString("application", id.application().value()); Map<JobId, List<Versions>> jobsToRun = status.jobsToRun(); Cursor stepsArray = responseObject.setArray("steps"); for (DeploymentStatus.StepStatus stepStatus : status.allSteps()) { Change change = status.application().require(stepStatus.instance()).change(); Cursor stepObject = stepsArray.addObject(); stepObject.setString("type", stepStatus.type().name()); stepStatus.dependencies().stream() .map(status.allSteps()::indexOf) .forEach(stepObject.setArray("dependencies")::addLong); stepObject.setBool("declared", stepStatus.isDeclared()); stepObject.setString("instance", stepStatus.instance().value()); stepStatus.job().ifPresent(job -> { stepObject.setString("jobName", job.type().jobName()); String baseUriForJob = baseUriForDeployments.resolve(baseUriForDeployments.getPath() + "/../instance/" + job.application().instance().value() + "/job/" + job.type().jobName()).normalize().toString(); stepObject.setString("url", baseUriForJob); stepObject.setString("environment", job.type().environment().value()); stepObject.setString("region", job.type().zone(controller.system()).value()); if (job.type().isProduction() && job.type().isDeployment()) { status.deploymentFor(job).ifPresent(deployment -> { stepObject.setString("currentPlatform", deployment.version().toFullString()); toSlime(stepObject.setObject("currentApplication"), deployment.applicationVersion()); }); } JobStatus jobStatus = status.jobs().get(job).get(); Cursor toRunArray = stepObject.setArray("toRun"); for (Versions versions : jobsToRun.getOrDefault(job, List.of())) { boolean running = jobStatus.lastTriggered() .map(run -> jobStatus.isRunning() && versions.targetsMatch(run.versions()) && (job.type().isProduction() || versions.sourcesMatchIfPresent(run.versions()))) .orElse(false); if (running) continue; // Run will be contained in the "runs" array. Cursor runObject = toRunArray.addObject(); toSlime(runObject.setObject("versions"), versions); } stepStatus.readyAt(change).ifPresent(ready -> stepObject.setLong("readyAt", ready.toEpochMilli())); stepStatus.readyAt(change) .filter(controller.clock().instant()::isBefore) .ifPresent(until -> stepObject.setLong("delayedUntil", until.toEpochMilli())); stepStatus.pausedUntil().ifPresent(until -> stepObject.setLong("pausedUntil", until.toEpochMilli())); stepStatus.coolingDownUntil(change).ifPresent(until -> stepObject.setLong("coolingDownUntil", until.toEpochMilli())); stepStatus.blockedUntil(change).ifPresent(until -> stepObject.setLong("blockedUntil", until.toEpochMilli())); Cursor runsArray = stepObject.setArray("runs"); jobStatus.runs().descendingMap().values().stream().limit(10).forEach(run -> { Cursor runObject = runsArray.addObject(); runObject.setLong("id", run.id().number()); runObject.setString("url", baseUriForJob + "/run/" + run.id()); runObject.setLong("start", run.start().toEpochMilli()); run.end().ifPresent(end -> runObject.setLong("end", end.toEpochMilli())); runObject.setString("status", run.status().name()); toSlime(runObject.setObject("versions"), run.versions()); Cursor runStepsArray = runObject.setArray("steps"); run.steps().forEach((step, info) -> { Cursor runStepObject = runStepsArray.addObject(); runStepObject.setString("name", step.name()); runStepObject.setString("status", info.status().name()); }); }); }); } // TODO jonmv: Add latest platform and application status. return new SlimeJsonResponse(slime); } private static void toSlime(Cursor versionObject, ApplicationVersion version) { version.buildNumber().ifPresent(id -> versionObject.setLong("id", id)); version.source().ifPresent(source -> versionObject.setString("commit", source.commit())); version.compileVersion().ifPresent(platform -> versionObject.setString("compileVersion", platform.toFullString())); version.sourceUrl().ifPresent(url -> versionObject.setString("sourceUrl", url)); version.commit().ifPresent(commit -> versionObject.setString("commit", commit)); } private static void toSlime(Cursor versionsObject, Versions versions) { versionsObject.setString("targetPlatform", versions.targetPlatform().toFullString()); toSlime(versionsObject.setObject("targetApplication"), versions.targetApplication()); versions.sourcePlatform().ifPresent(platform -> versionsObject.setString("sourcePlatform", platform.toFullString())); versions.sourceApplication().ifPresent(application -> toSlime(versionsObject.setObject("sourceApplication"), application)); } }
package rest; import java.util.List; import javax.servlet.ServletContext; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import tm.RotondAndesTM; import vos.Ingrediente; import vos.Menu; import vos.PedidoProducto; import vos.Usuario; import vos.VOVerificacionMenu; @Path("PedidoProducto") public class RotondAndesServicesPedidoProducto { /** * Atributo que usa la anotacion @Context para tener el ServletContext de la conexion actual. */ @Context private ServletContext context; /** * Metodo que retorna el path de la carpeta WEB-INF/ConnectionData en el deploy actual dentro del servidor. * @return path de la carpeta WEB-INF/ConnectionData en el deploy actual. */ private String getPath() { return context.getRealPath("WEB-INF/ConnectionData"); } private String doErrorMessage(Exception e){ return "{ \"ERROR\": \""+ e.getMessage() + "\"}" ; } @GET @Produces({ MediaType.APPLICATION_JSON }) public Response getPedidosProductos() { RotondAndesTM tm = new RotondAndesTM(getPath()); List<PedidoProducto> pedidosProductos; try { pedidosProductos = tm.darPedidoProductos(); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(pedidosProductos).build(); } @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response addMenuByRestaurante(PedidoProducto pedidoProducto) { try { RotondAndesTM tm = new RotondAndesTM(getPath()); tm.addPedidoProducto(pedidoProducto); return Response.status(200).entity(pedidoProducto).build(); } catch(Exception e) { e.printStackTrace(); return Response.status(500).entity(doErrorMessage(e)).build(); } } @GET @Path( "{nombre}" ) @Produces( { MediaType.APPLICATION_JSON } ) public Response getPedidoProductoName( @QueryParam("nombre") String name) { RotondAndesTM tm = new RotondAndesTM(getPath()); PedidoProducto pedidoProducto; try { if (name == null || name.length() == 0) throw new Exception("Nombre del producto no valido"); pedidoProducto = tm.buscarPedidoProductoPorName(name); } catch (Exception e) { return Response.status(500).entity(doErrorMessage(e)).build(); } return Response.status(200).entity(pedidoProducto).build(); } @GET @Path( "{id: \\d+}" ) @Produces( { MediaType.APPLICATION_JSON } ) public Response getPedidoProducto( @PathParam( "id" ) Long id ) { RotondAndesTM tm = new RotondAndesTM( getPath( ) ); try { PedidoProducto v = tm.buscarPedidoProductoPorID(id); return Response.status( 200 ).entity( v ).build( ); } catch( Exception e ) { return Response.status( 500 ).entity( doErrorMessage( e ) ).build( ); } } }
package org.hisp.dhis.client.models.program; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.google.auto.value.AutoValue; import org.hisp.dhis.client.models.common.BaseIdentifiableObject; import org.hisp.dhis.client.models.common.FormType; import java.util.Collections; import java.util.List; import javax.annotation.Nullable; @AutoValue @JsonDeserialize(builder = AutoValue_ProgramStage.Builder.class) public abstract class ProgramStage extends BaseIdentifiableObject { private static final String EXECUTION_DATE_LABEL = "executionDateLabel"; private static final String ALLOW_GENERATE_NEXT_VISIT = "allowGenerateNextVisit"; private static final String VALID_COMPLETE_ONLY = "validCompleteOnly"; private static final String REPORT_DATE_TO_USE = "reportDateToUse"; private static final String OPEN_AFTER_ENROLLMENT = "openAfterEnrollment"; private static final String PROGRAM_STAGE_DATA_ELEMENTS = "programStageDataElements"; private static final String REPEATABLE = "repeatable"; private static final String CAPTURE_COORDINATES = "captureCoordinates"; private static final String FORM_TYPE = "formType"; private static final String DISPLAY_GENERATE_EVENT_BOX = "displayGenerateEventBox"; private static final String GENERATED_BY_ENROLMENT_DATE = "generatedByEnrollmentDate"; private static final String AUTO_GENERATE_EVENT = "autoGenerateEvent"; private static final String SORT_ORDER = "sortOrder"; private static final String HIDE_DUE_DATE = "hideDueDate"; private static final String BLOCK_ENTRY_FORM = "blockEntryForm"; private static final String MIN_DAYS_FROM_START = "minDaysFromStart"; private static final String STANDARD_INTERVAL = "standardInterval"; private static final String PROGRAM_STAGE_SECTIONS = "programStageSections"; @Nullable @JsonProperty(EXECUTION_DATE_LABEL) public abstract String executionDateLabel(); @Nullable @JsonProperty(ALLOW_GENERATE_NEXT_VISIT) public abstract Boolean allowGenerateNextVisit(); @Nullable @JsonProperty(VALID_COMPLETE_ONLY) public abstract Boolean validCompleteOnly(); @Nullable @JsonProperty(REPORT_DATE_TO_USE) public abstract String reportDateToUse(); @Nullable @JsonProperty(OPEN_AFTER_ENROLLMENT) public abstract Boolean openAfterEnrollment(); @Nullable @JsonProperty(PROGRAM_STAGE_DATA_ELEMENTS) public abstract List<ProgramStageDataElement> programStageDataElements(); @Nullable @JsonProperty(REPEATABLE) public abstract Boolean repeatable(); @Nullable @JsonProperty(CAPTURE_COORDINATES) public abstract Boolean captureCoordinates(); @Nullable @JsonProperty(FORM_TYPE) public abstract FormType formType(); @Nullable @JsonProperty(DISPLAY_GENERATE_EVENT_BOX) public abstract Boolean displayGenerateEventBox(); @Nullable @JsonProperty(GENERATED_BY_ENROLMENT_DATE) public abstract Boolean generatedByEnrollmentDate(); @Nullable @JsonProperty(AUTO_GENERATE_EVENT) public abstract Boolean autoGenerateEvent(); @Nullable @JsonProperty(SORT_ORDER) public abstract Integer sortOrder(); @Nullable @JsonProperty(HIDE_DUE_DATE) public abstract Boolean hideDueDate(); @Nullable @JsonProperty(BLOCK_ENTRY_FORM) public abstract Boolean blockEntryForm(); @Nullable @JsonProperty(MIN_DAYS_FROM_START) public abstract Integer minDaysFromStart(); @Nullable @JsonProperty(STANDARD_INTERVAL) public abstract Integer standardInterval(); @Nullable @JsonProperty(PROGRAM_STAGE_SECTIONS) public abstract List<ProgramStageSection> programStageSections(); @AutoValue.Builder public static abstract class Builder extends BaseIdentifiableObject.Builder<Builder> { @JsonProperty(EXECUTION_DATE_LABEL) public abstract Builder executionDateLabel(@Nullable String executionDateLabel); @JsonProperty(ALLOW_GENERATE_NEXT_VISIT) public abstract Builder allowGenerateNextVisit(@Nullable Boolean allowGenerateNextVisit); @JsonProperty(VALID_COMPLETE_ONLY) public abstract Builder validCompleteOnly(@Nullable Boolean validCompleteOnly); @JsonProperty(REPORT_DATE_TO_USE) public abstract Builder reportDateToUse(@Nullable String reportDateToUse); @JsonProperty(OPEN_AFTER_ENROLLMENT) public abstract Builder openAfterEnrollment(@Nullable Boolean openAfterEnrollment); @JsonProperty(PROGRAM_STAGE_DATA_ELEMENTS) public abstract Builder programStageDataElements(@Nullable List<ProgramStageDataElement> programStageDataElements); @JsonProperty(REPEATABLE) public abstract Builder repeatable(@Nullable Boolean repeatable); @JsonProperty(CAPTURE_COORDINATES) public abstract Builder captureCoordinates(@Nullable Boolean captureCoordinates); @JsonProperty(FORM_TYPE) public abstract Builder formType(@Nullable FormType formType); @JsonProperty(DISPLAY_GENERATE_EVENT_BOX) public abstract Builder displayGenerateEventBox(@Nullable Boolean displayGenerateEventBox); @JsonProperty(GENERATED_BY_ENROLMENT_DATE) public abstract Builder generatedByEnrollmentDate(@Nullable Boolean generatedByEnrollmentDate); @JsonProperty(AUTO_GENERATE_EVENT) public abstract Builder autoGenerateEvent(@Nullable Boolean autoGenerateEvent); @JsonProperty(SORT_ORDER) public abstract Builder sortOrder(@Nullable Integer sortOrder); @JsonProperty(HIDE_DUE_DATE) public abstract Builder hideDueDate(@Nullable Boolean hideDueDate); @JsonProperty(BLOCK_ENTRY_FORM) public abstract Builder blockEntryForm(@Nullable Boolean blockEntryForm); @JsonProperty(MIN_DAYS_FROM_START) public abstract Builder minDaysFromStart(@Nullable Integer minDaysFromStart); @JsonProperty(STANDARD_INTERVAL) public abstract Builder standardInterval(@Nullable Integer standardInterval); @JsonProperty(PROGRAM_STAGE_SECTIONS) public abstract Builder programStageSections(@Nullable List<ProgramStageSection> programStageSections); abstract ProgramStage autoBuild(); abstract List<ProgramStageSection> programStageSections(); public ProgramStage build() { if (programStageSections() != null) { programStageSections(Collections.unmodifiableList(programStageSections())); } return autoBuild(); } } }
package com.haulmont.cuba.core.global; import java.util.LinkedHashMap; import java.util.Map; /** * @author krivopustov * @version $Id$ */ @SupportedByClient public class DevelopmentException extends RuntimeException { protected final Map<String, Object> params = new LinkedHashMap<>(1); public DevelopmentException(String message) { super(message); } public DevelopmentException(String message, String paramKey, Object paramValue) { super(message); params.put(paramKey, paramValue); } public DevelopmentException(String message, Map<String, Object> params) { super(message); this.params.putAll(params); } public DevelopmentException(String message, Throwable cause) { super(message, cause); } public Map<String, Object> getParams() { return params; } @Override public String toString() { return super.toString() + (params.isEmpty() ? "" : ", params=" + params); } }